# HG changeset patch # User Marcin Kuzminski # Date 2020-01-08 13:48:49 # Node ID 8d2996bef4b253af4bfa08795e3bfe56320f28a9 # Parent eb5eef3e9c43083b743d5926c6c84b1cdb850d55 # Parent b87f1db7492da25e7c864db99c2cc0cba954b2b7 release: Merge default into stable for release preparation diff --git a/.bumpversion.cfg b/.bumpversion.cfg --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 4.17.4 +current_version = 4.18.0 message = release: Bump version {current_version} to {new_version} [bumpversion:file:rhodecode/VERSION] diff --git a/.coveragerc b/.coveragerc --- a/.coveragerc +++ b/.coveragerc @@ -8,6 +8,7 @@ include = omit = rhodecode/lib/dbmigrate/* rhodecode/lib/paster_commands/* + rhodecode/lib/_vendor/* [report] diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -45,6 +45,7 @@ syntax: regexp ^rhodecode/public/js/rhodecode-components.html$ ^rhodecode/public/js/rhodecode-components.js$ ^rhodecode/public/js/scripts.js$ +^rhodecode/public/js/scripts.min.js$ ^rhodecode/public/js/src/components/root-styles.gen.html$ ^rhodecode/public/js/vendors/webcomponentsjs/ ^rhodecode\.db$ diff --git a/.release.cfg b/.release.cfg --- a/.release.cfg +++ b/.release.cfg @@ -5,25 +5,20 @@ done = false done = true [task:rc_tools_pinned] -done = true [task:fixes_on_stable] -done = true [task:pip2nix_generated] -done = true [task:changelog_updated] -done = true [task:generate_api_docs] -done = true + +[task:updated_translation] [release] -state = prepared -version = 4.17.4 - -[task:updated_translation] +state = in_progress +version = 4.18.0 [task:generate_js_routes] diff --git a/Gruntfile.js b/Gruntfile.js --- a/Gruntfile.js +++ b/Gruntfile.js @@ -13,9 +13,10 @@ module.exports = function(grunt) { grunt.loadNpmTasks('grunt-contrib-less'); grunt.loadNpmTasks('grunt-contrib-concat'); + grunt.loadNpmTasks('grunt-contrib-uglify'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-copy'); grunt.loadNpmTasks('grunt-webpack'); - grunt.registerTask('default', ['less:production', 'less:components', 'copy', 'webpack', 'concat:dist']); + grunt.registerTask('default', ['less:production', 'less:components', 'copy', 'webpack', 'concat:dist', 'uglify:dist']); }; diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -17,6 +17,7 @@ test: test-clean: rm -rf coverage.xml htmlcov junit.xml pylint.log result find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' + find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';' test-only: PYTHONHASHSEED=random \ @@ -28,7 +29,7 @@ test-only-mysql: PYTHONHASHSEED=random \ py.test -x -vv -r xw -p no:sugar --cov=rhodecode \ --cov-report=term-missing --cov-report=html \ - --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test"}}' \ + --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \ rhodecode test-only-postgres: diff --git a/README.rst b/README.rst --- a/README.rst +++ b/README.rst @@ -5,20 +5,24 @@ RhodeCode About ----- -``RhodeCode`` is a fast and powerful management tool for Mercurial_ and GIT_ -and Subversion_ with a built in push/pull server, full text search, -pull requests and powerful code-review system. It works on http/https, SSH and -has a few unique features like: +``RhodeCode`` is a fast and powerful source code management tool for +Mercurial_, GIT_ and Subversion_. It's main features are: + -- plugable architecture from Pyramid web-framework. -- advanced permission system with IP restrictions, inheritation, and user-groups. +- built in push/pull server +- SSH with key management support +- full text search. +- plugable authentication. +- pull requests and powerful code-review system. +- advanced permission system with IP restrictions, permission inheritation, and user-groups. - rich set of authentication plugins including LDAP, ActiveDirectory, SAML 2.0, Atlassian Crowd, Http-Headers, Pam, Token-Auth, OAuth. - live code-review chat, and reviewer rules. - full web based file editing. - unified multi vcs support. - snippets (gist) system. -- integration framework for Slack, CI systems, Webhooks. +- artfacts store for binaries. +- integration framework for Slack, CI systems, Webhooks, Jira, Redmine etc. - integration with all 3rd party issue trackers. @@ -41,7 +45,8 @@ Source code ----------- The latest sources can be obtained from official RhodeCode instance -https://code.rhodecode.com +https://code.rhodecode.com/rhodecode-enterprise-ce +https://code.rhodecode.com/rhodecode-vcsserver Contributions diff --git a/configs/development.ini b/configs/development.ini --- a/configs/development.ini +++ b/configs/development.ini @@ -1,24 +1,22 @@ - +## -*- coding: utf-8 -*- -################################################################################ -## RHODECODE COMMUNITY EDITION CONFIGURATION ## -################################################################################ +; ######################################### +; RHODECODE COMMUNITY EDITION CONFIGURATION +; ######################################### [DEFAULT] -## Debug flag sets all loggers to debug, and enables request tracking +; Debug flag sets all loggers to debug, and enables request tracking debug = true -################################################################################ -## EMAIL CONFIGURATION ## -## Uncomment and replace with the email address which should receive ## -## any error reports after an application crash ## -## Additionally these settings will be used by the RhodeCode mailing system ## -################################################################################ +; ######################################################################## +; EMAIL CONFIGURATION +; These settings will be used by the RhodeCode mailing system +; ######################################################################## -## prefix all emails subjects with given prefix, helps filtering out emails +; prefix all emails subjects with given prefix, helps filtering out emails #email_prefix = [RhodeCode] -## email FROM address all mails will be sent +; email FROM address all mails will be sent #app_email_from = rhodecode-noreply@localhost #smtp_server = mail.server.com @@ -29,82 +27,139 @@ debug = true #smtp_use_ssl = true [server:main] -## COMMON ## +; COMMON HOST/IP CONFIG host = 127.0.0.1 port = 5000 -########################################################### -## WAITRESS WSGI SERVER - Recommended for Development #### -########################################################### +; ################################################## +; WAITRESS WSGI SERVER - Recommended for Development +; ################################################## +; use server type use = egg:waitress#main -## number of worker threads + +; number of worker threads threads = 5 -## MAX BODY SIZE 100GB + +; MAX BODY SIZE 100GB max_request_body_size = 107374182400 -## Use poll instead of select, fixes file descriptors limits problems. -## May not work on old windows systems. + +; Use poll instead of select, fixes file descriptors limits problems. +; May not work on old windows systems. asyncore_use_poll = true -########################## -## GUNICORN WSGI SERVER ## -########################## -## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini +; ########################### +; GUNICORN APPLICATION SERVER +; ########################### +; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini + +; Module to use, this setting shouldn't be changed #use = egg:gunicorn#main -## Sets the number of process workers. More workers means more concurrent connections -## RhodeCode can handle at the same time. Each additional worker also it increases -## memory usage as each has it's own set of caches. -## Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more -## than 8-10 unless for really big deployments .e.g 700-1000 users. -## `instance_id = *` must be set in the [app:main] section below (which is the default) -## when using more than 1 worker. + +; Sets the number of process workers. More workers means more concurrent connections +; RhodeCode can handle at the same time. Each additional worker also it increases +; memory usage as each has it's own set of caches. +; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more +; than 8-10 unless for really big deployments .e.g 700-1000 users. +; `instance_id = *` must be set in the [app:main] section below (which is the default) +; when using more than 1 worker. #workers = 2 -## process name visible in process list + +; Gunicorn access log level +#loglevel = info + +; Process name visible in process list #proc_name = rhodecode -## type of worker class, one of sync, gevent -## recommended for bigger setup is using of of other than sync one + +; Type of worker class, one of `sync`, `gevent` +; Recommended type is `gevent` #worker_class = gevent -## The maximum number of simultaneous clients. Valid only for Gevent + +; The maximum number of simultaneous clients. Valid only for gevent #worker_connections = 10 -## max number of requests that worker will handle before being gracefully -## restarted, could prevent memory leaks + +; Max number of requests that worker will handle before being gracefully restarted. +; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. #max_requests = 1000 #max_requests_jitter = 30 -## amount of time a worker can spend with handling a request before it -## gets killed and restarted. Set to 6hrs + +; Amount of time a worker can spend with handling a request before it +; gets killed and restarted. By default set to 21600 (6hrs) +; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) #timeout = 21600 +; The maximum size of HTTP request line in bytes. +; 0 for unlimited +#limit_request_line = 0 + +; Limit the number of HTTP headers fields in a request. +; By default this value is 100 and can't be larger than 32768. +#limit_request_fields = 32768 + +; Limit the allowed size of an HTTP request header field. +; Value is a positive number or 0. +; Setting it to 0 will allow unlimited header field sizes. +#limit_request_field_size = 0 + +; Timeout for graceful workers restart. +; After receiving a restart signal, workers have this much time to finish +; serving requests. Workers still alive after the timeout (starting from the +; receipt of the restart signal) are force killed. +; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) +#graceful_timeout = 3600 + +# The number of seconds to wait for requests on a Keep-Alive connection. +# Generally set in the 1-5 seconds range. +#keepalive = 2 + +; Maximum memory usage that each worker can use before it will receive a +; graceful restart signal 0 = memory monitoring is disabled +; Examples: 268435456 (256MB), 536870912 (512MB) +; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) +#memory_max_usage = 0 + +; How often in seconds to check for memory usage for each gunicorn worker +#memory_usage_check_interval = 60 + +; Threshold value for which we don't recycle worker if GarbageCollection +; frees up enough resources. Before each restart we try to run GC on worker +; in case we get enough free memory after that, restart will not happen. +#memory_usage_recovery_threshold = 0.8 + -## prefix middleware for RhodeCode. -## recommended when using proxy setup. -## allows to set RhodeCode under a prefix in server. -## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. -## And set your prefix like: `prefix = /custom_prefix` -## be sure to also set beaker.session.cookie_path = /custom_prefix if you need -## to make your cookies only work on prefix url +; Prefix middleware for RhodeCode. +; recommended when using proxy setup. +; allows to set RhodeCode under a prefix in server. +; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. +; And set your prefix like: `prefix = /custom_prefix` +; be sure to also set beaker.session.cookie_path = /custom_prefix if you need +; to make your cookies only work on prefix url [filter:proxy-prefix] use = egg:PasteDeploy#prefix prefix = / [app:main] -## The %(here)s variable will be replaced with the absolute path of parent directory -## of this file -## In addition ENVIRONMENT variables usage is possible, e.g -## sqlalchemy.db1.url = {ENV_RC_DB_URL} +; The %(here)s variable will be replaced with the absolute path of parent directory +; of this file +; In addition ENVIRONMENT variables usage is possible, e.g +; sqlalchemy.db1.url = {ENV_RC_DB_URL} use = egg:rhodecode-enterprise-ce -## enable proxy prefix middleware, defined above +; enable proxy prefix middleware, defined above #filter-with = proxy-prefix +; ############# +; DEBUG OPTIONS +; ############# + +pyramid.reload_templates = true + # During development the we want to have the debug toolbar enabled pyramid.includes = pyramid_debugtoolbar - rhodecode.lib.middleware.request_wrapper - -pyramid.reload_templates = true debugtoolbar.hosts = 0.0.0.0/0 debugtoolbar.exclude_prefixes = @@ -121,101 +176,100 @@ rhodecode.includes = # api prefix url rhodecode.api.url = /_admin/api - -## END RHODECODE PLUGINS ## +; enable debug style page +debug_style = true -## encryption key used to encrypt social plugin tokens, -## remote_urls with credentials etc, if not set it defaults to -## `beaker.session.secret` +; ################# +; END DEBUG OPTIONS +; ################# + +; encryption key used to encrypt social plugin tokens, +; remote_urls with credentials etc, if not set it defaults to +; `beaker.session.secret` #rhodecode.encrypted_values.secret = -## decryption strict mode (enabled by default). It controls if decryption raises -## `SignatureVerificationError` in case of wrong key, or damaged encryption data. +; decryption strict mode (enabled by default). It controls if decryption raises +; `SignatureVerificationError` in case of wrong key, or damaged encryption data. #rhodecode.encrypted_values.strict = false -## Pick algorithm for encryption. Either fernet (more secure) or aes (default) -## fernet is safer, and we strongly recommend switching to it. -## Due to backward compatibility aes is used as default. +; Pick algorithm for encryption. Either fernet (more secure) or aes (default) +; fernet is safer, and we strongly recommend switching to it. +; Due to backward compatibility aes is used as default. #rhodecode.encrypted_values.algorithm = fernet -## return gzipped responses from RhodeCode (static files/application) +; Return gzipped responses from RhodeCode (static files/application) gzip_responses = false -## auto-generate javascript routes file on startup +; Auto-generate javascript routes file on startup generate_js_files = false -## System global default language. -## All available languages: en(default), be, de, es, fr, it, ja, pl, pt, ru, zh +; System global default language. +; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh lang = en -## Perform a full repository scan and import on each server start. -## Settings this to true could lead to very long startup time. +; Perform a full repository scan and import on each server start. +; Settings this to true could lead to very long startup time. startup.import_repos = false -## Uncomment and set this path to use archive download cache. -## Once enabled, generated archives will be cached at this location -## and served from the cache during subsequent requests for the same archive of -## the repository. +; Uncomment and set this path to use archive download cache. +; Once enabled, generated archives will be cached at this location +; and served from the cache during subsequent requests for the same archive of +; the repository. #archive_cache_dir = /tmp/tarballcache -## URL at which the application is running. This is used for Bootstrapping -## requests in context when no web request is available. Used in ishell, or -## SSH calls. Set this for events to receive proper url for SSH calls. +; URL at which the application is running. This is used for Bootstrapping +; requests in context when no web request is available. Used in ishell, or +; SSH calls. Set this for events to receive proper url for SSH calls. app.base_url = http://rhodecode.local -## Unique application ID. Should be a random unique string for security. +; Unique application ID. Should be a random unique string for security. app_instance_uuid = rc-production -## Cut off limit for large diffs (size in bytes). If overall diff size on -## commit, or pull request exceeds this limit this diff will be displayed -## partially. E.g 512000 == 512Kb +; Cut off limit for large diffs (size in bytes). If overall diff size on +; commit, or pull request exceeds this limit this diff will be displayed +; partially. E.g 512000 == 512Kb cut_off_limit_diff = 512000 -## Cut off limit for large files inside diffs (size in bytes). Each individual -## file inside diff which exceeds this limit will be displayed partially. -## E.g 128000 == 128Kb +; Cut off limit for large files inside diffs (size in bytes). Each individual +; file inside diff which exceeds this limit will be displayed partially. +; E.g 128000 == 128Kb cut_off_limit_file = 128000 -## use cached version of vcs repositories everywhere. Recommended to be `true` +; Use cached version of vcs repositories everywhere. Recommended to be `true` vcs_full_cache = true -## Force https in RhodeCode, fixes https redirects, assumes it's always https. -## Normally this is controlled by proper http flags sent from http server +; Force https in RhodeCode, fixes https redirects, assumes it's always https. +; Normally this is controlled by proper flags sent from http server such as Nginx or Apache force_https = false -## use Strict-Transport-Security headers +; use Strict-Transport-Security headers use_htsts = false -## git rev filter option, --all is the default filter, if you need to -## hide all refs in changelog switch this to --branches --tags -git_rev_filter = --branches --tags - -# Set to true if your repos are exposed using the dumb protocol +; Set to true if your repos are exposed using the dumb protocol git_update_server_info = false -## RSS/ATOM feed options +; RSS/ATOM feed options rss_cut_off_limit = 256000 rss_items_per_page = 10 rss_include_diff = false -## gist URL alias, used to create nicer urls for gist. This should be an -## url that does rewrites to _admin/gists/{gistid}. -## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal -## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} +; gist URL alias, used to create nicer urls for gist. This should be an +; url that does rewrites to _admin/gists/{gistid}. +; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal +; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} gist_alias_url = -## List of views (using glob pattern syntax) that AUTH TOKENS could be -## used for access. -## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it -## came from the the logged in user who own this authentication token. -## Additionally @TOKEN syntax can be used to bound the view to specific -## authentication token. Such view would be only accessible when used together -## with this authentication token -## -## list of all views can be found under `/_admin/permissions/auth_token_access` -## The list should be "," separated and on a single line. -## -## Most common views to enable: +; List of views (using glob pattern syntax) that AUTH TOKENS could be +; used for access. +; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it +; came from the the logged in user who own this authentication token. +; Additionally @TOKEN syntax can be used to bound the view to specific +; authentication token. Such view would be only accessible when used together +; with this authentication token +; list of all views can be found under `/_admin/permissions/auth_token_access` +; The list should be "," separated and on a single line. +; Most common views to enable: + # RepoCommitsView:repo_commit_download # RepoCommitsView:repo_commit_patch # RepoCommitsView:repo_commit_raw @@ -226,164 +280,194 @@ gist_alias_url = # GistView:* api_access_controllers_whitelist = -## Default encoding used to convert from and to unicode -## can be also a comma separated list of encoding in case of mixed encodings +; Default encoding used to convert from and to unicode +; can be also a comma separated list of encoding in case of mixed encodings default_encoding = UTF-8 -## instance-id prefix -## a prefix key for this instance used for cache invalidation when running -## multiple instances of RhodeCode, make sure it's globally unique for -## all running RhodeCode instances. Leave empty if you don't use it +; instance-id prefix +; a prefix key for this instance used for cache invalidation when running +; multiple instances of RhodeCode, make sure it's globally unique for +; all running RhodeCode instances. Leave empty if you don't use it instance_id = -## Fallback authentication plugin. Set this to a plugin ID to force the usage -## of an authentication plugin also if it is disabled by it's settings. -## This could be useful if you are unable to log in to the system due to broken -## authentication settings. Then you can enable e.g. the internal RhodeCode auth -## module to log in again and fix the settings. -## -## Available builtin plugin IDs (hash is part of the ID): -## egg:rhodecode-enterprise-ce#rhodecode -## egg:rhodecode-enterprise-ce#pam -## egg:rhodecode-enterprise-ce#ldap -## egg:rhodecode-enterprise-ce#jasig_cas -## egg:rhodecode-enterprise-ce#headers -## egg:rhodecode-enterprise-ce#crowd +; Fallback authentication plugin. Set this to a plugin ID to force the usage +; of an authentication plugin also if it is disabled by it's settings. +; This could be useful if you are unable to log in to the system due to broken +; authentication settings. Then you can enable e.g. the internal RhodeCode auth +; module to log in again and fix the settings. +; Available builtin plugin IDs (hash is part of the ID): +; egg:rhodecode-enterprise-ce#rhodecode +; egg:rhodecode-enterprise-ce#pam +; egg:rhodecode-enterprise-ce#ldap +; egg:rhodecode-enterprise-ce#jasig_cas +; egg:rhodecode-enterprise-ce#headers +; egg:rhodecode-enterprise-ce#crowd + #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode -## alternative return HTTP header for failed authentication. Default HTTP -## response is 401 HTTPUnauthorized. Currently HG clients have troubles with -## handling that causing a series of failed authentication calls. -## Set this variable to 403 to return HTTPForbidden, or any other HTTP code -## This will be served instead of default 401 on bad authentication +; Flag to control loading of legacy plugins in py:/path format +auth_plugin.import_legacy_plugins = true + +; alternative return HTTP header for failed authentication. Default HTTP +; response is 401 HTTPUnauthorized. Currently HG clients have troubles with +; handling that causing a series of failed authentication calls. +; Set this variable to 403 to return HTTPForbidden, or any other HTTP code +; This will be served instead of default 401 on bad authentication auth_ret_code = -## use special detection method when serving auth_ret_code, instead of serving -## ret_code directly, use 401 initially (Which triggers credentials prompt) -## and then serve auth_ret_code to clients +; use special detection method when serving auth_ret_code, instead of serving +; ret_code directly, use 401 initially (Which triggers credentials prompt) +; and then serve auth_ret_code to clients auth_ret_code_detection = false -## locking return code. When repository is locked return this HTTP code. 2XX -## codes don't break the transactions while 4XX codes do +; locking return code. When repository is locked return this HTTP code. 2XX +; codes don't break the transactions while 4XX codes do lock_ret_code = 423 -## allows to change the repository location in settings page +; allows to change the repository location in settings page allow_repo_location_change = true -## allows to setup custom hooks in settings page +; allows to setup custom hooks in settings page allow_custom_hooks_settings = true -## Generated license token required for EE edition license. -## New generated token value can be found in Admin > settings > license page. +; Generated license token required for EE edition license. +; New generated token value can be found in Admin > settings > license page. license_token = -## supervisor connection uri, for managing supervisor and logs. +; This flag hides sensitive information on the license page such as token, and license data +license.hide_license_info = false + +; supervisor connection uri, for managing supervisor and logs. supervisor.uri = -## supervisord group name/id we only want this RC instance to handle + +; supervisord group name/id we only want this RC instance to handle supervisor.group_id = dev -## Display extended labs settings +; Display extended labs settings labs_settings_active = true -## Custom exception store path, defaults to TMPDIR -## This is used to store exception from RhodeCode in shared directory +; Custom exception store path, defaults to TMPDIR +; This is used to store exception from RhodeCode in shared directory #exception_tracker.store_path = -## File store configuration. This is used to store and serve uploaded files +; File store configuration. This is used to store and serve uploaded files file_store.enabled = true -## Storage backend, available options are: local + +; Storage backend, available options are: local file_store.backend = local -## path to store the uploaded binaries + +; path to store the uploaded binaries file_store.storage_path = %(here)s/data/file_store -#################################### -### CELERY CONFIG #### -#################################### -## run: /path/to/celery worker \ -## -E --beat --app rhodecode.lib.celerylib.loader \ -## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ -## --loglevel DEBUG --ini /path/to/rhodecode.ini +; ############# +; CELERY CONFIG +; ############# + +; manually run celery: /path/to/celery worker -E --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini use_celery = false -## connection url to the message broker (default rabbitmq) -celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost +; connection url to the message broker (default redis) +celery.broker_url = redis://localhost:6379/8 -## maximum tasks to execute before worker restart +; rabbitmq example +#celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost + +; maximum tasks to execute before worker restart celery.max_tasks_per_child = 100 -## tasks will never be sent to the queue, but executed locally instead. +; tasks will never be sent to the queue, but executed locally instead. celery.task_always_eager = false -##################################### -### DOGPILE CACHE #### -##################################### -## Default cache dir for caches. Putting this into a ramdisk -## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require -## large amount of space +; ############# +; DOGPILE CACHE +; ############# + +; Default cache dir for caches. Putting this into a ramdisk can boost performance. +; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space cache_dir = %(here)s/data -## `cache_perms` cache settings for permission tree, auth TTL. +; ********************************************* +; `sql_cache_short` cache for heavy SQL queries +; Only supported backend is `memory_lru` +; ********************************************* +rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru +rc_cache.sql_cache_short.expiration_time = 30 + + +; ***************************************************** +; `cache_repo_longterm` cache for repo object instances +; Only supported backend is `memory_lru` +; ***************************************************** +rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru +; by default we use 30 Days, cache is still invalidated on push +rc_cache.cache_repo_longterm.expiration_time = 2592000 +; max items in LRU cache, set to smaller number to save memory, and expire last used caches +rc_cache.cache_repo_longterm.max_size = 10000 + + +; ************************************************* +; `cache_perms` cache for permission tree, auth TTL +; ************************************************* rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace rc_cache.cache_perms.expiration_time = 300 +; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set +#rc_cache.cache_perms.arguments.filename = /tmp/cache_perms.db -## alternative `cache_perms` redis backend with distributed lock +; alternative `cache_perms` redis backend with distributed lock #rc_cache.cache_perms.backend = dogpile.cache.rc.redis #rc_cache.cache_perms.expiration_time = 300 -## redis_expiration_time needs to be greater then expiration_time + +; redis_expiration_time needs to be greater then expiration_time #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 -#rc_cache.cache_perms.arguments.socket_timeout = 30 + #rc_cache.cache_perms.arguments.host = localhost #rc_cache.cache_perms.arguments.port = 6379 #rc_cache.cache_perms.arguments.db = 0 -## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends +#rc_cache.cache_perms.arguments.socket_timeout = 30 +; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends #rc_cache.cache_perms.arguments.distributed_lock = true -## `cache_repo` cache settings for FileTree, Readme, RSS FEEDS + +; *************************************************** +; `cache_repo` cache for file tree, Readme, RSS FEEDS +; *************************************************** rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace rc_cache.cache_repo.expiration_time = 2592000 +; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set +#rc_cache.cache_repo.arguments.filename = /tmp/cache_repo.db -## alternative `cache_repo` redis backend with distributed lock +; alternative `cache_repo` redis backend with distributed lock #rc_cache.cache_repo.backend = dogpile.cache.rc.redis #rc_cache.cache_repo.expiration_time = 2592000 -## redis_expiration_time needs to be greater then expiration_time + +; redis_expiration_time needs to be greater then expiration_time #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 -#rc_cache.cache_repo.arguments.socket_timeout = 30 + #rc_cache.cache_repo.arguments.host = localhost #rc_cache.cache_repo.arguments.port = 6379 #rc_cache.cache_repo.arguments.db = 1 -## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends +#rc_cache.cache_repo.arguments.socket_timeout = 30 +; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends #rc_cache.cache_repo.arguments.distributed_lock = true -## cache settings for SQL queries, this needs to use memory type backend -rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru -rc_cache.sql_cache_short.expiration_time = 30 -## `cache_repo_longterm` cache for repo object instances, this needs to use memory -## type backend as the objects kept are not pickle serializable -rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru -## by default we use 96H, this is using invalidation on push anyway -rc_cache.cache_repo_longterm.expiration_time = 345600 -## max items in LRU cache, reduce this number to save memory, and expire last used -## cached objects -rc_cache.cache_repo_longterm.max_size = 10000 +; ############## +; BEAKER SESSION +; ############## - -#################################### -### BEAKER SESSION #### -#################################### - -## .session.type is type of storage options for the session, current allowed -## types are file, ext:memcached, ext:redis, ext:database, and memory (default). +; beaker.session.type is type of storage options for the logged users sessions. Current allowed +; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified). +; Fastest ones are Redis and ext:database beaker.session.type = file beaker.session.data_dir = %(here)s/data/sessions -## redis sessions +; Redis based sessions #beaker.session.type = ext:redis #beaker.session.url = redis://127.0.0.1:6379/2 -## db based session, fast, and allows easy management over logged in users +; DB based session, fast, and allows easy management over logged in users #beaker.session.type = ext:database #beaker.session.table_name = db_session #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode @@ -395,267 +479,275 @@ beaker.session.key = rhodecode beaker.session.secret = develop-rc-uytcxaz beaker.session.lock_dir = %(here)s/data/sessions/lock -## Secure encrypted cookie. Requires AES and AES python libraries -## you must disable beaker.session.secret to use this +; Secure encrypted cookie. Requires AES and AES python libraries +; you must disable beaker.session.secret to use this #beaker.session.encrypt_key = key_for_encryption #beaker.session.validate_key = validation_key -## sets session as invalid(also logging out user) if it haven not been -## accessed for given amount of time in seconds +; Sets session as invalid (also logging out user) if it haven not been +; accessed for given amount of time in seconds beaker.session.timeout = 2592000 beaker.session.httponly = true -## Path to use for the cookie. Set to prefix if you use prefix middleware + +; Path to use for the cookie. Set to prefix if you use prefix middleware #beaker.session.cookie_path = /custom_prefix -## uncomment for https secure cookie +; Set https secure cookie beaker.session.secure = false -## auto save the session to not to use .save() -beaker.session.auto = false - -## default cookie expiration time in seconds, set to `true` to set expire -## at browser close +; default cookie expiration time in seconds, set to `true` to set expire +; at browser close #beaker.session.cookie_expires = 3600 -################################### -## SEARCH INDEXING CONFIGURATION ## -################################### -## Full text search indexer is available in rhodecode-tools under -## `rhodecode-tools index` command +; ############################# +; SEARCH INDEXING CONFIGURATION +; ############################# -## WHOOSH Backend, doesn't require additional services to run -## it works good with few dozen repos +; Full text search indexer is available in rhodecode-tools under +; `rhodecode-tools index` command + +; WHOOSH Backend, doesn't require additional services to run +; it works good with few dozen repos search.module = rhodecode.lib.index.whoosh search.location = %(here)s/data/index -######################################## -### CHANNELSTREAM CONFIG #### -######################################## -## channelstream enables persistent connections and live notification -## in the system. It's also used by the chat system +; #################### +; CHANNELSTREAM CONFIG +; #################### + +; channelstream enables persistent connections and live notification +; in the system. It's also used by the chat system channelstream.enabled = false -## server address for channelstream server on the backend +; server address for channelstream server on the backend channelstream.server = 127.0.0.1:9800 -## location of the channelstream server from outside world -## use ws:// for http or wss:// for https. This address needs to be handled -## by external HTTP server such as Nginx or Apache -## see Nginx/Apache configuration examples in our docs +; location of the channelstream server from outside world +; use ws:// for http or wss:// for https. This address needs to be handled +; by external HTTP server such as Nginx or Apache +; see Nginx/Apache configuration examples in our docs channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream channelstream.secret = secret channelstream.history.location = %(here)s/channelstream_history -## Internal application path that Javascript uses to connect into. -## If you use proxy-prefix the prefix should be added before /_channelstream +; Internal application path that Javascript uses to connect into. +; If you use proxy-prefix the prefix should be added before /_channelstream channelstream.proxy_path = /_channelstream -################################### -## APPENLIGHT CONFIG ## -################################### +; ############################## +; MAIN RHODECODE DATABASE CONFIG +; ############################## + +#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 +#sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode +#sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 +; pymysql is an alternative driver for MySQL, use in case of problems with default one +#sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode + +sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 + +; see sqlalchemy docs for other advanced settings +; print the sql statements to output +sqlalchemy.db1.echo = false + +; recycle the connections after this amount of seconds +sqlalchemy.db1.pool_recycle = 3600 +sqlalchemy.db1.convert_unicode = true + +; the number of connections to keep open inside the connection pool. +; 0 indicates no limit +#sqlalchemy.db1.pool_size = 5 + +; The number of connections to allow in connection pool "overflow", that is +; connections that can be opened above and beyond the pool_size setting, +; which defaults to five. +#sqlalchemy.db1.max_overflow = 10 + +; Connection check ping, used to detect broken database connections +; could be enabled to better handle cases if MySQL has gone away errors +#sqlalchemy.db1.ping_connection = true + +; ########## +; VCS CONFIG +; ########## +vcs.server.enable = true +vcs.server = localhost:9900 + +; Web server connectivity protocol, responsible for web based VCS operations +; Available protocols are: +; `http` - use http-rpc backend (default) +vcs.server.protocol = http + +; Push/Pull operations protocol, available options are: +; `http` - use http-rpc backend (default) +vcs.scm_app_implementation = http + +; Push/Pull operations hooks protocol, available options are: +; `http` - use http-rpc backend (default) +vcs.hooks.protocol = http + +; Host on which this instance is listening for hooks. If vcsserver is in other location +; this should be adjusted. +vcs.hooks.host = 127.0.0.1 + +; Start VCSServer with this instance as a subprocess, useful for development +vcs.start_server = false + +; List of enabled VCS backends, available options are: +; `hg` - mercurial +; `git` - git +; `svn` - subversion +vcs.backends = hg, git, svn + +; Wait this number of seconds before killing connection to the vcsserver +vcs.connection_timeout = 3600 + +; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. +; Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible +#vcs.svn.compatible_version = pre-1.8-compatible + -## Appenlight is tailored to work with RhodeCode, see -## http://appenlight.com for details how to obtain an account +; #################################################### +; Subversion proxy support (mod_dav_svn) +; Maps RhodeCode repo groups into SVN paths for Apache +; #################################################### + +; Enable or disable the config file generation. +svn.proxy.generate_config = false + +; Generate config file with `SVNListParentPath` set to `On`. +svn.proxy.list_parent_path = true + +; Set location and file name of generated config file. +svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf + +; alternative mod_dav config template. This needs to be a valid mako template +; Example template can be found in the source code: +; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako +#svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako + +; Used as a prefix to the `Location` block in the generated config file. +; In most cases it should be set to `/`. +svn.proxy.location_root = / + +; Command to reload the mod dav svn configuration on change. +; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh +; Make sure user who runs RhodeCode process is allowed to reload Apache +#svn.proxy.reload_cmd = /etc/init.d/apache2 reload + +; If the timeout expires before the reload command finishes, the command will +; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. +#svn.proxy.reload_timeout = 10 + +; #################### +; SSH Support Settings +; #################### -## Appenlight integration enabled +; Defines if a custom authorized_keys file should be created and written on +; any change user ssh keys. Setting this to false also disables possibility +; of adding SSH keys by users from web interface. Super admins can still +; manage SSH Keys. +ssh.generate_authorized_keyfile = false + +; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` +# ssh.authorized_keys_ssh_opts = + +; Path to the authorized_keys file where the generate entries are placed. +; It is possible to have multiple key files specified in `sshd_config` e.g. +; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode +ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode + +; Command to execute the SSH wrapper. The binary is available in the +; RhodeCode installation directory. +; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper +ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper + +; Allow shell when executing the ssh-wrapper command +ssh.wrapper_cmd_allow_shell = false + +; Enables logging, and detailed output send back to the client during SSH +; operations. Useful for debugging, shouldn't be used in production. +ssh.enable_debug_logging = true + +; Paths to binary executable, by default they are the names, but we can +; override them if we want to use a custom one +ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg +ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git +ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve + +; Enables SSH key generator web interface. Disabling this still allows users +; to add their own keys. +ssh.enable_ui_key_generator = true + + +; ################# +; APPENLIGHT CONFIG +; ################# + +; Appenlight is tailored to work with RhodeCode, see +; http://appenlight.rhodecode.com for details how to obtain an account + +; Appenlight integration enabled appenlight = false appenlight.server_url = https://api.appenlight.com appenlight.api_key = YOUR_API_KEY #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 -## used for JS client +; used for JS client appenlight.api_public_key = YOUR_API_PUBLIC_KEY -## TWEAK AMOUNT OF INFO SENT HERE +; TWEAK AMOUNT OF INFO SENT HERE -## enables 404 error logging (default False) +; enables 404 error logging (default False) appenlight.report_404 = false -## time in seconds after request is considered being slow (default 1) +; time in seconds after request is considered being slow (default 1) appenlight.slow_request_time = 1 -## record slow requests in application -## (needs to be enabled for slow datastore recording and time tracking) +; record slow requests in application +; (needs to be enabled for slow datastore recording and time tracking) appenlight.slow_requests = true -## enable hooking to application loggers +; enable hooking to application loggers appenlight.logging = true -## minimum log level for log capture +; minimum log level for log capture appenlight.logging.level = WARNING -## send logs only from erroneous/slow requests -## (saves API quota for intensive logging) +; send logs only from erroneous/slow requests +; (saves API quota for intensive logging) appenlight.logging_on_error = false -## list of additional keywords that should be grabbed from environ object -## can be string with comma separated list of words in lowercase -## (by default client will always send following info: -## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that -## start with HTTP* this list be extended with additional keywords here +; list of additional keywords that should be grabbed from environ object +; can be string with comma separated list of words in lowercase +; (by default client will always send following info: +; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that +; start with HTTP* this list be extended with additional keywords here appenlight.environ_keys_whitelist = -## list of keywords that should be blanked from request object -## can be string with comma separated list of words in lowercase -## (by default client will always blank keys that contain following words -## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' -## this list be extended with additional keywords set here +; list of keywords that should be blanked from request object +; can be string with comma separated list of words in lowercase +; (by default client will always blank keys that contain following words +; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' +; this list be extended with additional keywords set here appenlight.request_keys_blacklist = -## list of namespaces that should be ignores when gathering log entries -## can be string with comma separated list of namespaces -## (by default the client ignores own entries: appenlight_client.client) +; list of namespaces that should be ignores when gathering log entries +; can be string with comma separated list of namespaces +; (by default the client ignores own entries: appenlight_client.client) appenlight.log_namespace_blacklist = -# enable debug style page -debug_style = true - -########################################### -### MAIN RHODECODE DATABASE CONFIG ### -########################################### -#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 -#sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode -#sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 -# pymysql is an alternative driver for MySQL, use in case of problems with default one -#sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode - -sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 - -# see sqlalchemy docs for other advanced settings - -## print the sql statements to output -sqlalchemy.db1.echo = false -## recycle the connections after this amount of seconds -sqlalchemy.db1.pool_recycle = 3600 -sqlalchemy.db1.convert_unicode = true - -## the number of connections to keep open inside the connection pool. -## 0 indicates no limit -#sqlalchemy.db1.pool_size = 5 - -## the number of connections to allow in connection pool "overflow", that is -## connections that can be opened above and beyond the pool_size setting, -## which defaults to five. -#sqlalchemy.db1.max_overflow = 10 - -## Connection check ping, used to detect broken database connections -## could be enabled to better handle cases if MySQL has gone away errors -#sqlalchemy.db1.ping_connection = true - -################## -### VCS CONFIG ### -################## -vcs.server.enable = true -vcs.server = localhost:9900 - -## Web server connectivity protocol, responsible for web based VCS operations -## Available protocols are: -## `http` - use http-rpc backend (default) -vcs.server.protocol = http - -## Push/Pull operations protocol, available options are: -## `http` - use http-rpc backend (default) -vcs.scm_app_implementation = http - -## Push/Pull operations hooks protocol, available options are: -## `http` - use http-rpc backend (default) -vcs.hooks.protocol = http - -## Host on which this instance is listening for hooks. If vcsserver is in other location -## this should be adjusted. -vcs.hooks.host = 127.0.0.1 - -vcs.server.log_level = debug -## Start VCSServer with this instance as a subprocess, useful for development -vcs.start_server = false - -## List of enabled VCS backends, available options are: -## `hg` - mercurial -## `git` - git -## `svn` - subversion -vcs.backends = hg, git, svn - -vcs.connection_timeout = 3600 -## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. -## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible -#vcs.svn.compatible_version = pre-1.8-compatible - - -############################################################ -### Subversion proxy support (mod_dav_svn) ### -### Maps RhodeCode repo groups into SVN paths for Apache ### -############################################################ -## Enable or disable the config file generation. -svn.proxy.generate_config = false -## Generate config file with `SVNListParentPath` set to `On`. -svn.proxy.list_parent_path = true -## Set location and file name of generated config file. -svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf -## alternative mod_dav config template. This needs to be a mako template -#svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako -## Used as a prefix to the `Location` block in the generated config file. -## In most cases it should be set to `/`. -svn.proxy.location_root = / -## Command to reload the mod dav svn configuration on change. -## Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh -## Make sure user who runs RhodeCode process is allowed to reload Apache -#svn.proxy.reload_cmd = /etc/init.d/apache2 reload -## If the timeout expires before the reload command finishes, the command will -## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. -#svn.proxy.reload_timeout = 10 - -############################################################ -### SSH Support Settings ### -############################################################ - -## Defines if a custom authorized_keys file should be created and written on -## any change user ssh keys. Setting this to false also disables possibility -## of adding SSH keys by users from web interface. Super admins can still -## manage SSH Keys. -ssh.generate_authorized_keyfile = false - -## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` -# ssh.authorized_keys_ssh_opts = - -## Path to the authorized_keys file where the generate entries are placed. -## It is possible to have multiple key files specified in `sshd_config` e.g. -## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode -ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode - -## Command to execute the SSH wrapper. The binary is available in the -## RhodeCode installation directory. -## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper -ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper - -## Allow shell when executing the ssh-wrapper command -ssh.wrapper_cmd_allow_shell = false - -## Enables logging, and detailed output send back to the client during SSH -## operations. Useful for debugging, shouldn't be used in production. -ssh.enable_debug_logging = true - -## Paths to binary executable, by default they are the names, but we can -## override them if we want to use a custom one -ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg -ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git -ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve - -## Enables SSH key generator web interface. Disabling this still allows users -## to add their own keys. -ssh.enable_ui_key_generator = true - - -## Dummy marker to add new entries after. -## Add any custom entries below. Please don't remove. +; Dummy marker to add new entries after. +; Add any custom entries below. Please don't remove this marker. custom.conf = 1 -################################ -### LOGGING CONFIGURATION #### -################################ +; ##################### +; LOGGING CONFIGURATION +; ##################### [loggers] keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper @@ -665,9 +757,9 @@ keys = console, console_sql [formatters] keys = generic, color_formatter, color_formatter_sql -############# -## LOGGERS ## -############# +; ####### +; LOGGERS +; ####### [logger_root] level = NOTSET handlers = console @@ -702,9 +794,9 @@ handlers = qualname = celery -############## -## HANDLERS ## -############## +; ######## +; HANDLERS +; ######## [handler_console] class = StreamHandler @@ -713,17 +805,17 @@ level = DEBUG formatter = color_formatter [handler_console_sql] -# "level = DEBUG" logs SQL queries and results. -# "level = INFO" logs SQL queries. -# "level = WARN" logs neither. (Recommended for production systems.) +; "level = DEBUG" logs SQL queries and results. +; "level = INFO" logs SQL queries. +; "level = WARN" logs neither. (Recommended for production systems.) class = StreamHandler args = (sys.stderr, ) level = WARN formatter = color_formatter_sql -################ -## FORMATTERS ## -################ +; ########## +; FORMATTERS +; ########## [formatter_generic] class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter diff --git a/configs/gunicorn_config.py b/configs/gunicorn_config.py --- a/configs/gunicorn_config.py +++ b/configs/gunicorn_config.py @@ -1,58 +1,26 @@ """ -gunicorn config extension and hooks. Sets additional configuration that is -available post the .ini config. - -- workers = ${cpu_number} -- threads = 1 -- proc_name = ${gunicorn_proc_name} -- worker_class = sync -- worker_connections = 10 -- max_requests = 1000 -- max_requests_jitter = 30 -- timeout = 21600 - +Gunicorn config extension and hooks. This config file adds some extra settings and memory management. +Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer """ -import multiprocessing +import gc +import os import sys +import math import time -import datetime import threading import traceback +import random from gunicorn.glogging import Logger +def get_workers(): + import multiprocessing + return multiprocessing.cpu_count() * 2 + 1 + # GLOBAL errorlog = '-' accesslog = '-' -loglevel = 'debug' - -# SECURITY - -# The maximum size of HTTP request line in bytes. -# 0 for unlimited -limit_request_line = 0 - -# Limit the number of HTTP headers fields in a request. -# By default this value is 100 and can't be larger than 32768. -limit_request_fields = 10240 - -# Limit the allowed size of an HTTP request header field. -# Value is a positive number or 0. -# Setting it to 0 will allow unlimited header field sizes. -limit_request_field_size = 0 - - -# Timeout for graceful workers restart. -# After receiving a restart signal, workers have this much time to finish -# serving requests. Workers still alive after the timeout (starting from the -# receipt of the restart signal) are force killed. -graceful_timeout = 30 - - -# The number of seconds to wait for requests on a Keep-Alive connection. -# Generally set in the 1-5 seconds range. -keepalive = 2 # SERVER MECHANICS @@ -63,38 +31,178 @@ tmp_upload_dir = None # Custom log format access_log_format = ( - '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') + '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') # self adjust workers based on CPU count -# workers = multiprocessing.cpu_count() * 2 + 1 +# workers = get_workers() + + +def _get_process_rss(pid=None): + try: + import psutil + if pid: + proc = psutil.Process(pid) + else: + proc = psutil.Process() + return proc.memory_info().rss + except Exception: + return None -def post_fork(server, worker): - server.log.info("[<%-10s>] WORKER spawned", worker.pid) +def _get_config(ini_path): + + try: + import configparser + except ImportError: + import ConfigParser as configparser + try: + config = configparser.RawConfigParser() + config.read(ini_path) + return config + except Exception: + return None + + +def _time_with_offset(memory_usage_check_interval): + return time.time() - random.randint(0, memory_usage_check_interval/2.0) def pre_fork(server, worker): pass +def post_fork(server, worker): + + # memory spec defaults + _memory_max_usage = 0 + _memory_usage_check_interval = 60 + _memory_usage_recovery_threshold = 0.8 + + ini_path = os.path.abspath(server.cfg.paste) + conf = _get_config(ini_path) + + section = 'server:main' + if conf and conf.has_section(section): + + if conf.has_option(section, 'memory_max_usage'): + _memory_max_usage = conf.getint(section, 'memory_max_usage') + + if conf.has_option(section, 'memory_usage_check_interval'): + _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval') + + if conf.has_option(section, 'memory_usage_recovery_threshold'): + _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold') + + worker._memory_max_usage = _memory_max_usage + worker._memory_usage_check_interval = _memory_usage_check_interval + worker._memory_usage_recovery_threshold = _memory_usage_recovery_threshold + + # register memory last check time, with some random offset so we don't recycle all + # at once + worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval) + + if _memory_max_usage: + server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid, + _format_data_size(_memory_max_usage)) + else: + server.log.info("[%-10s] WORKER spawned", worker.pid) + + def pre_exec(server): server.log.info("Forked child, re-executing.") def on_starting(server): - server.log.info("Server is starting.") + server_lbl = '{} {}'.format(server.proc_name, server.address) + server.log.info("Server %s is starting.", server_lbl) def when_ready(server): - server.log.info("Server is ready. Spawning workers") + server.log.info("Server %s is ready. Spawning workers", server) def on_reload(server): pass +def _format_data_size(size, unit="B", precision=1, binary=True): + """Format a number using SI units (kilo, mega, etc.). + + ``size``: The number as a float or int. + + ``unit``: The unit name in plural form. Examples: "bytes", "B". + + ``precision``: How many digits to the right of the decimal point. Default + is 1. 0 suppresses the decimal point. + + ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000). + If true, use base-2 binary prefixes (kibi = Ki = 1024). + + ``full_name``: If false (default), use the prefix abbreviation ("k" or + "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false, + use abbreviation ("k" or "Ki"). + + """ + + if not binary: + base = 1000 + multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + else: + base = 1024 + multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') + + sign = "" + if size > 0: + m = int(math.log(size, base)) + elif size < 0: + sign = "-" + size = -size + m = int(math.log(size, base)) + else: + m = 0 + if m > 8: + m = 8 + + if m == 0: + precision = '%.0f' + else: + precision = '%%.%df' % precision + + size = precision % (size / math.pow(base, m)) + + return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit) + + +def _check_memory_usage(worker): + memory_max_usage = worker._memory_max_usage + if not memory_max_usage: + return + + memory_usage_check_interval = worker._memory_usage_check_interval + memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold + + elapsed = time.time() - worker._last_memory_check_time + if elapsed > memory_usage_check_interval: + mem_usage = _get_process_rss() + if mem_usage and mem_usage > memory_max_usage: + worker.log.info( + "memory usage %s > %s, forcing gc", + _format_data_size(mem_usage), _format_data_size(memory_max_usage)) + # Try to clean it up by forcing a full collection. + gc.collect() + mem_usage = _get_process_rss() + if mem_usage > memory_usage_recovery_threshold: + # Didn't clean up enough, we'll have to terminate. + worker.log.warning( + "memory usage %s > %s after gc, quitting", + _format_data_size(mem_usage), _format_data_size(memory_max_usage)) + # This will cause worker to auto-restart itself + worker.alive = False + worker._last_memory_check_time = time.time() + + def worker_int(worker): - worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid) + worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid) # get traceback info, on worker crash id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) @@ -110,15 +218,15 @@ def worker_int(worker): def worker_abort(worker): - worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid) + worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid) def worker_exit(server, worker): - worker.log.info("[<%-10s>] worker exit", worker.pid) + worker.log.info("[%-10s] worker exit", worker.pid) def child_exit(server, worker): - worker.log.info("[<%-10s>] worker child exit", worker.pid) + worker.log.info("[%-10s] worker child exit", worker.pid) def pre_request(worker, req): @@ -129,9 +237,12 @@ def pre_request(worker, req): def post_request(worker, req, environ, resp): total_time = time.time() - worker.start_time + # Gunicorn sometimes has problems with reading the status_code + status_code = getattr(resp, 'status_code', '') worker.log.debug( - "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs", - worker.nr, req.method, req.path, resp.status_code, total_time) + "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs", + worker.nr, req.method, req.path, status_code, total_time) + _check_memory_usage(worker) class RhodeCodeLogger(Logger): diff --git a/configs/production.ini b/configs/production.ini --- a/configs/production.ini +++ b/configs/production.ini @@ -1,24 +1,22 @@ - +## -*- coding: utf-8 -*- -################################################################################ -## RHODECODE COMMUNITY EDITION CONFIGURATION ## -################################################################################ +; ######################################### +; RHODECODE COMMUNITY EDITION CONFIGURATION +; ######################################### [DEFAULT] -## Debug flag sets all loggers to debug, and enables request tracking +; Debug flag sets all loggers to debug, and enables request tracking debug = false -################################################################################ -## EMAIL CONFIGURATION ## -## Uncomment and replace with the email address which should receive ## -## any error reports after an application crash ## -## Additionally these settings will be used by the RhodeCode mailing system ## -################################################################################ +; ######################################################################## +; EMAIL CONFIGURATION +; These settings will be used by the RhodeCode mailing system +; ######################################################################## -## prefix all emails subjects with given prefix, helps filtering out emails +; prefix all emails subjects with given prefix, helps filtering out emails #email_prefix = [RhodeCode] -## email FROM address all mails will be sent +; email FROM address all mails will be sent #app_email_from = rhodecode-noreply@localhost #smtp_server = mail.server.com @@ -29,168 +27,200 @@ debug = false #smtp_use_ssl = true [server:main] -## COMMON ## +; COMMON HOST/IP CONFIG host = 127.0.0.1 port = 5000 -########################################################### -## WAITRESS WSGI SERVER - Recommended for Development #### -########################################################### + +; ########################### +; GUNICORN APPLICATION SERVER +; ########################### + +; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini + +; Module to use, this setting shouldn't be changed +use = egg:gunicorn#main + +; Sets the number of process workers. More workers means more concurrent connections +; RhodeCode can handle at the same time. Each additional worker also it increases +; memory usage as each has it's own set of caches. +; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more +; than 8-10 unless for really big deployments .e.g 700-1000 users. +; `instance_id = *` must be set in the [app:main] section below (which is the default) +; when using more than 1 worker. +workers = 2 + +; Gunicorn access log level +loglevel = info + +; Process name visible in process list +proc_name = rhodecode + +; Type of worker class, one of `sync`, `gevent` +; Recommended type is `gevent` +worker_class = gevent + +; The maximum number of simultaneous clients per worker. Valid only for gevent +worker_connections = 10 + +; Max number of requests that worker will handle before being gracefully restarted. +; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. +max_requests = 1000 +max_requests_jitter = 30 -#use = egg:waitress#main -## number of worker threads -#threads = 5 -## MAX BODY SIZE 100GB -#max_request_body_size = 107374182400 -## Use poll instead of select, fixes file descriptors limits problems. -## May not work on old windows systems. -#asyncore_use_poll = true +; Amount of time a worker can spend with handling a request before it +; gets killed and restarted. By default set to 21600 (6hrs) +; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) +timeout = 21600 + +; The maximum size of HTTP request line in bytes. +; 0 for unlimited +limit_request_line = 0 + +; Limit the number of HTTP headers fields in a request. +; By default this value is 100 and can't be larger than 32768. +limit_request_fields = 32768 + +; Limit the allowed size of an HTTP request header field. +; Value is a positive number or 0. +; Setting it to 0 will allow unlimited header field sizes. +limit_request_field_size = 0 + +; Timeout for graceful workers restart. +; After receiving a restart signal, workers have this much time to finish +; serving requests. Workers still alive after the timeout (starting from the +; receipt of the restart signal) are force killed. +; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) +graceful_timeout = 3600 + +# The number of seconds to wait for requests on a Keep-Alive connection. +# Generally set in the 1-5 seconds range. +keepalive = 2 + +; Maximum memory usage that each worker can use before it will receive a +; graceful restart signal 0 = memory monitoring is disabled +; Examples: 268435456 (256MB), 536870912 (512MB) +; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) +memory_max_usage = 0 + +; How often in seconds to check for memory usage for each gunicorn worker +memory_usage_check_interval = 60 + +; Threshold value for which we don't recycle worker if GarbageCollection +; frees up enough resources. Before each restart we try to run GC on worker +; in case we get enough free memory after that, restart will not happen. +memory_usage_recovery_threshold = 0.8 -########################## -## GUNICORN WSGI SERVER ## -########################## -## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini - -use = egg:gunicorn#main -## Sets the number of process workers. More workers means more concurrent connections -## RhodeCode can handle at the same time. Each additional worker also it increases -## memory usage as each has it's own set of caches. -## Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more -## than 8-10 unless for really big deployments .e.g 700-1000 users. -## `instance_id = *` must be set in the [app:main] section below (which is the default) -## when using more than 1 worker. -workers = 2 -## process name visible in process list -proc_name = rhodecode -## type of worker class, one of sync, gevent -## recommended for bigger setup is using of of other than sync one -worker_class = gevent -## The maximum number of simultaneous clients. Valid only for Gevent -worker_connections = 10 -## max number of requests that worker will handle before being gracefully -## restarted, could prevent memory leaks -max_requests = 1000 -max_requests_jitter = 30 -## amount of time a worker can spend with handling a request before it -## gets killed and restarted. Set to 6hrs -timeout = 21600 - - -## prefix middleware for RhodeCode. -## recommended when using proxy setup. -## allows to set RhodeCode under a prefix in server. -## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. -## And set your prefix like: `prefix = /custom_prefix` -## be sure to also set beaker.session.cookie_path = /custom_prefix if you need -## to make your cookies only work on prefix url +; Prefix middleware for RhodeCode. +; recommended when using proxy setup. +; allows to set RhodeCode under a prefix in server. +; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. +; And set your prefix like: `prefix = /custom_prefix` +; be sure to also set beaker.session.cookie_path = /custom_prefix if you need +; to make your cookies only work on prefix url [filter:proxy-prefix] use = egg:PasteDeploy#prefix prefix = / [app:main] -## The %(here)s variable will be replaced with the absolute path of parent directory -## of this file -## In addition ENVIRONMENT variables usage is possible, e.g -## sqlalchemy.db1.url = {ENV_RC_DB_URL} +; The %(here)s variable will be replaced with the absolute path of parent directory +; of this file +; In addition ENVIRONMENT variables usage is possible, e.g +; sqlalchemy.db1.url = {ENV_RC_DB_URL} use = egg:rhodecode-enterprise-ce -## enable proxy prefix middleware, defined above +; enable proxy prefix middleware, defined above #filter-with = proxy-prefix -## encryption key used to encrypt social plugin tokens, -## remote_urls with credentials etc, if not set it defaults to -## `beaker.session.secret` +; encryption key used to encrypt social plugin tokens, +; remote_urls with credentials etc, if not set it defaults to +; `beaker.session.secret` #rhodecode.encrypted_values.secret = -## decryption strict mode (enabled by default). It controls if decryption raises -## `SignatureVerificationError` in case of wrong key, or damaged encryption data. +; decryption strict mode (enabled by default). It controls if decryption raises +; `SignatureVerificationError` in case of wrong key, or damaged encryption data. #rhodecode.encrypted_values.strict = false -## Pick algorithm for encryption. Either fernet (more secure) or aes (default) -## fernet is safer, and we strongly recommend switching to it. -## Due to backward compatibility aes is used as default. +; Pick algorithm for encryption. Either fernet (more secure) or aes (default) +; fernet is safer, and we strongly recommend switching to it. +; Due to backward compatibility aes is used as default. #rhodecode.encrypted_values.algorithm = fernet -## return gzipped responses from RhodeCode (static files/application) +; Return gzipped responses from RhodeCode (static files/application) gzip_responses = false -## auto-generate javascript routes file on startup +; Auto-generate javascript routes file on startup generate_js_files = false -## System global default language. -## All available languages: en(default), be, de, es, fr, it, ja, pl, pt, ru, zh +; System global default language. +; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh lang = en -## Perform a full repository scan and import on each server start. -## Settings this to true could lead to very long startup time. +; Perform a full repository scan and import on each server start. +; Settings this to true could lead to very long startup time. startup.import_repos = false -## Uncomment and set this path to use archive download cache. -## Once enabled, generated archives will be cached at this location -## and served from the cache during subsequent requests for the same archive of -## the repository. +; Uncomment and set this path to use archive download cache. +; Once enabled, generated archives will be cached at this location +; and served from the cache during subsequent requests for the same archive of +; the repository. #archive_cache_dir = /tmp/tarballcache -## URL at which the application is running. This is used for Bootstrapping -## requests in context when no web request is available. Used in ishell, or -## SSH calls. Set this for events to receive proper url for SSH calls. +; URL at which the application is running. This is used for Bootstrapping +; requests in context when no web request is available. Used in ishell, or +; SSH calls. Set this for events to receive proper url for SSH calls. app.base_url = http://rhodecode.local -## Unique application ID. Should be a random unique string for security. +; Unique application ID. Should be a random unique string for security. app_instance_uuid = rc-production -## Cut off limit for large diffs (size in bytes). If overall diff size on -## commit, or pull request exceeds this limit this diff will be displayed -## partially. E.g 512000 == 512Kb +; Cut off limit for large diffs (size in bytes). If overall diff size on +; commit, or pull request exceeds this limit this diff will be displayed +; partially. E.g 512000 == 512Kb cut_off_limit_diff = 512000 -## Cut off limit for large files inside diffs (size in bytes). Each individual -## file inside diff which exceeds this limit will be displayed partially. -## E.g 128000 == 128Kb +; Cut off limit for large files inside diffs (size in bytes). Each individual +; file inside diff which exceeds this limit will be displayed partially. +; E.g 128000 == 128Kb cut_off_limit_file = 128000 -## use cached version of vcs repositories everywhere. Recommended to be `true` +; Use cached version of vcs repositories everywhere. Recommended to be `true` vcs_full_cache = true -## Force https in RhodeCode, fixes https redirects, assumes it's always https. -## Normally this is controlled by proper http flags sent from http server +; Force https in RhodeCode, fixes https redirects, assumes it's always https. +; Normally this is controlled by proper flags sent from http server such as Nginx or Apache force_https = false -## use Strict-Transport-Security headers +; use Strict-Transport-Security headers use_htsts = false -## git rev filter option, --all is the default filter, if you need to -## hide all refs in changelog switch this to --branches --tags -git_rev_filter = --branches --tags - -# Set to true if your repos are exposed using the dumb protocol +; Set to true if your repos are exposed using the dumb protocol git_update_server_info = false -## RSS/ATOM feed options +; RSS/ATOM feed options rss_cut_off_limit = 256000 rss_items_per_page = 10 rss_include_diff = false -## gist URL alias, used to create nicer urls for gist. This should be an -## url that does rewrites to _admin/gists/{gistid}. -## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal -## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} +; gist URL alias, used to create nicer urls for gist. This should be an +; url that does rewrites to _admin/gists/{gistid}. +; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal +; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} gist_alias_url = -## List of views (using glob pattern syntax) that AUTH TOKENS could be -## used for access. -## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it -## came from the the logged in user who own this authentication token. -## Additionally @TOKEN syntax can be used to bound the view to specific -## authentication token. Such view would be only accessible when used together -## with this authentication token -## -## list of all views can be found under `/_admin/permissions/auth_token_access` -## The list should be "," separated and on a single line. -## -## Most common views to enable: +; List of views (using glob pattern syntax) that AUTH TOKENS could be +; used for access. +; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it +; came from the the logged in user who own this authentication token. +; Additionally @TOKEN syntax can be used to bound the view to specific +; authentication token. Such view would be only accessible when used together +; with this authentication token +; list of all views can be found under `/_admin/permissions/auth_token_access` +; The list should be "," separated and on a single line. +; Most common views to enable: + # RepoCommitsView:repo_commit_download # RepoCommitsView:repo_commit_patch # RepoCommitsView:repo_commit_raw @@ -201,164 +231,194 @@ gist_alias_url = # GistView:* api_access_controllers_whitelist = -## Default encoding used to convert from and to unicode -## can be also a comma separated list of encoding in case of mixed encodings +; Default encoding used to convert from and to unicode +; can be also a comma separated list of encoding in case of mixed encodings default_encoding = UTF-8 -## instance-id prefix -## a prefix key for this instance used for cache invalidation when running -## multiple instances of RhodeCode, make sure it's globally unique for -## all running RhodeCode instances. Leave empty if you don't use it +; instance-id prefix +; a prefix key for this instance used for cache invalidation when running +; multiple instances of RhodeCode, make sure it's globally unique for +; all running RhodeCode instances. Leave empty if you don't use it instance_id = -## Fallback authentication plugin. Set this to a plugin ID to force the usage -## of an authentication plugin also if it is disabled by it's settings. -## This could be useful if you are unable to log in to the system due to broken -## authentication settings. Then you can enable e.g. the internal RhodeCode auth -## module to log in again and fix the settings. -## -## Available builtin plugin IDs (hash is part of the ID): -## egg:rhodecode-enterprise-ce#rhodecode -## egg:rhodecode-enterprise-ce#pam -## egg:rhodecode-enterprise-ce#ldap -## egg:rhodecode-enterprise-ce#jasig_cas -## egg:rhodecode-enterprise-ce#headers -## egg:rhodecode-enterprise-ce#crowd +; Fallback authentication plugin. Set this to a plugin ID to force the usage +; of an authentication plugin also if it is disabled by it's settings. +; This could be useful if you are unable to log in to the system due to broken +; authentication settings. Then you can enable e.g. the internal RhodeCode auth +; module to log in again and fix the settings. +; Available builtin plugin IDs (hash is part of the ID): +; egg:rhodecode-enterprise-ce#rhodecode +; egg:rhodecode-enterprise-ce#pam +; egg:rhodecode-enterprise-ce#ldap +; egg:rhodecode-enterprise-ce#jasig_cas +; egg:rhodecode-enterprise-ce#headers +; egg:rhodecode-enterprise-ce#crowd + #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode -## alternative return HTTP header for failed authentication. Default HTTP -## response is 401 HTTPUnauthorized. Currently HG clients have troubles with -## handling that causing a series of failed authentication calls. -## Set this variable to 403 to return HTTPForbidden, or any other HTTP code -## This will be served instead of default 401 on bad authentication +; Flag to control loading of legacy plugins in py:/path format +auth_plugin.import_legacy_plugins = true + +; alternative return HTTP header for failed authentication. Default HTTP +; response is 401 HTTPUnauthorized. Currently HG clients have troubles with +; handling that causing a series of failed authentication calls. +; Set this variable to 403 to return HTTPForbidden, or any other HTTP code +; This will be served instead of default 401 on bad authentication auth_ret_code = -## use special detection method when serving auth_ret_code, instead of serving -## ret_code directly, use 401 initially (Which triggers credentials prompt) -## and then serve auth_ret_code to clients +; use special detection method when serving auth_ret_code, instead of serving +; ret_code directly, use 401 initially (Which triggers credentials prompt) +; and then serve auth_ret_code to clients auth_ret_code_detection = false -## locking return code. When repository is locked return this HTTP code. 2XX -## codes don't break the transactions while 4XX codes do +; locking return code. When repository is locked return this HTTP code. 2XX +; codes don't break the transactions while 4XX codes do lock_ret_code = 423 -## allows to change the repository location in settings page +; allows to change the repository location in settings page allow_repo_location_change = true -## allows to setup custom hooks in settings page +; allows to setup custom hooks in settings page allow_custom_hooks_settings = true -## Generated license token required for EE edition license. -## New generated token value can be found in Admin > settings > license page. +; Generated license token required for EE edition license. +; New generated token value can be found in Admin > settings > license page. license_token = -## supervisor connection uri, for managing supervisor and logs. +; This flag hides sensitive information on the license page such as token, and license data +license.hide_license_info = false + +; supervisor connection uri, for managing supervisor and logs. supervisor.uri = -## supervisord group name/id we only want this RC instance to handle + +; supervisord group name/id we only want this RC instance to handle supervisor.group_id = prod -## Display extended labs settings +; Display extended labs settings labs_settings_active = true -## Custom exception store path, defaults to TMPDIR -## This is used to store exception from RhodeCode in shared directory +; Custom exception store path, defaults to TMPDIR +; This is used to store exception from RhodeCode in shared directory #exception_tracker.store_path = -## File store configuration. This is used to store and serve uploaded files +; File store configuration. This is used to store and serve uploaded files file_store.enabled = true -## Storage backend, available options are: local + +; Storage backend, available options are: local file_store.backend = local -## path to store the uploaded binaries + +; path to store the uploaded binaries file_store.storage_path = %(here)s/data/file_store -#################################### -### CELERY CONFIG #### -#################################### -## run: /path/to/celery worker \ -## -E --beat --app rhodecode.lib.celerylib.loader \ -## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ -## --loglevel DEBUG --ini /path/to/rhodecode.ini +; ############# +; CELERY CONFIG +; ############# + +; manually run celery: /path/to/celery worker -E --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini use_celery = false -## connection url to the message broker (default rabbitmq) -celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost +; connection url to the message broker (default redis) +celery.broker_url = redis://localhost:6379/8 -## maximum tasks to execute before worker restart +; rabbitmq example +#celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost + +; maximum tasks to execute before worker restart celery.max_tasks_per_child = 100 -## tasks will never be sent to the queue, but executed locally instead. +; tasks will never be sent to the queue, but executed locally instead. celery.task_always_eager = false -##################################### -### DOGPILE CACHE #### -##################################### -## Default cache dir for caches. Putting this into a ramdisk -## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require -## large amount of space +; ############# +; DOGPILE CACHE +; ############# + +; Default cache dir for caches. Putting this into a ramdisk can boost performance. +; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space cache_dir = %(here)s/data -## `cache_perms` cache settings for permission tree, auth TTL. +; ********************************************* +; `sql_cache_short` cache for heavy SQL queries +; Only supported backend is `memory_lru` +; ********************************************* +rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru +rc_cache.sql_cache_short.expiration_time = 30 + + +; ***************************************************** +; `cache_repo_longterm` cache for repo object instances +; Only supported backend is `memory_lru` +; ***************************************************** +rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru +; by default we use 30 Days, cache is still invalidated on push +rc_cache.cache_repo_longterm.expiration_time = 2592000 +; max items in LRU cache, set to smaller number to save memory, and expire last used caches +rc_cache.cache_repo_longterm.max_size = 10000 + + +; ************************************************* +; `cache_perms` cache for permission tree, auth TTL +; ************************************************* rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace rc_cache.cache_perms.expiration_time = 300 +; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set +#rc_cache.cache_perms.arguments.filename = /tmp/cache_perms.db -## alternative `cache_perms` redis backend with distributed lock +; alternative `cache_perms` redis backend with distributed lock #rc_cache.cache_perms.backend = dogpile.cache.rc.redis #rc_cache.cache_perms.expiration_time = 300 -## redis_expiration_time needs to be greater then expiration_time + +; redis_expiration_time needs to be greater then expiration_time #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 -#rc_cache.cache_perms.arguments.socket_timeout = 30 + #rc_cache.cache_perms.arguments.host = localhost #rc_cache.cache_perms.arguments.port = 6379 #rc_cache.cache_perms.arguments.db = 0 -## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends +#rc_cache.cache_perms.arguments.socket_timeout = 30 +; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends #rc_cache.cache_perms.arguments.distributed_lock = true -## `cache_repo` cache settings for FileTree, Readme, RSS FEEDS + +; *************************************************** +; `cache_repo` cache for file tree, Readme, RSS FEEDS +; *************************************************** rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace rc_cache.cache_repo.expiration_time = 2592000 +; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set +#rc_cache.cache_repo.arguments.filename = /tmp/cache_repo.db -## alternative `cache_repo` redis backend with distributed lock +; alternative `cache_repo` redis backend with distributed lock #rc_cache.cache_repo.backend = dogpile.cache.rc.redis #rc_cache.cache_repo.expiration_time = 2592000 -## redis_expiration_time needs to be greater then expiration_time + +; redis_expiration_time needs to be greater then expiration_time #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 -#rc_cache.cache_repo.arguments.socket_timeout = 30 + #rc_cache.cache_repo.arguments.host = localhost #rc_cache.cache_repo.arguments.port = 6379 #rc_cache.cache_repo.arguments.db = 1 -## more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends +#rc_cache.cache_repo.arguments.socket_timeout = 30 +; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends #rc_cache.cache_repo.arguments.distributed_lock = true -## cache settings for SQL queries, this needs to use memory type backend -rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru -rc_cache.sql_cache_short.expiration_time = 30 -## `cache_repo_longterm` cache for repo object instances, this needs to use memory -## type backend as the objects kept are not pickle serializable -rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru -## by default we use 96H, this is using invalidation on push anyway -rc_cache.cache_repo_longterm.expiration_time = 345600 -## max items in LRU cache, reduce this number to save memory, and expire last used -## cached objects -rc_cache.cache_repo_longterm.max_size = 10000 +; ############## +; BEAKER SESSION +; ############## - -#################################### -### BEAKER SESSION #### -#################################### - -## .session.type is type of storage options for the session, current allowed -## types are file, ext:memcached, ext:redis, ext:database, and memory (default). +; beaker.session.type is type of storage options for the logged users sessions. Current allowed +; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified). +; Fastest ones are Redis and ext:database beaker.session.type = file beaker.session.data_dir = %(here)s/data/sessions -## redis sessions +; Redis based sessions #beaker.session.type = ext:redis #beaker.session.url = redis://127.0.0.1:6379/2 -## db based session, fast, and allows easy management over logged in users +; DB based session, fast, and allows easy management over logged in users #beaker.session.type = ext:database #beaker.session.table_name = db_session #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode @@ -370,265 +430,275 @@ beaker.session.key = rhodecode beaker.session.secret = production-rc-uytcxaz beaker.session.lock_dir = %(here)s/data/sessions/lock -## Secure encrypted cookie. Requires AES and AES python libraries -## you must disable beaker.session.secret to use this +; Secure encrypted cookie. Requires AES and AES python libraries +; you must disable beaker.session.secret to use this #beaker.session.encrypt_key = key_for_encryption #beaker.session.validate_key = validation_key -## sets session as invalid(also logging out user) if it haven not been -## accessed for given amount of time in seconds +; Sets session as invalid (also logging out user) if it haven not been +; accessed for given amount of time in seconds beaker.session.timeout = 2592000 beaker.session.httponly = true -## Path to use for the cookie. Set to prefix if you use prefix middleware + +; Path to use for the cookie. Set to prefix if you use prefix middleware #beaker.session.cookie_path = /custom_prefix -## uncomment for https secure cookie +; Set https secure cookie beaker.session.secure = false -## auto save the session to not to use .save() -beaker.session.auto = false - -## default cookie expiration time in seconds, set to `true` to set expire -## at browser close +; default cookie expiration time in seconds, set to `true` to set expire +; at browser close #beaker.session.cookie_expires = 3600 -################################### -## SEARCH INDEXING CONFIGURATION ## -################################### -## Full text search indexer is available in rhodecode-tools under -## `rhodecode-tools index` command +; ############################# +; SEARCH INDEXING CONFIGURATION +; ############################# -## WHOOSH Backend, doesn't require additional services to run -## it works good with few dozen repos +; Full text search indexer is available in rhodecode-tools under +; `rhodecode-tools index` command + +; WHOOSH Backend, doesn't require additional services to run +; it works good with few dozen repos search.module = rhodecode.lib.index.whoosh search.location = %(here)s/data/index -######################################## -### CHANNELSTREAM CONFIG #### -######################################## -## channelstream enables persistent connections and live notification -## in the system. It's also used by the chat system +; #################### +; CHANNELSTREAM CONFIG +; #################### + +; channelstream enables persistent connections and live notification +; in the system. It's also used by the chat system channelstream.enabled = false -## server address for channelstream server on the backend +; server address for channelstream server on the backend channelstream.server = 127.0.0.1:9800 -## location of the channelstream server from outside world -## use ws:// for http or wss:// for https. This address needs to be handled -## by external HTTP server such as Nginx or Apache -## see Nginx/Apache configuration examples in our docs +; location of the channelstream server from outside world +; use ws:// for http or wss:// for https. This address needs to be handled +; by external HTTP server such as Nginx or Apache +; see Nginx/Apache configuration examples in our docs channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream channelstream.secret = secret channelstream.history.location = %(here)s/channelstream_history -## Internal application path that Javascript uses to connect into. -## If you use proxy-prefix the prefix should be added before /_channelstream +; Internal application path that Javascript uses to connect into. +; If you use proxy-prefix the prefix should be added before /_channelstream channelstream.proxy_path = /_channelstream -################################### -## APPENLIGHT CONFIG ## -################################### +; ############################## +; MAIN RHODECODE DATABASE CONFIG +; ############################## + +#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 +#sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode +#sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 +; pymysql is an alternative driver for MySQL, use in case of problems with default one +#sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode + +sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode + +; see sqlalchemy docs for other advanced settings +; print the sql statements to output +sqlalchemy.db1.echo = false + +; recycle the connections after this amount of seconds +sqlalchemy.db1.pool_recycle = 3600 +sqlalchemy.db1.convert_unicode = true + +; the number of connections to keep open inside the connection pool. +; 0 indicates no limit +#sqlalchemy.db1.pool_size = 5 + +; The number of connections to allow in connection pool "overflow", that is +; connections that can be opened above and beyond the pool_size setting, +; which defaults to five. +#sqlalchemy.db1.max_overflow = 10 + +; Connection check ping, used to detect broken database connections +; could be enabled to better handle cases if MySQL has gone away errors +#sqlalchemy.db1.ping_connection = true + +; ########## +; VCS CONFIG +; ########## +vcs.server.enable = true +vcs.server = localhost:9900 + +; Web server connectivity protocol, responsible for web based VCS operations +; Available protocols are: +; `http` - use http-rpc backend (default) +vcs.server.protocol = http + +; Push/Pull operations protocol, available options are: +; `http` - use http-rpc backend (default) +vcs.scm_app_implementation = http + +; Push/Pull operations hooks protocol, available options are: +; `http` - use http-rpc backend (default) +vcs.hooks.protocol = http + +; Host on which this instance is listening for hooks. If vcsserver is in other location +; this should be adjusted. +vcs.hooks.host = 127.0.0.1 + +; Start VCSServer with this instance as a subprocess, useful for development +vcs.start_server = false + +; List of enabled VCS backends, available options are: +; `hg` - mercurial +; `git` - git +; `svn` - subversion +vcs.backends = hg, git, svn + +; Wait this number of seconds before killing connection to the vcsserver +vcs.connection_timeout = 3600 + +; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. +; Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible +#vcs.svn.compatible_version = pre-1.8-compatible + -## Appenlight is tailored to work with RhodeCode, see -## http://appenlight.com for details how to obtain an account +; #################################################### +; Subversion proxy support (mod_dav_svn) +; Maps RhodeCode repo groups into SVN paths for Apache +; #################################################### + +; Enable or disable the config file generation. +svn.proxy.generate_config = false + +; Generate config file with `SVNListParentPath` set to `On`. +svn.proxy.list_parent_path = true + +; Set location and file name of generated config file. +svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf + +; alternative mod_dav config template. This needs to be a valid mako template +; Example template can be found in the source code: +; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako +#svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako + +; Used as a prefix to the `Location` block in the generated config file. +; In most cases it should be set to `/`. +svn.proxy.location_root = / + +; Command to reload the mod dav svn configuration on change. +; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh +; Make sure user who runs RhodeCode process is allowed to reload Apache +#svn.proxy.reload_cmd = /etc/init.d/apache2 reload + +; If the timeout expires before the reload command finishes, the command will +; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. +#svn.proxy.reload_timeout = 10 + +; #################### +; SSH Support Settings +; #################### -## Appenlight integration enabled +; Defines if a custom authorized_keys file should be created and written on +; any change user ssh keys. Setting this to false also disables possibility +; of adding SSH keys by users from web interface. Super admins can still +; manage SSH Keys. +ssh.generate_authorized_keyfile = false + +; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` +# ssh.authorized_keys_ssh_opts = + +; Path to the authorized_keys file where the generate entries are placed. +; It is possible to have multiple key files specified in `sshd_config` e.g. +; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode +ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode + +; Command to execute the SSH wrapper. The binary is available in the +; RhodeCode installation directory. +; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper +ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper + +; Allow shell when executing the ssh-wrapper command +ssh.wrapper_cmd_allow_shell = false + +; Enables logging, and detailed output send back to the client during SSH +; operations. Useful for debugging, shouldn't be used in production. +ssh.enable_debug_logging = false + +; Paths to binary executable, by default they are the names, but we can +; override them if we want to use a custom one +ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg +ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git +ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve + +; Enables SSH key generator web interface. Disabling this still allows users +; to add their own keys. +ssh.enable_ui_key_generator = true + + +; ################# +; APPENLIGHT CONFIG +; ################# + +; Appenlight is tailored to work with RhodeCode, see +; http://appenlight.rhodecode.com for details how to obtain an account + +; Appenlight integration enabled appenlight = false appenlight.server_url = https://api.appenlight.com appenlight.api_key = YOUR_API_KEY #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 -## used for JS client +; used for JS client appenlight.api_public_key = YOUR_API_PUBLIC_KEY -## TWEAK AMOUNT OF INFO SENT HERE +; TWEAK AMOUNT OF INFO SENT HERE -## enables 404 error logging (default False) +; enables 404 error logging (default False) appenlight.report_404 = false -## time in seconds after request is considered being slow (default 1) +; time in seconds after request is considered being slow (default 1) appenlight.slow_request_time = 1 -## record slow requests in application -## (needs to be enabled for slow datastore recording and time tracking) +; record slow requests in application +; (needs to be enabled for slow datastore recording and time tracking) appenlight.slow_requests = true -## enable hooking to application loggers +; enable hooking to application loggers appenlight.logging = true -## minimum log level for log capture +; minimum log level for log capture appenlight.logging.level = WARNING -## send logs only from erroneous/slow requests -## (saves API quota for intensive logging) +; send logs only from erroneous/slow requests +; (saves API quota for intensive logging) appenlight.logging_on_error = false -## list of additional keywords that should be grabbed from environ object -## can be string with comma separated list of words in lowercase -## (by default client will always send following info: -## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that -## start with HTTP* this list be extended with additional keywords here +; list of additional keywords that should be grabbed from environ object +; can be string with comma separated list of words in lowercase +; (by default client will always send following info: +; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that +; start with HTTP* this list be extended with additional keywords here appenlight.environ_keys_whitelist = -## list of keywords that should be blanked from request object -## can be string with comma separated list of words in lowercase -## (by default client will always blank keys that contain following words -## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' -## this list be extended with additional keywords set here +; list of keywords that should be blanked from request object +; can be string with comma separated list of words in lowercase +; (by default client will always blank keys that contain following words +; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' +; this list be extended with additional keywords set here appenlight.request_keys_blacklist = -## list of namespaces that should be ignores when gathering log entries -## can be string with comma separated list of namespaces -## (by default the client ignores own entries: appenlight_client.client) +; list of namespaces that should be ignores when gathering log entries +; can be string with comma separated list of namespaces +; (by default the client ignores own entries: appenlight_client.client) appenlight.log_namespace_blacklist = - -########################################### -### MAIN RHODECODE DATABASE CONFIG ### -########################################### -#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 -#sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode -#sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 -# pymysql is an alternative driver for MySQL, use in case of problems with default one -#sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode - -sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode - -# see sqlalchemy docs for other advanced settings - -## print the sql statements to output -sqlalchemy.db1.echo = false -## recycle the connections after this amount of seconds -sqlalchemy.db1.pool_recycle = 3600 -sqlalchemy.db1.convert_unicode = true - -## the number of connections to keep open inside the connection pool. -## 0 indicates no limit -#sqlalchemy.db1.pool_size = 5 - -## the number of connections to allow in connection pool "overflow", that is -## connections that can be opened above and beyond the pool_size setting, -## which defaults to five. -#sqlalchemy.db1.max_overflow = 10 - -## Connection check ping, used to detect broken database connections -## could be enabled to better handle cases if MySQL has gone away errors -#sqlalchemy.db1.ping_connection = true - -################## -### VCS CONFIG ### -################## -vcs.server.enable = true -vcs.server = localhost:9900 - -## Web server connectivity protocol, responsible for web based VCS operations -## Available protocols are: -## `http` - use http-rpc backend (default) -vcs.server.protocol = http - -## Push/Pull operations protocol, available options are: -## `http` - use http-rpc backend (default) -vcs.scm_app_implementation = http - -## Push/Pull operations hooks protocol, available options are: -## `http` - use http-rpc backend (default) -vcs.hooks.protocol = http - -## Host on which this instance is listening for hooks. If vcsserver is in other location -## this should be adjusted. -vcs.hooks.host = 127.0.0.1 - -vcs.server.log_level = info -## Start VCSServer with this instance as a subprocess, useful for development -vcs.start_server = false - -## List of enabled VCS backends, available options are: -## `hg` - mercurial -## `git` - git -## `svn` - subversion -vcs.backends = hg, git, svn - -vcs.connection_timeout = 3600 -## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. -## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible -#vcs.svn.compatible_version = pre-1.8-compatible - - -############################################################ -### Subversion proxy support (mod_dav_svn) ### -### Maps RhodeCode repo groups into SVN paths for Apache ### -############################################################ -## Enable or disable the config file generation. -svn.proxy.generate_config = false -## Generate config file with `SVNListParentPath` set to `On`. -svn.proxy.list_parent_path = true -## Set location and file name of generated config file. -svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf -## alternative mod_dav config template. This needs to be a mako template -#svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako -## Used as a prefix to the `Location` block in the generated config file. -## In most cases it should be set to `/`. -svn.proxy.location_root = / -## Command to reload the mod dav svn configuration on change. -## Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh -## Make sure user who runs RhodeCode process is allowed to reload Apache -#svn.proxy.reload_cmd = /etc/init.d/apache2 reload -## If the timeout expires before the reload command finishes, the command will -## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. -#svn.proxy.reload_timeout = 10 - -############################################################ -### SSH Support Settings ### -############################################################ - -## Defines if a custom authorized_keys file should be created and written on -## any change user ssh keys. Setting this to false also disables possibility -## of adding SSH keys by users from web interface. Super admins can still -## manage SSH Keys. -ssh.generate_authorized_keyfile = false - -## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` -# ssh.authorized_keys_ssh_opts = - -## Path to the authorized_keys file where the generate entries are placed. -## It is possible to have multiple key files specified in `sshd_config` e.g. -## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode -ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode - -## Command to execute the SSH wrapper. The binary is available in the -## RhodeCode installation directory. -## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper -ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper - -## Allow shell when executing the ssh-wrapper command -ssh.wrapper_cmd_allow_shell = false - -## Enables logging, and detailed output send back to the client during SSH -## operations. Useful for debugging, shouldn't be used in production. -ssh.enable_debug_logging = false - -## Paths to binary executable, by default they are the names, but we can -## override them if we want to use a custom one -ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg -ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git -ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve - -## Enables SSH key generator web interface. Disabling this still allows users -## to add their own keys. -ssh.enable_ui_key_generator = true - - -## Dummy marker to add new entries after. -## Add any custom entries below. Please don't remove. +; Dummy marker to add new entries after. +; Add any custom entries below. Please don't remove this marker. custom.conf = 1 -################################ -### LOGGING CONFIGURATION #### -################################ +; ##################### +; LOGGING CONFIGURATION +; ##################### [loggers] keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper @@ -638,9 +708,9 @@ keys = console, console_sql [formatters] keys = generic, color_formatter, color_formatter_sql -############# -## LOGGERS ## -############# +; ####### +; LOGGERS +; ####### [logger_root] level = NOTSET handlers = console @@ -675,9 +745,9 @@ handlers = qualname = celery -############## -## HANDLERS ## -############## +; ######## +; HANDLERS +; ######## [handler_console] class = StreamHandler @@ -686,17 +756,17 @@ level = INFO formatter = generic [handler_console_sql] -# "level = DEBUG" logs SQL queries and results. -# "level = INFO" logs SQL queries. -# "level = WARN" logs neither. (Recommended for production systems.) +; "level = DEBUG" logs SQL queries and results. +; "level = INFO" logs SQL queries. +; "level = WARN" logs neither. (Recommended for production systems.) class = StreamHandler args = (sys.stderr, ) level = WARN formatter = generic -################ -## FORMATTERS ## -################ +; ########## +; FORMATTERS +; ########## [formatter_generic] class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter diff --git a/default.nix b/default.nix --- a/default.nix +++ b/default.nix @@ -190,7 +190,7 @@ let postInstall = '' # check required files STATIC_CHECK="/robots.txt /502.html - /js/scripts.js /js/rhodecode-components.js + /js/scripts.min.js /js/rhodecode-components.js /css/style.css /css/style-polymer.css /css/style-ipython.css" for file in $STATIC_CHECK; diff --git a/docs/admin/repo_admin/repo-admin-tasks.rst b/docs/admin/repo_admin/repo-admin-tasks.rst --- a/docs/admin/repo_admin/repo-admin-tasks.rst +++ b/docs/admin/repo_admin/repo-admin-tasks.rst @@ -12,13 +12,50 @@ Here is how to force delete a repository .. code-block:: bash + :dedent: 1 # starts the ishell interactive prompt $ rccontrol ishell enterprise-1 .. code-block:: python + :dedent: 1 In [4]: from rhodecode.model.repo import RepoModel In [3]: repo = Repository.get_by_repo_name('test_repos/repo_with_prs') In [5]: RepoModel().delete(repo, forks='detach', pull_requests='delete') In [6]: Session().commit() + + +Below is a fully automated example to force delete repositories reading from a +file where each line is a repository name. This can be executed via simple CLI command +without entering the interactive shell. + +Save the below content as a file named `repo_delete_task.py` + + +.. code-block:: python + :dedent: 1 + + from rhodecode.model.db import * + from rhodecode.model.repo import RepoModel + with open('delete_repos.txt', 'rb') as f: + # read all lines from file + repos = f.readlines() + for repo_name in repos: + repo_name = repo_name.strip() # cleanup the name just in case + repo = Repository.get_by_repo_name(repo_name) + if not repo: + raise Exception('Repo with name {} not found'.format(repo_name)) + RepoModel().delete(repo, forks='detach', pull_requests='delete') + Session().commit() + print('Removed repository {}'.format(repo_name)) + + +The code above will read the names of repositories from a file called `delete_repos.txt` +Each lines should represent a single name e.g `repo_name_1` or `repo_group/repo_name_2` + +Run this line from CLI to execute the code from the `repo_delete_task.py` file and +exit the ishell after the execution:: + + echo "%run repo_delete_task.py" | rccontrol ishell Enterprise-1 + diff --git a/docs/admin/system_admin/nginx/nginx-config-example.rst b/docs/admin/system_admin/nginx/nginx-config-example.rst --- a/docs/admin/system_admin/nginx/nginx-config-example.rst +++ b/docs/admin/system_admin/nginx/nginx-config-example.rst @@ -110,6 +110,7 @@ Use the following example to configure N # gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; # gzip_vary on; # gzip_disable "msie6"; + # expires 60d; # alias /path/to/.rccontrol/community-1/static; # alias /path/to/.rccontrol/enterprise-1/static; # } diff --git a/docs/admin/system_admin/tuning-rhodecode.rst b/docs/admin/system_admin/tuning-rhodecode.rst --- a/docs/admin/system_admin/tuning-rhodecode.rst +++ b/docs/admin/system_admin/tuning-rhodecode.rst @@ -9,7 +9,8 @@ may find some of the following methods u .. toctree:: tuning/tuning-gunicorn - tuning/tuning-vcs-memory-cache + tuning/tuning-vcs-server-memory-usage + tuning/tuning-rhodecode-memory-usage tuning/tuning-user-sessions-performance tuning/tuning-increase-db-performance tuning/tuning-scale-horizontally-cluster diff --git a/docs/admin/system_admin/tuning/tuning-gunicorn.rst b/docs/admin/system_admin/tuning/tuning-gunicorn.rst --- a/docs/admin/system_admin/tuning/tuning-gunicorn.rst +++ b/docs/admin/system_admin/tuning/tuning-gunicorn.rst @@ -25,26 +25,22 @@ 2. In the ``[server:main]`` section, cha .. code-block:: ini - use = egg:gunicorn#main - ## Sets the number of process workers. You must set `instance_id = *` - ## when this option is set to more than one worker, recommended - ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers - ## The `instance_id = *` must be set in the [app:main] section below - workers = 4 - ## process name - proc_name = rhodecode - ## type of worker class, one of sync, gevent - ## recommended for bigger setup is using of of other than sync one - worker_class = sync - ## The maximum number of simultaneous clients. Valid only for Gevent - #worker_connections = 10 - ## max number of requests that worker will handle before being gracefully - ## restarted, could prevent memory leaks - max_requests = 1000 - max_requests_jitter = 30 - ## amount of time a worker can spend with handling a request tuning-change-lfs-dir.before it - ## gets killed and restarted. Set to 6hrs - timeout = 21600 + ; Sets the number of process workers. More workers means more concurrent connections + ; RhodeCode can handle at the same time. Each additional worker also it increases + ; memory usage as each has it's own set of caches. + ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more + ; than 8-10 unless for really big deployments .e.g 700-1000 users. + ; `instance_id = *` must be set in the [app:main] section below (which is the default) + ; when using more than 1 worker. + workers = 6 + + ; Type of worker class, one of `sync`, `gevent` + ; Use `gevent` for rhodecode + worker_class = gevent + + ; The maximum number of simultaneous clients per worker. Valid only for gevent + worker_connections = 10 + 3. In the ``[app:main]`` section, set the ``instance_id`` property to ``*``. @@ -63,24 +59,19 @@ 5. In the ``[server:main]`` section, inc .. code-block:: ini - ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini - use = egg:gunicorn#main - ## Sets the number of process workers. Recommended - ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers - workers = 4 - ## process name - proc_name = rhodecode_vcsserver - ## type of worker class, currently `sync` is the only option allowed. + ; Sets the number of process workers. More workers means more concurrent connections + ; RhodeCode can handle at the same time. Each additional worker also it increases + ; memory usage as each has it's own set of caches. + ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more + ; than 8-10 unless for really big deployments .e.g 700-1000 users. + ; `instance_id = *` must be set in the [app:main] section below (which is the default) + ; when using more than 1 worker. + workers = 8 + + ; Type of worker class, one of `sync`, `gevent` + ; Use `sync` for vcsserver worker_class = sync - ## The maximum number of simultaneous clients. Valid only for Gevent - #worker_connections = 10 - ## max number of requests that worker will handle before being gracefully - ## restarted, could prevent memory leaks - max_requests = 1000 - max_requests_jitter = 30 - ## amount of time a worker can spend with handling a request before it - ## gets killed and restarted. Set to 6hrs - timeout = 21600 + 6. Save your changes. 7. Restart your |RCE| instances, using the following command: @@ -109,17 +100,18 @@ 2. In the ``[server:main]`` section, cha .. code-block:: ini - ## type of worker class, one of sync, gevent - ## recommended for bigger setup is using of of other than sync one + ; Type of worker class, one of `sync`, `gevent` + ; Use `gevent` for rhodecode worker_class = gevent - ## The maximum number of simultaneous clients. Valid only for Gevent + + ; The maximum number of simultaneous clients per worker. Valid only for gevent worker_connections = 30 .. note:: `Gevent` is currently only supported for Enterprise/Community instances. - VCSServer doesn't yet support gevent. + VCSServer doesn't support gevent. diff --git a/docs/admin/system_admin/tuning/tuning-rhodecode-memory-usage.rst b/docs/admin/system_admin/tuning/tuning-rhodecode-memory-usage.rst new file mode 100644 --- /dev/null +++ b/docs/admin/system_admin/tuning/tuning-rhodecode-memory-usage.rst @@ -0,0 +1,26 @@ +.. _adjust-rhodecode-mem: + +RhodeCode Memory Usage +---------------------- + +Starting from Version 4.18.X RhodeCode has a builtin memory monitor for gunicorn workers. +Enabling this can limit the maximum amount of memory system can use. Each worker +for RhodeCode is monitored independently. +To enable Memory management make sure to have following settings inside `[app:main] section` of +:file:`home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. + + + + ; Maximum memory usage that each worker can use before it will receive a + ; graceful restart signal 0 = memory monitoring is disabled + ; Examples: 268435456 (256MB), 536870912 (512MB) + ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) + memory_max_usage = 1073741824 + + ; How often in seconds to check for memory usage for each gunicorn worker + memory_usage_check_interval = 60 + + ; Threshold value for which we don't recycle worker if GarbageCollection + ; frees up enough resources. Before each restart we try to run GC on worker + ; in case we get enough free memory after that, restart will not happen. + memory_usage_recovery_threshold = 0.8 diff --git a/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst b/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst --- a/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst +++ b/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst @@ -57,7 +57,7 @@ Here's an overview what components shoul - `nginx` acting as a load-balancer. - `postgresql-server` used for database and sessions. - `redis-server` used for storing shared caches. - - optionally `rabbitmq-server` for `Celery` if used. + - optionally `rabbitmq-server` or `redis` for `Celery` if used. - optionally if `Celery` is used Enterprise/Community instance + VCSServer. - optionally mailserver that can be shared by other instances. - optionally channelstream server to handle live communication for all instances. @@ -263,6 +263,7 @@ 6) Configure `Nginx`_ as reverse proxy o gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/json application/xml application/rss+xml font/truetype font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_disable "msie6"; + expires 60d; #alias /home/rcdev/.rccontrol/community-1/static; alias /home/rcdev/.rccontrol/enterprise-1/static; } @@ -372,16 +373,16 @@ Using Celery with cluster If `Celery` is used we recommend setting also an instance of Enterprise/Community+VCSserver -on the node that is running `RabbitMQ`_. Those instances will be used to executed async -tasks on the `rc-node-1`. This is the most efficient setup. `Celery` usually -handles tasks such as sending emails, forking repositories, importing +on the node that is running `RabbitMQ`_ or `Redis`_. Those instances will be used to +executed async tasks on the `rc-node-1`. This is the most efficient setup. +`Celery` usually handles tasks such as sending emails, forking repositories, importing repositories from external location etc. Using workers on instance that has the direct access to disks used by NFS as well as email server gives noticeable performance boost. Running local workers to the NFS storage results in faster execution of forking large repositories or sending lots of emails. Those instances need to be configured in the same way as for other nodes. -The instance in rc-node-1 can be added to the cluser, but we don't recommend doing it. +The instance in rc-node-1 can be added to the cluster, but we don't recommend doing it. For best results let it be isolated to only executing `Celery` tasks in the cluster setup. diff --git a/docs/admin/system_admin/tuning/tuning-vcs-memory-cache.rst b/docs/admin/system_admin/tuning/tuning-vcs-server-memory-usage.rst rename from docs/admin/system_admin/tuning/tuning-vcs-memory-cache.rst rename to docs/admin/system_admin/tuning/tuning-vcs-server-memory-usage.rst --- a/docs/admin/system_admin/tuning/tuning-vcs-memory-cache.rst +++ b/docs/admin/system_admin/tuning/tuning-vcs-server-memory-usage.rst @@ -1,8 +1,26 @@ -.. _adjust-vcs-mem-cache: +.. _adjust-vcs-server-mem: -VCSServer Memory Cache +VCSServer Memory Usage ---------------------- -The VCS Server mamory cache can be adjusted to work best with the resources -available to your |RCE| instance. If you find that memory resources are under -pressure, see the :ref:`vcs-server-maintain` section for details. +Starting from Version 4.18.X RhodeCode has a builtin memory monitor for gunicorn workers. +Enabling this can limit the maximum amount of memory system can use. Each worker +for VCS Server is monitored independently. +To enable Memory management make sure to have following settings inside `[app:main] section` of +:file:`home/{user}/.rccontrol/{instance-id}/vcsserver.ini` file. + + + + ; Maximum memory usage that each worker can use before it will receive a + ; graceful restart signal 0 = memory monitoring is disabled + ; Examples: 268435456 (256MB), 536870912 (512MB) + ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) + memory_max_usage = 1073741824 + + ; How often in seconds to check for memory usage for each gunicorn worker + memory_usage_check_interval = 60 + + ; Threshold value for which we don't recycle worker if GarbageCollection + ; frees up enough resources. Before each restart we try to run GC on worker + ; in case we get enough free memory after that, restart will not happen. + memory_usage_recovery_threshold = 0.8 diff --git a/docs/admin/system_admin/vcs-server.rst b/docs/admin/system_admin/vcs-server.rst --- a/docs/admin/system_admin/vcs-server.rst +++ b/docs/admin/system_admin/vcs-server.rst @@ -110,35 +110,39 @@ match, for example: .. _vcs-server-maintain: -VCS Server Memory Optimization -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +VCS Server Cache Optimization +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -To optimize the VCS server to manage the cache and memory usage efficiently, you need to -configure the following options in the -:file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini` file. Once -configured, restart the VCS Server. By default we use an optimal settings, but in certain -conditions tunning expiration_time and max_size can affect memory usage and performance +To optimize the VCS server to manage the cache and memory usage efficiently, it's recommended to +configure the Redis backend for VCSServer caches. +Once configured, restart the VCS Server. + +Make sure Redis is installed and running. +Open :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini` +file and ensure the below settings for `repo_object` type cache are set: .. code-block:: ini - ## cache region for storing repo_objects cache - rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru + ; ensure the default file based cache is *commented out* + ##rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace + ##rc_cache.repo_object.expiration_time = 2592000 - ## cache auto-expires after N seconds, setting this to 0 disabled cache - rc_cache.repo_object.expiration_time = 300 + ; `repo_object` cache settings for vcs methods for repositories + rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack - ## max size of LRU, old values will be discarded if the size of cache reaches max_size - ## Sets the maximum number of items stored in the cache, before the cache - ## starts to be cleared. + ; cache auto-expires after N seconds + ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) + rc_cache.repo_object.expiration_time = 2592000 + + ; redis_expiration_time needs to be greater then expiration_time + rc_cache.repo_object.arguments.redis_expiration_time = 3592000 - ## As a general rule of thumb, running this value at 120 resulted in a - ## 5GB cache. Running it at 240 resulted in a 9GB cache. Your results - ## will differ based on usage patterns and |repo| sizes. - - ## Tweaking this value to run at a fairly constant memory load on your - ## server will help performance. - - rc_cache.repo_object.max_size = 120 + rc_cache.repo_object.arguments.host = localhost + rc_cache.repo_object.arguments.port = 6379 + rc_cache.repo_object.arguments.db = 5 + rc_cache.repo_object.arguments.socket_timeout = 30 + ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends + rc_cache.repo_object.arguments.distributed_lock = true To clear the cache completely, you can restart the VCS Server. @@ -190,25 +194,6 @@ For a more detailed explanation of the l \port Set the port number on which the VCS Server will be available. - \locale - Set the locale the VCS Server expects. - - \workers - Set the number of process workers.Recommended - value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers - - \max_requests - The maximum number of requests a worker will process before restarting. - Any value greater than zero will limit the number of requests a work - will process before automatically restarting. This is a simple method - to help limit the damage of memory leaks. - - \max_requests_jitter - The maximum jitter to add to the max_requests setting. - The jitter causes the restart per worker to be randomized by - randint(0, max_requests_jitter). This is intended to stagger worker - restarts to avoid all workers restarting at the same time. - .. note:: @@ -216,63 +201,139 @@ For a more detailed explanation of the l .. code-block:: ini - ################################################################################ - # RhodeCode VCSServer with HTTP Backend - configuration # - # # - ################################################################################ - + ; ################################# + ; RHODECODE VCSSERVER CONFIGURATION + ; ################################# [server:main] - ## COMMON ## + ; COMMON HOST/IP CONFIG host = 127.0.0.1 port = 10002 - ########################## - ## GUNICORN WSGI SERVER ## - ########################## - ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini + ; ########################### + ; GUNICORN APPLICATION SERVER + ; ########################### + + ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini + + ; Module to use, this setting shouldn't be changed use = egg:gunicorn#main - ## Sets the number of process workers. Recommended - ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers - workers = 3 - ## process name + + ; Sets the number of process workers. More workers means more concurrent connections + ; RhodeCode can handle at the same time. Each additional worker also it increases + ; memory usage as each has it's own set of caches. + ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more + ; than 8-10 unless for really big deployments .e.g 700-1000 users. + ; `instance_id = *` must be set in the [app:main] section below (which is the default) + ; when using more than 1 worker. + workers = 6 + + ; Gunicorn access log level + loglevel = info + + ; Process name visible in process list proc_name = rhodecode_vcsserver - ## type of worker class, one of sync, gevent - ## recommended for bigger setup is using of of other than sync one + + ; Type of worker class, one of sync, gevent + ; currently `sync` is the only option allowed. worker_class = sync - ## The maximum number of simultaneous clients. Valid only for Gevent - #worker_connections = 10 - ## max number of requests that worker will handle before being gracefully - ## restarted, could prevent memory leaks + + ; The maximum number of simultaneous clients. Valid only for gevent + worker_connections = 10 + + ; Max number of requests that worker will handle before being gracefully restarted. + ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. max_requests = 1000 max_requests_jitter = 30 - ## amount of time a worker can spend with handling a request before it - ## gets killed and restarted. Set to 6hrs + + ; Amount of time a worker can spend with handling a request before it + ; gets killed and restarted. By default set to 21600 (6hrs) + ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) timeout = 21600 + ; The maximum size of HTTP request line in bytes. + ; 0 for unlimited + limit_request_line = 0 + + ; Limit the number of HTTP headers fields in a request. + ; By default this value is 100 and can't be larger than 32768. + limit_request_fields = 32768 + + ; Limit the allowed size of an HTTP request header field. + ; Value is a positive number or 0. + ; Setting it to 0 will allow unlimited header field sizes. + limit_request_field_size = 0 + + ; Timeout for graceful workers restart. + ; After receiving a restart signal, workers have this much time to finish + ; serving requests. Workers still alive after the timeout (starting from the + ; receipt of the restart signal) are force killed. + ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) + graceful_timeout = 3600 + + # The number of seconds to wait for requests on a Keep-Alive connection. + # Generally set in the 1-5 seconds range. + keepalive = 2 + + ; Maximum memory usage that each worker can use before it will receive a + ; graceful restart signal 0 = memory monitoring is disabled + ; Examples: 268435456 (256MB), 536870912 (512MB) + ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) + memory_max_usage = 1073741824 + + ; How often in seconds to check for memory usage for each gunicorn worker + memory_usage_check_interval = 60 + + ; Threshold value for which we don't recycle worker if GarbageCollection + ; frees up enough resources. Before each restart we try to run GC on worker + ; in case we get enough free memory after that, restart will not happen. + memory_usage_recovery_threshold = 0.8 + + [app:main] use = egg:rhodecode-vcsserver pyramid.default_locale_name = en pyramid.includes = - ## default locale used by VCS systems + ; default locale used by VCS systems locale = en_US.UTF-8 - # cache regions, please don't change - beaker.cache.regions = repo_object - beaker.cache.repo_object.type = memorylru - beaker.cache.repo_object.max_items = 100 - # cache auto-expires after N seconds - beaker.cache.repo_object.expire = 300 - beaker.cache.repo_object.enabled = true + ; ############# + ; DOGPILE CACHE + ; ############# + + ; Default cache dir for caches. Putting this into a ramdisk can boost performance. + ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space + cache_dir = %(here)s/data + + ; ********************************************************** + ; `repo_object` cache with redis backend + ; recommended for larger instance, or for better performance + ; ********************************************************** + + ; `repo_object` cache settings for vcs methods for repositories + rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack + ; cache auto-expires after N seconds + ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) + rc_cache.repo_object.expiration_time = 2592000 - ################################ - ### LOGGING CONFIGURATION #### - ################################ + ; redis_expiration_time needs to be greater then expiration_time + rc_cache.repo_object.arguments.redis_expiration_time = 3592000 + + rc_cache.repo_object.arguments.host = localhost + rc_cache.repo_object.arguments.port = 6379 + rc_cache.repo_object.arguments.db = 5 + rc_cache.repo_object.arguments.socket_timeout = 30 + ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends + rc_cache.repo_object.arguments.distributed_lock = true + + ; ##################### + ; LOGGING CONFIGURATION + ; ##################### [loggers] - keys = root, vcsserver, beaker + keys = root, vcsserver [handlers] keys = console @@ -280,9 +341,9 @@ For a more detailed explanation of the l [formatters] keys = generic - ############# - ## LOGGERS ## - ############# + ; ####### + ; LOGGERS + ; ####### [logger_root] level = NOTSET handlers = console @@ -293,29 +354,23 @@ For a more detailed explanation of the l qualname = vcsserver propagate = 1 - [logger_beaker] - level = DEBUG - handlers = - qualname = beaker - propagate = 1 - - ############## - ## HANDLERS ## - ############## + ; ######## + ; HANDLERS + ; ######## [handler_console] class = StreamHandler - args = (sys.stderr,) - level = DEBUG + args = (sys.stderr, ) + level = INFO formatter = generic - ################ - ## FORMATTERS ## - ################ + ; ########## + ; FORMATTERS + ; ########## [formatter_generic] - format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s + format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s datefmt = %Y-%m-%d %H:%M:%S diff --git a/docs/api/methods/pull-request-methods.rst b/docs/api/methods/pull-request-methods.rst --- a/docs/api/methods/pull-request-methods.rst +++ b/docs/api/methods/pull-request-methods.rst @@ -39,7 +39,7 @@ close_pull_request comment_pull_request -------------------- -.. py:function:: comment_pull_request(apiuser, pullrequestid, repoid=, message=, commit_id=, status=, comment_type=, resolves_comment_id=, userid=>) +.. py:function:: comment_pull_request(apiuser, pullrequestid, repoid=, message=, commit_id=, status=, comment_type=, resolves_comment_id=, extra_recipients=, userid=>) Comment on the pull request specified with the `pullrequestid`, in the |repo| specified by the `repoid`, and optionally change the @@ -63,6 +63,11 @@ comment_pull_request :type status: str :param comment_type: Comment type, one of: 'note', 'todo' :type comment_type: Optional(str), default: 'note' + :param resolves_comment_id: id of comment which this one will resolve + :type resolves_comment_id: Optional(int) + :param extra_recipients: list of user ids or usernames to add + notifications for this comment. Acts like a CC for notification + :type extra_recipients: Optional(list) :param userid: Comment on the pull request as this user :type userid: Optional(str or int) @@ -126,7 +131,7 @@ create_pull_request get_pull_request ---------------- -.. py:function:: get_pull_request(apiuser, pullrequestid, repoid=) +.. py:function:: get_pull_request(apiuser, pullrequestid, repoid=, merge_state=) Get a pull request based on the given ID. @@ -137,6 +142,9 @@ get_pull_request :type repoid: str or int :param pullrequestid: ID of the requested pull request. :type pullrequestid: int + :param merge_state: Optional calculate merge state for each repository. + This could result in longer time to fetch the data + :type merge_state: bool Example output: @@ -250,7 +258,7 @@ get_pull_request_comments get_pull_requests ----------------- -.. py:function:: get_pull_requests(apiuser, repoid, status=, merge_state=) +.. py:function:: get_pull_requests(apiuser, repoid, status=, merge_state=) Get all pull requests from the repository specified in `repoid`. diff --git a/docs/api/methods/repo-methods.rst b/docs/api/methods/repo-methods.rst --- a/docs/api/methods/repo-methods.rst +++ b/docs/api/methods/repo-methods.rst @@ -28,7 +28,7 @@ add_field_to_repo comment_commit -------------- -.. py:function:: comment_commit(apiuser, repoid, commit_id, message, status=, comment_type=, resolves_comment_id=, userid=>) +.. py:function:: comment_commit(apiuser, repoid, commit_id, message, status=, comment_type=, resolves_comment_id=, extra_recipients=, userid=>) Set a commit comment, and optionally change the status of the commit. @@ -45,6 +45,11 @@ comment_commit :type status: str :param comment_type: Comment type, one of: 'note', 'todo' :type comment_type: Optional(str), default: 'note' + :param resolves_comment_id: id of comment which this one will resolve + :type resolves_comment_id: Optional(int) + :param extra_recipients: list of user ids or usernames to add + notifications for this comment. Acts like a CC for notification + :type extra_recipients: Optional(list) :param userid: Set the user name of the comment creator. :type userid: Optional(str or int) @@ -66,7 +71,7 @@ comment_commit create_repo ----------- -.. py:function:: create_repo(apiuser, repo_name, repo_type, owner=>, description=, private=, clone_uri=, push_uri=, landing_rev=, enable_statistics=, enable_locking=, enable_downloads=, copy_permissions=) +.. py:function:: create_repo(apiuser, repo_name, repo_type, owner=>, description=, private=, clone_uri=, push_uri=, landing_rev=, enable_statistics=, enable_locking=, enable_downloads=, copy_permissions=) Creates a repository. @@ -97,7 +102,7 @@ create_repo :type clone_uri: str :param push_uri: set push_uri :type push_uri: str - :param landing_rev: : + :param landing_rev: :, e.g branch:default, book:dev, rev:abcd :type landing_rev: str :param enable_locking: :type enable_locking: bool @@ -169,7 +174,7 @@ delete_repo fork_repo --------- -.. py:function:: fork_repo(apiuser, repoid, fork_name, owner=>, description=, private=, clone_uri=, landing_rev=, copy_permissions=) +.. py:function:: fork_repo(apiuser, repoid, fork_name, owner=>, description=, private=, clone_uri=, landing_rev=, copy_permissions=) Creates a fork of the specified |repo|. @@ -198,7 +203,7 @@ fork_repo :type copy_permissions: bool :param private: Make the fork private. The default is False. :type private: bool - :param landing_rev: Set the landing revision. The default is tip. + :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd Example output: @@ -1085,7 +1090,7 @@ strip update_repo ----------- -.. py:function:: update_repo(apiuser, repoid, repo_name=, owner=>, description=, private=, clone_uri=, push_uri=, landing_rev=, fork_of=, enable_statistics=, enable_locking=, enable_downloads=, fields=) +.. py:function:: update_repo(apiuser, repoid, repo_name=, owner=>, description=, private=, clone_uri=, push_uri=, landing_rev=, fork_of=, enable_statistics=, enable_locking=, enable_downloads=, fields=) Updates a repository with the given information. @@ -1117,7 +1122,7 @@ update_repo :type private: bool :param clone_uri: Update the |repo| clone URI. :type clone_uri: str - :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. + :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd :type landing_rev: str :param enable_statistics: Enable statistics on the |repo|, (True | False). :type enable_statistics: bool diff --git a/docs/api/methods/search-methods.rst b/docs/api/methods/search-methods.rst --- a/docs/api/methods/search-methods.rst +++ b/docs/api/methods/search-methods.rst @@ -6,7 +6,7 @@ search methods search ------ -.. py:function:: search(apiuser, search_query, search_type, page_limit=, page=, search_sort=, repo_name=, repo_group_name=) +.. py:function:: search(apiuser, search_query, search_type, page_limit=, page=, search_sort=, repo_name=, repo_group_name=) Fetch Full Text Search results using API. @@ -23,9 +23,15 @@ search :type page_limit: Optional(int) :param page: Page number. Default first page. :type page: Optional(int) - :param search_sort: Search sort order. Default newfirst. The following are valid options: - * newfirst - * oldfirst + :param search_sort: Search sort order.Must start with asc: or desc: Default desc:date. + The following are valid options: + * asc|desc:message.raw + * asc|desc:date + * asc|desc:author.email.raw + * asc|desc:message.raw + * newfirst (old legacy equal to desc:date) + * oldfirst (old legacy equal to asc:date) + :type search_sort: Optional(str) :param repo_name: Filter by one repo. Default is all. :type repo_name: Optional(str) diff --git a/docs/api/methods/store-methods.rst b/docs/api/methods/store-methods.rst --- a/docs/api/methods/store-methods.rst +++ b/docs/api/methods/store-methods.rst @@ -6,7 +6,7 @@ store methods file_store_add (EE only) ------------------------ -.. py:function:: file_store_add(apiuser, filename, content) +.. py:function:: file_store_add(apiuser, filename, content, description=) Upload API for the file_store @@ -19,6 +19,8 @@ file_store_add (EE only) :type apiuser: AuthUser :param filename: name of the file uploaded :type filename: str + :param description: Optional description for added file + :type description: str :param content: base64 encoded content of the uploaded file :type content: str @@ -35,3 +37,148 @@ file_store_add (EE only) error : null +file_store_add_with_acl (EE only) +--------------------------------- + +.. py:function:: file_store_add_with_acl(apiuser, filename, content, description=, scope_user_id=, scope_repo_id=, scope_repo_group_id=) + + Upload API for the file_store + + Example usage from CLI:: + rhodecode-api --instance-name=enterprise-1 upload_file "{"content": "$(cat image.jpg | base64)", "filename":"image.jpg", "scope_repo_id":101}" + + This command takes the following options: + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + :param filename: name of the file uploaded + :type filename: str + :param description: Optional description for added file + :type description: str + :param content: base64 encoded content of the uploaded file + :type content: str + + :param scope_user_id: Optionally bind this file to user. + This will check ACL in such way only this user can access the file. + :type scope_user_id: int + :param scope_repo_id: Optionally bind this file to repository. + This will check ACL in such way only user with proper access to such + repository can access the file. + :type scope_repo_id: int + :param scope_repo_group_id: Optionally bind this file to repository group. + This will check ACL in such way only user with proper access to such + repository group can access the file. + :type scope_repo_group_id: int + + Example output: + + .. code-block:: bash + + id : + result: { + "access_path": "/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg", + "access_path_fqn": "http://server.domain.com/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg", + "store_fid": "84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg" + } + error : null + + +file_store_get_info (EE only) +----------------------------- + +.. py:function:: file_store_get_info(apiuser, store_fid) + + Get artifact data. + + Example output: + + .. code-block:: bash + + id : + result: { + "artifact": { + "access_path_fqn": "https://rhodecode.example.com/_file_store/download/0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg", + "created_on": "2019-10-15T16:25:35.491", + "description": "my upload", + "downloaded_times": 1, + "file_uid": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg", + "filename": "example.jpg", + "filename_org": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg", + "hidden": false, + "metadata": [ + { + "artifact": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg", + "key": "yellow", + "section": "tags", + "value": "bar" + } + ], + "sha256": "818dff0f44574dfb6814d38e6bf3c60c5943d1d13653398ecddaedf2f6a5b04d", + "size": 18599, + "uploaded_by": { + "email": "admin@rhodecode.com", + "emails": [ + "admin@rhodecode.com" + ], + "firstname": "Admin", + "lastname": "LastName", + "user_id": 2, + "username": "admin" + } + } + } + error : null + + +file_store_add_metadata (EE only) +--------------------------------- + +.. py:function:: file_store_add_metadata(apiuser, store_fid, section, key, value, value_type=) + + Add metadata into artifact. The metadata consist of section, key, value. eg. + section='tags', 'key'='tag_name', value='1' + + :param apiuser: This is filled automatically from the |authtoken|. + :type apiuser: AuthUser + + :param store_fid: file uid, e.g 0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4 + :type store_fid: str + + :param section: Section name to add metadata + :type section: str + + :param key: Key to add as metadata + :type key: str + + :param value: Value to add as metadata + :type value: str + + :param value_type: Optional type, default is 'unicode' other types are: + int, list, bool, unicode, str + + :type value_type: str + + Example output: + + .. code-block:: bash + + id : + result: { + "metadata": [ + { + "artifact": "0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4", + "key": "secret", + "section": "tags", + "value": "1" + }, + { + "artifact": "0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4", + "key": "video", + "section": "tags", + "value": "1" + } + ] + } + error : null + + diff --git a/docs/api/methods/user-group-methods.rst b/docs/api/methods/user-group-methods.rst --- a/docs/api/methods/user-group-methods.rst +++ b/docs/api/methods/user-group-methods.rst @@ -72,7 +72,9 @@ create_user_group :param active: Set this group as active. :type active: Optional(``True`` | ``False``) :param sync: Set enabled or disabled the automatically sync from - external authentication types like ldap. + external authentication types like ldap. If User Group will be named like + one from e.g ldap and sync flag is enabled members will be synced automatically. + Sync type when enabled via API is set to `manual_api` :type sync: Optional(``True`` | ``False``) Example output: @@ -391,7 +393,9 @@ update_user_group :param active: Set the group as active. :type active: Optional(``True`` | ``False``) :param sync: Set enabled or disabled the automatically sync from - external authentication types like ldap. + external authentication types like ldap. If User Group will be named like + one from e.g ldap and sync flag is enabled members will be synced automatically. + Sync type when enabled via API is set to `manual_api` :type sync: Optional(``True`` | ``False``) Example output: diff --git a/docs/api/methods/user-methods.rst b/docs/api/methods/user-methods.rst --- a/docs/api/methods/user-methods.rst +++ b/docs/api/methods/user-methods.rst @@ -6,7 +6,7 @@ user methods create_user ----------- -.. py:function:: create_user(apiuser, username, email, password=, firstname=, lastname=, active=, admin=, extern_name=, extern_type=, force_password_change=, create_personal_repo_group=) +.. py:function:: create_user(apiuser, username, email, password=, firstname=, lastname=, description=, active=, admin=, extern_name=, extern_type=, force_password_change=, create_personal_repo_group=) Creates a new user and returns the new user object. @@ -27,6 +27,8 @@ create_user :type firstname: Optional(str) :param lastname: Set the new user surname. :type lastname: Optional(str) + :param description: Set user description, or short bio. Metatags are allowed. + :type description: Optional(str) :param active: Set the user as active. :type active: Optional(``True`` | ``False``) :param admin: Give the new user admin rights. @@ -155,6 +157,7 @@ get_user "extern_name": "rhodecode", "extern_type": "rhodecode", "firstname": "username", + "description": "user description", "ip_addresses": [], "language": null, "last_login": "Timestamp", @@ -268,7 +271,7 @@ get_users update_user ----------- -.. py:function:: update_user(apiuser, userid, username=, email=, password=, firstname=, lastname=, active=, admin=, extern_type=, extern_name=) +.. py:function:: update_user(apiuser, userid, username=, email=, password=, firstname=, lastname=, description=, active=, admin=, extern_type=, extern_name=) Updates the details for the specified user, if that user exists. @@ -291,6 +294,8 @@ update_user :type firstname: Optional(str) :param lastname: Set the new surname. :type lastname: Optional(str) + :param description: Set user description, or short bio. Metatags are allowed. + :type description: Optional(str) :param active: Set the new user as active. :type active: Optional(``True`` | ``False``) :param admin: Give the user admin rights. diff --git a/docs/contributing/dev-setup.rst b/docs/contributing/dev-setup.rst --- a/docs/contributing/dev-setup.rst +++ b/docs/contributing/dev-setup.rst @@ -19,7 +19,7 @@ Setup Nix Package Manager To install the Nix Package Manager, please run:: - $ curl https://nixos.org/nix/install | sh + $ curl https://nixos.org/releases/nix/nix-2.0.4/install | sh or go to https://nixos.org/nix/ and follow the installation instructions. Once this is correctly set up on your system, you should be able to use the diff --git a/docs/index.rst b/docs/index.rst --- a/docs/index.rst +++ b/docs/index.rst @@ -79,6 +79,12 @@ and commit files and |repos| while manag contributing/contributing .. toctree:: + :maxdepth: 2 + :caption: RhodeCode Control Documentation + + RhodeCode Installer + +.. toctree:: :maxdepth: 1 :caption: About diff --git a/docs/install/configure-celery.rst b/docs/install/configure-celery.rst --- a/docs/install/configure-celery.rst +++ b/docs/install/configure-celery.rst @@ -11,16 +11,20 @@ and import repositories in async way. It repository sync in scheduler. If you decide to use Celery you also need a working message queue. -The recommended and fully supported message broker is rabbitmq_. +There are two fully supported message brokers is rabbitmq_ and redis_ (recommended). + +Since release 4.18.X we recommend using redis_ as a backend since it's generally +easier to work with, and results in simpler stack as redis is generally recommended +for caching purposes. In order to install and configure Celery, follow these steps: -1. Install RabbitMQ, see the documentation on the Celery website for - `rabbitmq installation`_, or `rabbitmq website installation`_ +1. Install RabbitMQ or Redis for a message queue, see the documentation on the Celery website for + `redis installation`_ or `rabbitmq installation`_ -1a. As en example configuration after installation, you can run:: +1a. If you choose RabbitMQ example configuration after installation would look like that:: sudo rabbitmqctl add_user rcuser secret_password sudo rabbitmqctl add_vhost rhodevhost @@ -45,6 +49,10 @@ 3. Configure Celery in the Set the broker_url as minimal settings required to enable operation. If used our example data from pt 1a, here is how the broker url should look like:: + # for Redis + celery.broker_url = redis://localhost:6379/8 + + # for RabbitMQ celery.broker_url = amqp://rcuser:secret_password@localhost:5672/rhodevhost Full configuration example is below: @@ -57,7 +65,7 @@ 3. Configure Celery in the #################################### use_celery = true - celery.broker_url = amqp://rcuser:secret@localhost:5672/rhodevhost + celery.broker_url = redis://localhost:6379/8 # maximum tasks to execute before worker restart celery.max_tasks_per_child = 100 @@ -69,6 +77,8 @@ 3. Configure Celery in the .. _python: http://www.python.org/ .. _mercurial: http://mercurial.selenic.com/ .. _celery: http://celeryproject.org/ +.. _redis: http://redis.io +.. _redis installation: https://redis.io/topics/quickstart .. _rabbitmq: http://www.rabbitmq.com/ .. _rabbitmq installation: http://docs.celeryproject.org/en/latest/getting-started/brokers/rabbitmq.html .. _rabbitmq website installation: http://www.rabbitmq.com/download.html diff --git a/docs/install/database-string.rst b/docs/install/database-string.rst --- a/docs/install/database-string.rst +++ b/docs/install/database-string.rst @@ -35,4 +35,3 @@ 2. When you open the file, find the data # see sqlalchemy docs for other advanced settings sqlalchemy.db1.echo = false sqlalchemy.db1.pool_recycle = 3600 - sqlalchemy.db1.convert_unicode = true diff --git a/docs/issue-trackers/issue-trackers.rst b/docs/issue-trackers/issue-trackers.rst --- a/docs/issue-trackers/issue-trackers.rst +++ b/docs/issue-trackers/issue-trackers.rst @@ -38,44 +38,39 @@ default one. See the instructions in :re .. _issue-tr-eg-ref: + Jira Integration ---------------- -* Regex = ``(?:^#|\s#)(\w+-\d+)`` -* URL = ``https://myissueserver.com/browse/${id}`` -* Issue Prefix = ``#`` +Please check examples in the view for configuration the issue trackers. + Confluence (Wiki) ----------------- -* Regex = ``(?:conf-)([A-Z0-9]+)`` -* URL = ``https://example.atlassian.net/display/wiki/${id}/${repo_name}`` -* issue prefix = ``CONF-`` +Please check examples in the view for configuration the issue trackers. + Redmine Integration ------------------- -* Regex = ``(issue-+\d+)`` -* URL = ``https://myissueserver.com/redmine/issue/${id}`` -* Issue Prefix = ``issue-`` +Please check examples in the view for configuration the issue trackers. + -Redmine (wiki) --------------- +Redmine wiki Integration +------------------------ -* Regex = ``(?:wiki-)([a-zA-Z0-9]+)`` -* URL = ``https://example.com/redmine/projects/wiki/${repo_name}`` -* Issue prefix = ``Issue-`` +Please check examples in the view for configuration the issue trackers. + Pivotal Tracker --------------- -* Regex = ``(?:pivot-)(?\d+)-(?\d+)`` -* URL = ``https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}`` -* Issue prefix = ``Piv-`` +Please check examples in the view for configuration the issue trackers. + Trello ------ -* Regex = ``(?:trello-)(?[a-zA-Z0-9]+)`` -* URL = ``https://trello.com/example.com/${card_id}`` -* Issue prefix = ``Trello-`` +Please check examples in the view for configuration the issue trackers. + diff --git a/docs/release-notes/release-notes-4.17.0.rst b/docs/release-notes/release-notes-4.17.0.rst --- a/docs/release-notes/release-notes-4.17.0.rst +++ b/docs/release-notes/release-notes-4.17.0.rst @@ -62,7 +62,7 @@ Performance Fixes ^^^^^ -- hHooks: fixed more unicode problems with new pull-request link generator. +- Hooks: fixed more unicode problems with new pull-request link generator. - Mercurial: fix ssh-server support for mercurial custom options. - Pull requests: updated metadata information for failed merges with multiple heads. - Pull requests: calculate ancestor in the same way as creation mode. diff --git a/docs/release-notes/release-notes-4.18.0.rst b/docs/release-notes/release-notes-4.18.0.rst new file mode 100644 --- /dev/null +++ b/docs/release-notes/release-notes-4.18.0.rst @@ -0,0 +1,230 @@ +|RCE| 4.18.0 |RNS| +------------------ + +Release Date +^^^^^^^^^^^^ + +- 2020-01-05 + + +New Features +^^^^^^^^^^^^ + +- Artifacts: are no longer in BETA. New info page is available for uploaded artifacts + which exposes some useful information like sha256, various access urls etc, and also + allows deletion of artifacts, and updating their description. +- Artifacts: support new download url based on access to artifacts using new auth-token types. +- Artifacts: added ability to store artifacts using API, and internal cli upload. + This allows uploading of artifacts that can have 100s of GBs in size efficiently. +- Artifacts: added metadata logic to store various extra custom data for artifacts. +- Comments: added support for adding comment attachments using the artifacts logic. + Logged in users can now pick or drag and drop attachments into comment forms. +- Comments: enable linkification of certain patterns on comments in repo/pull request scopes. + This will render now active links to commits, pull-requests mentioned in comments body. +- Jira: new update integration plugin. + Plugin now fetches possible transitions from tickets and show them to users in the interface. + Allow sending extra attributes during a transition like `resolution` message. +- Navigation: Added new consistent and contextual way of creating new objects + likes gists, repositories, and repository groups using dedicated action (with a `+` sign) + available in the top navigation. +- Hovercards: added new tooltips and hovercards to expose certain information for objects shown in UI. + RhodeCode usernames, issues, pull-requests will have active hovercard logic that will + load extra information about them and exposing them to users. +- Files: all readme files found in repository file browser will be now rendered, allowing having readme per directory. +- Search: expose line counts in search files information. +- Audit-logs: expose download user audit logs as JSON file. +- Users: added description field for users. + Allows users to write a short BIO, or description of their role in the organization. +- Users: allow super-admins to change bound authentication type for users. + E.g internal rhodecode accounts can be changed to ldap easily from user settings page. +- Pull requests: simplified the UI for display view, hide less important information and expose the most important ones. +- Pull requests: add merge check that detects WIP marker in title. + Usually WIP in title means unfinished task that needs still some work, such marker will prevent accidental merges. +- Pull requests: TODO comments have now a dedicated box below reviewers to keep track + of important TODOs that still need attention before review process is finalized. +- Pull requests: participants of pull request will receive an email about update of a + pull requests with a small summary of changes made. +- Pull requests: change the naming from #NUM into !NUM. + !NUM format is now parsed and linkified in comments and commit messages. +- Pull requests: pull requests which state is changing can now be viewed with a limited view. +- Pull requests: re-organize merge/close buttons and merge checks according to the new UI. +- Pull requests: update commits button allows a force-refresh update now using dropdown option. +- Pull requests: added quick filter to grid view to filter/search pull requests in a repository. +- Pull requests: closing a pull-request without a merge requires additional confirmation now. +- Pull requests: merge checks will now show which files caused conflicts and are blocking the merge. +- Emails: updated all generated emails design and cleanup the data fields they expose. + a) More consistent UI for all types of emails. b) Improved formatting of plaintext emails + c) Added reply link to comment type emails for quicker response action. + + +General +^^^^^^^ + +- Artifacts: don't show hidden artifacts, allow showing them via a GET ?hidden=1 flag. + Hidden artifacts are for example comment attachments. +- UI: new commits page, according to the new design, which started on 4.17.X release lines +- UI: use explicit named actions like "create user" instead of generic "save" which is bad UX. +- UI: fixed problems with generating last change in repository groups. + There's now a new logic that checks all objects inside group for latest update time. +- API: add artifact `get_info`, and `store_metadata` methods. +- API: allowed to specify extra recipients for pr/commit comments api methods. +- Vcsserver: set file based cache as default for vcsserver which can be shared + across multiple workers saving memory usage. +- Vcsserver: added redis as possible cache backend for even greater performance. +- Dependencies: bumped GIT version to 2.23.0 +- Dependencies: bumped SVN version to 1.12.2 +- Dependencies: bumped Mercurial version to 5.1.1 and hg-evolve to 9.1.0 +- Search: added logic for sorting ElasticSearch6 backend search results. +- User bookmarks: make it easier to re-organize existing entries. +- Data grids: hide pagination for single pages in grids. +- Gists: UX, removed private/public gist buttons and replaced them with radio group. +- Gunicorn: moved all configuration of gunicorn workers to .ini files. +- Gunicorn: added worker memory management allowing setting maximum per-worker memory usage. +- Automation: moved update groups task into celery task +- Cache commits: add option to refresh caches manually from advanced pages. +- Pull requests: add indication of state change in list of pull-requests and actually show them in the list. +- Cache keys: register and self cleanup cache keys used for invalidation to prevent leaking lot of them into DB on worker recycle +- Repo groups: removed locking inheritance flag from repo-groups. We'll deprecate this soon and this only brings in confusion +- System snapshot: improved formatting for better readability +- System info: expose data about vcsserver. +- Packages: updated celery to 4.3.0 and switch default backend to redis instead of RabbitMQ. + Redis is stable enough and easier to install. Having Redis simplifies the stack as it's used in other parts of RhodeCode. +- Dependencies: bumped alembic to 1.2.1 +- Dependencies: bumped amqp==2.5.2 and kombu==4.6.6 +- Dependencies: bumped atomicwrites==1.3.0 +- Dependencies: bumped cffi==1.12.3 +- Dependencies: bumped configparser==4.0.2 +- Dependencies: bumped deform==2.0.8 +- Dependencies: bumped dogpile.cache==0.9.0 +- Dependencies: bumped hupper==1.8.1 +- Dependencies: bumped mako to 1.1.0 +- Dependencies: bumped markupsafe to 1.1.1 +- Dependencies: bumped packaging==19.2 +- Dependencies: bumped paste==3.2.1 +- Dependencies: bumped pastescript==3.2.0 +- Dependencies: bumped pathlib2 to 2.3.4 +- Dependencies: bumped pluggy==0.13.0 +- Dependencies: bumped psutil to 5.6.3 +- Dependencies: bumped psutil==5.6.5 +- Dependencies: bumped psycopg2==2.8.4 +- Dependencies: bumped pycurl to 7.43.0.3 +- Dependencies: bumped pyotp==2.3.0 +- Dependencies: bumped pyparsing to 2.4.2 +- Dependencies: bumped pyramid-debugtoolbar==4.5.1 +- Dependencies: bumped pyramid-mako to 1.1.0 +- Dependencies: bumped redis to 3.3.8 +- Dependencies: bumped sqlalchemy to 1.3.8 +- Dependencies: bumped sqlalchemy==1.3.11 +- Dependencies: bumped test libraries. +- Dependencies: freeze alembic==1.3.1 +- Dependencies: freeze python-dateutil +- Dependencies: freeze redis==3.3.11 +- Dependencies: freeze supervisor==4.1.0 + + +Security +^^^^^^^^ + +- Security: fixed issues with exposing wrong http status (403) indicating repository with + given name exists and we don't have permissions to it. This was exposed in the redirection + logic of the global pull-request page. In case of redirection we also exposed + repository name in the URL. + + +Performance +^^^^^^^^^^^ + +- Core: many various small improvements and optimizations to make rhodecode faster then before. +- VCSServer: new cache implementation for remote functions. + Single worker shared caches that can use redis/file-cache. + This greatly improves performance on larger instances, and doesn't trigger cache + re-calculation on worker restarts. +- GIT: switched internal git operations from Dulwich to libgit2 in order to obtain better performance and scalability. +- SSH: skip loading unneeded application parts for SSH to make execution of ssh commands faster. +- Main page: main page will now load repositories and repositories groups using partial DB calls instead of big JSON files. + In case of many repositories in root this could lead to very slow page rendering. +- Admin pages: made all grids use same DB based partial loading logic. We'll no longer fetch + all objects into JSON for display purposes. This significantly improves speed of those pages in case + of many objects shown in them. +- Summary page: use non-memory cache for readme, and cleanup cache for repo stats. + This change won't re-cache after worker restarts and can be shared across all workers +- Files: only check for git_lfs/hg_largefiles if they are enabled. + This speeds up fetching of files if they are not LF and very big. +- Vcsserver: added support for streaming data from the remote methods. This allows + to stream very large files without taking up memory, mostly for usage in SVN when + downloading large binaries from vcs system. +- Files: added streaming remote attributes for vcsserver. + This change enables streaming raw content or raw downloads of large files without + transferring them over to enterprise for pack & repack using msgpack. + Msgpack has a limit of 2gb and generally pack+repack for ~2gb is very slow. +- Files: ensure over size limit files never do any content fetching when viewing such files. +- VCSServer: skip host verification to speed up pycurl calls. +- User-bookmarks: cache fetching of bookmarks since this is quite expensive query to + make with joinedload on repos/repo groups. +- Goto-switcher: reduce query data to only required attributes for speedups. +- My account: owner/watched repos are now loaded only using DB queries. + + +Fixes +^^^^^ + +- Mercurial: move imports from top-level to prevent from loading mercurial code on hook execution for svn/git. +- GIT: limit sync-fetch logic to only retrieve tags/ and heads/ with default execution arguments. +- GIT: fixed issue with git submodules detection. +- SVN: fix checkout url for ssh+svn backend not having special prefix resulting in incorrect command shown. +- SVN: fixed problem with showing empty directories. +- OAuth: use a vendored version of `authomatic` library, and switch Bitbucket authentication to use oauth2. +- Diffs: handle paths with quotes in diffs. +- Diffs: fixed outdated files in pull-requests re-using the filediff raw_id for anchor generation. Fixes #5567 +- Diffs: toggle race condition on sticky vs wide-diff-mode that caused some display problems on larger diffs. +- Pull requests: handle exceptions in state change and improve logging. +- Pull requests: fixed title/description generation for single commits which are numbers. +- Pull requests: changed the source of changes to be using shadow repos if it exists. + In case of `git push -f` and rebase we lost commits in the repo resulting in + problems of displaying versions of pull-requests. +- Pull requests: handle case when removing existing files from a repository in compare versions diff. +- Files: don't expose copy content helper in case of binary files. +- Registration: properly expose first_name/last_name into email on user registration. +- Markup renderers: fixed broken code highlight for rst files. +- Ui: make super admin be named consistently across ui. +- Audit logs: fixed search cases with special chars such as `-`. + + +Upgrade notes +^^^^^^^^^^^^^ + +- New Automation task. We've changed the logic for updating latest change inside repository group. + New logic includes scanning for changes in all nested objects. Since this is a heavy task + a new dedicated scheduler task has been created to update it automatically on a scheduled base. + Please review in `admin > settings > automation` to enable this task. + +- New safer encryption algorithm. Some setting values are encrypted before storing it inside the database. + To keep full backward compatibility old AES algorithm is used. + If you wish to enable a safer option set fernet encryption instead inside rhodecode.ini + `rhodecode.encrypted_values.algorithm = fernet` + +- Pull requests UI changes. We've simplified the UI on pull requests page. + Please review the new UI to prevent surprises. All actions from old UI should be still possible with the new one. + +- Redis is now a default recommended backend for Celery and replaces previous rabbitmq. + Redis is generally easier to manage and install, and it's also very stable for usage + in the scheduler/celery async tasks. Since we also recommend Redis for caches the application + stack can be simplified by removing rabbitmq and replacing it with single Redis instance. + +- Recommendation for using Redis as the new cache backend on vcsserver. + Since Version 4.18.0 VCSServer has a new cache implementation for VCS data. + By default, for simplicity the cache type is file based. We strongly recommend using + Redis instead for better Performance and scalability + Please review vcsserver.ini settings under: + `rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack` + +- New memory monitoring for Gunicorn workers. Starting from 4.18 release a option was added + to limit the maximum amount of memory used by a worker. + Please review new settings in `[server:main]` section for memory management in both + rhodecode.ini and vcsserver.ini:: + + ; Maximum memory usage that each worker can use before it will receive a + ; graceful restart signal 0 = memory monitoring is disabled + ; Examples: 268435456 (256MB), 536870912 (512MB) + ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) + memory_max_usage = 0 diff --git a/docs/release-notes/release-notes.rst b/docs/release-notes/release-notes.rst --- a/docs/release-notes/release-notes.rst +++ b/docs/release-notes/release-notes.rst @@ -9,6 +9,7 @@ Release Notes .. toctree:: :maxdepth: 1 + release-notes-4.18.0.rst release-notes-4.17.4.rst release-notes-4.17.3.rst release-notes-4.17.2.rst diff --git a/docs/tools/install-tools.rst b/docs/tools/install-tools.rst --- a/docs/tools/install-tools.rst +++ b/docs/tools/install-tools.rst @@ -57,9 +57,12 @@ To install |RCT|, use the following step 1. Set up a ``virtualenv`` on your local machine, see virtualenv_ instructions here. -2. Install |RCT| using pip. Full url with token is available at https://rhodecode.com/u/#rhodecode-tools - ``pip install -I https://dls.rhodecode.com/dls//rhodecode-tools/latest`` +2. Install |RCT| using pip. All downloadable versions of |RCT| are available at: + `https://code.rhodecode.com/rhodecode-tools-ce/artifacts` + Example installation:: + + pip install -I https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-10ac93f4-bb7d-4b97-baea-68110743dd5a.tar.gz Once |RCT| is installed using these steps there are a few extra configuration changes you can make. These are explained in more detail in the diff --git a/docs/usage/keyboard-shortcuts.rst b/docs/usage/keyboard-shortcuts.rst --- a/docs/usage/keyboard-shortcuts.rst +++ b/docs/usage/keyboard-shortcuts.rst @@ -56,4 +56,4 @@ Repository Shortcuts Go to the repository settings page. \--:kbd:`gO` - Go to the repository permissions settings. + Go to the repository access permissions settings. diff --git a/grunt_config.json b/grunt_config.json --- a/grunt_config.json +++ b/grunt_config.json @@ -99,6 +99,12 @@ "nonull": true } }, + "uglify": { + "dist": { + "src": "<%= dirs.js.dest %>/scripts.js", + "dest": "<%= dirs.js.dest %>/scripts.min.js" + } + }, "less": { "development": { "options": { diff --git a/package.json b/package.json --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "grunt-contrib-less": "^1.1.0", "grunt-contrib-watch": "^0.6.1", "grunt-webpack": "^3.1.3", + "grunt-contrib-uglify": "^4.0.1", "jquery": "1.11.3", "mark.js": "8.11.1", "jshint": "^2.9.1-rc3", diff --git a/pkgs/node-packages.nix b/pkgs/node-packages.nix --- a/pkgs/node-packages.nix +++ b/pkgs/node-packages.nix @@ -211,13 +211,13 @@ let sha512 = "yiUk09opTEnE1lK+tb501ENb+yQBi4p++Ep0eGJAHesVYKVMPNgPphVKkIizkDaU+n0SE+zXfTsRbYyOMDYXSg=="; }; }; - "@polymer/polymer-3.2.0" = { + "@polymer/polymer-3.3.0" = { name = "_at_polymer_slash_polymer"; packageName = "@polymer/polymer"; - version = "3.2.0"; - src = fetchurl { - url = "https://registry.npmjs.org/@polymer/polymer/-/polymer-3.2.0.tgz"; - sha512 = "L6uV1oM6T6xbwbVx6t3biG5T2VSSB03LxnIrUd9M2pr6RkHVPFHJ37pC5MUwBAEhkGFJif7eks7fdMMSGZTeEQ=="; + version = "3.3.0"; + src = fetchurl { + url = "https://registry.npmjs.org/@polymer/polymer/-/polymer-3.3.0.tgz"; + sha512 = "rij7suomS7DxdBamnwr/Xa0V5hpypf7I9oYKseF2FWz5Xh2a3wJNpVjgJy1adXVCxqIyPhghsrthnfCt7EblsQ=="; }; }; "@types/clone-0.1.30" = { @@ -229,13 +229,13 @@ let sha1 = "e7365648c1b42136a59c7d5040637b3b5c83b614"; }; }; - "@types/node-6.14.6" = { + "@types/node-6.14.9" = { name = "_at_types_slash_node"; packageName = "@types/node"; - version = "6.14.6"; - src = fetchurl { - url = "https://registry.npmjs.org/@types/node/-/node-6.14.6.tgz"; - sha512 = "rFs9zCFtSHuseiNXxYxFlun8ibu+jtZPgRM+2ILCmeLiGeGLiIGxuOzD+cNyHegI1GD+da3R/cIbs9+xCLp13w=="; + version = "6.14.9"; + src = fetchurl { + url = "https://registry.npmjs.org/@types/node/-/node-6.14.9.tgz"; + sha512 = "leP/gxHunuazPdZaCvsCefPQxinqUDsCxCR5xaDUrY2MkYxQRFZZwU5e7GojyYsGB7QVtCi7iVEl/hoFXQYc+w=="; }; }; "@types/parse5-2.2.34" = { @@ -409,22 +409,22 @@ let sha512 = "mJ3QKWtCchL1vhU/kZlJnLPuQZnlDOdZsyP0bbLWPGdYsQDnSBvyTLhzwBA3QAMlzEL9V4JHygEmK6/OTEyytA=="; }; }; - "@webcomponents/shadycss-1.9.1" = { + "@webcomponents/shadycss-1.9.2" = { name = "_at_webcomponents_slash_shadycss"; packageName = "@webcomponents/shadycss"; - version = "1.9.1"; - src = fetchurl { - url = "https://registry.npmjs.org/@webcomponents/shadycss/-/shadycss-1.9.1.tgz"; - sha512 = "IaZOnWOKXHghqk/WfPNDRIgDBi3RsVPY2IFAw6tYiL9UBGvQRy5R6uC+Fk7qTZsReTJ0xh5MTT8yAcb3MUR4mQ=="; - }; - }; - "@webcomponents/webcomponentsjs-2.2.10" = { + version = "1.9.2"; + src = fetchurl { + url = "https://registry.npmjs.org/@webcomponents/shadycss/-/shadycss-1.9.2.tgz"; + sha512 = "GsD7RpDVrVdgC6e+D8zQia8RGNmEGQ9/qotnVPQYPrIXhGS5xSt6ZED9YmuHz3HbLqY+E54tE1EK3tjLzSCGrw=="; + }; + }; + "@webcomponents/webcomponentsjs-2.3.0" = { name = "_at_webcomponents_slash_webcomponentsjs"; packageName = "@webcomponents/webcomponentsjs"; - version = "2.2.10"; - src = fetchurl { - url = "https://registry.npmjs.org/@webcomponents/webcomponentsjs/-/webcomponentsjs-2.2.10.tgz"; - sha512 = "5dzhUhP+h0qMiK0IWb7VNb0OGBoXO3AuI6Qi8t9PoKT50s5L1jv0xnwnLq+cFgPuTB8FLTNP8xIDmyoOsKBy9Q=="; + version = "2.3.0"; + src = fetchurl { + url = "https://registry.npmjs.org/@webcomponents/webcomponentsjs/-/webcomponentsjs-2.3.0.tgz"; + sha512 = "sR6FOrNnnncRuoJDqq9QxtRsJMbIvASw4vnJwIYKVlKO3AMc+NAr/bIQNnUiTTE9pBDTJkFpVaUdjJaRdsjmyA=="; }; }; "@xtuc/ieee754-1.2.0" = { @@ -499,22 +499,22 @@ let sha1 = "82ffb02b29e662ae53bdc20af15947706739c536"; }; }; - "ajv-6.10.0" = { + "ajv-6.10.2" = { name = "ajv"; packageName = "ajv"; - version = "6.10.0"; - src = fetchurl { - url = "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz"; - sha512 = "nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg=="; - }; - }; - "ajv-keywords-3.4.0" = { + version = "6.10.2"; + src = fetchurl { + url = "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz"; + sha512 = "TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw=="; + }; + }; + "ajv-keywords-3.4.1" = { name = "ajv-keywords"; packageName = "ajv-keywords"; - version = "3.4.0"; - src = fetchurl { - url = "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.0.tgz"; - sha512 = "aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw=="; + version = "3.4.1"; + src = fetchurl { + url = "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz"; + sha512 = "RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ=="; }; }; "align-text-0.1.4" = { @@ -806,13 +806,13 @@ let sha1 = "b6bbe0b0674b9d719708ca38de8c237cb526c3d1"; }; }; - "async-2.6.2" = { + "async-2.6.3" = { name = "async"; packageName = "async"; - version = "2.6.2"; - src = fetchurl { - url = "https://registry.npmjs.org/async/-/async-2.6.2.tgz"; - sha512 = "H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg=="; + version = "2.6.3"; + src = fetchurl { + url = "https://registry.npmjs.org/async/-/async-2.6.3.tgz"; + sha512 = "zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg=="; }; }; "async-each-1.0.3" = { @@ -1400,13 +1400,13 @@ let sha512 = "5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg=="; }; }; - "base64-js-1.3.0" = { + "base64-js-1.3.1" = { name = "base64-js"; packageName = "base64-js"; - version = "1.3.0"; - src = fetchurl { - url = "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz"; - sha512 = "ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw=="; + version = "1.3.1"; + src = fetchurl { + url = "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz"; + sha512 = "mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g=="; }; }; "bcrypt-pbkdf-1.0.2" = { @@ -1445,13 +1445,13 @@ let sha512 = "Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw=="; }; }; - "bluebird-3.5.4" = { + "bluebird-3.7.1" = { name = "bluebird"; packageName = "bluebird"; - version = "3.5.4"; - src = fetchurl { - url = "https://registry.npmjs.org/bluebird/-/bluebird-3.5.4.tgz"; - sha512 = "FG+nFEZChJrbQ9tIccIfZJBz3J7mLrAhxakAbnrJWn8d7aKOC+LWifa0G+p4ZqKp4y13T7juYvdhq9NzKdsrjw=="; + version = "3.7.1"; + src = fetchurl { + url = "https://registry.npmjs.org/bluebird/-/bluebird-3.7.1.tgz"; + sha512 = "DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg=="; }; }; "bn.js-4.11.8" = { @@ -1670,22 +1670,22 @@ let sha1 = "b534e7c734c4f81ec5fbe8aca2ad24354b962c6c"; }; }; - "caniuse-db-1.0.30000967" = { + "caniuse-db-1.0.30001006" = { name = "caniuse-db"; packageName = "caniuse-db"; - version = "1.0.30000967"; - src = fetchurl { - url = "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30000967.tgz"; - sha512 = "70gk6cLSD5rItxnZ7WUxyCpM9LAjEb1tVzlENQfXQXZS/IiGnfAC6u32G5cZFlDBKjNPBIta/QSx5CZLZepxRA=="; - }; - }; - "caniuse-lite-1.0.30000967" = { + version = "1.0.30001006"; + src = fetchurl { + url = "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30001006.tgz"; + sha512 = "Xn25grc0GXATFnnEX+KP3IwEv6ZdHs4CALyLKvK8pBeeBe+hSpqy3/GyKBgEp4hn6o+bI+GNeNeQBf9PBOK0EQ=="; + }; + }; + "caniuse-lite-1.0.30001006" = { name = "caniuse-lite"; packageName = "caniuse-lite"; - version = "1.0.30000967"; - src = fetchurl { - url = "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000967.tgz"; - sha512 = "rUBIbap+VJfxTzrM4akJ00lkvVb5/n5v3EGXfWzSH5zT8aJmGzjA8HWhJ4U6kCpzxozUSnB+yvAYDRPY6mRpgQ=="; + version = "1.0.30001006"; + src = fetchurl { + url = "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001006.tgz"; + sha512 = "MXnUVX27aGs/QINz+QG1sWSLDr3P1A3Hq5EUWoIt0T7K24DuvMxZEnh3Y5aHlJW6Bz2aApJdSewdYLd8zQnUuw=="; }; }; "caseless-0.12.0" = { @@ -1733,31 +1733,31 @@ let sha512 = "Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="; }; }; - "chokidar-2.1.5" = { + "chokidar-2.1.8" = { name = "chokidar"; packageName = "chokidar"; - version = "2.1.5"; - src = fetchurl { - url = "https://registry.npmjs.org/chokidar/-/chokidar-2.1.5.tgz"; - sha512 = "i0TprVWp+Kj4WRPtInjexJ8Q+BqTE909VpH8xVhXrJkoc5QC8VO9TryGOqTr+2hljzc1sC62t22h5tZePodM/A=="; - }; - }; - "chownr-1.1.1" = { + version = "2.1.8"; + src = fetchurl { + url = "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz"; + sha512 = "ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg=="; + }; + }; + "chownr-1.1.3" = { name = "chownr"; packageName = "chownr"; - version = "1.1.1"; - src = fetchurl { - url = "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz"; - sha512 = "j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g=="; - }; - }; - "chrome-trace-event-1.0.0" = { + version = "1.1.3"; + src = fetchurl { + url = "https://registry.npmjs.org/chownr/-/chownr-1.1.3.tgz"; + sha512 = "i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw=="; + }; + }; + "chrome-trace-event-1.0.2" = { name = "chrome-trace-event"; packageName = "chrome-trace-event"; - version = "1.0.0"; - src = fetchurl { - url = "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.0.tgz"; - sha512 = "xDbVgyfDTT2piup/h8dK/y4QZfJRSa73bw1WZ8b4XM1o7fsFubUVGYcE+1ANtOzJJELGpYoG2961z0Z6OAld9A=="; + version = "1.0.2"; + src = fetchurl { + url = "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz"; + sha512 = "9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ=="; }; }; "cipher-base-1.0.4" = { @@ -1958,13 +1958,13 @@ let sha1 = "168a4701756b6a7f51a12ce0c97bfa28c084ed63"; }; }; - "colors-1.3.3" = { + "colors-1.4.0" = { name = "colors"; packageName = "colors"; - version = "1.3.3"; - src = fetchurl { - url = "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz"; - sha512 = "mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg=="; + version = "1.4.0"; + src = fetchurl { + url = "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz"; + sha512 = "a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA=="; }; }; "combined-stream-1.0.8" = { @@ -2003,6 +2003,15 @@ let sha512 = "6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg=="; }; }; + "commander-2.20.3" = { + name = "commander"; + packageName = "commander"; + version = "2.20.3"; + src = fetchurl { + url = "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz"; + sha512 = "GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="; + }; + }; "commondir-1.0.1" = { name = "commondir"; packageName = "commondir"; @@ -2093,13 +2102,13 @@ let sha512 = "Y+SQCF+0NoWQryez2zXn5J5knmr9z/9qSQt7fbL78u83rxmigOy8X5+BFn8CFSuX+nKT8gpYwJX68ekqtQt6ZA=="; }; }; - "core-js-2.6.5" = { + "core-js-2.6.10" = { name = "core-js"; packageName = "core-js"; - version = "2.6.5"; - src = fetchurl { - url = "https://registry.npmjs.org/core-js/-/core-js-2.6.5.tgz"; - sha512 = "klh/kDpwX8hryYL14M9w/xei6vrv6sE8gTHDG7/T/+SEovB/G4ejwcfE/CBzO6Edsu+OETZMZ3wcX/EjUkrl5A=="; + version = "2.6.10"; + src = fetchurl { + url = "https://registry.npmjs.org/core-js/-/core-js-2.6.10.tgz"; + sha512 = "I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA=="; }; }; "core-util-is-1.0.2" = { @@ -2237,13 +2246,13 @@ let sha1 = "ddd52c587033f49e94b71fc55569f252e8ff5f85"; }; }; - "cyclist-0.2.2" = { + "cyclist-1.0.1" = { name = "cyclist"; packageName = "cyclist"; - version = "0.2.2"; - src = fetchurl { - url = "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz"; - sha1 = "1b33792e11e914a2fd6d6ed6447464444e5fa640"; + version = "1.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/cyclist/-/cyclist-1.0.1.tgz"; + sha1 = "596e9698fd0c80e12038c2b82d6eb1b35b6224d9"; }; }; "dashdash-1.14.1" = { @@ -2435,13 +2444,13 @@ let sha512 = "gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA=="; }; }; - "dom-serializer-0.1.1" = { + "dom-serializer-0.2.1" = { name = "dom-serializer"; packageName = "dom-serializer"; - version = "0.1.1"; - src = fetchurl { - url = "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.1.tgz"; - sha512 = "l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA=="; + version = "0.2.1"; + src = fetchurl { + url = "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.1.tgz"; + sha512 = "sK3ujri04WyjwQXVoK4PU3y8ula1stq10GJZpqHIUgoGZdsGzAGu65BnU3d08aTVSvO7mGPZUc0wTEDL+qGE0Q=="; }; }; "dom5-2.3.0" = { @@ -2471,6 +2480,15 @@ let sha512 = "BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w=="; }; }; + "domelementtype-2.0.1" = { + name = "domelementtype"; + packageName = "domelementtype"; + version = "2.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/domelementtype/-/domelementtype-2.0.1.tgz"; + sha512 = "5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ=="; + }; + }; "domhandler-2.3.0" = { name = "domhandler"; packageName = "domhandler"; @@ -2498,6 +2516,15 @@ let sha512 = "3VduRWLxx9hbVr42QieQN25mx/I61/mRdUSuxAmDGdDqZIN8qtP7tcKMa3KfpJjuGjOJGYYUzzeq6eGDnkzesA=="; }; }; + "duplexer-0.1.1" = { + name = "duplexer"; + packageName = "duplexer"; + version = "0.1.1"; + src = fetchurl { + url = "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz"; + sha1 = "ace6ff808c1ce66b57d1ebf97977acb02334cfc1"; + }; + }; "duplexify-3.7.1" = { name = "duplexify"; packageName = "duplexify"; @@ -2516,22 +2543,22 @@ let sha1 = "3a83a904e54353287874c564b7549386849a98c9"; }; }; - "electron-to-chromium-1.3.133" = { + "electron-to-chromium-1.3.302" = { name = "electron-to-chromium"; packageName = "electron-to-chromium"; - version = "1.3.133"; - src = fetchurl { - url = "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.133.tgz"; - sha512 = "lyoC8aoqbbDqsprb6aPdt9n3DpOZZzdz/T4IZKsR0/dkZIxnJVUjjcpOSwA66jPRIOyDAamCTAUqweU05kKNSg=="; - }; - }; - "elliptic-6.4.1" = { + version = "1.3.302"; + src = fetchurl { + url = "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.302.tgz"; + sha512 = "1qConyiVEbj4xZRBXqtGR003+9tV0rJF0PS6aeO0Ln/UL637js9hdwweCl07meh/kJoI2N4W8q3R3g3F5z46ww=="; + }; + }; + "elliptic-6.5.1" = { name = "elliptic"; packageName = "elliptic"; - version = "6.4.1"; - src = fetchurl { - url = "https://registry.npmjs.org/elliptic/-/elliptic-6.4.1.tgz"; - sha512 = "BsXLz5sqX8OHcsh7CqBMztyXARmGQ3LWPtGjJi6DiJHq5C/qvi9P3OqgswKSDftbu8+IoI/QDTAm2fFnQ9SZSQ=="; + version = "6.5.1"; + src = fetchurl { + url = "https://registry.npmjs.org/elliptic/-/elliptic-6.5.1.tgz"; + sha512 = "xvJINNLbTeWQjrl6X+7eQCrIy/YPv5XCpKW6kB5mKvtnGILoLDcySuwomfdzt0BMdLNVnuRNTuzKNHj0bva1Cg=="; }; }; "emojis-list-2.1.0" = { @@ -2543,13 +2570,13 @@ let sha1 = "4daa4d9db00f9819880c79fa457ae5b09a1fd389"; }; }; - "end-of-stream-1.4.1" = { + "end-of-stream-1.4.4" = { name = "end-of-stream"; packageName = "end-of-stream"; - version = "1.4.1"; - src = fetchurl { - url = "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz"; - sha512 = "1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q=="; + version = "1.4.4"; + src = fetchurl { + url = "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz"; + sha512 = "+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="; }; }; "enhanced-resolve-3.4.1" = { @@ -2561,13 +2588,13 @@ let sha1 = "0421e339fd71419b3da13d129b3979040230476e"; }; }; - "enhanced-resolve-4.1.0" = { + "enhanced-resolve-4.1.1" = { name = "enhanced-resolve"; packageName = "enhanced-resolve"; - version = "4.1.0"; - src = fetchurl { - url = "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz"; - sha512 = "F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng=="; + version = "4.1.1"; + src = fetchurl { + url = "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz"; + sha512 = "98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA=="; }; }; "entities-1.0.0" = { @@ -2579,13 +2606,13 @@ let sha1 = "b2987aa3821347fcde642b24fdfc9e4fb712bf26"; }; }; - "entities-1.1.2" = { + "entities-2.0.0" = { name = "entities"; packageName = "entities"; - version = "1.1.2"; - src = fetchurl { - url = "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz"; - sha512 = "f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w=="; + version = "2.0.0"; + src = fetchurl { + url = "https://registry.npmjs.org/entities/-/entities-2.0.0.tgz"; + sha512 = "D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw=="; }; }; "errno-0.1.7" = { @@ -2597,13 +2624,13 @@ let sha512 = "MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg=="; }; }; - "es-abstract-1.13.0" = { + "es-abstract-1.16.0" = { name = "es-abstract"; packageName = "es-abstract"; - version = "1.13.0"; - src = fetchurl { - url = "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz"; - sha512 = "vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg=="; + version = "1.16.0"; + src = fetchurl { + url = "https://registry.npmjs.org/es-abstract/-/es-abstract-1.16.0.tgz"; + sha512 = "xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg=="; }; }; "es-to-primitive-1.2.0" = { @@ -2687,22 +2714,22 @@ let sha512 = "64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ=="; }; }; - "estraverse-4.2.0" = { + "estraverse-4.3.0" = { name = "estraverse"; packageName = "estraverse"; - version = "4.2.0"; - src = fetchurl { - url = "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz"; - sha1 = "0dee3fed31fcd469618ce7342099fc1afa0bdb13"; - }; - }; - "esutils-2.0.2" = { + version = "4.3.0"; + src = fetchurl { + url = "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz"; + sha512 = "39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw=="; + }; + }; + "esutils-2.0.3" = { name = "esutils"; packageName = "esutils"; - version = "2.0.2"; - src = fetchurl { - url = "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz"; - sha1 = "0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"; + version = "2.0.3"; + src = fetchurl { + url = "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz"; + sha512 = "kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="; }; }; "eventemitter2-0.4.14" = { @@ -2867,6 +2894,15 @@ let sha1 = "c14c5b3bf14d7417ffbfd990c0a7495cd9f337bc"; }; }; + "figures-1.7.0" = { + name = "figures"; + packageName = "figures"; + version = "1.7.0"; + src = fetchurl { + url = "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz"; + sha1 = "cbe1e3affcf1cd44b80cadfed28dc793a9701d2e"; + }; + }; "file-sync-cmp-0.1.1" = { name = "file-sync-cmp"; packageName = "file-sync-cmp"; @@ -2948,13 +2984,13 @@ let sha512 = "lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q=="; }; }; - "flatten-1.0.2" = { + "flatten-1.0.3" = { name = "flatten"; packageName = "flatten"; - version = "1.0.2"; - src = fetchurl { - url = "https://registry.npmjs.org/flatten/-/flatten-1.0.2.tgz"; - sha1 = "dae46a9d78fbe25292258cc1e780a41d95c03782"; + version = "1.0.3"; + src = fetchurl { + url = "https://registry.npmjs.org/flatten/-/flatten-1.0.3.tgz"; + sha512 = "dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg=="; }; }; "flush-write-stream-1.1.1" = { @@ -3128,13 +3164,13 @@ let sha1 = "4a973f635b9190f715d10987d5c00fd2815ebe3d"; }; }; - "glob-7.1.4" = { + "glob-7.1.5" = { name = "glob"; packageName = "glob"; - version = "7.1.4"; - src = fetchurl { - url = "https://registry.npmjs.org/glob/-/glob-7.1.4.tgz"; - sha512 = "hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A=="; + version = "7.1.5"; + src = fetchurl { + url = "https://registry.npmjs.org/glob/-/glob-7.1.5.tgz"; + sha512 = "J9dlskqUXK1OeTOYBEn5s8aMukWMwWfs+rPTn/jn50Ux4MNXVhubL1wu/j2t+H4NVI+cXEcCaYellqaPVGXNqQ=="; }; }; "glob-parent-3.1.0" = { @@ -3218,13 +3254,13 @@ let sha1 = "15a4806a57547cb2d2dbf27f42e89a8c3451b364"; }; }; - "graceful-fs-4.1.15" = { + "graceful-fs-4.2.3" = { name = "graceful-fs"; packageName = "graceful-fs"; - version = "4.1.15"; - src = fetchurl { - url = "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz"; - sha512 = "6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA=="; + version = "4.2.3"; + src = fetchurl { + url = "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz"; + sha512 = "a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ=="; }; }; "grunt-0.4.5" = { @@ -3281,6 +3317,15 @@ let sha1 = "3bbdec0b75d12ceaa55d62943625c0b0861cdf6f"; }; }; + "grunt-contrib-uglify-4.0.1" = { + name = "grunt-contrib-uglify"; + packageName = "grunt-contrib-uglify"; + version = "4.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/grunt-contrib-uglify/-/grunt-contrib-uglify-4.0.1.tgz"; + sha512 = "dwf8/+4uW1+7pH72WButOEnzErPGmtUvc8p08B0eQS/6ON0WdeQu0+WFeafaPTbbY1GqtS25lsHWaDeiTQNWPg=="; + }; + }; "grunt-contrib-watch-0.6.1" = { name = "grunt-contrib-watch"; packageName = "grunt-contrib-watch"; @@ -3335,6 +3380,15 @@ let sha512 = "SaZ8K8lG4iTxs7ClZxOWCf3kxqS2y+Eel8SbaEGgBKwhAp6e45beIu+vhBZRLX3vonKML2kjemKsQ21REaqNFQ=="; }; }; + "gzip-size-3.0.0" = { + name = "gzip-size"; + packageName = "gzip-size"; + version = "3.0.0"; + src = fetchurl { + url = "https://registry.npmjs.org/gzip-size/-/gzip-size-3.0.0.tgz"; + sha1 = "546188e9bdc337f673772f81660464b389dce520"; + }; + }; "har-schema-1.0.5" = { name = "har-schema"; packageName = "har-schema"; @@ -3695,15 +3749,6 @@ let sha1 = "f30f716c8e2bd346c7b67d3df3915566a7c05607"; }; }; - "indexof-0.0.1" = { - name = "indexof"; - packageName = "indexof"; - version = "0.0.1"; - src = fetchurl { - url = "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz"; - sha1 = "82dc336d232b9062179d05ab3293a66059fd435d"; - }; - }; "inflight-1.0.6" = { name = "inflight"; packageName = "inflight"; @@ -3740,6 +3785,15 @@ let sha1 = "633c2c83e3da42a502f52466022480f4208261de"; }; }; + "inherits-2.0.4" = { + name = "inherits"; + packageName = "inherits"; + version = "2.0.4"; + src = fetchurl { + url = "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"; + sha512 = "k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="; + }; + }; "ini-1.3.5" = { name = "ini"; packageName = "ini"; @@ -4424,13 +4478,13 @@ let sha1 = "fadd834b9683073da179b3eae6d9c0d15053f73e"; }; }; - "lodash-4.17.11" = { + "lodash-4.17.15" = { name = "lodash"; packageName = "lodash"; - version = "4.17.11"; - src = fetchurl { - url = "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz"; - sha512 = "cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg=="; + version = "4.17.15"; + src = fetchurl { + url = "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz"; + sha512 = "8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="; }; }; "lodash.camelcase-4.3.0" = { @@ -4577,6 +4631,15 @@ let sha1 = "de819fdbcd84dccd8fae59c6aeb79615b9d266ac"; }; }; + "maxmin-2.1.0" = { + name = "maxmin"; + packageName = "maxmin"; + version = "2.1.0"; + src = fetchurl { + url = "https://registry.npmjs.org/maxmin/-/maxmin-2.1.0.tgz"; + sha1 = "4d3b220903d95eee7eb7ac7fa864e72dc09a3166"; + }; + }; "md5.js-1.3.5" = { name = "md5.js"; packageName = "md5.js"; @@ -4604,6 +4667,15 @@ let sha1 = "3a9a20b8462523e447cfbc7e8bb80ed667bfc552"; }; }; + "memory-fs-0.5.0" = { + name = "memory-fs"; + packageName = "memory-fs"; + version = "0.5.0"; + src = fetchurl { + url = "https://registry.npmjs.org/memory-fs/-/memory-fs-0.5.0.tgz"; + sha512 = "jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA=="; + }; + }; "micromatch-3.1.10" = { name = "micromatch"; packageName = "micromatch"; @@ -4730,13 +4802,13 @@ let sha512 = "zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw=="; }; }; - "mixin-deep-1.3.1" = { + "mixin-deep-1.3.2" = { name = "mixin-deep"; packageName = "mixin-deep"; - version = "1.3.1"; - src = fetchurl { - url = "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz"; - sha512 = "8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ=="; + version = "1.3.2"; + src = fetchurl { + url = "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz"; + sha512 = "WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA=="; }; }; "mkdirp-0.5.1" = { @@ -4784,13 +4856,13 @@ let sha1 = "5608aeadfc00be6c2901df5f9861788de0d597c8"; }; }; - "nan-2.13.2" = { + "nan-2.14.0" = { name = "nan"; packageName = "nan"; - version = "2.13.2"; - src = fetchurl { - url = "https://registry.npmjs.org/nan/-/nan-2.13.2.tgz"; - sha512 = "TghvYc72wlMGMVMluVo9WRJc0mB8KxxF/gZ4YYFy7V2ZQX9l7rgbPg7vjS9mt6U5HXODVFVI2bOduCzwOMv/lw=="; + version = "2.14.0"; + src = fetchurl { + url = "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz"; + sha512 = "INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg=="; }; }; "nanomatch-1.2.13" = { @@ -4829,13 +4901,13 @@ let sha512 = "rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="; }; }; - "node-libs-browser-2.2.0" = { + "node-libs-browser-2.2.1" = { name = "node-libs-browser"; packageName = "node-libs-browser"; - version = "2.2.0"; - src = fetchurl { - url = "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.2.0.tgz"; - sha512 = "5MQunG/oyOaBdttrL40dA7bUfPORLRWMUJLQtMg7nluxUvk5XwnLdL9twQHFAjRx/y7mIMkLKT9++qPbbk6BZA=="; + version = "2.2.1"; + src = fetchurl { + url = "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.2.1.tgz"; + sha512 = "h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q=="; }; }; "nopt-1.0.10" = { @@ -4973,6 +5045,15 @@ let sha1 = "7e7d858b781bd7c991a41ba975ed3812754e998c"; }; }; + "object-inspect-1.6.0" = { + name = "object-inspect"; + packageName = "object-inspect"; + version = "1.6.0"; + src = fetchurl { + url = "https://registry.npmjs.org/object-inspect/-/object-inspect-1.6.0.tgz"; + sha512 = "GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ=="; + }; + }; "object-keys-1.1.1" = { name = "object-keys"; packageName = "object-keys"; @@ -5117,13 +5198,13 @@ let sha512 = "vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q=="; }; }; - "p-limit-2.2.0" = { + "p-limit-2.2.1" = { name = "p-limit"; packageName = "p-limit"; - version = "2.2.0"; - src = fetchurl { - url = "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz"; - sha512 = "pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ=="; + version = "2.2.1"; + src = fetchurl { + url = "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz"; + sha512 = "85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg=="; }; }; "p-locate-2.0.0" = { @@ -5171,13 +5252,13 @@ let sha512 = "0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw=="; }; }; - "parallel-transform-1.1.0" = { + "parallel-transform-1.2.0" = { name = "parallel-transform"; packageName = "parallel-transform"; - version = "1.1.0"; - src = fetchurl { - url = "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz"; - sha1 = "d410f065b05da23081fcd10f28854c29bda33b06"; + version = "1.2.0"; + src = fetchurl { + url = "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.2.0.tgz"; + sha512 = "P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg=="; }; }; "param-case-2.1.1" = { @@ -5189,13 +5270,13 @@ let sha1 = "df94fd8cf6531ecf75e6bef9a0858fbc72be2247"; }; }; - "parse-asn1-5.1.4" = { + "parse-asn1-5.1.5" = { name = "parse-asn1"; packageName = "parse-asn1"; - version = "5.1.4"; - src = fetchurl { - url = "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.4.tgz"; - sha512 = "Qs5duJcuvNExRfFZ99HDD3z4mAi3r9Wl/FOjEOijlxwCZs7E7mW2vjTpgQ4J8LpTF8x5v+1Vn5UQFejmWT11aw=="; + version = "5.1.5"; + src = fetchurl { + url = "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.5.tgz"; + sha512 = "jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ=="; }; }; "parse-filepath-1.0.2" = { @@ -5252,13 +5333,13 @@ let sha1 = "b363e55e8006ca6fe21784d2db22bd15d7917f14"; }; }; - "path-browserify-0.0.0" = { + "path-browserify-0.0.1" = { name = "path-browserify"; packageName = "path-browserify"; - version = "0.0.0"; - src = fetchurl { - url = "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz"; - sha1 = "a0b870729aae214005b7d5032ec2cbbb0fb4451a"; + version = "0.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz"; + sha512 = "BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ=="; }; }; "path-dirname-1.0.2" = { @@ -5711,6 +5792,15 @@ let sha1 = "d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"; }; }; + "pretty-bytes-3.0.1" = { + name = "pretty-bytes"; + packageName = "pretty-bytes"; + version = "3.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-3.0.1.tgz"; + sha1 = "27d0008d778063a0b4811bb35c79f1bd5d5fbccf"; + }; + }; "pretty-error-2.1.1" = { name = "pretty-error"; packageName = "pretty-error"; @@ -5738,13 +5828,13 @@ let sha1 = "7332300e840161bda3e69a1d1d91a7d4bc16f182"; }; }; - "process-nextick-args-2.0.0" = { + "process-nextick-args-2.0.1" = { name = "process-nextick-args"; packageName = "process-nextick-args"; - version = "2.0.0"; - src = fetchurl { - url = "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz"; - sha512 = "MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw=="; + version = "2.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz"; + sha512 = "3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="; }; }; "promise-7.3.1" = { @@ -5990,13 +6080,13 @@ let sha1 = "747c914e049614a4c9cfbba629871ad1d2927716"; }; }; - "reduce-function-call-1.0.2" = { + "reduce-function-call-1.0.3" = { name = "reduce-function-call"; packageName = "reduce-function-call"; - version = "1.0.2"; - src = fetchurl { - url = "https://registry.npmjs.org/reduce-function-call/-/reduce-function-call-1.0.2.tgz"; - sha1 = "5a200bf92e0e37751752fe45b0ab330fd4b6be99"; + version = "1.0.3"; + src = fetchurl { + url = "https://registry.npmjs.org/reduce-function-call/-/reduce-function-call-1.0.3.tgz"; + sha512 = "Hl/tuV2VDgWgCSEeWMLwxLZqX7OK59eU1guxXsRKTAyeYimivsKdtcV4fu3r710tpG5GmDKDhQ0HSZLExnNmyQ=="; }; }; "regenerate-1.4.0" = { @@ -6152,13 +6242,13 @@ let sha1 = "97f717b69d48784f5f526a6c5aa8ffdda055a4d1"; }; }; - "resolve-1.10.1" = { + "resolve-1.12.0" = { name = "resolve"; packageName = "resolve"; - version = "1.10.1"; - src = fetchurl { - url = "https://registry.npmjs.org/resolve/-/resolve-1.10.1.tgz"; - sha512 = "KuIe4mf++td/eFb6wkaPbMDnP6kObCaEtIDuHOUED6MNUo4K670KZUHuuvYPZDxNF0WVLw49n06M2m2dXphEzA=="; + version = "1.12.0"; + src = fetchurl { + url = "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz"; + sha512 = "B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w=="; }; }; "resolve-cwd-2.0.0" = { @@ -6224,13 +6314,13 @@ let sha1 = "e439be2aaee327321952730f99a8929e4fc50582"; }; }; - "rimraf-2.6.3" = { + "rimraf-2.7.1" = { name = "rimraf"; packageName = "rimraf"; - version = "2.6.3"; - src = fetchurl { - url = "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz"; - sha512 = "mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA=="; + version = "2.7.1"; + src = fetchurl { + url = "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz"; + sha512 = "uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w=="; }; }; "ripemd160-2.0.2" = { @@ -6260,6 +6350,15 @@ let sha512 = "Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="; }; }; + "safe-buffer-5.2.0" = { + name = "safe-buffer"; + packageName = "safe-buffer"; + version = "5.2.0"; + src = fetchurl { + url = "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz"; + sha512 = "fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg=="; + }; + }; "safe-regex-1.1.0" = { name = "safe-regex"; packageName = "safe-regex"; @@ -6305,22 +6404,22 @@ let sha1 = "0e7350acdec80b1108528786ec1d4418d11b396d"; }; }; - "semver-5.7.0" = { + "semver-5.7.1" = { name = "semver"; packageName = "semver"; - version = "5.7.0"; - src = fetchurl { - url = "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz"; - sha512 = "Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA=="; - }; - }; - "serialize-javascript-1.7.0" = { + version = "5.7.1"; + src = fetchurl { + url = "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz"; + sha512 = "sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="; + }; + }; + "serialize-javascript-1.9.1" = { name = "serialize-javascript"; packageName = "serialize-javascript"; - version = "1.7.0"; - src = fetchurl { - url = "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-1.7.0.tgz"; - sha512 = "ke8UG8ulpFOxO8f8gRYabHQe/ZntKlcig2Mp+8+URDP1D8vJZ0KUt7LYo07q25Z/+JVSgpr/cui9PIp5H6/+nA=="; + version = "1.9.1"; + src = fetchurl { + url = "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-1.9.1.tgz"; + sha512 = "0Vb/54WJ6k5v8sSWN09S0ora+Hnr+cX40r9F170nT+mSkaxltoE/7R3OrIdBSUv1OoiobH1QoWQbCnAO+e8J1A=="; }; }; "set-blocking-2.0.0" = { @@ -6332,22 +6431,13 @@ let sha1 = "045f9782d011ae9a6803ddd382b24392b3d890f7"; }; }; - "set-value-0.4.3" = { + "set-value-2.0.1" = { name = "set-value"; packageName = "set-value"; - version = "0.4.3"; - src = fetchurl { - url = "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz"; - sha1 = "7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1"; - }; - }; - "set-value-2.0.0" = { - name = "set-value"; - packageName = "set-value"; - version = "2.0.0"; - src = fetchurl { - url = "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz"; - sha512 = "hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg=="; + version = "2.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz"; + sha512 = "JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw=="; }; }; "setimmediate-1.0.5" = { @@ -6665,6 +6755,24 @@ let sha512 = "nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw=="; }; }; + "string.prototype.trimleft-2.1.0" = { + name = "string.prototype.trimleft"; + packageName = "string.prototype.trimleft"; + version = "2.1.0"; + src = fetchurl { + url = "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz"; + sha512 = "FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw=="; + }; + }; + "string.prototype.trimright-2.1.0" = { + name = "string.prototype.trimright"; + packageName = "string.prototype.trimright"; + version = "2.1.0"; + src = fetchurl { + url = "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz"; + sha512 = "fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg=="; + }; + }; "string_decoder-0.10.31" = { name = "string_decoder"; packageName = "string_decoder"; @@ -6683,13 +6791,13 @@ let sha512 = "n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="; }; }; - "string_decoder-1.2.0" = { + "string_decoder-1.3.0" = { name = "string_decoder"; packageName = "string_decoder"; - version = "1.2.0"; - src = fetchurl { - url = "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz"; - sha512 = "6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w=="; + version = "1.3.0"; + src = fetchurl { + url = "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz"; + sha512 = "hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="; }; }; "stringstream-0.0.6" = { @@ -6836,13 +6944,13 @@ let sha512 = "/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ=="; }; }; - "timers-browserify-2.0.10" = { + "timers-browserify-2.0.11" = { name = "timers-browserify"; packageName = "timers-browserify"; - version = "2.0.10"; - src = fetchurl { - url = "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.10.tgz"; - sha512 = "YvC1SV1XdOUaL6gx5CoGroT3Gu49pK9+TZ38ErPldOWW4j49GI1HKs9DV+KGq/w6y+LZ72W1c8cKz2vzY+qpzg=="; + version = "2.0.11"; + src = fetchurl { + url = "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.11.tgz"; + sha512 = "60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ=="; }; }; "tiny-emitter-2.1.0" = { @@ -6944,13 +7052,13 @@ let sha1 = "30c6203e1e66b841a88701ed8858f1725d94b026"; }; }; - "tslib-1.9.3" = { + "tslib-1.10.0" = { name = "tslib"; packageName = "tslib"; - version = "1.9.3"; - src = fetchurl { - url = "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz"; - sha512 = "4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ=="; + version = "1.10.0"; + src = fetchurl { + url = "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz"; + sha512 = "qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ=="; }; }; "tty-browserify-0.0.0" = { @@ -7016,6 +7124,15 @@ let sha512 = "Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw=="; }; }; + "uglify-js-3.6.7" = { + name = "uglify-js"; + packageName = "uglify-js"; + version = "3.6.7"; + src = fetchurl { + url = "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz"; + sha512 = "4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A=="; + }; + }; "uglify-to-browserify-1.0.2" = { name = "uglify-to-browserify"; packageName = "uglify-to-browserify"; @@ -7079,13 +7196,13 @@ let sha1 = "8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b"; }; }; - "union-value-1.0.0" = { + "union-value-1.0.1" = { name = "union-value"; packageName = "union-value"; - version = "1.0.0"; - src = fetchurl { - url = "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz"; - sha1 = "5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4"; + version = "1.0.1"; + src = fetchurl { + url = "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz"; + sha512 = "tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg=="; }; }; "uniq-1.0.1" = { @@ -7115,13 +7232,13 @@ let sha512 = "Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ=="; }; }; - "unique-slug-2.0.1" = { + "unique-slug-2.0.2" = { name = "unique-slug"; packageName = "unique-slug"; - version = "2.0.1"; - src = fetchurl { - url = "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.1.tgz"; - sha512 = "n9cU6+gITaVu7VGj1Z8feKMmfAjEAQGhwD9fE3zvpRRa0wEIx8ODYkVGfSc94M2OX00tUFV8wH3zYbm1I8mxFg=="; + version = "2.0.2"; + src = fetchurl { + url = "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz"; + sha512 = "zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w=="; }; }; "unset-value-1.0.0" = { @@ -7133,13 +7250,13 @@ let sha1 = "8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559"; }; }; - "upath-1.1.2" = { + "upath-1.2.0" = { name = "upath"; packageName = "upath"; - version = "1.1.2"; - src = fetchurl { - url = "https://registry.npmjs.org/upath/-/upath-1.1.2.tgz"; - sha512 = "kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q=="; + version = "1.2.0"; + src = fetchurl { + url = "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz"; + sha512 = "aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg=="; }; }; "upper-case-1.1.3" = { @@ -7160,6 +7277,15 @@ let sha512 = "KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ=="; }; }; + "uri-path-1.0.0" = { + name = "uri-path"; + packageName = "uri-path"; + version = "1.0.0"; + src = fetchurl { + url = "https://registry.npmjs.org/uri-path/-/uri-path-1.0.0.tgz"; + sha1 = "9747f018358933c31de0fccfd82d138e67262e32"; + }; + }; "urix-0.1.0" = { name = "urix"; packageName = "urix"; @@ -7232,22 +7358,22 @@ let sha1 = "8a16a05d445657a3aea5eecc5b12a4fa5379772c"; }; }; - "uuid-3.3.2" = { + "uuid-3.3.3" = { name = "uuid"; packageName = "uuid"; - version = "3.3.2"; - src = fetchurl { - url = "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz"; - sha512 = "yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="; - }; - }; - "v8-compile-cache-2.0.3" = { + version = "3.3.3"; + src = fetchurl { + url = "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz"; + sha512 = "pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ=="; + }; + }; + "v8-compile-cache-2.1.0" = { name = "v8-compile-cache"; packageName = "v8-compile-cache"; - version = "2.0.3"; - src = fetchurl { - url = "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz"; - sha512 = "CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w=="; + version = "2.1.0"; + src = fetchurl { + url = "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz"; + sha512 = "usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g=="; }; }; "v8flags-3.1.3" = { @@ -7277,13 +7403,13 @@ let sha1 = "3a105ca17053af55d6e270c1f8288682e18da400"; }; }; - "vm-browserify-0.0.4" = { + "vm-browserify-1.1.0" = { name = "vm-browserify"; packageName = "vm-browserify"; - version = "0.0.4"; - src = fetchurl { - url = "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz"; - sha1 = "5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73"; + version = "1.1.0"; + src = fetchurl { + url = "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.0.tgz"; + sha512 = "iq+S7vZJE60yejDYM0ek6zg308+UZsdtPExWP9VZoCFCz1zkJoXFnAX7aZfd/ZwrkidzdUZL0C/ryW+JwAiIGw=="; }; }; "watchpack-1.6.0" = { @@ -7331,13 +7457,13 @@ let sha1 = "fc571588c8558da77be9efb6debdc5a3b172bdc2"; }; }; - "webpack-sources-1.3.0" = { + "webpack-sources-1.4.3" = { name = "webpack-sources"; packageName = "webpack-sources"; - version = "1.3.0"; - src = fetchurl { - url = "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.3.0.tgz"; - sha512 = "OiVgSrbGu7NEnEvQJJgdSFPl2qWKkWq5lHMhgiToIiN9w34EBnjYzSYs+VbL5KoYiLNtFFa7BZIKxRED3I32pA=="; + version = "1.4.3"; + src = fetchurl { + url = "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz"; + sha512 = "lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ=="; }; }; "webpack-uglify-js-plugin-1.1.9" = { @@ -7430,13 +7556,13 @@ let sha1 = "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"; }; }; - "xtend-4.0.1" = { + "xtend-4.0.2" = { name = "xtend"; packageName = "xtend"; - version = "4.0.1"; - src = fetchurl { - url = "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"; - sha1 = "a5c6d532be656e23db820efb943a1f04998d63af"; + version = "4.0.2"; + src = fetchurl { + url = "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz"; + sha512 = "LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="; }; }; "y18n-4.0.0" = { @@ -7514,9 +7640,9 @@ let sources."@polymer/paper-toast-3.0.1" sources."@polymer/paper-toggle-button-3.0.1" sources."@polymer/paper-tooltip-3.0.1" - sources."@polymer/polymer-3.2.0" + sources."@polymer/polymer-3.3.0" sources."@types/clone-0.1.30" - sources."@types/node-6.14.6" + sources."@types/node-6.14.9" sources."@types/parse5-2.2.34" sources."@webassemblyjs/ast-1.7.10" sources."@webassemblyjs/floating-point-hex-parser-1.7.10" @@ -7536,8 +7662,8 @@ let sources."@webassemblyjs/wasm-parser-1.7.10" sources."@webassemblyjs/wast-parser-1.7.10" sources."@webassemblyjs/wast-printer-1.7.10" - sources."@webcomponents/shadycss-1.9.1" - sources."@webcomponents/webcomponentsjs-2.2.10" + sources."@webcomponents/shadycss-1.9.2" + sources."@webcomponents/webcomponentsjs-2.3.0" sources."@xtuc/ieee754-1.2.0" sources."@xtuc/long-4.2.1" sources."abbrev-1.1.1" @@ -7549,7 +7675,7 @@ let ]; }) sources."ajv-4.11.8" - sources."ajv-keywords-3.4.0" + sources."ajv-keywords-3.4.1" (sources."align-text-0.1.4" // { dependencies = [ sources."kind-of-3.2.2" @@ -7615,20 +7741,20 @@ let (sources."babel-core-6.26.3" // { dependencies = [ sources."json5-0.5.1" - sources."lodash-4.17.11" + sources."lodash-4.17.15" sources."minimatch-3.0.4" ]; }) (sources."babel-generator-6.26.1" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babel-helper-builder-binary-assignment-operator-visitor-6.24.1" sources."babel-helper-call-delegate-6.24.1" (sources."babel-helper-define-map-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babel-helper-explode-assignable-expression-6.24.1" @@ -7638,7 +7764,7 @@ let sources."babel-helper-optimise-call-expression-6.24.1" (sources."babel-helper-regex-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babel-helper-remap-async-to-generator-6.24.1" @@ -7656,7 +7782,7 @@ let sources."babel-plugin-transform-es2015-block-scoped-functions-6.22.0" (sources."babel-plugin-transform-es2015-block-scoping-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babel-plugin-transform-es2015-classes-6.24.1" @@ -7685,23 +7811,23 @@ let sources."babel-preset-env-1.7.0" (sources."babel-register-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babel-runtime-6.26.0" (sources."babel-template-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) (sources."babel-traverse-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) (sources."babel-types-6.26.0" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."babylon-6.18.0" @@ -7711,11 +7837,11 @@ let sources."define-property-1.0.0" ]; }) - sources."base64-js-1.3.0" + sources."base64-js-1.3.1" sources."bcrypt-pbkdf-1.0.2" sources."big.js-5.2.2" sources."binary-extensions-1.13.1" - sources."bluebird-3.5.4" + sources."bluebird-3.7.1" sources."bn.js-4.11.8" sources."boolbase-1.0.0" sources."boom-2.10.1" @@ -7739,11 +7865,11 @@ let sources."builtin-status-codes-3.0.0" (sources."cacache-10.0.4" // { dependencies = [ - sources."glob-7.1.4" - sources."graceful-fs-4.1.15" + sources."glob-7.1.5" + sources."graceful-fs-4.2.3" sources."lru-cache-4.1.5" sources."minimatch-3.0.4" - sources."rimraf-2.6.3" + sources."rimraf-2.7.1" ]; }) sources."cache-base-1.0.1" @@ -7754,18 +7880,18 @@ let sources."browserslist-1.7.7" ]; }) - sources."caniuse-db-1.0.30000967" - sources."caniuse-lite-1.0.30000967" + sources."caniuse-db-1.0.30001006" + sources."caniuse-lite-1.0.30001006" sources."caseless-0.12.0" sources."center-align-0.1.3" sources."chalk-0.5.1" - (sources."chokidar-2.1.5" // { + (sources."chokidar-2.1.8" // { dependencies = [ sources."is-glob-4.0.1" ]; }) - sources."chownr-1.1.1" - sources."chrome-trace-event-1.0.0" + sources."chownr-1.1.3" + sources."chrome-trace-event-1.0.2" sources."cipher-base-1.0.4" (sources."clap-1.2.3" // { dependencies = [ @@ -7801,7 +7927,7 @@ let }) (sources."cli-1.0.1" // { dependencies = [ - sources."glob-7.1.4" + sources."glob-7.1.5" sources."minimatch-3.0.4" ]; }) @@ -7825,24 +7951,29 @@ let sources."colormin-1.1.2" sources."colors-0.6.2" sources."combined-stream-1.0.8" - sources."commander-2.14.1" + sources."commander-2.20.3" sources."commondir-1.0.1" sources."component-emitter-1.3.0" sources."concat-map-0.0.1" (sources."concat-stream-1.6.2" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) sources."console-browserify-1.1.0" sources."constants-browserify-1.0.0" - sources."convert-source-map-1.6.0" + (sources."convert-source-map-1.6.0" // { + dependencies = [ + sources."safe-buffer-5.1.2" + ]; + }) (sources."copy-concurrently-1.0.5" // { dependencies = [ - sources."glob-7.1.4" + sources."glob-7.1.5" sources."minimatch-3.0.4" - sources."rimraf-2.6.3" + sources."rimraf-2.7.1" ]; }) sources."copy-descriptor-0.1.1" @@ -7852,7 +7983,7 @@ let sources."minimatch-3.0.4" ]; }) - sources."core-js-2.6.5" + sources."core-js-2.6.10" sources."core-util-is-1.0.2" sources."create-ecdh-4.0.3" sources."create-hash-1.2.0" @@ -7876,7 +8007,7 @@ let sources."cssesc-0.1.0" sources."cssnano-3.10.0" sources."csso-2.3.2" - sources."cyclist-0.2.2" + sources."cyclist-1.0.1" (sources."dashdash-1.14.1" // { dependencies = [ sources."assert-plus-1.0.0" @@ -7899,9 +8030,10 @@ let sources."diffie-hellman-5.0.3" sources."dir-glob-2.2.2" sources."dom-converter-0.2.0" - (sources."dom-serializer-0.1.1" // { - dependencies = [ - sources."entities-1.1.2" + (sources."dom-serializer-0.2.1" // { + dependencies = [ + sources."domelementtype-2.0.1" + sources."entities-2.0.0" ]; }) (sources."dom5-2.3.0" // { @@ -7915,25 +8047,31 @@ let sources."domhandler-2.3.0" sources."domutils-1.5.1" sources."dropzone-5.5.1" + sources."duplexer-0.1.1" (sources."duplexify-3.7.1" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) sources."ecc-jsbn-0.1.2" - sources."electron-to-chromium-1.3.133" - sources."elliptic-6.4.1" + sources."electron-to-chromium-1.3.302" + sources."elliptic-6.5.1" sources."emojis-list-2.1.0" - sources."end-of-stream-1.4.1" - (sources."enhanced-resolve-4.1.0" // { - dependencies = [ - sources."graceful-fs-4.1.15" + sources."end-of-stream-1.4.4" + (sources."enhanced-resolve-4.1.1" // { + dependencies = [ + sources."graceful-fs-4.2.3" + sources."memory-fs-0.5.0" + sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" + sources."string_decoder-1.1.1" ]; }) sources."entities-1.0.0" sources."errno-0.1.7" - sources."es-abstract-1.13.0" + sources."es-abstract-1.16.0" sources."es-to-primitive-1.2.0" sources."es6-templates-0.2.3" sources."escape-string-regexp-1.0.5" @@ -7941,8 +8079,8 @@ let sources."espree-3.5.4" sources."esprima-1.0.4" sources."esrecurse-4.2.1" - sources."estraverse-4.2.0" - sources."esutils-2.0.2" + sources."estraverse-4.3.0" + sources."esutils-2.0.3" sources."eventemitter2-0.4.14" sources."events-3.0.0" sources."evp_bytestokey-1.0.3" @@ -7986,6 +8124,7 @@ let sources."fastparse-1.1.2" sources."favico.js-0.3.10" sources."faye-websocket-0.4.4" + sources."figures-1.7.0" sources."file-sync-cmp-0.1.1" (sources."fill-range-4.0.0" // { dependencies = [ @@ -8003,10 +8142,11 @@ let }) sources."fined-1.2.0" sources."flagged-respawn-1.0.1" - sources."flatten-1.0.2" + sources."flatten-1.0.3" (sources."flush-write-stream-1.1.1" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) @@ -8018,12 +8158,13 @@ let (sources."from2-2.3.0" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) (sources."fs-write-stream-atomic-1.0.10" // { dependencies = [ - sources."graceful-fs-4.1.15" + sources."graceful-fs-4.2.3" ]; }) sources."fs.realpath-1.0.0" @@ -8059,7 +8200,7 @@ let sources."globals-9.18.0" (sources."globby-7.1.1" // { dependencies = [ - sources."glob-7.1.4" + sources."glob-7.1.5" sources."minimatch-3.0.4" ]; }) @@ -8094,7 +8235,7 @@ let (sources."grunt-contrib-jshint-0.12.0" // { dependencies = [ sources."jshint-2.9.7" - sources."lodash-4.17.11" + sources."lodash-4.17.15" sources."minimatch-3.0.4" ]; }) @@ -8102,14 +8243,21 @@ let dependencies = [ sources."ansi-regex-2.1.1" sources."ansi-styles-2.2.1" - sources."async-2.6.2" + sources."async-2.6.3" sources."chalk-1.1.3" sources."has-ansi-2.0.0" - sources."lodash-4.17.11" + sources."lodash-4.17.15" sources."strip-ansi-3.0.1" sources."supports-color-2.0.0" ]; }) + (sources."grunt-contrib-uglify-4.0.1" // { + dependencies = [ + sources."ansi-styles-3.2.1" + sources."chalk-2.4.2" + sources."supports-color-5.5.0" + ]; + }) (sources."grunt-contrib-watch-0.6.1" // { dependencies = [ sources."async-0.2.10" @@ -8132,9 +8280,10 @@ let sources."grunt-legacy-util-0.2.0" (sources."grunt-webpack-3.1.3" // { dependencies = [ - sources."lodash-4.17.11" - ]; - }) + sources."lodash-4.17.15" + ]; + }) + sources."gzip-size-3.0.0" sources."har-schema-1.0.5" sources."har-validator-4.2.1" sources."has-1.0.3" @@ -8161,6 +8310,12 @@ let (sources."html-minifier-3.5.21" // { dependencies = [ sources."commander-2.17.1" + sources."source-map-0.6.1" + (sources."uglify-js-3.4.10" // { + dependencies = [ + sources."commander-2.19.0" + ]; + }) ]; }) (sources."html-webpack-plugin-3.2.0" // { @@ -8168,7 +8323,7 @@ let sources."big.js-3.2.0" sources."json5-0.5.1" sources."loader-utils-0.2.17" - sources."lodash-4.17.11" + sources."lodash-4.17.15" ]; }) sources."htmlparser2-3.8.3" @@ -8193,7 +8348,7 @@ let dependencies = [ sources."find-up-3.0.0" sources."locate-path-3.0.0" - sources."p-limit-2.2.0" + sources."p-limit-2.2.1" sources."p-locate-3.0.0" sources."p-try-2.2.0" sources."pkg-dir-3.0.0" @@ -8202,9 +8357,8 @@ let sources."imports-loader-0.7.1" sources."imurmurhash-0.1.4" sources."indexes-of-1.0.1" - sources."indexof-0.0.1" sources."inflight-1.0.6" - sources."inherits-2.0.3" + sources."inherits-2.0.4" sources."ini-1.3.5" sources."interpret-1.1.0" sources."invariant-2.2.4" @@ -8250,7 +8404,7 @@ let sources."jsesc-1.3.0" (sources."jshint-2.10.2" // { dependencies = [ - sources."lodash-4.17.11" + sources."lodash-4.17.15" sources."minimatch-3.0.4" ]; }) @@ -8271,7 +8425,7 @@ let sources."lcid-2.0.0" (sources."less-2.7.3" // { dependencies = [ - sources."graceful-fs-4.1.15" + sources."graceful-fs-4.2.3" ]; }) (sources."liftoff-2.5.0" // { @@ -8298,11 +8452,22 @@ let sources."map-visit-1.0.0" sources."mark.js-8.11.1" sources."math-expression-evaluator-1.2.17" + (sources."maxmin-2.1.0" // { + dependencies = [ + sources."ansi-regex-2.1.1" + sources."ansi-styles-2.2.1" + sources."chalk-1.1.3" + sources."has-ansi-2.0.0" + sources."strip-ansi-3.0.1" + sources."supports-color-2.0.0" + ]; + }) sources."md5.js-1.3.5" sources."mem-4.3.0" (sources."memory-fs-0.4.1" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) @@ -8317,7 +8482,7 @@ let sources."minimatch-0.2.14" sources."minimist-1.2.0" sources."mississippi-2.0.0" - (sources."mixin-deep-1.3.1" // { + (sources."mixin-deep-1.3.2" // { dependencies = [ sources."is-extendable-1.0.1" ]; @@ -8331,25 +8496,30 @@ let sources."mousetrap-1.6.3" (sources."move-concurrently-1.0.1" // { dependencies = [ - sources."glob-7.1.4" + sources."glob-7.1.5" sources."minimatch-3.0.4" - sources."rimraf-2.6.3" + sources."rimraf-2.7.1" ]; }) sources."ms-2.0.0" - sources."nan-2.13.2" + sources."nan-2.14.0" sources."nanomatch-1.2.13" sources."neo-async-2.6.1" sources."nice-try-1.0.5" sources."no-case-2.3.2" - (sources."node-libs-browser-2.2.0" // { + (sources."node-libs-browser-2.2.1" // { dependencies = [ (sources."readable-stream-2.3.6" // { dependencies = [ sources."string_decoder-1.1.1" ]; }) - sources."string_decoder-1.2.0" + sources."safe-buffer-5.1.2" + (sources."string_decoder-1.3.0" // { + dependencies = [ + sources."safe-buffer-5.2.0" + ]; + }) ]; }) sources."nopt-1.0.10" @@ -8380,6 +8550,7 @@ let sources."kind-of-3.2.2" ]; }) + sources."object-inspect-1.6.0" sources."object-keys-1.1.1" sources."object-visit-1.0.1" sources."object.defaults-1.1.0" @@ -8399,14 +8570,15 @@ let sources."p-locate-2.0.0" sources."p-try-1.0.0" sources."pako-1.0.10" - (sources."parallel-transform-1.1.0" // { + (sources."parallel-transform-1.2.0" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) sources."param-case-2.1.1" - sources."parse-asn1-5.1.4" + sources."parse-asn1-5.1.5" sources."parse-filepath-1.0.2" sources."parse-passwd-1.0.0" sources."parse5-3.0.3" @@ -8416,7 +8588,7 @@ let ]; }) sources."pascalcase-0.1.1" - sources."path-browserify-0.0.0" + sources."path-browserify-0.0.1" sources."path-dirname-1.0.2" sources."path-exists-3.0.0" sources."path-is-absolute-1.0.1" @@ -8527,10 +8699,11 @@ let sources."postcss-value-parser-3.3.1" sources."postcss-zindex-2.2.0" sources."prepend-http-1.0.4" + sources."pretty-bytes-3.0.1" sources."pretty-error-2.1.1" sources."private-0.1.8" sources."process-0.11.10" - sources."process-nextick-args-2.0.0" + sources."process-nextick-args-2.0.1" sources."promise-7.3.1" sources."promise-inflight-1.0.1" sources."prr-1.0.1" @@ -8555,8 +8728,9 @@ let }) (sources."readdirp-2.2.1" // { dependencies = [ - sources."graceful-fs-4.1.15" + sources."graceful-fs-4.2.3" sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) @@ -8571,11 +8745,7 @@ let sources."balanced-match-0.4.2" ]; }) - (sources."reduce-function-call-1.0.2" // { - dependencies = [ - sources."balanced-match-0.4.2" - ]; - }) + sources."reduce-function-call-1.0.3" sources."regenerate-1.4.0" sources."regenerator-runtime-0.11.1" sources."regenerator-transform-0.10.1" @@ -8601,7 +8771,7 @@ let sources."request-2.81.0" sources."require-directory-2.1.1" sources."require-main-filename-1.0.1" - sources."resolve-1.10.1" + sources."resolve-1.12.0" sources."resolve-cwd-2.0.0" sources."resolve-dir-1.0.1" sources."resolve-from-3.0.0" @@ -8611,20 +8781,20 @@ let sources."rimraf-2.2.8" sources."ripemd160-2.0.2" sources."run-queue-1.0.3" - sources."safe-buffer-5.1.2" + sources."safe-buffer-5.2.0" sources."safe-regex-1.1.0" sources."safer-buffer-2.1.2" sources."sax-1.2.4" (sources."schema-utils-0.4.7" // { dependencies = [ - sources."ajv-6.10.0" + sources."ajv-6.10.2" ]; }) sources."select-1.1.2" - sources."semver-5.7.0" - sources."serialize-javascript-1.7.0" + sources."semver-5.7.1" + sources."serialize-javascript-1.9.1" sources."set-blocking-2.0.0" - (sources."set-value-2.0.0" // { + (sources."set-value-2.0.1" // { dependencies = [ sources."extend-shallow-2.0.1" ]; @@ -8701,6 +8871,7 @@ let (sources."stream-browserify-2.0.2" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) @@ -8708,6 +8879,7 @@ let (sources."stream-http-2.8.3" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) @@ -8720,6 +8892,8 @@ let sources."strip-ansi-4.0.0" ]; }) + sources."string.prototype.trimleft-2.1.0" + sources."string.prototype.trimright-2.1.0" sources."string_decoder-0.10.31" sources."stringstream-0.0.6" sources."strip-ansi-0.3.0" @@ -8740,10 +8914,11 @@ let (sources."through2-2.0.5" // { dependencies = [ sources."readable-stream-2.3.6" + sources."safe-buffer-5.1.2" sources."string_decoder-1.1.1" ]; }) - sources."timers-browserify-2.0.10" + sources."timers-browserify-2.0.11" sources."tiny-emitter-2.1.0" (sources."tiny-lr-fork-0.0.5" // { dependencies = [ @@ -8766,27 +8941,27 @@ let (sources."ts-loader-1.3.3" // { dependencies = [ sources."big.js-3.2.0" - sources."colors-1.3.3" + sources."colors-1.4.0" sources."enhanced-resolve-3.4.1" - sources."graceful-fs-4.1.15" + sources."graceful-fs-4.2.3" sources."json5-0.5.1" sources."loader-utils-0.2.17" sources."tapable-0.2.9" ]; }) - sources."tslib-1.9.3" + sources."tslib-1.10.0" sources."tty-browserify-0.0.0" sources."tunnel-agent-0.6.0" sources."tweetnacl-0.14.5" sources."typedarray-0.0.6" (sources."uglify-es-3.3.10" // { dependencies = [ + sources."commander-2.14.1" sources."source-map-0.6.1" ]; }) - (sources."uglify-js-3.4.10" // { - dependencies = [ - sources."commander-2.19.0" + (sources."uglify-js-3.6.7" // { + dependencies = [ sources."source-map-0.6.1" ]; }) @@ -8799,16 +8974,11 @@ let sources."unc-path-regex-0.1.2" sources."underscore-1.7.0" sources."underscore.string-2.2.1" - (sources."union-value-1.0.0" // { - dependencies = [ - sources."extend-shallow-2.0.1" - sources."set-value-0.4.3" - ]; - }) + sources."union-value-1.0.1" sources."uniq-1.0.1" sources."uniqs-2.0.0" sources."unique-filename-1.1.1" - sources."unique-slug-2.0.1" + sources."unique-slug-2.0.2" (sources."unset-value-1.0.0" // { dependencies = [ (sources."has-value-0.3.1" // { @@ -8819,13 +8989,14 @@ let sources."has-values-0.1.4" ]; }) - sources."upath-1.1.2" + sources."upath-1.2.0" sources."upper-case-1.1.3" (sources."uri-js-4.2.2" // { dependencies = [ sources."punycode-2.1.1" ]; }) + sources."uri-path-1.0.0" sources."urix-0.1.0" (sources."url-0.11.0" // { dependencies = [ @@ -8833,12 +9004,16 @@ let ]; }) sources."use-3.1.1" - sources."util-0.11.1" + (sources."util-0.11.1" // { + dependencies = [ + sources."inherits-2.0.3" + ]; + }) sources."util-deprecate-1.0.2" sources."util.promisify-1.0.0" sources."utila-0.4.0" - sources."uuid-3.3.2" - sources."v8-compile-cache-2.0.3" + sources."uuid-3.3.3" + sources."v8-compile-cache-2.1.0" sources."v8flags-3.1.3" sources."vendors-1.0.3" (sources."verror-1.10.0" // { @@ -8846,16 +9021,16 @@ let sources."assert-plus-1.0.0" ]; }) - sources."vm-browserify-0.0.4" + sources."vm-browserify-1.1.0" (sources."watchpack-1.6.0" // { dependencies = [ - sources."graceful-fs-4.1.15" + sources."graceful-fs-4.2.3" ]; }) sources."waypoints-4.0.1" (sources."webpack-4.23.1" // { dependencies = [ - sources."ajv-6.10.0" + sources."ajv-6.10.2" ]; }) (sources."webpack-cli-3.1.2" // { @@ -8871,7 +9046,7 @@ let sources."source-map-0.4.4" ]; }) - (sources."webpack-sources-1.3.0" // { + (sources."webpack-sources-1.4.3" // { dependencies = [ sources."source-map-0.6.1" ]; @@ -8904,14 +9079,14 @@ let ]; }) sources."wrappy-1.0.2" - sources."xtend-4.0.1" + sources."xtend-4.0.2" sources."y18n-4.0.0" sources."yallist-2.1.2" (sources."yargs-12.0.5" // { dependencies = [ sources."find-up-3.0.0" sources."locate-path-3.0.0" - sources."p-limit-2.2.0" + sources."p-limit-2.2.1" sources."p-locate-3.0.0" sources."p-try-2.2.0" ]; diff --git a/pkgs/python-packages.nix b/pkgs/python-packages.nix --- a/pkgs/python-packages.nix +++ b/pkgs/python-packages.nix @@ -5,7 +5,7 @@ self: super: { "alembic" = super.buildPythonPackage { - name = "alembic-1.0.10"; + name = "alembic-1.3.1"; doCheck = false; propagatedBuildInputs = [ self."sqlalchemy" @@ -14,22 +14,22 @@ self: super: { self."python-dateutil" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/6e/8b/fa3bd058cccd5e9177fea4efa26bfb769228fdd3178436ad5e05830ef6ef/alembic-1.0.10.tar.gz"; - sha256 = "1dwl0264r6ri2jyrjr68am04x538ab26xwy4crqjnnhm4alwm3c2"; + url = "https://files.pythonhosted.org/packages/84/64/493c45119dce700a4b9eeecc436ef9e8835ab67bae6414f040cdc7b58f4b/alembic-1.3.1.tar.gz"; + sha256 = "1cl2chk5jx0rf4hmsd5lljic7iifw17yv3y5xawvp4i14jvpn9s9"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "amqp" = super.buildPythonPackage { - name = "amqp-2.3.1"; + name = "amqp-2.5.2"; doCheck = false; propagatedBuildInputs = [ self."vine" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/1b/32/242ff76cd802766f11c89c72f3389b5c8de4bdfbab406137b90c5fae8b05/amqp-2.3.1.tar.gz"; - sha256 = "0wlfnvhmfrn7c8qif2jyvsm63ibdxp02ss564qwrvqfhz0di72s0"; + url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz"; + sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -63,33 +63,22 @@ self: super: { }; }; "atomicwrites" = super.buildPythonPackage { - name = "atomicwrites-1.2.1"; + name = "atomicwrites-1.3.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz"; - sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc"; + url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz"; + sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "attrs" = super.buildPythonPackage { - name = "attrs-18.2.0"; + name = "attrs-19.3.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz"; - sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh"; - }; - meta = { - license = [ pkgs.lib.licenses.mit ]; - }; - }; - "authomatic" = super.buildPythonPackage { - name = "authomatic-0.1.0.post1"; - doCheck = false; - src = fetchurl { - url = "https://code.rhodecode.com/upstream/authomatic/artifacts/download/0-4fe9c041-a567-4f84-be4c-7efa2a606d3c.tar.gz?md5=f6bdc3c769688212db68233e8d2b0383"; - sha256 = "0pc716mva0ym6xd8jwzjbjp8dqxy9069wwwv2aqwb8lyhl4757ab"; + url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz"; + sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -146,11 +135,11 @@ self: super: { }; }; "billiard" = super.buildPythonPackage { - name = "billiard-3.5.0.3"; + name = "billiard-3.6.1.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/39/ac/f5571210cca2e4f4532e38aaff242f26c8654c5e2436bee966c230647ccc/billiard-3.5.0.3.tar.gz"; - sha256 = "1riwiiwgb141151md4ykx49qrz749akj5k8g290ji9bsqjyj4yqx"; + url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz"; + sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -183,30 +172,42 @@ self: super: { }; }; "celery" = super.buildPythonPackage { - name = "celery-4.1.1"; + name = "celery-4.3.0"; doCheck = false; propagatedBuildInputs = [ self."pytz" self."billiard" self."kombu" + self."vine" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/e9/cf/a4c0597effca20c57eb586324e41d1180bc8f13a933da41e0646cff69f02/celery-4.1.1.tar.gz"; - sha256 = "1xbir4vw42n2ir9lanhwl7w69zpmj7lbi66fxm2b7pyvkcss7wni"; + url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz"; + sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; + "certifi" = super.buildPythonPackage { + name = "certifi-2019.11.28"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/41/bf/9d214a5af07debc6acf7f3f257265618f1db242a3f8e49a9b516f24523a6/certifi-2019.11.28.tar.gz"; + sha256 = "07qg6864bk4qxa8akr967amlmsq9v310hp039mcpjx6dliylrdi5"; + }; + meta = { + license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ]; + }; + }; "cffi" = super.buildPythonPackage { - name = "cffi-1.12.2"; + name = "cffi-1.12.3"; doCheck = false; propagatedBuildInputs = [ self."pycparser" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/64/7c/27367b38e6cc3e1f49f193deb761fe75cda9f95da37b67b422e62281fcac/cffi-1.12.2.tar.gz"; - sha256 = "19qfks2djya8vix95bmg3xzipjb8w9b8mbj4j5k2hqkc8j58f4z1"; + url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz"; + sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -243,6 +244,17 @@ self: super: { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; + "chardet" = super.buildPythonPackage { + name = "chardet-3.0.4"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"; + sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4"; + }; + meta = { + license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; + }; + }; "click" = super.buildPythonPackage { name = "click-7.0"; doCheck = false; @@ -285,16 +297,27 @@ self: super: { }; }; "configparser" = super.buildPythonPackage { - name = "configparser-3.7.4"; + name = "configparser-4.0.2"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/e2/1c/83fd53748d8245cb9a3399f705c251d3fc0ce7df04450aac1cfc49dd6a0f/configparser-3.7.4.tar.gz"; - sha256 = "0xac32886ihs2xg7w1gppcq2sgin5qsm8lqwijs5xifq9w0x0q6s"; + url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz"; + sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; + "contextlib2" = super.buildPythonPackage { + name = "contextlib2-0.6.0.post1"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz"; + sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01"; + }; + meta = { + license = [ pkgs.lib.licenses.psfl ]; + }; + }; "cov-core" = super.buildPythonPackage { name = "cov-core-1.15.0"; doCheck = false; @@ -310,11 +333,11 @@ self: super: { }; }; "coverage" = super.buildPythonPackage { - name = "coverage-4.5.3"; + name = "coverage-4.5.4"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz"; - sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx"; + url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz"; + sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0"; }; meta = { license = [ pkgs.lib.licenses.asl20 ]; @@ -361,7 +384,7 @@ self: super: { }; }; "deform" = super.buildPythonPackage { - name = "deform-2.0.7"; + name = "deform-2.0.8"; doCheck = false; propagatedBuildInputs = [ self."chameleon" @@ -372,8 +395,8 @@ self: super: { self."zope.deprecation" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/cf/a1/bc234527b8f181de9acd80e796483c00007658d1e32b7de78f1c2e004d9a/deform-2.0.7.tar.gz"; - sha256 = "0jnpi0zr2hjvbmiz6nm33yqv976dn9lf51vhlzqc0i75xcr9rwig"; + url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz"; + sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9"; }; meta = { license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; @@ -417,14 +440,14 @@ self: super: { }; }; "dogpile.cache" = super.buildPythonPackage { - name = "dogpile.cache-0.7.1"; + name = "dogpile.cache-0.9.0"; doCheck = false; propagatedBuildInputs = [ self."decorator" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz"; - sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9"; + url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz"; + sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -514,14 +537,14 @@ self: super: { }; }; "elasticsearch2" = super.buildPythonPackage { - name = "elasticsearch2-2.5.0"; + name = "elasticsearch2-2.5.1"; doCheck = false; propagatedBuildInputs = [ self."urllib3" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/84/77/63cf63d4ba11d913b5278406f2a37b0712bec6fc85edfb6151a33eaeba25/elasticsearch2-2.5.0.tar.gz"; - sha256 = "0ky0q16lbvz022yv6q3pix7aamf026p1y994537ccjf0p0dxnbxr"; + url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz"; + sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k"; }; meta = { license = [ pkgs.lib.licenses.asl20 ]; @@ -666,16 +689,44 @@ self: super: { }; }; "hupper" = super.buildPythonPackage { - name = "hupper-1.6.1"; + name = "hupper-1.9.1"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz"; - sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy"; + url = "https://files.pythonhosted.org/packages/09/3a/4f215659f31eeffe364a984dba486bfa3907bfcc54b7013bdfe825cebb5f/hupper-1.9.1.tar.gz"; + sha256 = "0pyg879fv9mbwlnbzw2a3234qqycqs9l97h5mpkmk0bvxhi2471v"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; + "idna" = super.buildPythonPackage { + name = "idna-2.8"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"; + sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3"; + }; + meta = { + license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ]; + }; + }; + "importlib-metadata" = super.buildPythonPackage { + name = "importlib-metadata-0.23"; + doCheck = false; + propagatedBuildInputs = [ + self."zipp" + self."contextlib2" + self."configparser" + self."pathlib2" + ]; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/5d/44/636bcd15697791943e2dedda0dbe098d8530a38d113b202817133e0b06c0/importlib_metadata-0.23.tar.gz"; + sha256 = "09mdqdfv5rdrwz80jh9m379gxmvk2vhjfz0fg53hid00icvxf65a"; + }; + meta = { + license = [ pkgs.lib.licenses.asl20 ]; + }; + }; "infrae.cache" = super.buildPythonPackage { name = "infrae.cache-1.0.1"; doCheck = false; @@ -703,11 +754,11 @@ self: super: { }; }; "ipaddress" = super.buildPythonPackage { - name = "ipaddress-1.0.22"; + name = "ipaddress-1.0.23"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/97/8d/77b8cedcfbf93676148518036c6b1ce7f8e14bf07e95d7fd4ddcb8cc052f/ipaddress-1.0.22.tar.gz"; - sha256 = "0b570bm6xqpjwqis15pvdy6lyvvzfndjvkynilcddjj5x98wfimi"; + url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz"; + sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p"; }; meta = { license = [ pkgs.lib.licenses.psfl ]; @@ -859,14 +910,15 @@ self: super: { }; }; "kombu" = super.buildPythonPackage { - name = "kombu-4.2.1"; + name = "kombu-4.6.6"; doCheck = false; propagatedBuildInputs = [ self."amqp" + self."importlib-metadata" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/39/9f/556b988833abede4a80dbd18b2bdf4e8ff4486dd482ed45da961347e8ed2/kombu-4.2.1.tar.gz"; - sha256 = "10lh3hncvw67fz0k5vgbx3yh9gjfpqdlia1f13i28cgnc1nfrbc6"; + url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz"; + sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -884,14 +936,14 @@ self: super: { }; }; "mako" = super.buildPythonPackage { - name = "mako-1.0.7"; + name = "mako-1.1.0"; doCheck = false; propagatedBuildInputs = [ self."markupsafe" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz"; - sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf"; + url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz"; + sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -909,25 +961,14 @@ self: super: { }; }; "markupsafe" = super.buildPythonPackage { - name = "markupsafe-1.1.0"; + name = "markupsafe-1.1.1"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz"; - sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf"; + url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"; + sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9"; }; meta = { - license = [ pkgs.lib.licenses.bsdOriginal ]; - }; - }; - "meld3" = super.buildPythonPackage { - name = "meld3-1.0.2"; - doCheck = false; - src = fetchurl { - url = "https://files.pythonhosted.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz"; - sha256 = "0n4mkwlpsqnmn0dm0wm5hn9nkda0nafl0jdy5sdl5977znh59dzp"; - }; - meta = { - license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; + license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ]; }; }; "mistune" = super.buildPythonPackage { @@ -942,14 +983,18 @@ self: super: { }; }; "mock" = super.buildPythonPackage { - name = "mock-1.0.1"; + name = "mock-3.0.5"; doCheck = false; + propagatedBuildInputs = [ + self."six" + self."funcsigs" + ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz"; - sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq"; + url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz"; + sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3"; }; meta = { - license = [ pkgs.lib.licenses.bsdOriginal ]; + license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ]; }; }; "more-itertools" = super.buildPythonPackage { @@ -1029,14 +1074,18 @@ self: super: { }; }; "packaging" = super.buildPythonPackage { - name = "packaging-15.2"; + name = "packaging-19.2"; doCheck = false; + propagatedBuildInputs = [ + self."pyparsing" + self."six" + ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz"; - sha256 = "1zn60w84bxvw6wypffka18ca66pa1k2cfrq3cq8fnsfja5m3k4ng"; + url = "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz"; + sha256 = "0izwlz9h0bw171a1chr311g2y7n657zjaf4mq4rgm8pp9lbj9f98"; }; meta = { - license = [ pkgs.lib.licenses.asl20 ]; + license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ]; }; }; "pandocfilters" = super.buildPythonPackage { @@ -1051,14 +1100,14 @@ self: super: { }; }; "paste" = super.buildPythonPackage { - name = "paste-3.0.8"; + name = "paste-3.2.1"; doCheck = false; propagatedBuildInputs = [ self."six" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/66/65/e3acf1663438483c1f6ced0b6c6f3b90da9f0faacb0a6e2aa0f3f9f4b235/Paste-3.0.8.tar.gz"; - sha256 = "05w1sh6ky4d7pmdb8nv82n13w22jcn3qsagg5ih3hjmbws9kkwf4"; + url = "https://files.pythonhosted.org/packages/0d/86/7008b5563594e8a63763f05212a3eb84c85f0b2eff834e5697716e56bca9/Paste-3.2.1.tar.gz"; + sha256 = "1vjxr8n1p31c9x9rh8g0f34yisa9028cxpvn36q7g1s0m2b9x71x"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1076,7 +1125,7 @@ self: super: { }; }; "pastescript" = super.buildPythonPackage { - name = "pastescript-3.1.0"; + name = "pastescript-3.2.0"; doCheck = false; propagatedBuildInputs = [ self."paste" @@ -1084,23 +1133,23 @@ self: super: { self."six" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/9e/1d/14db1c283eb21a5d36b6ba1114c13b709629711e64acab653d9994fe346f/PasteScript-3.1.0.tar.gz"; - sha256 = "02qcxjjr32ks7a6d4f533wl34ysc7yhwlrfcyqwqbzr52250v4fs"; + url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz"; + sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "pathlib2" = super.buildPythonPackage { - name = "pathlib2-2.3.4"; + name = "pathlib2-2.3.5"; doCheck = false; propagatedBuildInputs = [ self."six" self."scandir" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz"; - sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24"; + url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz"; + sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1175,48 +1224,51 @@ self: super: { }; }; "pluggy" = super.buildPythonPackage { - name = "pluggy-0.11.0"; + name = "pluggy-0.13.1"; doCheck = false; + propagatedBuildInputs = [ + self."importlib-metadata" + ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz"; - sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895"; + url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz"; + sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "prompt-toolkit" = super.buildPythonPackage { - name = "prompt-toolkit-1.0.16"; + name = "prompt-toolkit-1.0.18"; doCheck = false; propagatedBuildInputs = [ self."six" self."wcwidth" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz"; - sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1"; + url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz"; + sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; "psutil" = super.buildPythonPackage { - name = "psutil-5.5.1"; + name = "psutil-5.6.5"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz"; - sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj"; + url = "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz"; + sha256 = "0isil5jxwwd8awz54qk28rpgjg43i5l6yl70g40vxwa4r4m56lfh"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; "psycopg2" = super.buildPythonPackage { - name = "psycopg2-2.8.3"; + name = "psycopg2-2.8.4"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/5c/1c/6997288da181277a0c29bc39a5f9143ff20b8c99f2a7d059cfb55163e165/psycopg2-2.8.3.tar.gz"; - sha256 = "0ms4kx0p5n281l89awccix4d05ybmdngnjjpi9jbzd0rhf1nwyl9"; + url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz"; + sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q"; }; meta = { license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ]; @@ -1234,11 +1286,11 @@ self: super: { }; }; "py" = super.buildPythonPackage { - name = "py-1.6.0"; + name = "py-1.8.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz"; - sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6"; + url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz"; + sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1271,28 +1323,28 @@ self: super: { }; }; "pyasn1" = super.buildPythonPackage { - name = "pyasn1-0.4.5"; + name = "pyasn1-0.4.8"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/46/60/b7e32f6ff481b8a1f6c8f02b0fd9b693d1c92ddd2efb038ec050d99a7245/pyasn1-0.4.5.tar.gz"; - sha256 = "1xqh3jh2nfi2bflk5a0vn59y3pp1vn54f3ksx652sid92gz2096s"; + url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz"; + sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; "pyasn1-modules" = super.buildPythonPackage { - name = "pyasn1-modules-0.2.5"; + name = "pyasn1-modules-0.2.6"; doCheck = false; propagatedBuildInputs = [ self."pyasn1" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/ec/0b/69620cb04a016e4a1e8e352e8a42717862129b574b3479adb2358a1f12f7/pyasn1-modules-0.2.5.tar.gz"; - sha256 = "15nvfx0vnl8akdlv3k6s0n80vqvryj82bm040jdsn7wmyxl1ywpg"; + url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz"; + sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3"; }; meta = { - license = [ pkgs.lib.licenses.bsdOriginal ]; + license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ]; }; }; "pycparser" = super.buildPythonPackage { @@ -1318,11 +1370,11 @@ self: super: { }; }; "pycurl" = super.buildPythonPackage { - name = "pycurl-7.43.0.2"; + name = "pycurl-7.43.0.3"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/e8/e4/0dbb8735407189f00b33d84122b9be52c790c7c3b25286826f4e1bdb7bde/pycurl-7.43.0.2.tar.gz"; - sha256 = "1915kb04k1j4y6k1dx1sgnbddxrl9r1n4q928if2lkrdm73xy30g"; + url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz"; + sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g"; }; meta = { license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; @@ -1351,22 +1403,22 @@ self: super: { }; }; "pyotp" = super.buildPythonPackage { - name = "pyotp-2.2.7"; + name = "pyotp-2.3.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/b1/ab/477cda97b6ca7baced5106471cb1ac1fe698d1b035983b9f8ee3422989eb/pyotp-2.2.7.tar.gz"; - sha256 = "00p69nw431f0s2ilg0hnd77p1l22m06p9rq4f8zfapmavnmzw3xy"; + url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz"; + sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "pyparsing" = super.buildPythonPackage { - name = "pyparsing-2.3.0"; + name = "pyparsing-2.4.5"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/d0/09/3e6a5eeb6e04467b737d55f8bba15247ac0876f98fae659e58cd744430c6/pyparsing-2.3.0.tar.gz"; - sha256 = "14k5v7n3xqw8kzf42x06bzp184spnlkya2dpjyflax6l3yrallzk"; + url = "https://files.pythonhosted.org/packages/00/32/8076fa13e832bb4dcff379f18f228e5a53412be0631808b9ca2610c0f566/pyparsing-2.4.5.tar.gz"; + sha256 = "0fk8gsybiw1gm146mkjdjvaajwh20xwvpv4j7syh2zrnpq0j19jc"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1396,7 +1448,7 @@ self: super: { }; }; "pyramid-debugtoolbar" = super.buildPythonPackage { - name = "pyramid-debugtoolbar-4.5"; + name = "pyramid-debugtoolbar-4.5.1"; doCheck = false; propagatedBuildInputs = [ self."pyramid" @@ -1406,8 +1458,8 @@ self: super: { self."ipaddress" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/14/28/1f240239af340d19ee271ac62958158c79edb01a44ad8c9885508dd003d2/pyramid_debugtoolbar-4.5.tar.gz"; - sha256 = "0x2p3409pnx66n6dx5vc0mk2r1cp1ydr8mp120w44r9pwcngbibl"; + url = "https://files.pythonhosted.org/packages/88/21/74e7fa52edc74667e29403bd0cb4f2bb74dc4014711de313868001bf639f/pyramid_debugtoolbar-4.5.1.tar.gz"; + sha256 = "0hgf6i1fzvq43m9vjdmb24nnv8fwp7sdzrx9bcwrgpy24n07am9a"; }; meta = { license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ]; @@ -1447,15 +1499,15 @@ self: super: { }; }; "pyramid-mako" = super.buildPythonPackage { - name = "pyramid-mako-1.0.2"; + name = "pyramid-mako-1.1.0"; doCheck = false; propagatedBuildInputs = [ self."pyramid" self."mako" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz"; - sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d"; + url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz"; + sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0"; }; meta = { license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; @@ -1473,44 +1525,46 @@ self: super: { }; }; "pytest" = super.buildPythonPackage { - name = "pytest-3.8.2"; + name = "pytest-4.6.5"; doCheck = false; propagatedBuildInputs = [ self."py" self."six" - self."setuptools" + self."packaging" self."attrs" - self."more-itertools" self."atomicwrites" self."pluggy" + self."importlib-metadata" + self."wcwidth" self."funcsigs" self."pathlib2" + self."more-itertools" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz"; - sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk"; + url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz"; + sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "pytest-cov" = super.buildPythonPackage { - name = "pytest-cov-2.6.0"; + name = "pytest-cov-2.7.1"; doCheck = false; propagatedBuildInputs = [ self."pytest" self."coverage" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz"; - sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73"; + url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz"; + sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ]; }; }; "pytest-profiling" = super.buildPythonPackage { - name = "pytest-profiling-1.3.0"; + name = "pytest-profiling-1.7.0"; doCheck = false; propagatedBuildInputs = [ self."six" @@ -1518,62 +1572,63 @@ self: super: { self."gprof2dot" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz"; - sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb"; + url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz"; + sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "pytest-runner" = super.buildPythonPackage { - name = "pytest-runner-4.2"; + name = "pytest-runner-5.1"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz"; - sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj"; + url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz"; + sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815"; }; meta = { license = [ pkgs.lib.licenses.mit ]; }; }; "pytest-sugar" = super.buildPythonPackage { - name = "pytest-sugar-0.9.1"; + name = "pytest-sugar-0.9.2"; doCheck = false; propagatedBuildInputs = [ self."pytest" self."termcolor" + self."packaging" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz"; - sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b"; + url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz"; + sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; "pytest-timeout" = super.buildPythonPackage { - name = "pytest-timeout-1.3.2"; + name = "pytest-timeout-1.3.3"; doCheck = false; propagatedBuildInputs = [ self."pytest" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz"; - sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi"; + url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz"; + sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a"; }; meta = { license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ]; }; }; "python-dateutil" = super.buildPythonPackage { - name = "python-dateutil-2.8.0"; + name = "python-dateutil-2.8.1"; doCheck = false; propagatedBuildInputs = [ self."six" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/ad/99/5b2e99737edeb28c71bcbec5b5dda19d0d9ef3ca3e92e3e925e7c0bb364c/python-dateutil-2.8.0.tar.gz"; - sha256 = "17nsfhy4xdz1khrfxa61vd7pmvd5z0wa3zb6v4gb4kfnykv0b668"; + url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"; + sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ]; @@ -1647,11 +1702,11 @@ self: super: { }; }; "pytz" = super.buildPythonPackage { - name = "pytz-2018.4"; + name = "pytz-2019.2"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/10/76/52efda4ef98e7544321fd8d5d512e11739c1df18b0649551aeccfb1c8376/pytz-2018.4.tar.gz"; - sha256 = "0jgpqx3kk2rhv81j1izjxvmx8d0x7hzs1857pgqnixic5wq2ar60"; + url = "https://files.pythonhosted.org/packages/27/c0/fbd352ca76050952a03db776d241959d5a2ee1abddfeb9e2a53fdb489be4/pytz-2019.2.tar.gz"; + sha256 = "0ckb27hhjc8i8gcdvk4d9avld62b7k52yjijc60s2m3y8cpb7h16"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1669,11 +1724,11 @@ self: super: { }; }; "redis" = super.buildPythonPackage { - name = "redis-2.10.6"; + name = "redis-3.3.11"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/09/8d/6d34b75326bf96d4139a2ddd8e74b80840f800a0a79f9294399e212cb9a7/redis-2.10.6.tar.gz"; - sha256 = "03vcgklykny0g0wpvqmy8p6azi2s078317wgb2xjv5m2rs9sjb52"; + url = "https://files.pythonhosted.org/packages/06/ca/00557c74279d2f256d3c42cabf237631355f3a132e4c74c2000e6647ad98/redis-3.3.11.tar.gz"; + sha256 = "1hicqbi5xl92hhml82awrr2rxl9jar5fp8nbcycj9qgmsdwc43wd"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1707,18 +1762,24 @@ self: super: { }; }; "requests" = super.buildPythonPackage { - name = "requests-2.9.1"; + name = "requests-2.22.0"; doCheck = false; + propagatedBuildInputs = [ + self."chardet" + self."idna" + self."urllib3" + self."certifi" + ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz"; - sha256 = "0zsqrzlybf25xscgi7ja4s48y2abf9wvjkn47wh984qgs1fq2xy5"; + url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"; + sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i"; }; meta = { license = [ pkgs.lib.licenses.asl20 ]; }; }; "rhodecode-enterprise-ce" = super.buildPythonPackage { - name = "rhodecode-enterprise-ce-4.17.4"; + name = "rhodecode-enterprise-ce-4.18.0"; buildInputs = [ self."pytest" self."py" @@ -1738,7 +1799,6 @@ self: super: { doCheck = true; propagatedBuildInputs = [ self."amqp" - self."authomatic" self."babel" self."beaker" self."bleach" @@ -1808,7 +1868,6 @@ self: super: { self."venusian" self."weberror" self."webhelpers2" - self."webhelpers" self."webob" self."whoosh" self."wsgiref" @@ -1823,6 +1882,7 @@ self: super: { self."nbconvert" self."nbformat" self."jupyter-client" + self."jupyter-core" self."alembic" self."invoke" self."bumpversion" @@ -1854,7 +1914,7 @@ self: super: { }; }; "rhodecode-tools" = super.buildPythonPackage { - name = "rhodecode-tools-1.2.1"; + name = "rhodecode-tools-1.4.0"; doCheck = false; propagatedBuildInputs = [ self."click" @@ -1871,8 +1931,8 @@ self: super: { self."elasticsearch1-dsl" ]; src = fetchurl { - url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-10ac93f4-bb7d-4b97-baea-68110743dd5a.tar.gz?md5=962dc77c06aceee62282b98d33149661"; - sha256 = "1vfhgf46inbx7jvlfx4fdzh3vz7lh37r291gzb5hx447pfm3qllg"; + url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a"; + sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n"; }; meta = { license = [ { fullName = "Apache 2.0 and Proprietary"; } ]; @@ -1916,11 +1976,11 @@ self: super: { }; }; "setuptools" = super.buildPythonPackage { - name = "setuptools-41.0.1"; + name = "setuptools-44.0.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/1d/64/a18a487b4391a05b9c7f938b94a16d80305bf0369c6b0b9509e86165e1d3/setuptools-41.0.1.zip"; - sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2"; + url = "https://files.pythonhosted.org/packages/b0/f3/44da7482ac6da3f36f68e253cb04de37365b3dba9036a3c70773b778b485/setuptools-44.0.0.zip"; + sha256 = "025h5cnxcmda1893l6i12hrwdvs1n8r31qs6q4pkif2v7rrggfp5"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1960,11 +2020,11 @@ self: super: { }; }; "sqlalchemy" = super.buildPythonPackage { - name = "sqlalchemy-1.1.18"; + name = "sqlalchemy-1.3.11"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/cc/4d/96d93ff77cd67aca7618e402191eee3490d8f5f245d6ab7622d35fe504f4/SQLAlchemy-1.1.18.tar.gz"; - sha256 = "1ab4ysip6irajfbxl9wy27kv76miaz8h6759hfx92499z4dcf3lb"; + url = "https://files.pythonhosted.org/packages/34/5c/0e1d7ad0ca52544bb12f9cb8d5cc454af45821c92160ffedd38db0a317f6/SQLAlchemy-1.3.11.tar.gz"; + sha256 = "12izpqqgy738ndn7qqn962qxi8qw2xb9vg2i880x12paklg599dg"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -1997,14 +2057,11 @@ self: super: { }; }; "supervisor" = super.buildPythonPackage { - name = "supervisor-4.0.3"; + name = "supervisor-4.1.0"; doCheck = false; - propagatedBuildInputs = [ - self."meld3" - ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/97/48/f38bf70bd9282d1a18d591616557cc1a77a1c627d57dff66ead65c891dc8/supervisor-4.0.3.tar.gz"; - sha256 = "17hla7mx6w5m5jzkkjxgqa8wpswqmfhbhf49f692hw78fg0ans7p"; + url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz"; + sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d"; }; meta = { license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; @@ -2033,18 +2090,18 @@ self: super: { }; }; "testpath" = super.buildPythonPackage { - name = "testpath-0.4.2"; + name = "testpath-0.4.4"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/06/30/9a7e917066d851d8b4117e85794b5f14516419ea714a8a2681ec6aa8a981/testpath-0.4.2.tar.gz"; - sha256 = "1y40hywscnnyb734pnzm55nd8r8kp1072bjxbil83gcd53cv755n"; + url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz"; + sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30"; }; meta = { license = [ ]; }; }; "traitlets" = super.buildPythonPackage { - name = "traitlets-4.3.2"; + name = "traitlets-4.3.3"; doCheck = false; propagatedBuildInputs = [ self."ipython-genutils" @@ -2053,8 +2110,8 @@ self: super: { self."enum34" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz"; - sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww"; + url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz"; + sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -2100,11 +2157,11 @@ self: super: { }; }; "urllib3" = super.buildPythonPackage { - name = "urllib3-1.24.1"; + name = "urllib3-1.25.2"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/b1/53/37d82ab391393565f2f831b8eedbffd57db5a718216f82f1a8b4d381a1c1/urllib3-1.24.1.tar.gz"; - sha256 = "08lwd9f3hqznyf32vnzwvp87pchx062nkbgyrf67rwlkgj0jk5fy"; + url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz"; + sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55"; }; meta = { license = [ pkgs.lib.licenses.mit ]; @@ -2144,11 +2201,11 @@ self: super: { }; }; "waitress" = super.buildPythonPackage { - name = "waitress-1.3.0"; + name = "waitress-1.3.1"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/43/50/9890471320d5ad22761ae46661cf745f487b1c8c4ec49352b99e1078b970/waitress-1.3.0.tar.gz"; - sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf"; + url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz"; + sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7"; }; meta = { license = [ pkgs.lib.licenses.zpl21 ]; @@ -2193,20 +2250,6 @@ self: super: { license = [ pkgs.lib.licenses.mit ]; }; }; - "webhelpers" = super.buildPythonPackage { - name = "webhelpers-1.3"; - doCheck = false; - propagatedBuildInputs = [ - self."markupsafe" - ]; - src = fetchurl { - url = "https://files.pythonhosted.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz"; - sha256 = "10x5i82qdkrvyw18gsybwggfhfpl869siaab89vnndi9x62g51pa"; - }; - meta = { - license = [ pkgs.lib.licenses.bsdOriginal ]; - }; - }; "webhelpers2" = super.buildPythonPackage { name = "webhelpers2-2.0"; doCheck = false; @@ -2283,6 +2326,20 @@ self: super: { license = [ { fullName = "PSF or ZPL"; } ]; }; }; + "zipp" = super.buildPythonPackage { + name = "zipp-0.6.0"; + doCheck = false; + propagatedBuildInputs = [ + self."more-itertools" + ]; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz"; + sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p"; + }; + meta = { + license = [ pkgs.lib.licenses.mit ]; + }; + }; "zope.cachedescriptors" = super.buildPythonPackage { name = "zope.cachedescriptors-4.3.1"; doCheck = false; diff --git a/pytest.ini b/pytest.ini --- a/pytest.ini +++ b/pytest.ini @@ -9,8 +9,11 @@ vcsserver_config_http = rhodecode/tests/ addopts = --pdbcls=IPython.terminal.debugger:TerminalPdb + --strict-markers markers = vcs_operations: Mark tests depending on a running RhodeCode instance. xfail_backends: Mark tests as xfail for given backends. skip_backends: Mark tests as skipped for given backends. + backends: Mark backends + dbs: database markers for running tests for given DB diff --git a/requirements.txt b/requirements.txt --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,10 @@ ## dependencies -amqp==2.3.1 -# not released authomatic that has updated some oauth providers -https://code.rhodecode.com/upstream/authomatic/artifacts/download/0-4fe9c041-a567-4f84-be4c-7efa2a606d3c.tar.gz?md5=f6bdc3c769688212db68233e8d2b0383#egg=authomatic==0.1.0.post1 - +amqp==2.5.2 babel==1.3 beaker==1.9.1 bleach==3.1.0 -celery==4.1.1 +celery==4.3.0 channelstream==0.5.2 click==7.0 colander==1.7.0 @@ -16,9 +13,9 @@ https://code.rhodecode.com/upstream/conf cssselect==1.0.3 cryptography==2.6.1 decorator==4.1.2 -deform==2.0.7 +deform==2.0.8 docutils==0.14.0 -dogpile.cache==0.7.1 +dogpile.cache==0.9.0 dogpile.core==0.4.1 formencode==1.2.4 future==0.14.3 @@ -26,55 +23,54 @@ futures==3.0.2 infrae.cache==1.0.1 iso8601==0.1.12 itsdangerous==0.24 -kombu==4.2.1 +kombu==4.6.6 lxml==4.2.5 -mako==1.0.7 +mako==1.1.0 markdown==2.6.11 -markupsafe==1.1.0 +markupsafe==1.1.1 msgpack-python==0.5.6 -pyotp==2.2.7 -packaging==15.2 -pathlib2==2.3.4 -paste==3.0.8 +pyotp==2.3.0 +packaging==19.2 +pathlib2==2.3.5 +paste==3.2.1 pastedeploy==2.0.1 -pastescript==3.1.0 +pastescript==3.2.0 peppercorn==0.6 -psutil==5.5.1 +psutil==5.6.5 py-bcrypt==0.4 -pycurl==7.43.0.2 +pycurl==7.43.0.3 pycrypto==2.6.1 pygments==2.4.2 -pyparsing==2.3.0 -pyramid-debugtoolbar==4.5.0 -pyramid-mako==1.0.2 +pyparsing==2.4.5 +pyramid-debugtoolbar==4.5.1 +pyramid-mako==1.1.0 pyramid==1.10.4 pyramid_mailer==0.15.1 -python-dateutil +python-dateutil==2.8.1 python-ldap==3.1.0 python-memcached==1.59 python-pam==1.8.4 python-saml==2.4.2 -pytz==2018.4 +pytz==2019.2 tzlocal==1.5.1 pyzmq==14.6.0 py-gfm==0.1.4 -redis==2.10.6 +redis==3.3.11 repoze.lru==0.7 -requests==2.9.1 +requests==2.22.0 routes==2.4.1 simplejson==3.16.0 six==1.11.0 -sqlalchemy==1.1.18 +sqlalchemy==1.3.11 sshpubkeys==3.1.0 subprocess32==3.5.4 -supervisor==4.0.3 +supervisor==4.1.0 translationstring==1.3 -urllib3==1.24.1 +urllib3==1.25.2 urlobject==2.4.3 venusian==1.2.0 weberror==0.10.3 webhelpers2==2.0 -webhelpers==1.3 webob==1.8.5 whoosh==2.7.4 wsgiref==0.1.2 @@ -87,17 +83,18 @@ zope.interface==4.6.0 mysql-python==1.2.5 pymysql==0.8.1 pysqlite==2.8.3 -psycopg2==2.8.3 +psycopg2==2.8.4 # IPYTHON RENDERING # entrypoints backport, pypi version doesn't support egg installs https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d#egg=entrypoints==0.2.2.rhodecode-upstream1 nbconvert==5.3.1 nbformat==4.4.0 -jupyter_client==5.0.0 +jupyter-client==5.0.0 +jupyter-core==4.5.0 ## cli tools -alembic==1.0.10 +alembic==1.3.1 invoke==0.13.0 bumpversion==0.5.3 @@ -105,14 +102,15 @@ bumpversion==0.5.3 gevent==1.4.0 greenlet==0.4.15 gunicorn==19.9.0 -waitress==1.3.0 +waitress==1.3.1 ## debug ipdb==0.12.0 ipython==5.1.0 -## rhodecode-tools, special case -https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-10ac93f4-bb7d-4b97-baea-68110743dd5a.tar.gz?md5=962dc77c06aceee62282b98d33149661#egg=rhodecode-tools==1.2.1 +## rhodecode-tools, special case, use file://PATH.tar.gz#egg=rhodecode-tools==X.Y.Z, to test local version +https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a#egg=rhodecode-tools==1.4.0 + ## appenlight appenlight-client==0.6.26 diff --git a/requirements_pinned.txt b/requirements_pinned.txt --- a/requirements_pinned.txt +++ b/requirements_pinned.txt @@ -1,19 +1,27 @@ # contains not directly required libraries we want to pin the version. -atomicwrites==1.2.1 -attrs==18.2.0 -billiard==3.5.0.3 +atomicwrites==1.3.0 +attrs==19.3.0 +asn1crypto==0.24.0 +billiard==3.6.1.0 +cffi==1.12.3 chameleon==2.24 -cffi==1.12.2 +configparser==4.0.2 +contextlib2==0.6.0.post1 ecdsa==0.13.2 -hupper==1.6.1 gnureadline==6.3.8 +hupper==1.9.1 +ipaddress==1.0.23 +importlib-metadata==0.23 jinja2==2.9.6 jsonschema==2.6.0 +pluggy==0.13.1 +pyasn1-modules==0.2.6 pyramid-jinja2==2.7 -pluggy==0.11.0 -setproctitle==1.1.10 scandir==1.10.0 +setproctitle==1.1.10 tempita==0.5.2 +testpath==0.4.4 +transaction==2.4.0 vine==1.3.0 -configparser==3.7.4 +wcwidth==0.1.7 diff --git a/requirements_test.txt b/requirements_test.txt --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,16 +1,16 @@ # test related requirements -pytest==3.8.2 -py==1.6.0 -pytest-cov==2.6.0 -pytest-sugar==0.9.1 -pytest-runner==4.2.0 -pytest-profiling==1.3.0 -pytest-timeout==1.3.2 +pytest==4.6.5 +py==1.8.0 +pytest-cov==2.7.1 +pytest-sugar==0.9.2 +pytest-runner==5.1.0 +pytest-profiling==1.7.0 +pytest-timeout==1.3.3 gprof2dot==2017.9.19 -mock==1.0.1 +mock==3.0.5 cov-core==1.15.0 -coverage==4.5.3 +coverage==4.5.4 webtest==2.0.33 beautifulsoup4==4.6.3 diff --git a/rhodecode/VERSION b/rhodecode/VERSION --- a/rhodecode/VERSION +++ b/rhodecode/VERSION @@ -1,1 +1,1 @@ -4.17.4 \ No newline at end of file +4.18.0 \ No newline at end of file diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py --- a/rhodecode/__init__.py +++ b/rhodecode/__init__.py @@ -45,7 +45,7 @@ PYRAMID_SETTINGS = {} EXTENSIONS = {} __version__ = ('.'.join((str(each) for each in VERSION[:3]))) -__dbversion__ = 98 # defines current db version for migrations +__dbversion__ = 103 # defines current db version for migrations __platform__ = platform.system() __license__ = 'AGPLv3, and Commercial License' __author__ = 'RhodeCode GmbH' diff --git a/rhodecode/api/__init__.py b/rhodecode/api/__init__.py --- a/rhodecode/api/__init__.py +++ b/rhodecode/api/__init__.py @@ -122,7 +122,7 @@ def jsonrpc_response(request, result): return response -def jsonrpc_error(request, message, retid=None, code=None): +def jsonrpc_error(request, message, retid=None, code=None, headers=None): """ Generate a Response object with a JSON-RPC error body @@ -132,10 +132,12 @@ def jsonrpc_error(request, message, reti """ err_dict = {'id': retid, 'result': None, 'error': message} body = render(DEFAULT_RENDERER, err_dict, request=request).encode('utf-8') + return Response( body=body, status=code, - content_type='application/json' + content_type='application/json', + headerlist=headers ) @@ -287,8 +289,7 @@ def request_view(request): }) # register some common functions for usage - attach_context_attributes( - TemplateArgs(), request, request.rpc_user.user_id) + attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id) try: ret_value = func(**call_params) @@ -298,9 +299,13 @@ def request_view(request): except Exception: log.exception('Unhandled exception occurred on api call: %s', func) exc_info = sys.exc_info() - store_exception(id(exc_info), exc_info, prefix='rhodecode-api') + exc_id, exc_type_name = store_exception( + id(exc_info), exc_info, prefix='rhodecode-api') + error_headers = [('RhodeCode-Exception-Id', str(exc_id)), + ('RhodeCode-Exception-Type', str(exc_type_name))] return jsonrpc_error( - request, retid=request.rpc_id, message='Internal server error') + request, retid=request.rpc_id, message='Internal server error', + headers=error_headers) def setup_request(request): @@ -333,6 +338,7 @@ def setup_request(request): raise JSONRPCError("Content-Length is 0") raw_body = request.body + log.debug("Loading JSON body now") try: json_body = json.loads(raw_body) except ValueError as e: @@ -359,7 +365,7 @@ def setup_request(request): request.rpc_params = json_body['args'] \ if isinstance(json_body['args'], dict) else {} - log.debug('method: %s, params: %s', request.rpc_method, request.rpc_params) + log.debug('method: %s, params: %.10240r', request.rpc_method, request.rpc_params) except KeyError as e: raise JSONRPCError('Incorrect JSON data. Missing %s' % e) diff --git a/rhodecode/api/tests/test_close_pull_request.py b/rhodecode/api/tests/test_close_pull_request.py --- a/rhodecode/api/tests/test_close_pull_request.py +++ b/rhodecode/api/tests/test_close_pull_request.py @@ -49,7 +49,7 @@ class TestClosePullRequest(object): assert_ok(id_, expected, response.body) journal = UserLog.query()\ .filter(UserLog.user_id == author) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .filter(UserLog.repository_id == repo)\ .all() assert journal[-1].action == 'repo.pull_request.close' diff --git a/rhodecode/api/tests/test_comment_commit.py b/rhodecode/api/tests/test_comment_commit.py --- a/rhodecode/api/tests/test_comment_commit.py +++ b/rhodecode/api/tests/test_comment_commit.py @@ -20,7 +20,7 @@ import pytest -from rhodecode.model.db import ChangesetStatus +from rhodecode.model.db import ChangesetStatus, User from rhodecode.api.tests.utils import ( build_data, api_call, assert_error, assert_ok) @@ -79,3 +79,38 @@ class TestCommentCommit(object): 'success': True } assert_ok(id_, expected, given=response.body) + + def test_api_comment_commit_with_extra_recipients(self, backend, user_util): + + commit_id = backend.repo.scm_instance().get_commit('tip').raw_id + + user1 = user_util.create_user() + user1_id = user1.user_id + user2 = user_util.create_user() + user2_id = user2.user_id + + id_, params = build_data( + self.apikey, 'comment_commit', repoid=backend.repo_name, + commit_id=commit_id, + message='abracadabra', + extra_recipients=[user1.user_id, user2.username]) + + response = api_call(self.app, params) + repo = backend.repo.scm_instance() + + expected = { + 'msg': 'Commented on commit `%s` for repository `%s`' % ( + repo.get_commit().raw_id, backend.repo_name), + 'status_change': None, + 'success': True + } + + assert_ok(id_, expected, given=response.body) + # check user1/user2 inbox for notification + user1 = User.get(user1_id) + assert 1 == len(user1.notifications) + assert 'abracadabra' in user1.notifications[0].notification.body + + user2 = User.get(user2_id) + assert 1 == len(user2.notifications) + assert 'abracadabra' in user2.notifications[0].notification.body diff --git a/rhodecode/api/tests/test_comment_pull_request.py b/rhodecode/api/tests/test_comment_pull_request.py --- a/rhodecode/api/tests/test_comment_pull_request.py +++ b/rhodecode/api/tests/test_comment_pull_request.py @@ -21,7 +21,7 @@ import pytest from rhodecode.model.comment import CommentsModel -from rhodecode.model.db import UserLog +from rhodecode.model.db import UserLog, User from rhodecode.model.pull_request import PullRequestModel from rhodecode.tests import TEST_USER_ADMIN_LOGIN from rhodecode.api.tests.utils import ( @@ -65,11 +65,48 @@ class TestCommentPullRequest(object): journal = UserLog.query()\ .filter(UserLog.user_id == author)\ .filter(UserLog.repository_id == repo) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .all() assert journal[-1].action == 'repo.pull_request.comment.create' @pytest.mark.backends("git", "hg") + def test_api_comment_pull_request_with_extra_recipients(self, pr_util, user_util): + pull_request = pr_util.create_pull_request() + + user1 = user_util.create_user() + user1_id = user1.user_id + user2 = user_util.create_user() + user2_id = user2.user_id + + id_, params = build_data( + self.apikey, 'comment_pull_request', + repoid=pull_request.target_repo.repo_name, + pullrequestid=pull_request.pull_request_id, + message='test message', + extra_recipients=[user1.user_id, user2.username] + ) + response = api_call(self.app, params) + pull_request = PullRequestModel().get(pull_request.pull_request_id) + + comments = CommentsModel().get_comments( + pull_request.target_repo.repo_id, pull_request=pull_request) + + expected = { + 'pull_request_id': pull_request.pull_request_id, + 'comment_id': comments[-1].comment_id, + 'status': {'given': None, 'was_changed': None} + } + assert_ok(id_, expected, response.body) + # check user1/user2 inbox for notification + user1 = User.get(user1_id) + assert 1 == len(user1.notifications) + assert 'test message' in user1.notifications[0].notification.body + + user2 = User.get(user2_id) + assert 1 == len(user2.notifications) + assert 'test message' in user2.notifications[0].notification.body + + @pytest.mark.backends("git", "hg") def test_api_comment_pull_request_change_status( self, pr_util, no_notifications): pull_request = pr_util.create_pull_request() diff --git a/rhodecode/api/tests/test_create_user.py b/rhodecode/api/tests/test_create_user.py --- a/rhodecode/api/tests/test_create_user.py +++ b/rhodecode/api/tests/test_create_user.py @@ -82,6 +82,7 @@ class TestCreateUser(object): self.apikey, 'create_user', username=username, email=email, + description='CTO of Things', password='example') response = api_call(self.app, params) diff --git a/rhodecode/api/tests/test_fts_search.py b/rhodecode/api/tests/test_fts_search.py --- a/rhodecode/api/tests/test_fts_search.py +++ b/rhodecode/api/tests/test_fts_search.py @@ -27,6 +27,16 @@ from rhodecode.api.tests.utils import ( @pytest.mark.usefixtures("testuser_api", "app") class TestApiSearch(object): + @pytest.mark.parametrize("sort_dir", [ + "asc", + "desc", + ]) + @pytest.mark.parametrize("sort", [ + "xxx", + "author_email", + "date", + "message", + ]) @pytest.mark.parametrize("query, expected_hits, expected_paths", [ ('todo', 23, [ 'vcs/backends/hg/inmemory.py', @@ -55,10 +65,11 @@ class TestApiSearch(object): 'vcs/tests/test_cli.py']), ('owner:michał test', 0, []), ]) - def test_search_content_results(self, query, expected_hits, expected_paths): + def test_search_content_results(self, sort_dir, sort, query, expected_hits, expected_paths): id_, params = build_data( self.apikey_regular, 'search', search_query=query, + search_sort='{}:{}'.format(sort_dir, sort), search_type='content') response = api_call(self.app, params) @@ -70,6 +81,16 @@ class TestApiSearch(object): for expected_path in expected_paths: assert expected_path in paths + @pytest.mark.parametrize("sort_dir", [ + "asc", + "desc", + ]) + @pytest.mark.parametrize("sort", [ + "xxx", + "date", + "file", + "size", + ]) @pytest.mark.parametrize("query, expected_hits, expected_paths", [ ('readme.rst', 3, []), ('test*', 75, []), @@ -77,10 +98,11 @@ class TestApiSearch(object): ('extension:rst', 48, []), ('extension:rst api', 24, []), ]) - def test_search_file_paths(self, query, expected_hits, expected_paths): + def test_search_file_paths(self, sort_dir, sort, query, expected_hits, expected_paths): id_, params = build_data( self.apikey_regular, 'search', search_query=query, + search_sort='{}:{}'.format(sort_dir, sort), search_type='path') response = api_call(self.app, params) diff --git a/rhodecode/api/tests/test_get_method.py b/rhodecode/api/tests/test_get_method.py --- a/rhodecode/api/tests/test_get_method.py +++ b/rhodecode/api/tests/test_get_method.py @@ -50,6 +50,7 @@ class TestGetMethod(object): {'apiuser': '', 'comment_type': "", 'commit_id': '', + 'extra_recipients': '', 'message': '', 'repoid': '', 'request': '', diff --git a/rhodecode/api/tests/test_get_repo_changeset.py b/rhodecode/api/tests/test_get_repo_changeset.py --- a/rhodecode/api/tests/test_get_repo_changeset.py +++ b/rhodecode/api/tests/test_get_repo_changeset.py @@ -54,7 +54,7 @@ class TestGetRepoChangeset(object): details=details, ) response = api_call(self.app, params) - expected = 'commit_id must be a string value' + expected = "commit_id must be a string value got instead" assert_error(id_, expected, given=response.body) @pytest.mark.parametrize("details", ['basic', 'extended', 'full']) @@ -137,5 +137,5 @@ class TestGetRepoChangeset(object): details=details, ) response = api_call(self.app, params) - expected = 'commit_id must be a string value' + expected = "commit_id must be a string value got instead" assert_error(id_, expected, given=response.body) diff --git a/rhodecode/api/tests/test_get_repo_comments.py b/rhodecode/api/tests/test_get_repo_comments.py --- a/rhodecode/api/tests/test_get_repo_comments.py +++ b/rhodecode/api/tests/test_get_repo_comments.py @@ -31,36 +31,38 @@ from rhodecode.api.tests.utils import ( @pytest.fixture() def make_repo_comments_factory(request): - def maker(repo): - user = User.get_first_super_admin() - commit = repo.scm_instance()[0] + class Make(object): + + def make_comments(self, repo): + user = User.get_first_super_admin() + commit = repo.scm_instance()[0] - commit_id = commit.raw_id - file_0 = commit.affected_files[0] - comments = [] + commit_id = commit.raw_id + file_0 = commit.affected_files[0] + comments = [] - # general - CommentsModel().create( - text='General Comment', repo=repo, user=user, commit_id=commit_id, - comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) + # general + CommentsModel().create( + text='General Comment', repo=repo, user=user, commit_id=commit_id, + comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) - # inline - CommentsModel().create( - text='Inline Comment', repo=repo, user=user, commit_id=commit_id, - f_path=file_0, line_no='n1', - comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) + # inline + CommentsModel().create( + text='Inline Comment', repo=repo, user=user, commit_id=commit_id, + f_path=file_0, line_no='n1', + comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False) - # todo - CommentsModel().create( - text='INLINE TODO Comment', repo=repo, user=user, commit_id=commit_id, - f_path=file_0, line_no='n1', - comment_type=ChangesetComment.COMMENT_TYPE_TODO, send_email=False) + # todo + CommentsModel().create( + text='INLINE TODO Comment', repo=repo, user=user, commit_id=commit_id, + f_path=file_0, line_no='n1', + comment_type=ChangesetComment.COMMENT_TYPE_TODO, send_email=False) - @request.addfinalizer - def cleanup(): - for comment in comments: - Session().delete(comment) - return maker + @request.addfinalizer + def cleanup(): + for comment in comments: + Session().delete(comment) + return Make() @pytest.mark.usefixtures("testuser_api", "app") @@ -76,7 +78,7 @@ class TestGetRepo(object): make_repo_comments_factory, filters, expected_count): commits = [{'message': 'A'}, {'message': 'B'}] repo = backend.create_repo(commits=commits) - make_repo_comments_factory(repo) + make_repo_comments_factory.make_comments(repo) api_call_params = {'repoid': repo.repo_name,} api_call_params.update(filters) @@ -92,12 +94,13 @@ class TestGetRepo(object): assert len(result) == expected_count - def test_api_get_repo_comments_wrong_comment_typ(self, backend_hg): + def test_api_get_repo_comments_wrong_comment_type( + self, make_repo_comments_factory, backend_hg): + commits = [{'message': 'A'}, {'message': 'B'}] + repo = backend_hg.create_repo(commits=commits) + make_repo_comments_factory.make_comments(repo) - repo = backend_hg.create_repo() - make_repo_comments_factory(repo) - - api_call_params = {'repoid': repo.repo_name,} + api_call_params = {'repoid': repo.repo_name} api_call_params.update({'comment_type': 'bogus'}) expected = 'comment_type must be one of `{}` got {}'.format( diff --git a/rhodecode/api/tests/test_get_server_info.py b/rhodecode/api/tests/test_get_server_info.py --- a/rhodecode/api/tests/test_get_server_info.py +++ b/rhodecode/api/tests/test_get_server_info.py @@ -25,7 +25,7 @@ from rhodecode.model.scm import ScmModel from rhodecode.api.tests.utils import build_data, api_call, assert_ok -@pytest.fixture +@pytest.fixture() def http_host_stub(): """ To ensure that we can get an IP address, this test shall run with a diff --git a/rhodecode/api/tests/test_merge_pull_request.py b/rhodecode/api/tests/test_merge_pull_request.py --- a/rhodecode/api/tests/test_merge_pull_request.py +++ b/rhodecode/api/tests/test_merge_pull_request.py @@ -120,7 +120,7 @@ class TestMergePullRequest(object): journal = UserLog.query()\ .filter(UserLog.user_id == author)\ .filter(UserLog.repository_id == repo) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .all() assert journal[-2].action == 'repo.pull_request.merge' assert journal[-1].action == 'repo.pull_request.close' @@ -221,7 +221,7 @@ class TestMergePullRequest(object): journal = UserLog.query() \ .filter(UserLog.user_id == merge_user_id) \ .filter(UserLog.repository_id == repo) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .all() assert journal[-2].action == 'repo.pull_request.merge' assert journal[-1].action == 'repo.pull_request.close' diff --git a/rhodecode/api/tests/test_update_user.py b/rhodecode/api/tests/test_update_user.py --- a/rhodecode/api/tests/test_update_user.py +++ b/rhodecode/api/tests/test_update_user.py @@ -42,7 +42,8 @@ class TestUpdateUser(object): ('extern_name', None), ('active', False), ('active', True), - ('password', 'newpass') + ('password', 'newpass'), + ('description', 'CTO 4 Life') ]) def test_api_update_user(self, name, expected, user_util): usr = user_util.create_user() diff --git a/rhodecode/api/tests/utils.py b/rhodecode/api/tests/utils.py --- a/rhodecode/api/tests/utils.py +++ b/rhodecode/api/tests/utils.py @@ -20,11 +20,16 @@ import random +import pytest from rhodecode.api.utils import get_origin from rhodecode.lib.ext_json import json +def jsonify(obj): + return json.loads(json.dumps(obj)) + + API_URL = '/_admin/api' @@ -42,12 +47,16 @@ def assert_call_ok(id_, given): def assert_ok(id_, expected, given): + given = json.loads(given) + if given.get('error'): + pytest.fail("Unexpected ERROR in success response: {}".format(given['error'])) + expected = jsonify({ 'id': id_, 'error': None, 'result': expected }) - given = json.loads(given) + assert expected == given @@ -61,10 +70,6 @@ def assert_error(id_, expected, given): assert expected == given -def jsonify(obj): - return json.loads(json.dumps(obj)) - - def build_data(apikey, method, **kw): """ Builds API data with given random ID diff --git a/rhodecode/api/views/pull_request_api.py b/rhodecode/api/views/pull_request_api.py --- a/rhodecode/api/views/pull_request_api.py +++ b/rhodecode/api/views/pull_request_api.py @@ -451,7 +451,7 @@ def comment_pull_request( request, apiuser, pullrequestid, repoid=Optional(None), message=Optional(None), commit_id=Optional(None), status=Optional(None), comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), - resolves_comment_id=Optional(None), + resolves_comment_id=Optional(None), extra_recipients=Optional([]), userid=Optional(OAttr('apiuser'))): """ Comment on the pull request specified with the `pullrequestid`, @@ -476,6 +476,11 @@ def comment_pull_request( :type status: str :param comment_type: Comment type, one of: 'note', 'todo' :type comment_type: Optional(str), default: 'note' + :param resolves_comment_id: id of comment which this one will resolve + :type resolves_comment_id: Optional(int) + :param extra_recipients: list of user ids or usernames to add + notifications for this comment. Acts like a CC for notification + :type extra_recipients: Optional(list) :param userid: Comment on the pull request as this user :type userid: Optional(str or int) @@ -521,6 +526,7 @@ def comment_pull_request( commit_id = Optional.extract(commit_id) comment_type = Optional.extract(comment_type) resolves_comment_id = Optional.extract(resolves_comment_id) + extra_recipients = Optional.extract(extra_recipients) if not message and not status: raise JSONRPCError( @@ -580,7 +586,8 @@ def comment_pull_request( renderer=renderer, comment_type=comment_type, resolves_comment_id=resolves_comment_id, - auth_user=auth_user + auth_user=auth_user, + extra_recipients=extra_recipients ) if allowed_to_change_status and status: @@ -888,7 +895,9 @@ def update_pull_request( with pull_request.set_state(PullRequest.STATE_UPDATING): if PullRequestModel().has_valid_update_type(pull_request): - update_response = PullRequestModel().update_commits(pull_request) + db_user = apiuser.get_instance() + update_response = PullRequestModel().update_commits( + pull_request, db_user) commit_changes = update_response.changes or commit_changes Session().commit() diff --git a/rhodecode/api/views/repo_api.py b/rhodecode/api/views/repo_api.py --- a/rhodecode/api/views/repo_api.py +++ b/rhodecode/api/views/repo_api.py @@ -617,9 +617,7 @@ def get_repo_fts_tree(request, apiuser, cache_namespace_uid = 'cache_repo.{}'.format(repo_id) region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) - @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, - condition=cache_on) - def compute_fts_tree(repo_id, commit_id, root_path, cache_ver): + def compute_fts_tree(cache_ver, repo_id, commit_id, root_path): return ScmModel().get_fts_data(repo_id, commit_id, root_path) try: @@ -640,7 +638,7 @@ def get_repo_fts_tree(request, apiuser, 'with caching: %s[TTL: %ss]' % ( repo_id, commit_id, cache_on, cache_seconds or 0)) - tree_files = compute_fts_tree(repo_id, commit_id, root_path, 'v1') + tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path) return tree_files except Exception: @@ -714,7 +712,7 @@ def create_repo( private=Optional(False), clone_uri=Optional(None), push_uri=Optional(None), - landing_rev=Optional('rev:tip'), + landing_rev=Optional(None), enable_statistics=Optional(False), enable_locking=Optional(False), enable_downloads=Optional(False), @@ -749,7 +747,7 @@ def create_repo( :type clone_uri: str :param push_uri: set push_uri :type push_uri: str - :param landing_rev: : + :param landing_rev: :, e.g branch:default, book:dev, rev:abcd :type landing_rev: str :param enable_locking: :type enable_locking: bool @@ -793,7 +791,6 @@ def create_repo( copy_permissions = Optional.extract(copy_permissions) clone_uri = Optional.extract(clone_uri) push_uri = Optional.extract(push_uri) - landing_commit_ref = Optional.extract(landing_rev) defs = SettingsModel().get_default_repo_settings(strip_prefix=True) if isinstance(private, Optional): @@ -807,8 +804,15 @@ def create_repo( if isinstance(enable_downloads, Optional): enable_downloads = defs.get('repo_enable_downloads') + landing_ref, _label = ScmModel.backend_landing_ref(repo_type) + ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) + ref_choices = list(set(ref_choices + [landing_ref])) + + landing_commit_ref = Optional.extract(landing_rev) or landing_ref + schema = repo_schema.RepoSchema().bind( repo_type_options=rhodecode.BACKENDS.keys(), + repo_ref_options=ref_choices, repo_type=repo_type, # user caller user=apiuser) @@ -958,7 +962,7 @@ def update_repo( owner=Optional(OAttr('apiuser')), description=Optional(''), private=Optional(False), clone_uri=Optional(None), push_uri=Optional(None), - landing_rev=Optional('rev:tip'), fork_of=Optional(None), + landing_rev=Optional(None), fork_of=Optional(None), enable_statistics=Optional(False), enable_locking=Optional(False), enable_downloads=Optional(False), fields=Optional('')): @@ -993,7 +997,7 @@ def update_repo( :type private: bool :param clone_uri: Update the |repo| clone URI. :type clone_uri: str - :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. + :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd :type landing_rev: str :param enable_statistics: Enable statistics on the |repo|, (True | False). :type enable_statistics: bool @@ -1049,8 +1053,10 @@ def update_repo( repo_enable_downloads=enable_downloads if not isinstance(enable_downloads, Optional) else repo.enable_downloads) + landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) ref_choices, _labels = ScmModel().get_repo_landing_revs( request.translate, repo=repo) + ref_choices = list(set(ref_choices + [landing_ref])) old_values = repo.get_api_data() repo_type = repo.repo_type @@ -1128,7 +1134,7 @@ def fork_repo(request, apiuser, repoid, description=Optional(''), private=Optional(False), clone_uri=Optional(None), - landing_rev=Optional('rev:tip'), + landing_rev=Optional(None), copy_permissions=Optional(False)): """ Creates a fork of the specified |repo|. @@ -1158,7 +1164,7 @@ def fork_repo(request, apiuser, repoid, :type copy_permissions: bool :param private: Make the fork private. The default is False. :type private: bool - :param landing_rev: Set the landing revision. The default is tip. + :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd Example output: @@ -1210,11 +1216,17 @@ def fork_repo(request, apiuser, repoid, description = Optional.extract(description) copy_permissions = Optional.extract(copy_permissions) clone_uri = Optional.extract(clone_uri) - landing_commit_ref = Optional.extract(landing_rev) + + landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) + ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) + ref_choices = list(set(ref_choices + [landing_ref])) + landing_commit_ref = Optional.extract(landing_rev) or landing_ref + private = Optional.extract(private) schema = repo_schema.RepoSchema().bind( repo_type_options=rhodecode.BACKENDS.keys(), + repo_ref_options=ref_choices, repo_type=repo.repo_type, # user caller user=apiuser) @@ -1538,7 +1550,7 @@ def lock(request, apiuser, repoid, locke def comment_commit( request, apiuser, repoid, commit_id, message, status=Optional(None), comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), - resolves_comment_id=Optional(None), + resolves_comment_id=Optional(None), extra_recipients=Optional([]), userid=Optional(OAttr('apiuser'))): """ Set a commit comment, and optionally change the status of the commit. @@ -1556,6 +1568,11 @@ def comment_commit( :type status: str :param comment_type: Comment type, one of: 'note', 'todo' :type comment_type: Optional(str), default: 'note' + :param resolves_comment_id: id of comment which this one will resolve + :type resolves_comment_id: Optional(int) + :param extra_recipients: list of user ids or usernames to add + notifications for this comment. Acts like a CC for notification + :type extra_recipients: Optional(list) :param userid: Set the user name of the comment creator. :type userid: Optional(str or int) @@ -1592,6 +1609,7 @@ def comment_commit( status = Optional.extract(status) comment_type = Optional.extract(comment_type) resolves_comment_id = Optional.extract(resolves_comment_id) + extra_recipients = Optional.extract(extra_recipients) allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] if status and status not in allowed_statuses: @@ -1620,7 +1638,8 @@ def comment_commit( renderer=renderer, comment_type=comment_type, resolves_comment_id=resolves_comment_id, - auth_user=apiuser + auth_user=apiuser, + extra_recipients=extra_recipients ) if status: # also do a status change diff --git a/rhodecode/api/views/search_api.py b/rhodecode/api/views/search_api.py --- a/rhodecode/api/views/search_api.py +++ b/rhodecode/api/views/search_api.py @@ -33,7 +33,7 @@ log = logging.getLogger(__name__) @jsonrpc_method() def search(request, apiuser, search_query, search_type, page_limit=Optional(10), - page=Optional(1), search_sort=Optional('newfirst'), + page=Optional(1), search_sort=Optional('desc:date'), repo_name=Optional(None), repo_group_name=Optional(None)): """ Fetch Full Text Search results using API. @@ -51,9 +51,15 @@ def search(request, apiuser, search_quer :type page_limit: Optional(int) :param page: Page number. Default first page. :type page: Optional(int) - :param search_sort: Search sort order. Default newfirst. The following are valid options: - * newfirst - * oldfirst + :param search_sort: Search sort order.Must start with asc: or desc: Default desc:date. + The following are valid options: + * asc|desc:message.raw + * asc|desc:date + * asc|desc:author.email.raw + * asc|desc:message.raw + * newfirst (old legacy equal to desc:date) + * oldfirst (old legacy equal to asc:date) + :type search_sort: Optional(str) :param repo_name: Filter by one repo. Default is all. :type repo_name: Optional(str) @@ -101,7 +107,7 @@ def search(request, apiuser, search_quer searcher.cleanup() if not search_result['error']: - data['execution_time'] = '%s results (%.3f seconds)' % ( + data['execution_time'] = '%s results (%.4f seconds)' % ( search_result['count'], search_result['runtime']) else: diff --git a/rhodecode/api/views/user_api.py b/rhodecode/api/views/user_api.py --- a/rhodecode/api/views/user_api.py +++ b/rhodecode/api/views/user_api.py @@ -75,6 +75,7 @@ def get_user(request, apiuser, userid=Op "extern_name": "rhodecode", "extern_type": "rhodecode", "firstname": "username", + "description": "user description", "ip_addresses": [], "language": null, "last_login": "Timestamp", @@ -159,7 +160,7 @@ def get_users(request, apiuser): @jsonrpc_method() def create_user(request, apiuser, username, email, password=Optional(''), - firstname=Optional(''), lastname=Optional(''), + firstname=Optional(''), lastname=Optional(''), description=Optional(''), active=Optional(True), admin=Optional(False), extern_name=Optional('rhodecode'), extern_type=Optional('rhodecode'), @@ -185,6 +186,8 @@ def create_user(request, apiuser, userna :type firstname: Optional(str) :param lastname: Set the new user surname. :type lastname: Optional(str) + :param description: Set user description, or short bio. Metatags are allowed. + :type description: Optional(str) :param active: Set the user as active. :type active: Optional(``True`` | ``False``) :param admin: Give the new user admin rights. @@ -250,6 +253,7 @@ def create_user(request, apiuser, userna email = Optional.extract(email) first_name = Optional.extract(firstname) last_name = Optional.extract(lastname) + description = Optional.extract(description) active = Optional.extract(active) admin = Optional.extract(admin) extern_type = Optional.extract(extern_type) @@ -267,6 +271,7 @@ def create_user(request, apiuser, userna last_name=last_name, active=active, admin=admin, + description=description, extern_type=extern_type, extern_name=extern_name, )) @@ -280,6 +285,7 @@ def create_user(request, apiuser, userna email=schema_data['email'], firstname=schema_data['first_name'], lastname=schema_data['last_name'], + description=schema_data['description'], active=schema_data['active'], admin=schema_data['admin'], extern_type=schema_data['extern_type'], @@ -307,7 +313,7 @@ def create_user(request, apiuser, userna def update_user(request, apiuser, userid, username=Optional(None), email=Optional(None), password=Optional(None), firstname=Optional(None), lastname=Optional(None), - active=Optional(None), admin=Optional(None), + description=Optional(None), active=Optional(None), admin=Optional(None), extern_type=Optional(None), extern_name=Optional(None), ): """ Updates the details for the specified user, if that user exists. @@ -331,6 +337,8 @@ def update_user(request, apiuser, userid :type firstname: Optional(str) :param lastname: Set the new surname. :type lastname: Optional(str) + :param description: Set user description, or short bio. Metatags are allowed. + :type description: Optional(str) :param active: Set the new user as active. :type active: Optional(``True`` | ``False``) :param admin: Give the user admin rights. @@ -379,6 +387,7 @@ def update_user(request, apiuser, userid store_update(updates, email, 'email') store_update(updates, firstname, 'name') store_update(updates, lastname, 'lastname') + store_update(updates, description, 'description') store_update(updates, active, 'active') store_update(updates, admin, 'admin') store_update(updates, extern_name, 'extern_name') diff --git a/rhodecode/api/views/user_group_api.py b/rhodecode/api/views/user_group_api.py --- a/rhodecode/api/views/user_group_api.py +++ b/rhodecode/api/views/user_group_api.py @@ -191,7 +191,9 @@ def create_user_group( :param active: Set this group as active. :type active: Optional(``True`` | ``False``) :param sync: Set enabled or disabled the automatically sync from - external authentication types like ldap. + external authentication types like ldap. If User Group will be named like + one from e.g ldap and sync flag is enabled members will be synced automatically. + Sync type when enabled via API is set to `manual_api` :type sync: Optional(``True`` | ``False``) Example output: @@ -303,7 +305,9 @@ def update_user_group(request, apiuser, :param active: Set the group as active. :type active: Optional(``True`` | ``False``) :param sync: Set enabled or disabled the automatically sync from - external authentication types like ldap. + external authentication types like ldap. If User Group will be named like + one from e.g ldap and sync flag is enabled members will be synced automatically. + Sync type when enabled via API is set to `manual_api` :type sync: Optional(``True`` | ``False``) Example output: diff --git a/rhodecode/apps/_base/__init__.py b/rhodecode/apps/_base/__init__.py --- a/rhodecode/apps/_base/__init__.py +++ b/rhodecode/apps/_base/__init__.py @@ -25,9 +25,11 @@ import operator from pyramid import compat from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest -from rhodecode.lib import helpers as h, diffs +from rhodecode.lib import helpers as h, diffs, rc_cache from rhodecode.lib.utils2 import ( StrictAttributeDict, str2bool, safe_int, datetime_to_time, safe_unicode) +from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links +from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.lib.vcs.exceptions import RepositoryRequirementError from rhodecode.model import repo from rhodecode.model import repo_group @@ -36,6 +38,7 @@ from rhodecode.model import user from rhodecode.model.db import User from rhodecode.model.scm import ScmModel from rhodecode.model.settings import VcsSettingsModel +from rhodecode.model.repo import ReadmeFinder log = logging.getLogger(__name__) @@ -222,6 +225,7 @@ class RepoAppView(BaseAppView): self.db_repo = request.db_repo self.db_repo_name = self.db_repo.repo_name self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) + self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo) def _handle_missing_requirements(self, error): log.error( @@ -237,6 +241,7 @@ class RepoAppView(BaseAppView): c.rhodecode_db_repo = self.db_repo c.repo_name = self.db_repo_name c.repository_pull_requests = self.db_repo_pull_requests + c.repository_artifacts = self.db_repo_artifacts c.repository_is_user_following = ScmModel().is_following_repo( self.db_repo_name, self._rhodecode_user.user_id) self.path_filter = PathFilter(None) @@ -305,6 +310,69 @@ class RepoAppView(BaseAppView): settings = settings_model.get_general_settings() return settings.get(settings_key, default) + def _get_repo_setting(self, target_repo, settings_key, default=False): + settings_model = VcsSettingsModel(repo=target_repo) + settings = settings_model.get_repo_settings_inherited() + return settings.get(settings_key, default) + + def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path='/'): + log.debug('Looking for README file at path %s', path) + if commit_id: + landing_commit_id = commit_id + else: + landing_commit = db_repo.get_landing_commit() + if isinstance(landing_commit, EmptyCommit): + return None, None + landing_commit_id = landing_commit.raw_id + + cache_namespace_uid = 'cache_repo.{}'.format(db_repo.repo_id) + region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) + start = time.time() + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def generate_repo_readme(repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type): + readme_data = None + readme_filename = None + + commit = db_repo.get_commit(_commit_id) + log.debug("Searching for a README file at commit %s.", _commit_id) + readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path) + + if readme_node: + log.debug('Found README node: %s', readme_node) + relative_urls = { + 'raw': h.route_path( + 'repo_file_raw', repo_name=_repo_name, + commit_id=commit.raw_id, f_path=readme_node.path), + 'standard': h.route_path( + 'repo_files', repo_name=_repo_name, + commit_id=commit.raw_id, f_path=readme_node.path), + } + readme_data = self._render_readme_or_none(commit, readme_node, relative_urls) + readme_filename = readme_node.unicode_path + + return readme_data, readme_filename + + readme_data, readme_filename = generate_repo_readme( + db_repo.repo_id, landing_commit_id, db_repo.repo_name, path, renderer_type,) + compute_time = time.time() - start + log.debug('Repo README for path %s generated and computed in %.4fs', + path, compute_time) + return readme_data, readme_filename + + def _render_readme_or_none(self, commit, readme_node, relative_urls): + log.debug('Found README file `%s` rendering...', readme_node.path) + renderer = MarkupRenderer() + try: + html_source = renderer.render( + readme_node.content, filename=readme_node.path) + if relative_urls: + return relative_links(html_source, relative_urls) + return html_source + except Exception: + log.exception( + "Exception while trying to render the README") + def get_recache_flag(self): for flag_name in ['force_recache', 'force-recache', 'no-cache']: flag_val = self.request.GET.get(flag_name) @@ -464,7 +532,7 @@ class BaseReferencesView(RepoAppView): def load_refs_context(self, ref_items, partials_template): _render = self.request.get_partial_renderer(partials_template) - pre_load = ["author", "date", "message"] + pre_load = ["author", "date", "message", "parents"] is_svn = h.is_svn(self.rhodecode_vcs_repo) is_hg = h.is_hg(self.rhodecode_vcs_repo) diff --git a/rhodecode/apps/admin/__init__.py b/rhodecode/apps/admin/__init__.py --- a/rhodecode/apps/admin/__init__.py +++ b/rhodecode/apps/admin/__init__.py @@ -140,7 +140,6 @@ def admin_routes(config): name='admin_settings_visual_update', pattern='/settings/visual/update') - config.add_route( name='admin_settings_issuetracker', pattern='/settings/issue-tracker') @@ -378,6 +377,10 @@ def admin_routes(config): name='edit_user_audit_logs', pattern='/users/{user_id:\d+}/edit/audit', user_route=True) + config.add_route( + name='edit_user_audit_logs_download', + pattern='/users/{user_id:\d+}/edit/audit/download', user_route=True) + # user caches config.add_route( name='edit_user_caches', @@ -411,6 +414,10 @@ def admin_routes(config): pattern='/repos') config.add_route( + name='repos_data', + pattern='/repos_data') + + config.add_route( name='repo_new', pattern='/repos/new') diff --git a/rhodecode/apps/admin/tests/test_admin_auth_settings.py b/rhodecode/apps/admin/tests/test_admin_auth_settings.py --- a/rhodecode/apps/admin/tests/test_admin_auth_settings.py +++ b/rhodecode/apps/admin/tests/test_admin_auth_settings.py @@ -155,7 +155,7 @@ class TestAuthSettingsView(object): response = self._post_ldap_settings(params, override={ 'port': invalid_port_value, }) - assertr = AssertResponse(response) + assertr = response.assert_response() assertr.element_contains( '.form .field #port ~ .error-message', invalid_port_value) diff --git a/rhodecode/apps/admin/tests/test_admin_repos.py b/rhodecode/apps/admin/tests/test_admin_repos.py --- a/rhodecode/apps/admin/tests/test_admin_repos.py +++ b/rhodecode/apps/admin/tests/test_admin_repos.py @@ -47,6 +47,7 @@ def route_path(name, params=None, **kwar base_url = { 'repos': ADMIN_PREFIX + '/repos', + 'repos_data': ADMIN_PREFIX + '/repos_data', 'repo_new': ADMIN_PREFIX + '/repos/new', 'repo_create': ADMIN_PREFIX + '/repos/create', @@ -70,24 +71,25 @@ def _get_permission_for_user(user, repo) @pytest.mark.usefixtures("app") class TestAdminRepos(object): - def test_repo_list(self, autologin_user, user_util): + def test_repo_list(self, autologin_user, user_util, xhr_header): repo = user_util.create_repo() repo_name = repo.repo_name response = self.app.get( - route_path('repos'), status=200) + route_path('repos_data'), status=200, + extra_environ=xhr_header) response.mustcontain(repo_name) def test_create_page_restricted_to_single_backend(self, autologin_user, backend): with mock.patch('rhodecode.BACKENDS', {'git': 'git'}): response = self.app.get(route_path('repo_new'), status=200) - assert_response = AssertResponse(response) + assert_response = response.assert_response() element = assert_response.get_element('#repo_type') assert element.text_content() == '\ngit\n' def test_create_page_non_restricted_backends(self, autologin_user, backend): response = self.app.get(route_path('repo_new'), status=200) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.element_contains('#repo_type', 'git') assert_response.element_contains('#repo_type', 'svn') assert_response.element_contains('#repo_type', 'hg') diff --git a/rhodecode/apps/admin/tests/test_admin_repository_groups.py b/rhodecode/apps/admin/tests/test_admin_repository_groups.py --- a/rhodecode/apps/admin/tests/test_admin_repository_groups.py +++ b/rhodecode/apps/admin/tests/test_admin_repository_groups.py @@ -84,7 +84,7 @@ class TestAdminRepositoryGroups(object): fixture.create_repo_group('test_repo_group') response = self.app.get(route_path( 'repo_groups_data'), extra_environ=xhr_header) - response.mustcontain('"name_raw": "test_repo_group"') + response.mustcontain('Edit'.format('test_repo_group')) fixture.destroy_repo_group('test_repo_group') def test_new(self, autologin_user): diff --git a/rhodecode/apps/admin/tests/test_admin_settings.py b/rhodecode/apps/admin/tests/test_admin_settings.py --- a/rhodecode/apps/admin/tests/test_admin_settings.py +++ b/rhodecode/apps/admin/tests/test_admin_settings.py @@ -367,7 +367,7 @@ class TestAdminSettingsVcs(object): def test_has_an_input_for_invalidation_of_inline_comments(self): response = self.app.get(route_path('admin_settings_vcs')) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.one_element_exists( '[name=rhodecode_use_outdated_comments]') @@ -412,14 +412,14 @@ class TestAdminSettingsVcs(object): setting = SettingsModel().get_setting_by_name(setting_key) assert setting.app_settings_value is new_value - @pytest.fixture + @pytest.fixture() def disable_sql_cache(self, request): patcher = mock.patch( 'rhodecode.lib.caching_query.FromCache.process_query') request.addfinalizer(patcher.stop) patcher.start() - @pytest.fixture + @pytest.fixture() def form_defaults(self): from rhodecode.apps.admin.views.settings import AdminSettingsView return AdminSettingsView._form_defaults() @@ -518,7 +518,7 @@ class TestOpenSourceLicenses(object): response = self.app.get( route_path('admin_settings_open_source'), status=200) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.element_contains( '.panel-heading', 'Licenses of Third Party Packages') for license_data in sample_licenses: @@ -528,7 +528,7 @@ class TestOpenSourceLicenses(object): def test_records_can_be_read(self, autologin_user): response = self.app.get( route_path('admin_settings_open_source'), status=200) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.element_contains( '.panel-heading', 'Licenses of Third Party Packages') @@ -726,7 +726,7 @@ class TestAdminSettingsIssueTracker(obje IssueTrackerSettingsModel().delete_entries(self.uid) def test_delete_issuetracker_pattern( - self, autologin_user, backend, csrf_token, settings_util): + self, autologin_user, backend, csrf_token, settings_util, xhr_header): pattern = 'issuetracker_pat' uid = md5(pattern) settings_util.create_rhodecode_setting( @@ -734,10 +734,9 @@ class TestAdminSettingsIssueTracker(obje post_url = route_path('admin_settings_issuetracker_delete') post_data = { - '_method': 'delete', 'uid': uid, 'csrf_token': csrf_token } - self.app.post(post_url, post_data, status=302) + self.app.post(post_url, post_data, extra_environ=xhr_header, status=200) settings = SettingsModel().get_all_settings() assert 'rhodecode_%s%s' % (self.SHORT_PATTERN_KEY, uid) not in settings diff --git a/rhodecode/apps/admin/tests/test_admin_users.py b/rhodecode/apps/admin/tests/test_admin_users.py --- a/rhodecode/apps/admin/tests/test_admin_users.py +++ b/rhodecode/apps/admin/tests/test_admin_users.py @@ -91,6 +91,9 @@ def route_path(name, params=None, **kwar 'edit_user_audit_logs': ADMIN_PREFIX + '/users/{user_id}/edit/audit', + 'edit_user_audit_logs_download': + ADMIN_PREFIX + '/users/{user_id}/edit/audit/download', + }[name].format(**kwargs) if params: @@ -318,7 +321,6 @@ class TestAdminUsersView(TestController) route_path('edit_user_emails', user_id=user_id)) response.mustcontain(no=['example@rhodecode.com']) - def test_create(self, request, xhr_header): self.log_user() username = 'newtestuser' @@ -333,6 +335,7 @@ class TestAdminUsersView(TestController) response = self.app.post(route_path('users_create'), params={ 'username': username, 'password': password, + 'description': 'mr CTO', 'password_confirmation': password_confirmation, 'firstname': name, 'active': True, @@ -381,6 +384,7 @@ class TestAdminUsersView(TestController) 'name': name, 'active': False, 'lastname': lastname, + 'description': 'mr CTO', 'email': email, 'csrf_token': self.csrf_token, }) @@ -418,6 +422,7 @@ class TestAdminUsersView(TestController) ('email', {'email': 'some@email.com'}), ('language', {'language': 'de'}), ('language', {'language': 'en'}), + ('description', {'description': 'hello CTO'}), # ('new_password', {'new_password': 'foobar123', # 'password_confirmation': 'foobar123'}) ]) @@ -515,7 +520,7 @@ class TestAdminUsersView(TestController) route_path('user_delete', user_id=new_user.user_id), params={'csrf_token': self.csrf_token}) - assert_session_flash(response, 'Successfully deleted user') + assert_session_flash(response, 'Successfully deleted user `{}`'.format(username)) def test_delete_owner_of_repository(self, request, user_util): self.log_user() @@ -531,8 +536,7 @@ class TestAdminUsersView(TestController) params={'csrf_token': self.csrf_token}) msg = 'user "%s" still owns 1 repositories and cannot be removed. ' \ - 'Switch owners or remove those repositories:%s' % (username, - obj_name) + 'Switch owners or remove those repositories:%s' % (username, obj_name) assert_session_flash(response, msg) fixture.destroy_repo(obj_name) @@ -542,6 +546,7 @@ class TestAdminUsersView(TestController) usr = user_util.create_user(auto_cleanup=False) username = usr.username fixture.create_repo(obj_name, cur_user=usr.username) + Session().commit() new_user = Session().query(User)\ .filter(User.username == username).one() @@ -583,8 +588,7 @@ class TestAdminUsersView(TestController) params={'csrf_token': self.csrf_token}) msg = 'user "%s" still owns 1 repository groups and cannot be removed. ' \ - 'Switch owners or remove those repository groups:%s' % (username, - obj_name) + 'Switch owners or remove those repository groups:%s' % (username, obj_name) assert_session_flash(response, msg) fixture.destroy_repo_group(obj_name) @@ -635,8 +639,7 @@ class TestAdminUsersView(TestController) params={'csrf_token': self.csrf_token}) msg = 'user "%s" still owns 1 user groups and cannot be removed. ' \ - 'Switch owners or remove those user groups:%s' % (username, - obj_name) + 'Switch owners or remove those user groups:%s' % (username, obj_name) assert_session_flash(response, msg) fixture.destroy_user_group(obj_name) @@ -779,3 +782,13 @@ class TestAdminUsersView(TestController) user = self.log_user() self.app.get( route_path('edit_user_audit_logs', user_id=user['user_id'])) + + def test_audit_log_page_download(self): + user = self.log_user() + user_id = user['user_id'] + response = self.app.get( + route_path('edit_user_audit_logs_download', user_id=user_id)) + + assert response.content_disposition == \ + 'attachment; filename=user_{}_audit_logs.json'.format(user_id) + assert response.content_type == "application/json" diff --git a/rhodecode/apps/admin/views/audit_logs.py b/rhodecode/apps/admin/views/audit_logs.py --- a/rhodecode/apps/admin/views/audit_logs.py +++ b/rhodecode/apps/admin/views/audit_logs.py @@ -28,7 +28,7 @@ from rhodecode.model.db import joinedloa from rhodecode.lib.user_log_filter import user_log_filter from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator from rhodecode.lib.utils2 import safe_int -from rhodecode.lib.helpers import Page +from rhodecode.lib.helpers import SqlPage log = logging.getLogger(__name__) @@ -62,13 +62,16 @@ class AdminAuditLogsView(BaseAppView): p = safe_int(self.request.GET.get('page', 1), 1) - def url_generator(**kw): + def url_generator(page_num): + query_params = { + 'page': page_num + } if c.search_term: - kw['filter'] = c.search_term - return self.request.current_route_path(_query=kw) + query_params['filter'] = c.search_term + return self.request.current_route_path(_query=query_params) - c.audit_logs = Page(users_log, page=p, items_per_page=10, - url=url_generator) + c.audit_logs = SqlPage(users_log, page=p, items_per_page=10, + url_maker=url_generator) return self._get_template_context(c) @LoginRequired() diff --git a/rhodecode/apps/admin/views/main_views.py b/rhodecode/apps/admin/views/main_views.py --- a/rhodecode/apps/admin/views/main_views.py +++ b/rhodecode/apps/admin/views/main_views.py @@ -25,7 +25,7 @@ from pyramid.view import view_config from rhodecode.apps._base import BaseAppView from rhodecode.lib import helpers as h -from rhodecode.lib.auth import (LoginRequired, NotAnonymous) +from rhodecode.lib.auth import (LoginRequired, NotAnonymous, HasRepoPermissionAny) from rhodecode.model.db import PullRequest @@ -66,6 +66,13 @@ class AdminMainView(BaseAppView): pull_request_id = pull_request.pull_request_id repo_name = pull_request.target_repo.repo_name + # NOTE(marcink): + # check permissions so we don't redirect to repo that we don't have access to + # exposing it's name + target_repo_perm = HasRepoPermissionAny( + 'repository.read', 'repository.write', 'repository.admin')(repo_name) + if not target_repo_perm: + raise HTTPNotFound() raise HTTPFound( h.route_path('pullrequest_show', repo_name=repo_name, diff --git a/rhodecode/apps/admin/views/permissions.py b/rhodecode/apps/admin/views/permissions.py --- a/rhodecode/apps/admin/views/permissions.py +++ b/rhodecode/apps/admin/views/permissions.py @@ -68,7 +68,8 @@ class AdminPermissionsView(BaseAppView, c.user = User.get_default_user(refresh=True) - app_settings = SettingsModel().get_all_settings() + app_settings = c.rc_config + defaults = { 'anonymous': c.user.active, 'default_register_message': app_settings.get( diff --git a/rhodecode/apps/admin/views/process_management.py b/rhodecode/apps/admin/views/process_management.py --- a/rhodecode/apps/admin/views/process_management.py +++ b/rhodecode/apps/admin/views/process_management.py @@ -47,7 +47,7 @@ class AdminProcessManagementView(BaseApp 'name': proc.name(), 'mem_rss': mem.rss, 'mem_vms': mem.vms, - 'cpu_percent': proc.cpu_percent(), + 'cpu_percent': proc.cpu_percent(interval=0.1), 'create_time': proc.create_time(), 'cmd': ' '.join(proc.cmdline()), }) diff --git a/rhodecode/apps/admin/views/repo_groups.py b/rhodecode/apps/admin/views/repo_groups.py --- a/rhodecode/apps/admin/views/repo_groups.py +++ b/rhodecode/apps/admin/views/repo_groups.py @@ -19,6 +19,8 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import datetime import logging +import time + import formencode import formencode.htmlfill @@ -63,7 +65,7 @@ class AdminRepoGroupsView(BaseAppView, D # and display only those we have ADMIN right groups_with_admin_rights = RepoGroupList( RepoGroup.query().all(), - perm_set=['group.admin']) + perm_set=['group.admin'], extra_kwargs=dict(user=self._rhodecode_user)) c.repo_groups = RepoGroup.groups_choices( groups=groups_with_admin_rights, show_empty_group=allow_empty_group) @@ -109,9 +111,9 @@ class AdminRepoGroupsView(BaseAppView, D def repo_group_list_data(self): self.load_default_context() column_map = { - 'name_raw': 'group_name_hash', + 'name': 'group_name_hash', 'desc': 'group_description', - 'last_change_raw': 'updated_on', + 'last_change': 'updated_on', 'top_level_repos': 'repos_total', 'owner': 'user_username', } @@ -131,9 +133,10 @@ class AdminRepoGroupsView(BaseAppView, D def last_change(last_change): if isinstance(last_change, datetime.datetime) and not last_change.tzinfo: - delta = datetime.timedelta( - seconds=(datetime.datetime.now() - datetime.datetime.utcnow()).seconds) - last_change = last_change + delta + ts = time.time() + utc_offset = (datetime.datetime.fromtimestamp(ts) + - datetime.datetime.utcfromtimestamp(ts)).total_seconds() + last_change = last_change + datetime.timedelta(seconds=utc_offset) return _render("last_change", last_change) def desc(desc, personal): @@ -147,12 +150,8 @@ class AdminRepoGroupsView(BaseAppView, D def user_profile(username): return _render('user_profile', username) - auth_repo_group_list = RepoGroupList( - RepoGroup.query().all(), perm_set=['group.admin']) - - allowed_ids = [-1] - for repo_group in auth_repo_group_list: - allowed_ids.append(repo_group.group_id) + _perms = ['group.admin'] + allowed_ids = [-1] + self._rhodecode_user.repo_group_acl_ids_from_stack(_perms) repo_groups_data_total_count = RepoGroup.query()\ .filter(or_( @@ -180,7 +179,7 @@ class AdminRepoGroupsView(BaseAppView, D # generate multiple IN to fix limitation problems *in_filter_generator(RepoGroup.group_id, allowed_ids) )) \ - .outerjoin(Repository) \ + .outerjoin(Repository, Repository.group_id == RepoGroup.group_id) \ .join(User, User.user_id == RepoGroup.user_id) \ .group_by(RepoGroup, User) @@ -224,9 +223,8 @@ class AdminRepoGroupsView(BaseAppView, D row = { "menu": quick_menu(repo_gr.group_name), "name": repo_group_lnk(repo_gr.group_name), - "name_raw": repo_gr.group_name, + "last_change": last_change(repo_gr.updated_on), - "last_change_raw": datetime_to_time(repo_gr.updated_on), "last_changeset": "", "last_changeset_raw": "", diff --git a/rhodecode/apps/admin/views/repositories.py b/rhodecode/apps/admin/views/repositories.py --- a/rhodecode/apps/admin/views/repositories.py +++ b/rhodecode/apps/admin/views/repositories.py @@ -31,7 +31,6 @@ from rhodecode import events from rhodecode.apps._base import BaseAppView, DataGridAppView from rhodecode.lib.celerylib.utils import get_task_id -from rhodecode.lib.ext_json import json from rhodecode.lib.auth import ( LoginRequired, CSRFRequired, NotAnonymous, HasPermissionAny, HasRepoGroupPermissionAny) @@ -43,7 +42,8 @@ from rhodecode.model.permission import P from rhodecode.model.repo import RepoModel from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel from rhodecode.model.settings import SettingsModel -from rhodecode.model.db import Repository, RepoGroup +from rhodecode.model.db import ( + in_filter_generator, or_, func, Session, Repository, RepoGroup, User) log = logging.getLogger(__name__) @@ -60,8 +60,6 @@ class AdminReposView(BaseAppView, DataGr perm_set=['group.write', 'group.admin']) c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) - c.landing_revs_choices, c.landing_revs = \ - ScmModel().get_repo_landing_revs(self.request.translate) c.personal_repo_group = self._rhodecode_user.personal_repo_group @LoginRequired() @@ -72,15 +70,94 @@ class AdminReposView(BaseAppView, DataGr renderer='rhodecode:templates/admin/repos/repos.mako') def repository_list(self): c = self.load_default_context() + return self._get_template_context(c) - repo_list = Repository.get_all_repos() - c.repo_list = RepoList(repo_list, perm_set=['repository.admin']) + @LoginRequired() + @NotAnonymous() + # perms check inside + @view_config( + route_name='repos_data', request_method='GET', + renderer='json_ext', xhr=True) + def repository_list_data(self): + self.load_default_context() + column_map = { + 'name': 'repo_name', + 'desc': 'description', + 'last_change': 'updated_on', + 'owner': 'user_username', + } + draw, start, limit = self._extract_chunk(self.request) + search_q, order_by, order_dir = self._extract_ordering( + self.request, column_map=column_map) + + _perms = ['repository.admin'] + allowed_ids = [-1] + self._rhodecode_user.repo_acl_ids_from_stack(_perms) + + repos_data_total_count = Repository.query() \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .count() + + base_q = Session.query( + Repository.repo_id, + Repository.repo_name, + Repository.description, + Repository.repo_type, + Repository.repo_state, + Repository.private, + Repository.archived, + Repository.fork, + Repository.updated_on, + Repository._changeset_cache, + User, + ) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .join(User, User.user_id == Repository.user_id) \ + .group_by(Repository, User) + + if search_q: + like_expression = u'%{}%'.format(safe_unicode(search_q)) + base_q = base_q.filter(or_( + Repository.repo_name.ilike(like_expression), + )) + + repos_data_total_filtered_count = base_q.count() + + sort_defined = False + if order_by == 'repo_name': + sort_col = func.lower(Repository.repo_name) + sort_defined = True + elif order_by == 'user_username': + sort_col = User.username + else: + sort_col = getattr(Repository, order_by, None) + + if sort_defined or sort_col: + if order_dir == 'asc': + sort_col = sort_col.asc() + else: + sort_col = sort_col.desc() + + base_q = base_q.order_by(sort_col) + base_q = base_q.offset(start).limit(limit) + + repos_list = base_q.all() + repos_data = RepoModel().get_repos_as_dict( - repo_list=c.repo_list, admin=True, super_user_actions=True) - # json used to render the grid - c.data = json.dumps(repos_data) + repo_list=repos_list, admin=True, super_user_actions=True) - return self._get_template_context(c) + data = ({ + 'draw': draw, + 'data': repos_data, + 'recordsTotal': repos_data_total_count, + 'recordsFiltered': repos_data_total_filtered_count, + }) + return data @LoginRequired() @NotAnonymous() @@ -151,8 +228,7 @@ class AdminReposView(BaseAppView, DataGr try: # CanWriteToGroup validators checks permissions of this POST form = RepoForm( - self.request.translate, repo_groups=c.repo_groups_choices, - landing_revs=c.landing_revs_choices)() + self.request.translate, repo_groups=c.repo_groups_choices)() form_result = form.to_python(dict(self.request.POST)) copy_permissions = form_result.get('repo_copy_permissions') # create is done sometimes async on celery, db transaction diff --git a/rhodecode/apps/admin/views/settings.py b/rhodecode/apps/admin/views/settings.py --- a/rhodecode/apps/admin/views/settings.py +++ b/rhodecode/apps/admin/views/settings.py @@ -445,7 +445,7 @@ class AdminSettingsView(BaseAppView): def settings_issuetracker(self): c = self.load_default_context() c.active = 'issuetracker' - defaults = SettingsModel().get_all_settings() + defaults = c.rc_config entry_key = 'rhodecode_issuetracker_pat_' @@ -518,7 +518,7 @@ class AdminSettingsView(BaseAppView): @CSRFRequired() @view_config( route_name='admin_settings_issuetracker_delete', request_method='POST', - renderer='rhodecode:templates/admin/settings/settings.mako') + renderer='json_ext', xhr=True) def settings_issuetracker_delete(self): _ = self.request.translate self.load_default_context() @@ -528,8 +528,11 @@ class AdminSettingsView(BaseAppView): except Exception: log.exception('Failed to delete issue tracker setting %s', uid) raise HTTPNotFound() - h.flash(_('Removed issue tracker entry'), category='success') - raise HTTPFound(h.route_path('admin_settings_issuetracker')) + + SettingsModel().invalidate_settings_cache() + h.flash(_('Removed issue tracker entry.'), category='success') + + return {'deleted': uid} @LoginRequired() @HasPermissionAllDecorator('hg.admin') @@ -570,8 +573,7 @@ class AdminSettingsView(BaseAppView): email_kwargs = { 'date': datetime.datetime.now(), - 'user': c.rhodecode_user, - 'rhodecode_version': c.rhodecode_version + 'user': c.rhodecode_user } (subject, headers, email_body, diff --git a/rhodecode/apps/admin/views/system_info.py b/rhodecode/apps/admin/views/system_info.py --- a/rhodecode/apps/admin/views/system_info.py +++ b/rhodecode/apps/admin/views/system_info.py @@ -155,6 +155,10 @@ class AdminSystemInfoSettingsView(BaseAp ] + c.vcsserver_data_items = [ + (k, v) for k,v in (val('vcs_server_config') or {}).items() + ] + if snapshot: if c.allowed_to_snapshot: c.data_items.pop(0) # remove server info diff --git a/rhodecode/apps/admin/views/user_groups.py b/rhodecode/apps/admin/views/user_groups.py --- a/rhodecode/apps/admin/views/user_groups.py +++ b/rhodecode/apps/admin/views/user_groups.py @@ -99,12 +99,8 @@ class AdminUserGroupsView(BaseAppView, D def user_profile(username): return _render('user_profile', username) - auth_user_group_list = UserGroupList( - UserGroup.query().all(), perm_set=['usergroup.admin']) - - allowed_ids = [-1] - for user_group in auth_user_group_list: - allowed_ids.append(user_group.users_group_id) + _perms = ['usergroup.admin'] + allowed_ids = [-1] + self._rhodecode_user.user_group_acl_ids_from_stack(_perms) user_groups_data_total_count = UserGroup.query()\ .filter(or_( @@ -134,7 +130,7 @@ class AdminUserGroupsView(BaseAppView, D # generate multiple IN to fix limitation problems *in_filter_generator(UserGroup.users_group_id, allowed_ids) )) \ - .outerjoin(UserGroupMember) \ + .outerjoin(UserGroupMember, UserGroupMember.users_group_id == UserGroup.users_group_id) \ .join(User, User.user_id == UserGroup.user_id) \ .group_by(UserGroup, User) @@ -175,7 +171,6 @@ class AdminUserGroupsView(BaseAppView, D for user_gr in auth_user_group_list: row = { "users_group_name": user_group_name(user_gr.users_group_name), - "name_raw": h.escape(user_gr.users_group_name), "description": h.escape(user_gr.user_group_description), "members": user_gr.member_count, # NOTE(marcink): because of advanced query we diff --git a/rhodecode/apps/admin/views/users.py b/rhodecode/apps/admin/views/users.py --- a/rhodecode/apps/admin/views/users.py +++ b/rhodecode/apps/admin/views/users.py @@ -31,6 +31,7 @@ from pyramid.response import Response from rhodecode import events from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView from rhodecode.apps.ssh_support import SshKeyFileChangeEvent +from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin from rhodecode.authentication.plugins import auth_rhodecode from rhodecode.events import trigger from rhodecode.model.db import true @@ -43,6 +44,7 @@ from rhodecode.lib.ext_json import json from rhodecode.lib.auth import ( LoginRequired, HasPermissionAllDecorator, CSRFRequired) from rhodecode.lib import helpers as h +from rhodecode.lib.helpers import SqlPage from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict from rhodecode.model.auth_token import AuthTokenModel from rhodecode.model.forms import ( @@ -249,7 +251,32 @@ class UsersView(UserAppView): in there as well. """ + def get_auth_plugins(self): + valid_plugins = [] + authn_registry = get_authn_registry(self.request.registry) + for plugin in authn_registry.get_plugins_for_authentication(): + if isinstance(plugin, RhodeCodeExternalAuthPlugin): + valid_plugins.append(plugin) + elif plugin.name == 'rhodecode': + valid_plugins.append(plugin) + + # extend our choices if user has set a bound plugin which isn't enabled at the + # moment + extern_type = self.db_user.extern_type + if extern_type not in [x.uid for x in valid_plugins]: + try: + plugin = authn_registry.get_plugin_by_uid(extern_type) + if plugin: + valid_plugins.append(plugin) + + except Exception: + log.exception( + 'Could not extend user plugins with `{}`'.format(extern_type)) + return valid_plugins + def load_default_context(self): + req = self.request + c = self._get_local_tmpl_context() c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS c.allowed_languages = [ @@ -263,7 +290,10 @@ class UsersView(UserAppView): ('ru', 'Russian (ru)'), ('zh', 'Chinese (zh)'), ] - req = self.request + + c.allowed_extern_types = [ + (x.uid, x.get_display_name()) for x in self.get_auth_plugins() + ] c.available_permissions = req.registry.settings['available_permissions'] PermissionModel().set_global_permission_choices( @@ -297,7 +327,7 @@ class UsersView(UserAppView): old_values = c.user.get_api_data() try: form_result = _form.to_python(dict(self.request.POST)) - skip_attrs = ['extern_type', 'extern_name'] + skip_attrs = ['extern_name'] # TODO: plugin should define if username can be updated if c.extern_type != "rhodecode": # forbid updating username for external accounts @@ -347,59 +377,69 @@ class UsersView(UserAppView): _repos = c.user.repositories _repo_groups = c.user.repository_groups _user_groups = c.user.user_groups + _artifacts = c.user.artifacts handle_repos = None handle_repo_groups = None handle_user_groups = None - # dummy call for flash of handle - set_handle_flash_repos = lambda: None - set_handle_flash_repo_groups = lambda: None - set_handle_flash_user_groups = lambda: None + handle_artifacts = None + + # calls for flash of handle based on handle case detach or delete + def set_handle_flash_repos(): + handle = handle_repos + if handle == 'detach': + h.flash(_('Detached %s repositories') % len(_repos), + category='success') + elif handle == 'delete': + h.flash(_('Deleted %s repositories') % len(_repos), + category='success') + + def set_handle_flash_repo_groups(): + handle = handle_repo_groups + if handle == 'detach': + h.flash(_('Detached %s repository groups') % len(_repo_groups), + category='success') + elif handle == 'delete': + h.flash(_('Deleted %s repository groups') % len(_repo_groups), + category='success') + + def set_handle_flash_user_groups(): + handle = handle_user_groups + if handle == 'detach': + h.flash(_('Detached %s user groups') % len(_user_groups), + category='success') + elif handle == 'delete': + h.flash(_('Deleted %s user groups') % len(_user_groups), + category='success') + + def set_handle_flash_artifacts(): + handle = handle_artifacts + if handle == 'detach': + h.flash(_('Detached %s artifacts') % len(_artifacts), + category='success') + elif handle == 'delete': + h.flash(_('Deleted %s artifacts') % len(_artifacts), + category='success') if _repos and self.request.POST.get('user_repos'): - do = self.request.POST['user_repos'] - if do == 'detach': - handle_repos = 'detach' - set_handle_flash_repos = lambda: h.flash( - _('Detached %s repositories') % len(_repos), - category='success') - elif do == 'delete': - handle_repos = 'delete' - set_handle_flash_repos = lambda: h.flash( - _('Deleted %s repositories') % len(_repos), - category='success') + handle_repos = self.request.POST['user_repos'] if _repo_groups and self.request.POST.get('user_repo_groups'): - do = self.request.POST['user_repo_groups'] - if do == 'detach': - handle_repo_groups = 'detach' - set_handle_flash_repo_groups = lambda: h.flash( - _('Detached %s repository groups') % len(_repo_groups), - category='success') - elif do == 'delete': - handle_repo_groups = 'delete' - set_handle_flash_repo_groups = lambda: h.flash( - _('Deleted %s repository groups') % len(_repo_groups), - category='success') + handle_repo_groups = self.request.POST['user_repo_groups'] if _user_groups and self.request.POST.get('user_user_groups'): - do = self.request.POST['user_user_groups'] - if do == 'detach': - handle_user_groups = 'detach' - set_handle_flash_user_groups = lambda: h.flash( - _('Detached %s user groups') % len(_user_groups), - category='success') - elif do == 'delete': - handle_user_groups = 'delete' - set_handle_flash_user_groups = lambda: h.flash( - _('Deleted %s user groups') % len(_user_groups), - category='success') + handle_user_groups = self.request.POST['user_user_groups'] + + if _artifacts and self.request.POST.get('user_artifacts'): + handle_artifacts = self.request.POST['user_artifacts'] old_values = c.user.get_api_data() + try: UserModel().delete(c.user, handle_repos=handle_repos, handle_repo_groups=handle_repo_groups, - handle_user_groups=handle_user_groups) + handle_user_groups=handle_user_groups, + handle_artifacts=handle_artifacts) audit_logger.store_web( 'user.delete', action_data={'old_data': old_values}, @@ -409,7 +449,9 @@ class UsersView(UserAppView): set_handle_flash_repos() set_handle_flash_repo_groups() set_handle_flash_user_groups() - h.flash(_('Successfully deleted user'), category='success') + set_handle_flash_artifacts() + username = h.escape(old_values['username']) + h.flash(_('Successfully deleted user `{}`').format(username), category='success') except (UserOwnsReposException, UserOwnsRepoGroupsException, UserOwnsUserGroupsException, DefaultUserException) as e: h.flash(e, category='warning') @@ -1187,19 +1229,45 @@ class UsersView(UserAppView): filter_term = self.request.GET.get('filter') user_log = UserModel().get_user_log(c.user, filter_term) - def url_generator(**kw): + def url_generator(page_num): + query_params = { + 'page': page_num + } if filter_term: - kw['filter'] = filter_term - return self.request.current_route_path(_query=kw) + query_params['filter'] = filter_term + return self.request.current_route_path(_query=query_params) - c.audit_logs = h.Page( - user_log, page=p, items_per_page=10, url=url_generator) + c.audit_logs = SqlPage( + user_log, page=p, items_per_page=10, url_maker=url_generator) c.filter_term = filter_term return self._get_template_context(c) @LoginRequired() @HasPermissionAllDecorator('hg.admin') @view_config( + route_name='edit_user_audit_logs_download', request_method='GET', + renderer='string') + def user_audit_logs_download(self): + _ = self.request.translate + c = self.load_default_context() + c.user = self.db_user + + user_log = UserModel().get_user_log(c.user, filter_term=None) + + audit_log_data = {} + for entry in user_log: + audit_log_data[entry.user_log_id] = entry.get_dict() + + response = Response(json.dumps(audit_log_data, indent=4)) + response.content_disposition = str( + 'attachment; filename=%s' % 'user_{}_audit_logs.json'.format(c.user.user_id)) + response.content_type = 'application/json' + + return response + + @LoginRequired() + @HasPermissionAllDecorator('hg.admin') + @view_config( route_name='edit_user_perms_summary', request_method='GET', renderer='rhodecode:templates/admin/users/user_edit.mako') def user_perms_summary(self): diff --git a/rhodecode/apps/debug_style/__init__.py b/rhodecode/apps/debug_style/__init__.py --- a/rhodecode/apps/debug_style/__init__.py +++ b/rhodecode/apps/debug_style/__init__.py @@ -43,6 +43,14 @@ def includeme(config): pattern=ADMIN_PREFIX + '/debug_style', debug_style=True) config.add_route( + name='debug_style_email', + pattern=ADMIN_PREFIX + '/debug_style/email/{email_id}', + debug_style=True) + config.add_route( + name='debug_style_email_plain_rendered', + pattern=ADMIN_PREFIX + '/debug_style/email-rendered/{email_id}', + debug_style=True) + config.add_route( name='debug_style_template', pattern=ADMIN_PREFIX + '/debug_style/t/{t_path}', debug_style=True) diff --git a/rhodecode/apps/debug_style/views.py b/rhodecode/apps/debug_style/views.py --- a/rhodecode/apps/debug_style/views.py +++ b/rhodecode/apps/debug_style/views.py @@ -20,10 +20,15 @@ import os import logging +import datetime from pyramid.view import view_config from pyramid.renderers import render_to_response from rhodecode.apps._base import BaseAppView +from rhodecode.lib.celerylib import run_task, tasks +from rhodecode.lib.utils2 import AttributeDict +from rhodecode.model.db import User +from rhodecode.model.notification import EmailNotificationModel log = logging.getLogger(__name__) @@ -46,6 +51,317 @@ class DebugStyleView(BaseAppView): request=self.request) @view_config( + route_name='debug_style_email', request_method='GET', + renderer=None) + @view_config( + route_name='debug_style_email_plain_rendered', request_method='GET', + renderer=None) + def render_email(self): + c = self.load_default_context() + email_id = self.request.matchdict['email_id'] + c.active = 'emails' + + pr = AttributeDict( + pull_request_id=123, + title='digital_ocean: fix redis, elastic search start on boot, ' + 'fix fd limits on supervisor, set postgres 11 version', + description=''' +Check if we should use full-topic or mini-topic. + +- full topic produces some problems with merge states etc +- server-mini-topic needs probably tweeks. + ''', + repo_name='foobar', + source_ref_parts=AttributeDict(type='branch', name='fix-ticket-2000'), + target_ref_parts=AttributeDict(type='branch', name='master'), + ) + target_repo = AttributeDict(repo_name='repo_group/target_repo') + source_repo = AttributeDict(repo_name='repo_group/source_repo') + user = User.get_by_username(self.request.GET.get('user')) or self._rhodecode_db_user + # file/commit changes for PR update + commit_changes = AttributeDict({ + 'added': ['aaaaaaabbbbb', 'cccccccddddddd'], + 'removed': ['eeeeeeeeeee'], + }) + file_changes = AttributeDict({ + 'added': ['a/file1.md', 'file2.py'], + 'modified': ['b/modified_file.rst'], + 'removed': ['.idea'], + }) + email_kwargs = { + 'test': {}, + 'message': { + 'body': 'message body !' + }, + 'email_test': { + 'user': user, + 'date': datetime.datetime.now(), + }, + 'password_reset': { + 'password_reset_url': 'http://example.com/reset-rhodecode-password/token', + + 'user': user, + 'date': datetime.datetime.now(), + 'email': 'test@rhodecode.com', + 'first_admin_email': User.get_first_super_admin().email + }, + 'password_reset_confirmation': { + 'new_password': 'new-password-example', + 'user': user, + 'date': datetime.datetime.now(), + 'email': 'test@rhodecode.com', + 'first_admin_email': User.get_first_super_admin().email + }, + 'registration': { + 'user': user, + 'date': datetime.datetime.now(), + }, + + 'pull_request_comment': { + 'user': user, + + 'status_change': None, + 'status_change_type': None, + + 'pull_request': pr, + 'pull_request_commits': [], + + 'pull_request_target_repo': target_repo, + 'pull_request_target_repo_url': 'http://target-repo/url', + + 'pull_request_source_repo': source_repo, + 'pull_request_source_repo_url': 'http://source-repo/url', + + 'pull_request_url': 'http://localhost/pr1', + 'pr_comment_url': 'http://comment-url', + 'pr_comment_reply_url': 'http://comment-url#reply', + + 'comment_file': None, + 'comment_line': None, + 'comment_type': 'note', + 'comment_body': 'This is my comment body. *I like !*', + 'comment_id': 2048, + 'renderer_type': 'markdown', + 'mention': True, + + }, + 'pull_request_comment+status': { + 'user': user, + + 'status_change': 'approved', + 'status_change_type': 'approved', + + 'pull_request': pr, + 'pull_request_commits': [], + + 'pull_request_target_repo': target_repo, + 'pull_request_target_repo_url': 'http://target-repo/url', + + 'pull_request_source_repo': source_repo, + 'pull_request_source_repo_url': 'http://source-repo/url', + + 'pull_request_url': 'http://localhost/pr1', + 'pr_comment_url': 'http://comment-url', + 'pr_comment_reply_url': 'http://comment-url#reply', + + 'comment_type': 'todo', + 'comment_file': None, + 'comment_line': None, + 'comment_body': ''' +I think something like this would be better + +```py + +def db(): + global connection + return connection + +``` + + ''', + 'comment_id': 2048, + 'renderer_type': 'markdown', + 'mention': True, + + }, + 'pull_request_comment+file': { + 'user': user, + + 'status_change': None, + 'status_change_type': None, + + 'pull_request': pr, + 'pull_request_commits': [], + + 'pull_request_target_repo': target_repo, + 'pull_request_target_repo_url': 'http://target-repo/url', + + 'pull_request_source_repo': source_repo, + 'pull_request_source_repo_url': 'http://source-repo/url', + + 'pull_request_url': 'http://localhost/pr1', + + 'pr_comment_url': 'http://comment-url', + 'pr_comment_reply_url': 'http://comment-url#reply', + + 'comment_file': 'rhodecode/model/db.py', + 'comment_line': 'o1210', + 'comment_type': 'todo', + 'comment_body': ''' +I like this ! + +But please check this code:: + + def main(): + print 'ok' + +This should work better ! + ''', + 'comment_id': 2048, + 'renderer_type': 'rst', + 'mention': True, + + }, + + 'pull_request_update': { + 'updating_user': user, + + 'status_change': None, + 'status_change_type': None, + + 'pull_request': pr, + 'pull_request_commits': [], + + 'pull_request_target_repo': target_repo, + 'pull_request_target_repo_url': 'http://target-repo/url', + + 'pull_request_source_repo': source_repo, + 'pull_request_source_repo_url': 'http://source-repo/url', + + 'pull_request_url': 'http://localhost/pr1', + + # update comment links + 'pr_comment_url': 'http://comment-url', + 'pr_comment_reply_url': 'http://comment-url#reply', + 'ancestor_commit_id': 'f39bd443', + 'added_commits': commit_changes.added, + 'removed_commits': commit_changes.removed, + 'changed_files': (file_changes.added + file_changes.modified + file_changes.removed), + 'added_files': file_changes.added, + 'modified_files': file_changes.modified, + 'removed_files': file_changes.removed, + }, + + 'cs_comment': { + 'user': user, + 'commit': AttributeDict(idx=123, raw_id='a'*40, message='Commit message'), + 'status_change': None, + 'status_change_type': None, + + 'commit_target_repo_url': 'http://foo.example.com/#comment1', + 'repo_name': 'test-repo', + 'comment_type': 'note', + 'comment_file': None, + 'comment_line': None, + 'commit_comment_url': 'http://comment-url', + 'commit_comment_reply_url': 'http://comment-url#reply', + 'comment_body': 'This is my comment body. *I like !*', + 'comment_id': 2048, + 'renderer_type': 'markdown', + 'mention': True, + }, + 'cs_comment+status': { + 'user': user, + 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'), + 'status_change': 'approved', + 'status_change_type': 'approved', + + 'commit_target_repo_url': 'http://foo.example.com/#comment1', + 'repo_name': 'test-repo', + 'comment_type': 'note', + 'comment_file': None, + 'comment_line': None, + 'commit_comment_url': 'http://comment-url', + 'commit_comment_reply_url': 'http://comment-url#reply', + 'comment_body': ''' +Hello **world** + +This is a multiline comment :) + +- list +- list2 + ''', + 'comment_id': 2048, + 'renderer_type': 'markdown', + 'mention': True, + }, + 'cs_comment+file': { + 'user': user, + 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'), + 'status_change': None, + 'status_change_type': None, + + 'commit_target_repo_url': 'http://foo.example.com/#comment1', + 'repo_name': 'test-repo', + + 'comment_type': 'note', + 'comment_file': 'test-file.py', + 'comment_line': 'n100', + + 'commit_comment_url': 'http://comment-url', + 'commit_comment_reply_url': 'http://comment-url#reply', + 'comment_body': 'This is my comment body. *I like !*', + 'comment_id': 2048, + 'renderer_type': 'markdown', + 'mention': True, + }, + + 'pull_request': { + 'user': user, + 'pull_request': pr, + 'pull_request_commits': [ + ('472d1df03bf7206e278fcedc6ac92b46b01c4e21', '''\ +my-account: moved email closer to profile as it's similar data just moved outside. + '''), + ('cbfa3061b6de2696c7161ed15ba5c6a0045f90a7', '''\ +users: description edit fixes + +- tests +- added metatags info + '''), + ], + + 'pull_request_target_repo': target_repo, + 'pull_request_target_repo_url': 'http://target-repo/url', + + 'pull_request_source_repo': source_repo, + 'pull_request_source_repo_url': 'http://source-repo/url', + + 'pull_request_url': 'http://code.rhodecode.com/_pull-request/123', + } + + } + + template_type = email_id.split('+')[0] + (c.subject, c.headers, c.email_body, + c.email_body_plaintext) = EmailNotificationModel().render_email( + template_type, **email_kwargs.get(email_id, {})) + + test_email = self.request.GET.get('email') + if test_email: + recipients = [test_email] + run_task(tasks.send_email, recipients, c.subject, + c.email_body_plaintext, c.email_body) + + if self.request.matched_route.name == 'debug_style_email_plain_rendered': + template = 'debug_style/email_plain_rendered.mako' + else: + template = 'debug_style/email.mako' + return render_to_response( + template, self._get_template_context(c), + request=self.request) + + @view_config( route_name='debug_style_template', request_method='GET', renderer=None) def template(self): @@ -53,7 +369,18 @@ class DebugStyleView(BaseAppView): c = self.load_default_context() c.active = os.path.splitext(t_path)[0] c.came_from = '' + c.email_types = { + 'cs_comment+file': {}, + 'cs_comment+status': {}, + + 'pull_request_comment+file': {}, + 'pull_request_comment+status': {}, + + 'pull_request_update': {}, + } + c.email_types.update(EmailNotificationModel.email_types) return render_to_response( 'debug_style/' + t_path, self._get_template_context(c), - request=self.request) \ No newline at end of file + request=self.request) + diff --git a/rhodecode/apps/file_store/__init__.py b/rhodecode/apps/file_store/__init__.py --- a/rhodecode/apps/file_store/__init__.py +++ b/rhodecode/apps/file_store/__init__.py @@ -44,6 +44,9 @@ def includeme(config): config.add_route( name='download_file', pattern='/_file_store/download/{fid}') + config.add_route( + name='download_file_by_token', + pattern='/_file_store/token-download/{_auth_token}/{fid}') # Scan module for configuration decorators. config.scan('.views', ignore='.tests') diff --git a/rhodecode/apps/file_store/backends/__init__.py b/rhodecode/apps/file_store/backends/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/apps/file_store/backends/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ diff --git a/rhodecode/apps/file_store/local_store.py b/rhodecode/apps/file_store/backends/local_store.py rename from rhodecode/apps/file_store/local_store.py rename to rhodecode/apps/file_store/backends/local_store.py --- a/rhodecode/apps/file_store/local_store.py +++ b/rhodecode/apps/file_store/backends/local_store.py @@ -26,7 +26,8 @@ import hashlib from rhodecode.lib.ext_json import json from rhodecode.apps.file_store import utils from rhodecode.apps.file_store.extensions import resolve_extensions -from rhodecode.apps.file_store.exceptions import FileNotAllowedException +from rhodecode.apps.file_store.exceptions import ( + FileNotAllowedException, FileOverSizeException) METADATA_VER = 'v1' @@ -91,6 +92,9 @@ class LocalFileStorage(object): self.base_path = base_path self.extensions = resolve_extensions([], groups=extension_groups) + def __repr__(self): + return '{}@{}'.format(self.__class__, self.base_path) + def store_path(self, filename): """ Returns absolute file path of the filename, joined to the @@ -140,16 +144,21 @@ class LocalFileStorage(object): :param ext: extension to check :param extensions: iterable of extensions to validate against (or self.extensions) """ + def normalize_ext(_ext): + if _ext.startswith('.'): + _ext = _ext[1:] + return _ext.lower() extensions = extensions or self.extensions if not extensions: return True - if ext.startswith('.'): - ext = ext[1:] - return ext.lower() in extensions + + ext = normalize_ext(ext) + + return ext in [normalize_ext(x) for x in extensions] def save_file(self, file_obj, filename, directory=None, extensions=None, - extra_metadata=None, **kwargs): + extra_metadata=None, max_filesize=None, **kwargs): """ Saves a file object to the uploads location. Returns the resolved filename, i.e. the directory + @@ -159,7 +168,9 @@ class LocalFileStorage(object): :param filename: original filename :param directory: relative path of sub-directory :param extensions: iterable of allowed extensions, if not default + :param max_filesize: maximum size of file that should be allowed :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix + """ extensions = extensions or self.extensions @@ -191,6 +202,12 @@ class LocalFileStorage(object): metadata = extra_metadata size = os.stat(path).st_size + + if max_filesize and size > max_filesize: + # free up the copied file, and raise exc + os.remove(path) + raise FileOverSizeException() + file_hash = self.calculate_path_hash(path) metadata.update( diff --git a/rhodecode/apps/file_store/tests/test_upload_file.py b/rhodecode/apps/file_store/tests/test_upload_file.py --- a/rhodecode/apps/file_store/tests/test_upload_file.py +++ b/rhodecode/apps/file_store/tests/test_upload_file.py @@ -21,7 +21,8 @@ import os import pytest from rhodecode.lib.ext_json import json -from rhodecode.model.db import Session, FileStore +from rhodecode.model.auth_token import AuthTokenModel +from rhodecode.model.db import Session, FileStore, Repository, User from rhodecode.tests import TestController from rhodecode.apps.file_store import utils, config_keys @@ -32,6 +33,7 @@ def route_path(name, params=None, **kwar base_url = { 'upload_file': '/_file_store/upload', 'download_file': '/_file_store/download/{fid}', + 'download_file_by_token': '/_file_store/token-download/{_auth_token}/{fid}' }[name].format(**kwargs) @@ -124,3 +126,136 @@ class TestFileStoreViews(TestController) status=200) assert response.json['store_fid'] + + @pytest.fixture() + def create_artifact_factory(self, tmpdir): + def factory(user_id, content): + store_path = self.app._pyramid_settings[config_keys.store_path] + store = utils.get_file_storage({config_keys.store_path: store_path}) + fid = 'example.txt' + + filesystem_file = os.path.join(str(tmpdir), fid) + with open(filesystem_file, 'wb') as f: + f.write(content) + + with open(filesystem_file, 'rb') as f: + store_uid, metadata = store.save_file(f, fid, extra_metadata={'filename': fid}) + + entry = FileStore.create( + file_uid=store_uid, filename=metadata["filename"], + file_hash=metadata["sha256"], file_size=metadata["size"], + file_display_name='file_display_name', + file_description='repo artifact `{}`'.format(metadata["filename"]), + check_acl=True, user_id=user_id, + ) + Session().add(entry) + Session().commit() + return entry + return factory + + def test_download_file_non_scoped(self, user_util, create_artifact_factory): + user = self.log_user() + user_id = user['user_id'] + content = 'HELLO MY NAME IS ARTIFACT !' + + artifact = create_artifact_factory(user_id, content) + file_uid = artifact.file_uid + response = self.app.get(route_path('download_file', fid=file_uid), status=200) + assert response.text == content + + # log-in to new user and test download again + user = user_util.create_user(password='qweqwe') + self.log_user(user.username, 'qweqwe') + response = self.app.get(route_path('download_file', fid=file_uid), status=200) + assert response.text == content + + def test_download_file_scoped_to_repo(self, user_util, create_artifact_factory): + user = self.log_user() + user_id = user['user_id'] + content = 'HELLO MY NAME IS ARTIFACT !' + + artifact = create_artifact_factory(user_id, content) + # bind to repo + repo = user_util.create_repo() + repo_id = repo.repo_id + artifact.scope_repo_id = repo_id + Session().add(artifact) + Session().commit() + + file_uid = artifact.file_uid + response = self.app.get(route_path('download_file', fid=file_uid), status=200) + assert response.text == content + + # log-in to new user and test download again + user = user_util.create_user(password='qweqwe') + self.log_user(user.username, 'qweqwe') + response = self.app.get(route_path('download_file', fid=file_uid), status=200) + assert response.text == content + + # forbid user the rights to repo + repo = Repository.get(repo_id) + user_util.grant_user_permission_to_repo(repo, user, 'repository.none') + self.app.get(route_path('download_file', fid=file_uid), status=404) + + def test_download_file_scoped_to_user(self, user_util, create_artifact_factory): + user = self.log_user() + user_id = user['user_id'] + content = 'HELLO MY NAME IS ARTIFACT !' + + artifact = create_artifact_factory(user_id, content) + # bind to user + user = user_util.create_user(password='qweqwe') + + artifact.scope_user_id = user.user_id + Session().add(artifact) + Session().commit() + + # artifact creator doesn't have access since it's bind to another user + file_uid = artifact.file_uid + self.app.get(route_path('download_file', fid=file_uid), status=404) + + # log-in to new user and test download again, should be ok since we're bind to this artifact + self.log_user(user.username, 'qweqwe') + response = self.app.get(route_path('download_file', fid=file_uid), status=200) + assert response.text == content + + def test_download_file_scoped_to_repo_with_bad_token(self, user_util, create_artifact_factory): + user_id = User.get_first_super_admin().user_id + content = 'HELLO MY NAME IS ARTIFACT !' + + artifact = create_artifact_factory(user_id, content) + # bind to repo + repo = user_util.create_repo() + repo_id = repo.repo_id + artifact.scope_repo_id = repo_id + Session().add(artifact) + Session().commit() + + file_uid = artifact.file_uid + self.app.get(route_path('download_file_by_token', + _auth_token='bogus', fid=file_uid), status=302) + + def test_download_file_scoped_to_repo_with_token(self, user_util, create_artifact_factory): + user = User.get_first_super_admin() + AuthTokenModel().create(user, 'test artifact token', + role=AuthTokenModel.cls.ROLE_ARTIFACT_DOWNLOAD) + + user = User.get_first_super_admin() + artifact_token = user.artifact_token + + user_id = User.get_first_super_admin().user_id + content = 'HELLO MY NAME IS ARTIFACT !' + + artifact = create_artifact_factory(user_id, content) + # bind to repo + repo = user_util.create_repo() + repo_id = repo.repo_id + artifact.scope_repo_id = repo_id + Session().add(artifact) + Session().commit() + + file_uid = artifact.file_uid + response = self.app.get( + route_path('download_file_by_token', + _auth_token=artifact_token, fid=file_uid), status=200) + assert response.text == content diff --git a/rhodecode/apps/file_store/utils.py b/rhodecode/apps/file_store/utils.py --- a/rhodecode/apps/file_store/utils.py +++ b/rhodecode/apps/file_store/utils.py @@ -25,7 +25,7 @@ import pathlib2 def get_file_storage(settings): - from rhodecode.apps.file_store.local_store import LocalFileStorage + from rhodecode.apps.file_store.backends.local_store import LocalFileStorage from rhodecode.apps.file_store import config_keys store_path = settings.get(config_keys.store_path) return LocalFileStorage(base_path=store_path) diff --git a/rhodecode/apps/file_store/views.py b/rhodecode/apps/file_store/views.py --- a/rhodecode/apps/file_store/views.py +++ b/rhodecode/apps/file_store/views.py @@ -30,8 +30,10 @@ from rhodecode.apps.file_store.exception from rhodecode.lib import helpers as h from rhodecode.lib import audit_logger -from rhodecode.lib.auth import (CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny) -from rhodecode.model.db import Session, FileStore +from rhodecode.lib.auth import ( + CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, + LoginRequired) +from rhodecode.model.db import Session, FileStore, UserApiKeys log = logging.getLogger(__name__) @@ -44,6 +46,55 @@ class FileStoreView(BaseAppView): self.storage = utils.get_file_storage(self.request.registry.settings) return c + def _serve_file(self, file_uid): + + if not self.storage.exists(file_uid): + store_path = self.storage.store_path(file_uid) + log.debug('File with FID:%s not found in the store under `%s`', + file_uid, store_path) + raise HTTPNotFound() + + db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar() + if not db_obj: + raise HTTPNotFound() + + # private upload for user + if db_obj.check_acl and db_obj.scope_user_id: + log.debug('Artifact: checking scope access for bound artifact user: `%s`', + db_obj.scope_user_id) + user = db_obj.user + if self._rhodecode_db_user.user_id != user.user_id: + log.warning('Access to file store object forbidden') + raise HTTPNotFound() + + # scoped to repository permissions + if db_obj.check_acl and db_obj.scope_repo_id: + log.debug('Artifact: checking scope access for bound artifact repo: `%s`', + db_obj.scope_repo_id) + repo = db_obj.repo + perm_set = ['repository.read', 'repository.write', 'repository.admin'] + has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') + if not has_perm: + log.warning('Access to file store object `%s` forbidden', file_uid) + raise HTTPNotFound() + + # scoped to repository group permissions + if db_obj.check_acl and db_obj.scope_repo_group_id: + log.debug('Artifact: checking scope access for bound artifact repo group: `%s`', + db_obj.scope_repo_group_id) + repo_group = db_obj.repo_group + perm_set = ['group.read', 'group.write', 'group.admin'] + has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') + if not has_perm: + log.warning('Access to file store object `%s` forbidden', file_uid) + raise HTTPNotFound() + + FileStore.bump_access_counter(file_uid) + + file_path = self.storage.store_path(file_uid) + return FileResponse(file_path) + + @LoginRequired() @NotAnonymous() @CSRFRequired() @view_config(route_name='upload_file', request_method='POST', renderer='json_ext') @@ -84,7 +135,7 @@ class FileStoreView(BaseAppView): entry = FileStore.create( file_uid=store_uid, filename=metadata["filename"], file_hash=metadata["sha256"], file_size=metadata["size"], - file_description='upload attachment', + file_description=u'upload attachment', check_acl=False, user_id=self._rhodecode_user.user_id ) Session().add(entry) @@ -99,46 +150,25 @@ class FileStoreView(BaseAppView): return {'store_fid': store_uid, 'access_path': h.route_path('download_file', fid=store_uid)} + # ACL is checked by scopes, if no scope the file is accessible to all @view_config(route_name='download_file') def download_file(self): self.load_default_context() file_uid = self.request.matchdict['fid'] log.debug('Requesting FID:%s from store %s', file_uid, self.storage) - - if not self.storage.exists(file_uid): - log.debug('File with FID:%s not found in the store', file_uid) - raise HTTPNotFound() - - db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar() - if not db_obj: - raise HTTPNotFound() - - # private upload for user - if db_obj.check_acl and db_obj.scope_user_id: - user = db_obj.user - if self._rhodecode_db_user.user_id != user.user_id: - log.warning('Access to file store object forbidden') - raise HTTPNotFound() + return self._serve_file(file_uid) - # scoped to repository permissions - if db_obj.check_acl and db_obj.scope_repo_id: - repo = db_obj.repo - perm_set = ['repository.read', 'repository.write', 'repository.admin'] - has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check') - if not has_perm: - log.warning('Access to file store object forbidden') - raise HTTPNotFound() + # in addition to @LoginRequired ACL is checked by scopes + @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD]) + @NotAnonymous() + @view_config(route_name='download_file_by_token') + def download_file_by_token(self): + """ + Special view that allows to access the download file by special URL that + is stored inside the URL. - # scoped to repository group permissions - if db_obj.check_acl and db_obj.scope_repo_group_id: - repo_group = db_obj.repo_group - perm_set = ['group.read', 'group.write', 'group.admin'] - has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check') - if not has_perm: - log.warning('Access to file store object forbidden') - raise HTTPNotFound() - - FileStore.bump_access_counter(file_uid) - - file_path = self.storage.store_path(file_uid) - return FileResponse(file_path) + http://example.com/_file_store/token-download/TOKEN/FILE_UID + """ + self.load_default_context() + file_uid = self.request.matchdict['fid'] + return self._serve_file(file_uid) diff --git a/rhodecode/apps/gist/tests/test_admin_gists.py b/rhodecode/apps/gist/tests/test_admin_gists.py --- a/rhodecode/apps/gist/tests/test_admin_gists.py +++ b/rhodecode/apps/gist/tests/test_admin_gists.py @@ -83,7 +83,7 @@ class GistUtility(object): Session().commit() -@pytest.fixture +@pytest.fixture() def create_gist(request): gist_utility = GistUtility() request.addfinalizer(gist_utility.cleanup) @@ -159,7 +159,7 @@ class TestGistsController(TestController params={'lifetime': -1, 'content': 'gist test', 'filename': 'foo', - 'public': 'public', + 'gist_type': 'public', 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC, 'csrf_token': self.csrf_token}, status=302) @@ -174,7 +174,7 @@ class TestGistsController(TestController params={'lifetime': -1, 'content': 'gist test', 'filename': '/home/foo', - 'public': 'public', + 'gist_type': 'public', 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC, 'csrf_token': self.csrf_token}, status=200) @@ -197,7 +197,7 @@ class TestGistsController(TestController params={'lifetime': -1, 'content': 'private gist test', 'filename': 'private-foo', - 'private': 'private', + 'gist_type': 'private', 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC, 'csrf_token': self.csrf_token}, status=302) @@ -216,7 +216,7 @@ class TestGistsController(TestController params={'lifetime': -1, 'content': 'private gist test', 'filename': 'private-foo', - 'private': 'private', + 'gist_type': 'private', 'gist_acl_level': Gist.ACL_LEVEL_PRIVATE, 'csrf_token': self.csrf_token}, status=302) @@ -236,7 +236,7 @@ class TestGistsController(TestController 'content': 'gist test', 'filename': 'foo-desc', 'description': 'gist-desc', - 'public': 'public', + 'gist_type': 'public', 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC, 'csrf_token': self.csrf_token}, status=302) @@ -252,7 +252,7 @@ class TestGistsController(TestController 'content': 'gist test', 'filename': 'foo-desc', 'description': 'gist-desc', - 'public': 'public', + 'gist_type': 'public', 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC, 'csrf_token': self.csrf_token } diff --git a/rhodecode/apps/gist/views.py b/rhodecode/apps/gist/views.py --- a/rhodecode/apps/gist/views.py +++ b/rhodecode/apps/gist/views.py @@ -72,7 +72,7 @@ class GistView(BaseAppView): @LoginRequired() @view_config( route_name='gists_show', request_method='GET', - renderer='rhodecode:templates/admin/gists/index.mako') + renderer='rhodecode:templates/admin/gists/gist_index.mako') def gist_show_all(self): c = self.load_default_context() @@ -136,7 +136,7 @@ class GistView(BaseAppView): @NotAnonymous() @view_config( route_name='gists_new', request_method='GET', - renderer='rhodecode:templates/admin/gists/new.mako') + renderer='rhodecode:templates/admin/gists/gist_new.mako') def gist_new(self): c = self.load_default_context() return self._get_template_context(c) @@ -146,21 +146,26 @@ class GistView(BaseAppView): @CSRFRequired() @view_config( route_name='gists_create', request_method='POST', - renderer='rhodecode:templates/admin/gists/new.mako') + renderer='rhodecode:templates/admin/gists/gist_new.mako') def gist_create(self): _ = self.request.translate c = self.load_default_context() data = dict(self.request.POST) data['filename'] = data.get('filename') or Gist.DEFAULT_FILENAME + data['nodes'] = [{ 'filename': data['filename'], 'content': data.get('content'), 'mimetype': data.get('mimetype') # None is autodetect }] + gist_type = { + 'public': Gist.GIST_PUBLIC, + 'private': Gist.GIST_PRIVATE + }.get(data.get('gist_type')) or Gist.GIST_PRIVATE - data['gist_type'] = ( - Gist.GIST_PUBLIC if data.get('public') else Gist.GIST_PRIVATE) + data['gist_type'] = gist_type + data['gist_acl_level'] = ( data.get('gist_acl_level') or Gist.ACL_LEVEL_PRIVATE) @@ -196,7 +201,7 @@ class GistView(BaseAppView): errors['filename'] = errors['nodes.0.filename'] del errors['nodes.0.filename'] - data = render('rhodecode:templates/admin/gists/new.mako', + data = render('rhodecode:templates/admin/gists/gist_new.mako', self._get_template_context(c), self.request) html = formencode.htmlfill.render( data, @@ -260,10 +265,10 @@ class GistView(BaseAppView): @LoginRequired() @view_config( route_name='gist_show', request_method='GET', - renderer='rhodecode:templates/admin/gists/show.mako') + renderer='rhodecode:templates/admin/gists/gist_show.mako') @view_config( route_name='gist_show_rev', request_method='GET', - renderer='rhodecode:templates/admin/gists/show.mako') + renderer='rhodecode:templates/admin/gists/gist_show.mako') @view_config( route_name='gist_show_formatted', request_method='GET', renderer=None) @@ -304,7 +309,7 @@ class GistView(BaseAppView): @NotAnonymous() @view_config( route_name='gist_edit', request_method='GET', - renderer='rhodecode:templates/admin/gists/edit.mako') + renderer='rhodecode:templates/admin/gists/gist_edit.mako') def gist_edit(self): _ = self.request.translate gist_id = self.request.matchdict['gist_id'] @@ -338,7 +343,7 @@ class GistView(BaseAppView): @CSRFRequired() @view_config( route_name='gist_update', request_method='POST', - renderer='rhodecode:templates/admin/gists/edit.mako') + renderer='rhodecode:templates/admin/gists/gist_edit.mako') def gist_update(self): _ = self.request.translate gist_id = self.request.matchdict['gist_id'] diff --git a/rhodecode/apps/home/__init__.py b/rhodecode/apps/home/__init__.py --- a/rhodecode/apps/home/__init__.py +++ b/rhodecode/apps/home/__init__.py @@ -44,6 +44,14 @@ def includeme(config): pattern='/') config.add_route( + name='main_page_repos_data', + pattern='/_home_repos') + + config.add_route( + name='main_page_repo_groups_data', + pattern='/_home_repo_groups') + + config.add_route( name='user_autocomplete_data', pattern='/_users') diff --git a/rhodecode/apps/home/tests/test_home.py b/rhodecode/apps/home/tests/test_home.py --- a/rhodecode/apps/home/tests/test_home.py +++ b/rhodecode/apps/home/tests/test_home.py @@ -22,7 +22,7 @@ import pytest import rhodecode -from rhodecode.model.db import Repository +from rhodecode.model.db import Repository, RepoGroup, User from rhodecode.model.meta import Session from rhodecode.model.repo import RepoModel from rhodecode.model.repo_group import RepoGroupModel @@ -37,6 +37,8 @@ fixture = Fixture() def route_path(name, **kwargs): return { 'home': '/', + 'main_page_repos_data': '/_home_repos', + 'main_page_repo_groups_data': '/_home_repo_groups', 'repo_group_home': '/{repo_group_name}' }[name].format(**kwargs) @@ -47,11 +49,42 @@ class TestHomeController(TestController) self.log_user() response = self.app.get(route_path('home')) # if global permission is set - response.mustcontain('Add Repository') + response.mustcontain('New Repository') + + def test_index_grid_repos(self, xhr_header): + self.log_user() + response = self.app.get(route_path('main_page_repos_data'), extra_environ=xhr_header) + # search for objects inside the JavaScript JSON + for obj in Repository.getAll(): + response.mustcontain(''.format(obj.repo_name)) + + def test_index_grid_repo_groups(self, xhr_header): + self.log_user() + response = self.app.get(route_path('main_page_repo_groups_data'), + extra_environ=xhr_header,) # search for objects inside the JavaScript JSON - for repo in Repository.getAll(): - response.mustcontain('"name_raw": "%s"' % repo.repo_name) + for obj in RepoGroup.getAll(): + response.mustcontain(''.format(obj.group_name)) + + def test_index_grid_repo_groups_without_access(self, xhr_header, user_util): + user = user_util.create_user(password='qweqwe') + group_ok = user_util.create_repo_group(owner=user) + group_id_ok = group_ok.group_id + + group_forbidden = user_util.create_repo_group(owner=User.get_first_super_admin()) + group_id_forbidden = group_forbidden.group_id + + user_util.grant_user_permission_to_repo_group(group_forbidden, user, 'group.none') + self.log_user(user.username, 'qweqwe') + + self.app.get(route_path('main_page_repo_groups_data'), + extra_environ=xhr_header, + params={'repo_group_id': group_id_ok}, status=200) + + self.app.get(route_path('main_page_repo_groups_data'), + extra_environ=xhr_header, + params={'repo_group_id': group_id_forbidden}, status=404) def test_index_contains_statics_with_ver(self): from rhodecode.lib.base import calculate_version_hash @@ -62,11 +95,11 @@ class TestHomeController(TestController) rhodecode_version_hash = calculate_version_hash( {'beaker.session.secret': 'test-rc-uytcxaz'}) response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash)) - response.mustcontain('scripts.js?ver={0}'.format(rhodecode_version_hash)) + response.mustcontain('scripts.min.js?ver={0}'.format(rhodecode_version_hash)) - def test_index_contains_backend_specific_details(self, backend): + def test_index_contains_backend_specific_details(self, backend, xhr_header): self.log_user() - response = self.app.get(route_path('home')) + response = self.app.get(route_path('main_page_repos_data'), extra_environ=xhr_header) tip = backend.repo.get_commit().raw_id # html in javascript variable: @@ -81,39 +114,44 @@ class TestHomeController(TestController) response = self.app.get(route_path('home'), status=302) assert 'login' in response.location - def test_index_page_on_groups(self, autologin_user, repo_group): - response = self.app.get(route_path('repo_group_home', repo_group_name='gr1')) - response.mustcontain("gr1/repo_in_group") + def test_index_page_on_groups_with_wrong_group_id(self, autologin_user, xhr_header): + group_id = 918123 + self.app.get( + route_path('main_page_repo_groups_data'), + params={'repo_group_id': group_id}, + status=404, extra_environ=xhr_header) - def test_index_page_on_group_with_trailing_slash( - self, autologin_user, repo_group): - response = self.app.get(route_path('repo_group_home', repo_group_name='gr1') + '/') - response.mustcontain("gr1/repo_in_group") + def test_index_page_on_groups(self, autologin_user, user_util, xhr_header): + gr = user_util.create_repo_group() + repo = user_util.create_repo(parent=gr) + repo_name = repo.repo_name + group_id = gr.group_id - @pytest.fixture(scope='class') - def repo_group(self, request): - gr = fixture.create_repo_group('gr1') - fixture.create_repo(name='gr1/repo_in_group', repo_group=gr) + response = self.app.get(route_path( + 'repo_group_home', repo_group_name=gr.group_name)) + response.mustcontain('d.repo_group_id = {}'.format(group_id)) - @request.addfinalizer - def cleanup(): - RepoModel().delete('gr1/repo_in_group') - RepoGroupModel().delete(repo_group='gr1', force_delete=True) - Session().commit() + response = self.app.get( + route_path('main_page_repos_data'), + params={'repo_group_id': group_id}, + extra_environ=xhr_header,) + response.mustcontain(repo_name) - def test_index_with_name_with_tags(self, user_util, autologin_user): - user = user_util.create_user() - username = user.username - user.name = '' - user.lastname = '#">' + def test_index_page_on_group_with_trailing_slash(self, autologin_user, user_util, xhr_header): + gr = user_util.create_repo_group() + repo = user_util.create_repo(parent=gr) + repo_name = repo.repo_name + group_id = gr.group_id - Session().add(user) - Session().commit() - user_util.create_repo(owner=username) + response = self.app.get(route_path( + 'repo_group_home', repo_group_name=gr.group_name+'/')) + response.mustcontain('d.repo_group_id = {}'.format(group_id)) - response = self.app.get(route_path('home')) - response.mustcontain(h.html_escape(user.first_name)) - response.mustcontain(h.html_escape(user.last_name)) + response = self.app.get( + route_path('main_page_repos_data'), + params={'repo_group_id': group_id}, + extra_environ=xhr_header, ) + response.mustcontain(repo_name) @pytest.mark.parametrize("name, state", [ ('Disabled', False), @@ -137,5 +175,5 @@ class TestHomeController(TestController) def test_logout_form_contains_csrf(self, autologin_user, csrf_token): response = self.app.get(route_path('home')) assert_response = response.assert_response() - element = assert_response.get_element('.logout #csrf_token') + element = assert_response.get_element('.logout [name=csrf_token]') assert element.value == csrf_token diff --git a/rhodecode/apps/home/views.py b/rhodecode/apps/home/views.py --- a/rhodecode/apps/home/views.py +++ b/rhodecode/apps/home/views.py @@ -22,29 +22,30 @@ import re import logging import collections +from pyramid.httpexceptions import HTTPNotFound from pyramid.view import view_config -from rhodecode.apps._base import BaseAppView +from rhodecode.apps._base import BaseAppView, DataGridAppView from rhodecode.lib import helpers as h from rhodecode.lib.auth import ( - LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired) + LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired, + HasRepoGroupPermissionAny, AuthUser) from rhodecode.lib.codeblocks import filenode_as_lines_tokens from rhodecode.lib.index import searcher_from_config from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int -from rhodecode.lib.ext_json import json from rhodecode.lib.vcs.nodes import FileNode from rhodecode.model.db import ( - func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) + func, true, or_, case, in_filter_generator, Session, + Repository, RepoGroup, User, UserGroup) from rhodecode.model.repo import RepoModel from rhodecode.model.repo_group import RepoGroupModel -from rhodecode.model.scm import RepoGroupList, RepoList from rhodecode.model.user import UserModel from rhodecode.model.user_group import UserGroupModel log = logging.getLogger(__name__) -class HomeView(BaseAppView): +class HomeView(BaseAppView, DataGridAppView): def load_default_context(self): c = self._get_local_tmpl_context() @@ -112,7 +113,12 @@ class HomeView(BaseAppView): ['repository.read', 'repository.write', 'repository.admin'], cache=False, name_filter=name_contains) or [-1] - query = Repository.query()\ + query = Session().query( + Repository.repo_name, + Repository.repo_id, + Repository.repo_type, + Repository.private, + )\ .filter(Repository.archived.isnot(true()))\ .filter(or_( # generate multiple IN to fix limitation problems @@ -158,7 +164,10 @@ class HomeView(BaseAppView): ['group.read', 'group.write', 'group.admin'], cache=False, name_filter=name_contains) or [-1] - query = RepoGroup.query()\ + query = Session().query( + RepoGroup.group_id, + RepoGroup.group_name, + )\ .filter(or_( # generate multiple IN to fix limitation problems *in_filter_generator(RepoGroup.group_id, allowed_ids) @@ -449,6 +458,7 @@ class HomeView(BaseAppView): 'id': -10, 'value': query, 'value_display': label, + 'value_icon': '', 'type': 'search', 'subtype': 'repo', 'url': h.route_path('search_repo', @@ -466,6 +476,7 @@ class HomeView(BaseAppView): 'id': -20, 'value': query, 'value_display': label, + 'value_icon': '', 'type': 'search', 'subtype': 'repo', 'url': h.route_path('search_repo', @@ -491,6 +502,7 @@ class HomeView(BaseAppView): 'id': -30, 'value': query, 'value_display': label, + 'value_icon': '', 'type': 'search', 'subtype': 'repo_group', 'url': h.route_path('search_repo_group', @@ -508,6 +520,7 @@ class HomeView(BaseAppView): 'id': -40, 'value': query, 'value_display': label, + 'value_icon': '', 'type': 'search', 'subtype': 'repo_group', 'url': h.route_path('search_repo_group', @@ -529,6 +542,7 @@ class HomeView(BaseAppView): 'id': -1, 'value': query, 'value_display': u'File search for: `{}`'.format(query), + 'value_icon': '', 'type': 'search', 'subtype': 'global', 'url': h.route_path('search', @@ -539,6 +553,7 @@ class HomeView(BaseAppView): 'id': -2, 'value': query, 'value_display': u'Commit search for: `{}`'.format(query), + 'value_icon': '', 'type': 'search', 'subtype': 'global', 'url': h.route_path('search', @@ -667,23 +682,6 @@ class HomeView(BaseAppView): return {'suggestions': res} - def _get_groups_and_repos(self, repo_group_id=None): - # repo groups groups - repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) - _perms = ['group.read', 'group.write', 'group.admin'] - repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) - repo_group_data = RepoGroupModel().get_repo_groups_as_dict( - repo_group_list=repo_group_list_acl, admin=False) - - # repositories - repo_list = Repository.get_all_repos(group_id=repo_group_id) - _perms = ['repository.read', 'repository.write', 'repository.admin'] - repo_list_acl = RepoList(repo_list, perm_set=_perms) - repo_data = RepoModel().get_repos_as_dict( - repo_list=repo_list_acl, admin=False) - - return repo_data, repo_group_data - @LoginRequired() @view_config( route_name='home', request_method='GET', @@ -691,17 +689,74 @@ class HomeView(BaseAppView): def main_page(self): c = self.load_default_context() c.repo_group = None - - repo_data, repo_group_data = self._get_groups_and_repos() - # json used to render the grids - c.repos_data = json.dumps(repo_data) - c.repo_groups_data = json.dumps(repo_group_data) - return self._get_template_context(c) + def _main_page_repo_groups_data(self, repo_group_id): + column_map = { + 'name': 'group_name_hash', + 'desc': 'group_description', + 'last_change': 'updated_on', + 'owner': 'user_username', + } + draw, start, limit = self._extract_chunk(self.request) + search_q, order_by, order_dir = self._extract_ordering( + self.request, column_map=column_map) + return RepoGroupModel().get_repo_groups_data_table( + draw, start, limit, + search_q, order_by, order_dir, + self._rhodecode_user, repo_group_id) + + def _main_page_repos_data(self, repo_group_id): + column_map = { + 'name': 'repo_name', + 'desc': 'description', + 'last_change': 'updated_on', + 'owner': 'user_username', + } + draw, start, limit = self._extract_chunk(self.request) + search_q, order_by, order_dir = self._extract_ordering( + self.request, column_map=column_map) + return RepoModel().get_repos_data_table( + draw, start, limit, + search_q, order_by, order_dir, + self._rhodecode_user, repo_group_id) + @LoginRequired() - @HasRepoGroupPermissionAnyDecorator( - 'group.read', 'group.write', 'group.admin') + @view_config( + route_name='main_page_repo_groups_data', + request_method='GET', renderer='json_ext', xhr=True) + def main_page_repo_groups_data(self): + self.load_default_context() + repo_group_id = safe_int(self.request.GET.get('repo_group_id')) + + if repo_group_id: + group = RepoGroup.get_or_404(repo_group_id) + _perms = AuthUser.repo_group_read_perms + if not HasRepoGroupPermissionAny(*_perms)( + group.group_name, 'user is allowed to list repo group children'): + raise HTTPNotFound() + + return self._main_page_repo_groups_data(repo_group_id) + + @LoginRequired() + @view_config( + route_name='main_page_repos_data', + request_method='GET', renderer='json_ext', xhr=True) + def main_page_repos_data(self): + self.load_default_context() + repo_group_id = safe_int(self.request.GET.get('repo_group_id')) + + if repo_group_id: + group = RepoGroup.get_or_404(repo_group_id) + _perms = AuthUser.repo_group_read_perms + if not HasRepoGroupPermissionAny(*_perms)( + group.group_name, 'user is allowed to list repo group children'): + raise HTTPNotFound() + + return self._main_page_repos_data(repo_group_id) + + @LoginRequired() + @HasRepoGroupPermissionAnyDecorator(*AuthUser.repo_group_read_perms) @view_config( route_name='repo_group_home', request_method='GET', renderer='rhodecode:templates/index_repo_group.mako') @@ -711,16 +766,6 @@ class HomeView(BaseAppView): def repo_group_main_page(self): c = self.load_default_context() c.repo_group = self.request.db_repo_group - repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) - - # update every 5 min - if self.request.db_repo_group.last_commit_cache_update_diff > 60 * 5: - self.request.db_repo_group.update_commit_cache() - - # json used to render the grids - c.repos_data = json.dumps(repo_data) - c.repo_groups_data = json.dumps(repo_group_data) - return self._get_template_context(c) @LoginRequired() diff --git a/rhodecode/apps/hovercards/__init__.py b/rhodecode/apps/hovercards/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/apps/hovercards/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2018-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + + +def includeme(config): + + config.add_route( + name='hovercard_user', + pattern='/_hovercard/user/{user_id}') + + config.add_route( + name='hovercard_user_group', + pattern='/_hovercard/user_group/{user_group_id}') + + config.add_route( + name='hovercard_pull_request', + pattern='/_hovercard/pull_request/{pull_request_id}') + + config.add_route( + name='hovercard_repo_commit', + pattern='/_hovercard/commit/{repo_name:.*?[^/]}/{commit_id}', repo_route=True) + + # Scan module for configuration decorators. + config.scan('.views', ignore='.tests') diff --git a/rhodecode/apps/hovercards/views.py b/rhodecode/apps/hovercards/views.py new file mode 100644 --- /dev/null +++ b/rhodecode/apps/hovercards/views.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import re +import logging +import collections + +from pyramid.httpexceptions import HTTPNotFound +from pyramid.view import view_config + +from rhodecode.apps._base import BaseAppView, RepoAppView +from rhodecode.lib import helpers as h +from rhodecode.lib.auth import ( + LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired, + HasRepoPermissionAnyDecorator) +from rhodecode.lib.codeblocks import filenode_as_lines_tokens +from rhodecode.lib.index import searcher_from_config +from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int +from rhodecode.lib.ext_json import json +from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError, EmptyRepositoryError +from rhodecode.lib.vcs.nodes import FileNode +from rhodecode.model.db import ( + func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup, PullRequest) +from rhodecode.model.repo import RepoModel +from rhodecode.model.repo_group import RepoGroupModel +from rhodecode.model.scm import RepoGroupList, RepoList +from rhodecode.model.user import UserModel +from rhodecode.model.user_group import UserGroupModel + +log = logging.getLogger(__name__) + + +class HoverCardsView(BaseAppView): + + def load_default_context(self): + c = self._get_local_tmpl_context() + return c + + @LoginRequired() + @view_config( + route_name='hovercard_user', request_method='GET', xhr=True, + renderer='rhodecode:templates/hovercards/hovercard_user.mako') + def hovercard_user(self): + c = self.load_default_context() + user_id = self.request.matchdict['user_id'] + c.user = User.get_or_404(user_id) + return self._get_template_context(c) + + @LoginRequired() + @view_config( + route_name='hovercard_user_group', request_method='GET', xhr=True, + renderer='rhodecode:templates/hovercards/hovercard_user_group.mako') + def hovercard_user_group(self): + c = self.load_default_context() + user_group_id = self.request.matchdict['user_group_id'] + c.user_group = UserGroup.get_or_404(user_group_id) + return self._get_template_context(c) + + @LoginRequired() + @view_config( + route_name='hovercard_pull_request', request_method='GET', xhr=True, + renderer='rhodecode:templates/hovercards/hovercard_pull_request.mako') + def hovercard_pull_request(self): + c = self.load_default_context() + c.pull_request = PullRequest.get_or_404( + self.request.matchdict['pull_request_id']) + perms = ['repository.read', 'repository.write', 'repository.admin'] + c.can_view_pr = h.HasRepoPermissionAny(*perms)( + c.pull_request.target_repo.repo_name) + return self._get_template_context(c) + + +class HoverCardsRepoView(RepoAppView): + def load_default_context(self): + c = self._get_local_tmpl_context() + return c + + @LoginRequired() + @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', 'repository.admin') + @view_config( + route_name='hovercard_repo_commit', request_method='GET', xhr=True, + renderer='rhodecode:templates/hovercards/hovercard_repo_commit.mako') + def hovercard_repo_commit(self): + c = self.load_default_context() + commit_id = self.request.matchdict['commit_id'] + pre_load = ['author', 'branch', 'date', 'message'] + try: + c.commit = self.rhodecode_vcs_repo.get_commit( + commit_id=commit_id, pre_load=pre_load) + except (CommitDoesNotExistError, EmptyRepositoryError): + raise HTTPNotFound() + + return self._get_template_context(c) diff --git a/rhodecode/apps/journal/views.py b/rhodecode/apps/journal/views.py --- a/rhodecode/apps/journal/views.py +++ b/rhodecode/apps/journal/views.py @@ -22,7 +22,7 @@ import logging import itertools -from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed + from pyramid.view import view_config from pyramid.httpexceptions import HTTPBadRequest @@ -34,10 +34,11 @@ from rhodecode.model.db import ( or_, joinedload, Repository, UserLog, UserFollowing, User, UserApiKeys) from rhodecode.model.meta import Session import rhodecode.lib.helpers as h -from rhodecode.lib.helpers import Page +from rhodecode.lib.helpers import SqlPage from rhodecode.lib.user_log_filter import user_log_filter from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, HasRepoPermissionAny from rhodecode.lib.utils2 import safe_int, AttributeDict, md5_safe +from rhodecode.lib.feedgenerator.feedgenerator import Atom1Feed, Rss201rev2Feed from rhodecode.model.scm import ScmModel log = logging.getLogger(__name__) @@ -166,7 +167,7 @@ class JournalView(BaseAppView): description=desc) response = Response(feed.writeString('utf-8')) - response.content_type = feed.mime_type + response.content_type = feed.content_type return response def _rss_feed(self, repos, search_term, public=True): @@ -212,7 +213,7 @@ class JournalView(BaseAppView): description=desc) response = Response(feed.writeString('utf-8')) - response.content_type = feed.mime_type + response.content_type = feed.content_type return response @LoginRequired() @@ -232,15 +233,15 @@ class JournalView(BaseAppView): journal = self._get_journal_data(following, c.search_term) - def url_generator(**kw): + def url_generator(page_num): query_params = { + 'page': page_num, 'filter': c.search_term } - query_params.update(kw) return self.request.current_route_path(_query=query_params) - c.journal_pager = Page( - journal, page=p, items_per_page=20, url=url_generator) + c.journal_pager = SqlPage( + journal, page=p, items_per_page=20, url_maker=url_generator) c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) c.journal_data = render( @@ -333,13 +334,14 @@ class JournalView(BaseAppView): journal = self._get_journal_data(c.following, c.search_term) - def url_generator(**kw): - query_params = {} - query_params.update(kw) + def url_generator(page_num): + query_params = { + 'page': page_num + } return self.request.current_route_path(_query=query_params) - c.journal_pager = Page( - journal, page=p, items_per_page=20, url=url_generator) + c.journal_pager = SqlPage( + journal, page=p, items_per_page=20, url_maker=url_generator) c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) c.journal_data = render( diff --git a/rhodecode/apps/login/tests/test_login.py b/rhodecode/apps/login/tests/test_login.py --- a/rhodecode/apps/login/tests/test_login.py +++ b/rhodecode/apps/login/tests/test_login.py @@ -93,7 +93,7 @@ class TestLoginController(object): session = response.get_session_from_response() username = session['rhodecode_user'].get('username') assert username == 'test_admin' - response.mustcontain('/%s' % HG_REPO) + response.mustcontain('logout') def test_login_regular_ok(self): response = self.app.post(route_path('login'), @@ -104,8 +104,7 @@ class TestLoginController(object): session = response.get_session_from_response() username = session['rhodecode_user'].get('username') assert username == 'test_regular' - - response.mustcontain('/%s' % HG_REPO) + response.mustcontain('logout') def test_login_regular_forbidden_when_super_admin_restriction(self): from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin @@ -225,7 +224,7 @@ class TestLoginController(object): session = response.get_session_from_response() username = session['rhodecode_user'].get('username') assert username == temp_user - response.mustcontain('/%s' % HG_REPO) + response.mustcontain('logout') # new password should be bcrypted, after log-in and transfer user = User.get_by_username(temp_user) @@ -401,7 +400,7 @@ class TestLoginController(object): ) # This should be overridden assert_session_flash( - response, 'You have successfully registered with RhodeCode') + response, 'You have successfully registered with RhodeCode. You can log-in now.') ret = Session().query(User).filter( User.username == 'test_regular4').one() diff --git a/rhodecode/apps/login/tests/test_password_reset.py b/rhodecode/apps/login/tests/test_password_reset.py --- a/rhodecode/apps/login/tests/test_password_reset.py +++ b/rhodecode/apps/login/tests/test_password_reset.py @@ -88,7 +88,7 @@ class TestPasswordReset(TestController): response = self.app.get(route_path('reset_password')) - assert_response = AssertResponse(response) + assert_response = response.assert_response() if show_reset: response.mustcontain('Send password reset email') assert_response.one_element_exists('#email') diff --git a/rhodecode/apps/login/tests/test_register_captcha.py b/rhodecode/apps/login/tests/test_register_captcha.py --- a/rhodecode/apps/login/tests/test_register_captcha.py +++ b/rhodecode/apps/login/tests/test_register_captcha.py @@ -90,7 +90,7 @@ class TestRegisterCaptcha(object): response = app.get(ADMIN_PREFIX + '/register') - assertr = AssertResponse(response) + assertr = response.assert_response() if active: assertr.one_element_exists('#recaptcha_field') else: @@ -128,6 +128,6 @@ class TestRegisterCaptcha(object): else: # If captche input is invalid we expect to stay on the registration # page with an error message displayed. - assertr = AssertResponse(response) + assertr = response.assert_response() assert response.status_int == 200 assertr.one_element_exists('#recaptcha_field ~ span.error-message') diff --git a/rhodecode/apps/login/views.py b/rhodecode/apps/login/views.py --- a/rhodecode/apps/login/views.py +++ b/rhodecode/apps/login/views.py @@ -312,8 +312,6 @@ class LoginView(BaseAppView): action_data = {'data': new_user.get_api_data(), 'user_agent': self.request.user_agent} - - if external_identity: action_data['external_identity'] = external_identity @@ -329,8 +327,13 @@ class LoginView(BaseAppView): event = UserRegistered(user=new_user, session=self.session) trigger(event) h.flash( - _('You have successfully registered with RhodeCode'), + _('You have successfully registered with RhodeCode. You can log-in now.'), category='success') + if external_identity: + h.flash( + _('Please use the {identity} button to log-in').format( + identity=external_identity), + category='success') Session().commit() redirect_ro = self.request.route_path('login') diff --git a/rhodecode/apps/my_account/views/my_account.py b/rhodecode/apps/my_account/views/my_account.py --- a/rhodecode/apps/my_account/views/my_account.py +++ b/rhodecode/apps/my_account/views/my_account.py @@ -33,22 +33,21 @@ from rhodecode import forms from rhodecode.lib import helpers as h from rhodecode.lib import audit_logger from rhodecode.lib.ext_json import json -from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, \ - HasRepoPermissionAny, HasRepoGroupPermissionAny +from rhodecode.lib.auth import ( + LoginRequired, NotAnonymous, CSRFRequired, + HasRepoPermissionAny, HasRepoGroupPermissionAny, AuthUser) from rhodecode.lib.channelstream import ( channelstream_request, ChannelstreamException) from rhodecode.lib.utils2 import safe_int, md5, str2bool from rhodecode.model.auth_token import AuthTokenModel from rhodecode.model.comment import CommentsModel from rhodecode.model.db import ( - IntegrityError, joinedload, + IntegrityError, or_, in_filter_generator, Repository, UserEmailMap, UserApiKeys, UserFollowing, PullRequest, UserBookmark, RepoGroup) from rhodecode.model.meta import Session from rhodecode.model.pull_request import PullRequestModel -from rhodecode.model.scm import RepoList from rhodecode.model.user import UserModel -from rhodecode.model.repo import RepoModel from rhodecode.model.user_group import UserGroupModel from rhodecode.model.validation_schema.schemas import user_schema @@ -345,22 +344,59 @@ class MyAccountView(BaseAppView, DataGri 'You should see a new live message now.'} def _load_my_repos_data(self, watched=False): + + allowed_ids = [-1] + self._rhodecode_user.repo_acl_ids_from_stack(AuthUser.repo_read_perms) + if watched: - admin = False - follows_repos = Session().query(UserFollowing)\ - .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ - .options(joinedload(UserFollowing.follows_repository))\ + # repos user watch + repo_list = Session().query( + Repository + ) \ + .join( + (UserFollowing, UserFollowing.follows_repo_id == Repository.repo_id) + ) \ + .filter( + UserFollowing.user_id == self._rhodecode_user.user_id + ) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .order_by(Repository.repo_name) \ .all() - repo_list = [x.follows_repository for x in follows_repos] + else: - admin = True - repo_list = Repository.get_all_repos( - user_id=self._rhodecode_user.user_id) - repo_list = RepoList(repo_list, perm_set=[ - 'repository.read', 'repository.write', 'repository.admin']) + # repos user is owner of + repo_list = Session().query( + Repository + ) \ + .filter( + Repository.user_id == self._rhodecode_user.user_id + ) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .order_by(Repository.repo_name) \ + .all() - repos_data = RepoModel().get_repos_as_dict( - repo_list=repo_list, admin=admin, short_name=False) + _render = self.request.get_partial_renderer( + 'rhodecode:templates/data_table/_dt_elements.mako') + + def repo_lnk(name, rtype, rstate, private, archived, fork_of): + return _render('repo_name', name, rtype, rstate, private, archived, fork_of, + short_name=False, admin=False) + + repos_data = [] + for repo in repo_list: + row = { + "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, + repo.private, repo.archived, repo.fork), + "name_raw": repo.repo_name.lower(), + } + + repos_data.append(row) + # json used to render the grid return json.dumps(repos_data) @@ -398,16 +434,19 @@ class MyAccountView(BaseAppView, DataGri def my_account_bookmarks(self): c = self.load_default_context() c.active = 'bookmarks' + c.bookmark_items = UserBookmark.get_bookmarks_for_user( + self._rhodecode_db_user.user_id, cache=False) return self._get_template_context(c) - def _process_entry(self, entry, user_id): + def _process_bookmark_entry(self, entry, user_id): position = safe_int(entry.get('position')) + cur_position = safe_int(entry.get('cur_position')) if position is None: return # check if this is an existing entry is_new = False - db_entry = UserBookmark().get_by_position_for_user(position, user_id) + db_entry = UserBookmark().get_by_position_for_user(cur_position, user_id) if db_entry and str2bool(entry.get('remove')): log.debug('Marked bookmark %s for deletion', db_entry) @@ -446,12 +485,12 @@ class MyAccountView(BaseAppView, DataGri should_save = True if should_save: - log.debug('Saving bookmark %s, new:%s', db_entry, is_new) # mark user and position db_entry.user_id = user_id db_entry.position = position db_entry.title = entry.get('title') db_entry.redirect_url = entry.get('redirect_url') or default_redirect_url + log.debug('Saving bookmark %s, new:%s', db_entry, is_new) Session().add(db_entry) @@ -468,15 +507,31 @@ class MyAccountView(BaseAppView, DataGri controls = peppercorn.parse(self.request.POST.items()) user_id = c.user.user_id + # validate positions + positions = {} + for entry in controls.get('bookmarks', []): + position = safe_int(entry['position']) + if position is None: + continue + + if position in positions: + h.flash(_("Position {} is defined twice. " + "Please correct this error.").format(position), category='error') + return HTTPFound(h.route_path('my_account_bookmarks')) + + entry['position'] = position + entry['cur_position'] = safe_int(entry.get('cur_position')) + positions[position] = entry + try: - for entry in controls.get('bookmarks', []): - self._process_entry(entry, user_id) + for entry in positions.values(): + self._process_bookmark_entry(entry, user_id) Session().commit() h.flash(_("Update Bookmarks"), category='success') except IntegrityError: h.flash(_("Failed to update bookmarks. " - "Make sure an unique position is used"), category='error') + "Make sure an unique position is used."), category='error') return HTTPFound(h.route_path('my_account_bookmarks')) @@ -582,6 +637,7 @@ class MyAccountView(BaseAppView, DataGri 'email': c.user.email, 'firstname': c.user.firstname, 'lastname': c.user.lastname, + 'description': c.user.description, } c.form = forms.RcForm( schema, appstruct=appstruct, @@ -664,7 +720,8 @@ class MyAccountView(BaseAppView, DataGri 'target_repo': _render('pullrequest_target_repo', pr.target_repo.repo_name), 'name': _render('pullrequest_name', - pr.pull_request_id, pr.target_repo.repo_name, + pr.pull_request_id, pr.pull_request_state, + pr.work_in_progress, pr.target_repo.repo_name, short=True), 'name_raw': pr.pull_request_id, 'status': _render('pullrequest_status', diff --git a/rhodecode/apps/my_account/views/my_account_notifications.py b/rhodecode/apps/my_account/views/my_account_notifications.py --- a/rhodecode/apps/my_account/views/my_account_notifications.py +++ b/rhodecode/apps/my_account/views/my_account_notifications.py @@ -28,7 +28,7 @@ from rhodecode.apps._base import BaseApp from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired from rhodecode.lib import helpers as h -from rhodecode.lib.helpers import Page +from rhodecode.lib.helpers import SqlPage from rhodecode.lib.utils2 import safe_int from rhodecode.model.db import Notification from rhodecode.model.notification import NotificationModel @@ -74,13 +74,16 @@ class MyAccountNotificationsView(BaseApp p = safe_int(self.request.GET.get('page', 1), 1) - def url_generator(**kw): + def url_generator(page_num): + query_params = { + 'page': page_num + } _query = self.request.GET.mixed() - _query.update(kw) - return self.request.current_route_path(_query=_query) + query_params.update(_query) + return self.request.current_route_path(_query=query_params) - c.notifications = Page(notifications, page=p, items_per_page=10, - url=url_generator) + c.notifications = SqlPage(notifications, page=p, items_per_page=10, + url_maker=url_generator) c.unread_type = 'unread' c.all_type = 'all' diff --git a/rhodecode/apps/repo_group/views/repo_group_advanced.py b/rhodecode/apps/repo_group/views/repo_group_advanced.py --- a/rhodecode/apps/repo_group/views/repo_group_advanced.py +++ b/rhodecode/apps/repo_group/views/repo_group_advanced.py @@ -46,9 +46,16 @@ class RepoGroupSettingsView(RepoGroupApp route_name='edit_repo_group_advanced', request_method='GET', renderer='rhodecode:templates/admin/repo_groups/repo_group_edit.mako') def edit_repo_group_advanced(self): + _ = self.request.translate c = self.load_default_context() c.active = 'advanced' c.repo_group = self.db_repo_group + + # update commit cache if GET flag is present + if self.request.GET.get('update_commit_cache'): + self.db_repo_group.update_commit_cache() + h.flash(_('updated commit cache'), category='success') + return self._get_template_context(c) @LoginRequired() diff --git a/rhodecode/apps/repository/__init__.py b/rhodecode/apps/repository/__init__.py --- a/rhodecode/apps/repository/__init__.py +++ b/rhodecode/apps/repository/__init__.py @@ -79,6 +79,10 @@ def includeme(config): pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) config.add_route( + name='repo_commit_comment_attachment_upload', + pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True) + + config.add_route( name='repo_commit_comment_delete', pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) diff --git a/rhodecode/apps/repository/tests/test_repo_commit_comments.py b/rhodecode/apps/repository/tests/test_repo_commit_comments.py --- a/rhodecode/apps/repository/tests/test_repo_commit_comments.py +++ b/rhodecode/apps/repository/tests/test_repo_commit_comments.py @@ -98,7 +98,7 @@ class TestRepoCommitCommentsView(TestCon assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT author = notification.created_by_user.username_and_name - sbj = '{0} left a {1} on commit `{2}` in the {3} repository'.format( + sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format( author, comment_type, h.show_id(commit), backend.repo_name) assert sbj == notification.subject @@ -159,7 +159,7 @@ class TestRepoCommitCommentsView(TestCon assert comment.revision == commit_id author = notification.created_by_user.username_and_name - sbj = '{0} left a {1} on file `{2}` in commit `{3}` in the {4} repository'.format( + sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format( author, comment_type, f_path, h.show_id(commit), backend.repo_name) assert sbj == notification.subject @@ -230,7 +230,7 @@ class TestRepoCommitCommentsView(TestCon assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT author = notification.created_by_user.username_and_name - sbj = '[status: Approved] {0} left a note on commit `{1}` in the {2} repository'.format( + sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format( author, h.show_id(commit), backend.repo_name) assert sbj == notification.subject @@ -299,14 +299,14 @@ class TestRepoCommitCommentsView(TestCon def assert_comment_links(response, comments, inline_comments): if comments == 1: - comments_text = "%d Commit comment" % comments + comments_text = "%d General" % comments else: - comments_text = "%d Commit comments" % comments + comments_text = "%d General" % comments if inline_comments == 1: - inline_comments_text = "%d Inline Comment" % inline_comments + inline_comments_text = "%d Inline" % inline_comments else: - inline_comments_text = "%d Inline Comments" % inline_comments + inline_comments_text = "%d Inline" % inline_comments if comments: response.mustcontain('%s,' % comments_text) @@ -315,6 +315,6 @@ def assert_comment_links(response, comme if inline_comments: response.mustcontain( - 'id="inline-comments-counter">%s%s' % inline_comments_text) else: response.mustcontain(inline_comments_text) diff --git a/rhodecode/apps/repository/tests/test_repo_commits.py b/rhodecode/apps/repository/tests/test_repo_commits.py --- a/rhodecode/apps/repository/tests/test_repo_commits.py +++ b/rhodecode/apps/repository/tests/test_repo_commits.py @@ -20,6 +20,7 @@ import pytest +from rhodecode.apps.repository.tests.test_repo_compare import ComparePage from rhodecode.lib.helpers import _shorten_commit_id @@ -79,13 +80,22 @@ class TestRepoCommitView(object): 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986', 'svn': '337', } + diff_stat = { + 'hg': (21, 943, 288), + 'git': (20, 941, 286), + 'svn': (21, 943, 288), + } + commit_id = commit_id[backend.alias] response = self.app.get(route_path( 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id)) response.mustcontain(_shorten_commit_id(commit_id)) - response.mustcontain('21 files changed: 943 inserted, 288 deleted') + + compare_page = ComparePage(response) + file_changes = diff_stat[backend.alias] + compare_page.contains_change_summary(*file_changes) # files op files response.mustcontain('File not present at commit: %s' % @@ -121,20 +131,24 @@ class TestRepoCommitView(object): response.mustcontain(_shorten_commit_id(commit_ids[0])) response.mustcontain(_shorten_commit_id(commit_ids[1])) + compare_page = ComparePage(response) + # svn is special if backend.alias == 'svn': response.mustcontain('new file 10644') - response.mustcontain('1 file changed: 5 inserted, 1 deleted') - response.mustcontain('12 files changed: 236 inserted, 22 deleted') - response.mustcontain('21 files changed: 943 inserted, 288 deleted') + for file_changes in [(1, 5, 1), (12, 236, 22), (21, 943, 288)]: + compare_page.contains_change_summary(*file_changes) + elif backend.alias == 'git': + response.mustcontain('new file 100644') + for file_changes in [(12, 222, 20), (20, 941, 286)]: + compare_page.contains_change_summary(*file_changes) else: response.mustcontain('new file 100644') - response.mustcontain('12 files changed: 222 inserted, 20 deleted') - response.mustcontain('21 files changed: 943 inserted, 288 deleted') + for file_changes in [(12, 222, 20), (21, 943, 288)]: + compare_page.contains_change_summary(*file_changes) # files op files - response.mustcontain('File not present at commit: %s' % - _shorten_commit_id(commit_ids[1])) + response.mustcontain('File not present at commit: %s' % _shorten_commit_id(commit_ids[1])) response.mustcontain('Added docstrings to vcs.cli') # commit msg response.mustcontain('Changed theme to ADC theme') # commit msg @@ -166,13 +180,21 @@ class TestRepoCommitView(object): response.mustcontain('File not present at commit: %s' % _shorten_commit_id(commit_ids[1])) + compare_page = ComparePage(response) + # svn is special if backend.alias == 'svn': response.mustcontain('new file 10644') - response.mustcontain('32 files changed: 1179 inserted, 310 deleted') + file_changes = (32, 1179, 310) + compare_page.contains_change_summary(*file_changes) + elif backend.alias == 'git': + response.mustcontain('new file 100644') + file_changes = (31, 1163, 306) + compare_page.contains_change_summary(*file_changes) else: response.mustcontain('new file 100644') - response.mustcontain('32 files changed: 1165 inserted, 308 deleted') + file_changes = (32, 1165, 308) + compare_page.contains_change_summary(*file_changes) response.mustcontain('Added docstrings to vcs.cli') # commit msg response.mustcontain('Changed theme to ADC theme') # commit msg @@ -246,7 +268,7 @@ new file mode 120000 """, 'git': r"""diff --git a/README b/README new file mode 120000 -index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4 +index 0000000..92cacd2 --- /dev/null +++ b/README @@ -0,0 +1 @@ @@ -300,6 +322,6 @@ Added a symlink # right pane diff menus if right_menu: - for elem in ['Hide whitespace changes', 'Toggle Wide Mode diff', + for elem in ['Hide whitespace changes', 'Toggle wide diff', 'Show full context diff']: response.mustcontain(elem) diff --git a/rhodecode/apps/repository/tests/test_repo_compare.py b/rhodecode/apps/repository/tests/test_repo_compare.py --- a/rhodecode/apps/repository/tests/test_repo_compare.py +++ b/rhodecode/apps/repository/tests/test_repo_compare.py @@ -623,8 +623,8 @@ class ComparePage(AssertResponse): def contains_change_summary(self, files_changed, inserted, deleted): template = ( - "{files_changed} file{plural} changed: " - "{inserted} inserted, {deleted} deleted") + '{files_changed} file{plural} changed: ' + '{inserted} inserted, {deleted} deleted') self.response.mustcontain(template.format( files_changed=files_changed, plural="s" if files_changed > 1 else "", diff --git a/rhodecode/apps/repository/tests/test_repo_compare_local.py b/rhodecode/apps/repository/tests/test_repo_compare_local.py --- a/rhodecode/apps/repository/tests/test_repo_compare_local.py +++ b/rhodecode/apps/repository/tests/test_repo_compare_local.py @@ -102,7 +102,7 @@ class TestCompareView(object): 'git': { 'tag': 'v0.2.2', 'branch': 'master', - 'response': (71, 2269, 3416) + 'response': (70, 1855, 3002) }, } diff --git a/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py b/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py --- a/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py +++ b/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py @@ -20,6 +20,7 @@ import pytest +from rhodecode.apps.repository.tests.test_repo_compare import ComparePage from rhodecode.lib.vcs import nodes from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.tests.fixture import Fixture @@ -49,18 +50,18 @@ class TestSideBySideDiff(object): 'hg': { 'commits': ['25d7e49c18b159446cadfa506a5cf8ad1cb04067', '603d6c72c46d953420c89d36372f08d9f305f5dd'], - 'changes': '21 files changed: 943 inserted, 288 deleted' + 'changes': (21, 943, 288), }, 'git': { 'commits': ['6fc9270775aaf5544c1deb014f4ddd60c952fcbb', '03fa803d7e9fb14daa9a3089e0d1494eda75d986'], - 'changes': '21 files changed: 943 inserted, 288 deleted' + 'changes': (20, 941, 286), }, 'svn': { 'commits': ['336', '337'], - 'changes': '21 files changed: 943 inserted, 288 deleted' + 'changes': (21, 943, 288), }, } @@ -79,26 +80,27 @@ class TestSideBySideDiff(object): params=dict(target_repo=backend.repo_name, diffmode='sidebyside') )) - response.mustcontain(file_changes) - response.mustcontain('Expand 1 commit') + compare_page = ComparePage(response) + compare_page.contains_change_summary(*file_changes) + response.mustcontain('Collapse 1 commit') def test_diff_sidebyside_two_commits(self, app, backend): commit_id_range = { 'hg': { 'commits': ['4fdd71e9427417b2e904e0464c634fdee85ec5a7', '603d6c72c46d953420c89d36372f08d9f305f5dd'], - 'changes': '32 files changed: 1165 inserted, 308 deleted' + 'changes': (32, 1165, 308), }, 'git': { 'commits': ['f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13', '03fa803d7e9fb14daa9a3089e0d1494eda75d986'], - 'changes': '32 files changed: 1165 inserted, 308 deleted' + 'changes': (31, 1163, 306), }, 'svn': { 'commits': ['335', '337'], - 'changes': '32 files changed: 1179 inserted, 310 deleted' + 'changes': (32, 1179, 310), }, } @@ -117,8 +119,36 @@ class TestSideBySideDiff(object): params=dict(target_repo=backend.repo_name, diffmode='sidebyside') )) - response.mustcontain(file_changes) - response.mustcontain('Expand 2 commits') + compare_page = ComparePage(response) + compare_page.contains_change_summary(*file_changes) + + response.mustcontain('Collapse 2 commits') + + def test_diff_sidebyside_collapsed_commits(self, app, backend_svn): + commit_id_range = { + + 'svn': { + 'commits': ['330', + '337'], + + }, + } + + commit_info = commit_id_range['svn'] + commit2, commit1 = commit_info['commits'] + + response = self.app.get(route_path( + 'repo_compare', + repo_name=backend_svn.repo_name, + source_ref_type='rev', + source_ref=commit2, + target_repo=backend_svn.repo_name, + target_ref_type='rev', + target_ref=commit1, + params=dict(target_repo=backend_svn.repo_name, diffmode='sidebyside') + )) + + response.mustcontain('Expand 7 commits') @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') def test_diff_side_by_side_from_0_commit(self, app, backend, backend_stub): @@ -145,14 +175,14 @@ class TestSideBySideDiff(object): params=dict(diffmode='sidebyside') )) - response.mustcontain('Expand 2 commits') + response.mustcontain('Collapse 2 commits') response.mustcontain('123 file changed') response.mustcontain( 'r%s:%s...r%s:%s' % ( commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) - response.mustcontain('{}'.format(f_path)) + response.mustcontain(f_path) @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub): @@ -179,14 +209,14 @@ class TestSideBySideDiff(object): params=dict(f_path=f_path, target_repo=repo.repo_name, diffmode='sidebyside') )) - response.mustcontain('Expand 2 commits') + response.mustcontain('Collapse 2 commits') response.mustcontain('1 file changed') response.mustcontain( 'r%s:%s...r%s:%s' % ( commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) - response.mustcontain('{}'.format(f_path)) + response.mustcontain(f_path) def test_diff_side_by_side_with_empty_file(self, app, backend, backend_stub): commits = [ @@ -211,32 +241,32 @@ class TestSideBySideDiff(object): params=dict(f_path=f_path, target_repo=repo.repo_name, diffmode='sidebyside') )) - response.mustcontain('Expand 2 commits') + response.mustcontain('Collapse 2 commits') response.mustcontain('1 file changed') response.mustcontain( 'r%s:%s...r%s:%s' % ( commit2.idx, commit2.short_id, commit3.idx, commit3.short_id)) - response.mustcontain('{}'.format(f_path)) + response.mustcontain(f_path) def test_diff_sidebyside_two_commits_with_file_filter(self, app, backend): commit_id_range = { 'hg': { 'commits': ['4fdd71e9427417b2e904e0464c634fdee85ec5a7', '603d6c72c46d953420c89d36372f08d9f305f5dd'], - 'changes': '1 file changed: 3 inserted, 3 deleted' + 'changes': (1, 3, 3) }, 'git': { 'commits': ['f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13', '03fa803d7e9fb14daa9a3089e0d1494eda75d986'], - 'changes': '1 file changed: 3 inserted, 3 deleted' + 'changes': (1, 3, 3) }, 'svn': { 'commits': ['335', '337'], - 'changes': '1 file changed: 3 inserted, 3 deleted' + 'changes': (1, 3, 3) }, } f_path = 'docs/conf.py' @@ -255,5 +285,7 @@ class TestSideBySideDiff(object): params=dict(f_path=f_path, target_repo=backend.repo_name, diffmode='sidebyside') )) - response.mustcontain('Expand 2 commits') - response.mustcontain(file_changes) + response.mustcontain('Collapse 2 commits') + + compare_page = ComparePage(response) + compare_page.contains_change_summary(*file_changes) diff --git a/rhodecode/apps/repository/tests/test_repo_feed.py b/rhodecode/apps/repository/tests/test_repo_feed.py --- a/rhodecode/apps/repository/tests/test_repo_feed.py +++ b/rhodecode/apps/repository/tests/test_repo_feed.py @@ -41,7 +41,7 @@ def route_path(name, params=None, **kwar class TestFeedView(TestController): @pytest.mark.parametrize("feed_type,response_types,content_type",[ - ('rss', [''], + ('rss', ['%s' - % (repo_name, fork_name, fork_name)) + 'Forked repository %s as %s' % ( + repo_name, fork_name, fork_name)) # test if the fork was created in the database fork_repo = Session().query(Repository)\ @@ -205,7 +203,6 @@ class TestRepoForkViewTests(TestControll 'repo_type': backend.alias, 'description': description, 'private': 'False', - 'landing_rev': 'rev:tip', 'csrf_token': csrf_token, } self.app.post( @@ -218,8 +215,8 @@ class TestRepoForkViewTests(TestControll route_path('repo_creating_check', repo_name=fork_name_full)) # test if we have a message that fork is ok assert_session_flash(response, - 'Forked repository %s as %s' - % (repo_name, fork_name_full, fork_name_full)) + 'Forked repository %s as %s' % ( + repo_name, fork_name_full, fork_name_full)) # test if the fork was created in the database fork_repo = Session().query(Repository)\ diff --git a/rhodecode/apps/repository/tests/test_repo_issue_tracker.py b/rhodecode/apps/repository/tests/test_repo_issue_tracker.py --- a/rhodecode/apps/repository/tests/test_repo_issue_tracker.py +++ b/rhodecode/apps/repository/tests/test_repo_issue_tracker.py @@ -84,7 +84,7 @@ class TestRepoIssueTracker(object): extra_environ=xhr_header, params=data) assert response.body == \ - 'example of prefix replacement' + 'example of prefix replacement' @request.addfinalizer def cleanup(): @@ -125,7 +125,7 @@ class TestRepoIssueTracker(object): self.settings_model.delete_entries(self.uid) def test_delete_issuetracker_pattern( - self, autologin_user, backend, csrf_token, settings_util): + self, autologin_user, backend, csrf_token, settings_util, xhr_header): repo = backend.create_repo() repo_name = repo.repo_name entry_key = 'issuetracker_pat_' @@ -141,8 +141,9 @@ class TestRepoIssueTracker(object): repo_name=backend.repo.repo_name), { 'uid': uid, - 'csrf_token': csrf_token - }, status=302) + 'csrf_token': csrf_token, + '': '' + }, extra_environ=xhr_header, status=200) settings = IssueTrackerSettingsModel( repo=Repository.get_by_repo_name(repo_name)).get_repo_settings() assert 'rhodecode_%s%s' % (entry_key, uid) not in settings diff --git a/rhodecode/apps/repository/tests/test_repo_permissions.py b/rhodecode/apps/repository/tests/test_repo_permissions.py --- a/rhodecode/apps/repository/tests/test_repo_permissions.py +++ b/rhodecode/apps/repository/tests/test_repo_permissions.py @@ -62,7 +62,7 @@ class TestRepoPermissionsView(object): route_path('edit_repo_perms', repo_name=repo_name), form_data).follow() - assert 'Repository permissions updated' in response + assert 'Repository access permissions updated' in response # revoke given form_data = permission_update_data_generator( @@ -74,4 +74,4 @@ class TestRepoPermissionsView(object): route_path('edit_repo_perms', repo_name=repo_name), form_data).follow() - assert 'Repository permissions updated' in response + assert 'Repository access permissions updated' in response diff --git a/rhodecode/apps/repository/tests/test_repo_pullrequests.py b/rhodecode/apps/repository/tests/test_repo_pullrequests.py --- a/rhodecode/apps/repository/tests/test_repo_pullrequests.py +++ b/rhodecode/apps/repository/tests/test_repo_pullrequests.py @@ -101,12 +101,11 @@ class TestPullrequestsView(object): for commit_id in pull_request.revisions: response.mustcontain(commit_id) - assert pull_request.target_ref_parts.type in response - assert pull_request.target_ref_parts.name in response - target_clone_url = pull_request.target_repo.clone_url() - assert target_clone_url in response + response.mustcontain(pull_request.target_ref_parts.type) + response.mustcontain(pull_request.target_ref_parts.name) - assert 'class="pull-request-merge"' in response + response.mustcontain('class="pull-request-merge"') + if pr_merge_enabled: response.mustcontain('Pull request reviewer approval is pending') else: @@ -261,7 +260,7 @@ class TestPullrequestsView(object): True, True, '', MergeFailureReason.MISSING_TARGET_REF, metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) response.assert_response().element_contains( - 'span[data-role="merge-message"]', merge_resp.merge_status_message) + 'div[data-role="merge-message"]', merge_resp.merge_status_message) def test_comment_and_close_pull_request_custom_message_approved( self, pr_util, csrf_token, xhr_header): @@ -284,7 +283,7 @@ class TestPullrequestsView(object): journal = UserLog.query()\ .filter(UserLog.user_id == author)\ .filter(UserLog.repository_id == repo) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .all() assert journal[-1].action == 'repo.pull_request.close' @@ -323,7 +322,7 @@ class TestPullrequestsView(object): journal = UserLog.query()\ .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ - .order_by('user_log_id') \ + .order_by(UserLog.user_log_id.asc()) \ .all() assert journal[-1].action == 'repo.pull_request.close' @@ -467,7 +466,7 @@ class TestPullrequestsView(object): .filter(Notification.created_by == pull_request.author.user_id, Notification.type_ == Notification.TYPE_PULL_REQUEST, Notification.subject.contains( - "wants you to review pull request #%s" % pull_request_id)) + "requested a pull request review. !%s" % pull_request_id)) assert len(notifications.all()) == 1 # Change reviewers and check that a notification was made @@ -536,9 +535,9 @@ class TestPullrequestsView(object): # Check generated diff contents response = response.follow() - assert 'content_of_ancestor' not in response.body - assert 'content_of_ancestor-child' not in response.body - assert 'content_of_change' in response.body + response.mustcontain(no=['content_of_ancestor']) + response.mustcontain(no=['content_of_ancestor-child']) + response.mustcontain('content_of_change') def test_merge_pull_request_enabled(self, pr_util, csrf_token): # Clear any previous calls to rcextensions @@ -549,11 +548,10 @@ class TestPullrequestsView(object): pull_request_id = pull_request.pull_request_id repo_name = pull_request.target_repo.scm_instance().name, - response = self.app.post( - route_path('pullrequest_merge', - repo_name=str(repo_name[0]), - pull_request_id=pull_request_id), - params={'csrf_token': csrf_token}).follow() + url = route_path('pullrequest_merge', + repo_name=str(repo_name[0]), + pull_request_id=pull_request_id) + response = self.app.post(url, params={'csrf_token': csrf_token}).follow() pull_request = PullRequest.get(pull_request_id) @@ -563,7 +561,7 @@ class TestPullrequestsView(object): pull_request, ChangesetStatus.STATUS_APPROVED) # Check the relevant log entries were added - user_logs = UserLog.query().order_by('-user_log_id').limit(3) + user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) actions = [log.action for log in user_logs] pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) expected_actions = [ @@ -573,7 +571,7 @@ class TestPullrequestsView(object): ] assert actions == expected_actions - user_logs = UserLog.query().order_by('-user_log_id').limit(4) + user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) actions = [log for log in user_logs] assert actions[-1].action == 'user.push' assert actions[-1].action_data['commit_ids'] == pr_commit_ids @@ -690,8 +688,8 @@ class TestPullrequestsView(object): pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 - assert 'Pull request updated to' in response.body - assert 'with 1 added, 0 removed commits.' in response.body + response.mustcontain('Pull request updated to') + response.mustcontain('with 1 added, 0 removed commits.') # check that we have now both revisions pull_request = PullRequest.get(pull_request_id) @@ -735,12 +733,12 @@ class TestPullrequestsView(object): backend.pull_heads(source, heads=['change-rebased']) # update PR - self.app.post( - route_path('pullrequest_update', - repo_name=target.repo_name, - pull_request_id=pull_request_id), - params={'update_commits': 'true', 'csrf_token': csrf_token}, - status=200) + url = route_path('pullrequest_update', + repo_name=target.repo_name, + pull_request_id=pull_request_id) + self.app.post(url, + params={'update_commits': 'true', 'csrf_token': csrf_token}, + status=200) # check that we have now both revisions pull_request = PullRequest.get(pull_request_id) @@ -753,8 +751,8 @@ class TestPullrequestsView(object): repo_name=target.repo_name, pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 - assert 'Pull request updated to' in response.body - assert 'with 1 added, 1 removed commits.' in response.body + response.mustcontain('Pull request updated to') + response.mustcontain('with 1 added, 1 removed commits.') def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): backend = backend_git @@ -801,12 +799,12 @@ class TestPullrequestsView(object): vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) # update PR - self.app.post( - route_path('pullrequest_update', - repo_name=target.repo_name, - pull_request_id=pull_request_id), - params={'update_commits': 'true', 'csrf_token': csrf_token}, - status=200) + url = route_path('pullrequest_update', + repo_name=target.repo_name, + pull_request_id=pull_request_id) + self.app.post(url, + params={'update_commits': 'true', 'csrf_token': csrf_token}, + status=200) response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) assert response.status_int == 200 @@ -871,6 +869,7 @@ class TestPullrequestsView(object): {'message': 'new-feature', 'branch': branch_name}, ] repo = backend_git.create_repo(commits) + repo_name = repo.repo_name commit_ids = backend_git.commit_ids pull_request = PullRequest() @@ -888,13 +887,15 @@ class TestPullrequestsView(object): Session().add(pull_request) Session().commit() + pull_request_id = pull_request.pull_request_id + vcs = repo.scm_instance() vcs.remove_ref('refs/heads/{}'.format(branch_name)) response = self.app.get(route_path( 'pullrequest_show', - repo_name=repo.repo_name, - pull_request_id=pull_request.pull_request_id)) + repo_name=repo_name, + pull_request_id=pull_request_id)) assert response.status_int == 200 @@ -958,15 +959,15 @@ class TestPullrequestsView(object): else: vcs.strip(pr_util.commit_ids['new-feature']) - response = self.app.post( - route_path('pullrequest_update', - repo_name=pull_request.target_repo.repo_name, - pull_request_id=pull_request.pull_request_id), - params={'update_commits': 'true', - 'csrf_token': csrf_token}) + url = route_path('pullrequest_update', + repo_name=pull_request.target_repo.repo_name, + pull_request_id=pull_request.pull_request_id) + response = self.app.post(url, + params={'update_commits': 'true', + 'csrf_token': csrf_token}) assert response.status_int == 200 - assert response.body == 'true' + assert response.body == '{"response": true, "redirect_url": null}' # Make sure that after update, it won't raise 500 errors response = self.app.get(route_path( @@ -992,12 +993,13 @@ class TestPullrequestsView(object): pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 - origin = response.assert_response().get_element('.pr-origininfo .tag') - origin_children = origin.getchildren() - assert len(origin_children) == 1 - target = response.assert_response().get_element('.pr-targetinfo .tag') - target_children = target.getchildren() - assert len(target_children) == 1 + source = response.assert_response().get_element('.pr-source-info') + source_parent = source.getparent() + assert len(source_parent) == 1 + + target = response.assert_response().get_element('.pr-target-info') + target_parent = target.getparent() + assert len(target_parent) == 1 expected_origin_link = route_path( 'repo_commits', @@ -1007,10 +1009,8 @@ class TestPullrequestsView(object): 'repo_commits', repo_name=pull_request.target_repo.scm_instance().name, params=dict(branch='target')) - assert origin_children[0].attrib['href'] == expected_origin_link - assert origin_children[0].text == 'branch: origin' - assert target_children[0].attrib['href'] == expected_target_link - assert target_children[0].text == 'branch: target' + assert source_parent.attrib['href'] == expected_origin_link + assert target_parent.attrib['href'] == expected_target_link def test_bookmark_is_not_a_link(self, pr_util): pull_request = pr_util.create_pull_request() @@ -1025,13 +1025,13 @@ class TestPullrequestsView(object): pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 - origin = response.assert_response().get_element('.pr-origininfo .tag') - assert origin.text.strip() == 'bookmark: origin' - assert origin.getchildren() == [] + source = response.assert_response().get_element('.pr-source-info') + assert source.text.strip() == 'bookmark:origin' + assert source.getparent().attrib.get('href') is None - target = response.assert_response().get_element('.pr-targetinfo .tag') - assert target.text.strip() == 'bookmark: target' - assert target.getchildren() == [] + target = response.assert_response().get_element('.pr-target-info') + assert target.text.strip() == 'bookmark:target' + assert target.getparent().attrib.get('href') is None def test_tag_is_not_a_link(self, pr_util): pull_request = pr_util.create_pull_request() @@ -1046,13 +1046,13 @@ class TestPullrequestsView(object): pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 - origin = response.assert_response().get_element('.pr-origininfo .tag') - assert origin.text.strip() == 'tag: origin' - assert origin.getchildren() == [] + source = response.assert_response().get_element('.pr-source-info') + assert source.text.strip() == 'tag:origin' + assert source.getparent().attrib.get('href') is None - target = response.assert_response().get_element('.pr-targetinfo .tag') - assert target.text.strip() == 'tag: target' - assert target.getchildren() == [] + target = response.assert_response().get_element('.pr-target-info') + assert target.text.strip() == 'tag:target' + assert target.getparent().attrib.get('href') is None @pytest.mark.parametrize('mergeable', [True, False]) def test_shadow_repository_link( @@ -1205,14 +1205,11 @@ class TestPullrequestsControllerDelete(o def assert_pull_request_status(pull_request, expected_status): - status = ChangesetStatusModel().calculated_review_status( - pull_request=pull_request) + status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request) assert status == expected_status @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) @pytest.mark.usefixtures("autologin_user") def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): - response = app.get( - route_path(route, repo_name=backend_svn.repo_name), status=404) - + app.get(route_path(route, repo_name=backend_svn.repo_name), status=404) diff --git a/rhodecode/apps/repository/tests/test_repo_summary.py b/rhodecode/apps/repository/tests/test_repo_summary.py --- a/rhodecode/apps/repository/tests/test_repo_summary.py +++ b/rhodecode/apps/repository/tests/test_repo_summary.py @@ -236,7 +236,7 @@ class TestSummaryView(object): with scm_patcher: response = self.app.get( route_path('repo_summary', repo_name=repo_name)) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.element_contains( '.main .alert-warning strong', 'Missing requirements') assert_response.element_contains( @@ -327,7 +327,7 @@ def summary_view(context_stub, request_s @pytest.mark.usefixtures('app') class TestCreateReferenceData(object): - @pytest.fixture + @pytest.fixture() def example_refs(self): section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id'))) example_refs = [ diff --git a/rhodecode/apps/repository/tests/test_repo_vcs_settings.py b/rhodecode/apps/repository/tests/test_repo_vcs_settings.py --- a/rhodecode/apps/repository/tests/test_repo_vcs_settings.py +++ b/rhodecode/apps/repository/tests/test_repo_vcs_settings.py @@ -98,7 +98,7 @@ class TestVcsSettings(object): repo_name = backend.repo_name response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) - assert_response = AssertResponse(response) + assert_response = response.assert_response() element = assert_response.get_element('#inherit_global_settings') assert element.checked @@ -111,7 +111,7 @@ class TestVcsSettings(object): repo, 'inherit_vcs_settings', checked_value, 'bool') response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) - assert_response = AssertResponse(response) + assert_response = response.assert_response() element = assert_response.get_element('#inherit_global_settings') assert element.checked == checked_value @@ -491,7 +491,7 @@ class TestVcsSettings(object): response = self.app.get( route_path('edit_repo_vcs', repo_name=repo_name), status=200) - assert_response = AssertResponse(response) + assert_response = response.assert_response() for branch in branches: css_selector = '[name=branch_value_{}]'.format(branch.ui_id) element = assert_response.get_element(css_selector) @@ -668,7 +668,7 @@ class TestVcsSettings(object): Session().commit() def assert_repo_value_equals_global_value(self, response, setting): - assert_response = AssertResponse(response) + assert_response = response.assert_response() global_css_selector = '[name={}_inherited]'.format(setting) repo_css_selector = '[name={}]'.format(setting) repo_element = assert_response.get_element(repo_css_selector) diff --git a/rhodecode/apps/repository/tests/test_vcs_settings.py b/rhodecode/apps/repository/tests/test_vcs_settings.py --- a/rhodecode/apps/repository/tests/test_vcs_settings.py +++ b/rhodecode/apps/repository/tests/test_vcs_settings.py @@ -59,7 +59,7 @@ class TestAdminRepoVcsSettings(object): rhodecode.CONFIG, {'labs_settings_active': 'true'}): response = self.app.get(vcs_settings_url) - assertr = AssertResponse(response) + assertr = response.assert_response() assertr.one_element_exists('#rhodecode_{}'.format(setting_name)) @pytest.mark.parametrize('setting_name, setting_backends', [ diff --git a/rhodecode/apps/repository/views/repo_audit_logs.py b/rhodecode/apps/repository/views/repo_audit_logs.py --- a/rhodecode/apps/repository/views/repo_audit_logs.py +++ b/rhodecode/apps/repository/views/repo_audit_logs.py @@ -22,6 +22,7 @@ import logging from pyramid.view import view_config from rhodecode.apps._base import RepoAppView +from rhodecode.lib.helpers import SqlPage from rhodecode.lib import helpers as h from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator from rhodecode.lib.utils2 import safe_int @@ -33,8 +34,6 @@ log = logging.getLogger(__name__) class AuditLogsView(RepoAppView): def load_default_context(self): c = self._get_local_tmpl_context() - - return c @LoginRequired() @@ -54,12 +53,15 @@ class AuditLogsView(RepoAppView): filter_term = self.request.GET.get('filter') user_log = RepoModel().get_repo_log(c.db_repo, filter_term) - def url_generator(**kw): + def url_generator(page_num): + query_params = { + 'page': page_num + } if filter_term: - kw['filter'] = filter_term - return self.request.current_route_path(_query=kw) + query_params['filter'] = filter_term + return self.request.current_route_path(_query=query_params) - c.audit_logs = h.Page( - user_log, page=p, items_per_page=10, url=url_generator) + c.audit_logs = SqlPage( + user_log, page=p, items_per_page=10, url_maker=url_generator) c.filter_term = filter_term return self._get_template_context(c) diff --git a/rhodecode/apps/repository/views/repo_changelog.py b/rhodecode/apps/repository/views/repo_changelog.py --- a/rhodecode/apps/repository/views/repo_changelog.py +++ b/rhodecode/apps/repository/views/repo_changelog.py @@ -89,7 +89,7 @@ class RepoChangelogView(RepoAppView): data = dict( raw_id=commit.raw_id, idx=commit.idx, - branch=h.escape(commit.branch), + branch=None, ) if parents: data['parents'] = [ @@ -121,9 +121,16 @@ class RepoChangelogView(RepoAppView): self, c, collection, page, chunk_size, branch_name=None, dynamic=False, f_path=None, commit_id=None): - def url_generator(**kw): - query_params = {} - query_params.update(kw) + def url_generator(page_num): + query_params = { + 'page': page_num + } + + if branch_name: + query_params.update({ + 'branch': branch_name + }) + if f_path: # changelog for file return h.route_path( @@ -139,8 +146,7 @@ class RepoChangelogView(RepoAppView): c.total_cs = len(collection) c.showing_commits = min(chunk_size, c.total_cs) c.pagination = RepoPage(collection, page=page, item_count=c.total_cs, - items_per_page=chunk_size, branch=branch_name, - url=url_generator) + items_per_page=chunk_size, url_maker=url_generator) c.next_page = c.pagination.next_page c.prev_page = c.pagination.previous_page diff --git a/rhodecode/apps/repository/views/repo_commits.py b/rhodecode/apps/repository/views/repo_commits.py --- a/rhodecode/apps/repository/views/repo_commits.py +++ b/rhodecode/apps/repository/views/repo_commits.py @@ -28,6 +28,8 @@ from pyramid.renderers import render from pyramid.response import Response from rhodecode.apps._base import RepoAppView +from rhodecode.apps.file_store import utils as store_utils +from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException from rhodecode.lib import diffs, codeblocks from rhodecode.lib.auth import ( @@ -43,7 +45,7 @@ from rhodecode.lib.utils2 import safe_un from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.lib.vcs.exceptions import ( RepositoryError, CommitDoesNotExistError) -from rhodecode.model.db import ChangesetComment, ChangesetStatus +from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore from rhodecode.model.changeset_status import ChangesetStatusModel from rhodecode.model.comment import CommentsModel from rhodecode.model.meta import Session @@ -57,9 +59,6 @@ def _update_with_GET(params, request): params[k] += request.GET.getall(k) - - - class RepoCommitsView(RepoAppView): def load_default_context(self): c = self._get_local_tmpl_context(include_app_defaults=True) @@ -93,6 +92,8 @@ class RepoCommitsView(RepoAppView): try: pre_load = ['affected_files', 'author', 'branch', 'date', 'message', 'parents'] + if self.rhodecode_vcs_repo.alias == 'hg': + pre_load += ['hidden', 'obsolete', 'phase'] if len(commit_range) == 2: commits = self.rhodecode_vcs_repo.get_commits( @@ -129,6 +130,7 @@ class RepoCommitsView(RepoAppView): c.statuses = [] c.comments = [] c.unresolved_comments = [] + c.resolved_comments = [] if len(c.commit_ranges) == 1: commit = c.commit_ranges[0] c.comments = CommentsModel().get_comments( @@ -149,6 +151,8 @@ class RepoCommitsView(RepoAppView): c.unresolved_comments = CommentsModel()\ .get_commit_unresolved_todos(commit.raw_id) + c.resolved_comments = CommentsModel()\ + .get_commit_resolved_todos(commit.raw_id) diff = None # Iterate over ranges (default commit view is always one commit) @@ -412,7 +416,8 @@ class RepoCommitsView(RepoAppView): text = self.request.POST.get('text') renderer = self.request.POST.get('renderer') or 'rst' if text: - return h.render(text, renderer=renderer, mentions=True) + return h.render(text, renderer=renderer, mentions=True, + repo_name=self.db_repo_name) return '' @LoginRequired() @@ -421,6 +426,101 @@ class RepoCommitsView(RepoAppView): 'repository.read', 'repository.write', 'repository.admin') @CSRFRequired() @view_config( + route_name='repo_commit_comment_attachment_upload', request_method='POST', + renderer='json_ext', xhr=True) + def repo_commit_comment_attachment_upload(self): + c = self.load_default_context() + upload_key = 'attachment' + + file_obj = self.request.POST.get(upload_key) + + if file_obj is None: + self.request.response.status = 400 + return {'store_fid': None, + 'access_path': None, + 'error': '{} data field is missing'.format(upload_key)} + + if not hasattr(file_obj, 'filename'): + self.request.response.status = 400 + return {'store_fid': None, + 'access_path': None, + 'error': 'filename cannot be read from the data field'} + + filename = file_obj.filename + file_display_name = filename + + metadata = { + 'user_uploaded': {'username': self._rhodecode_user.username, + 'user_id': self._rhodecode_user.user_id, + 'ip': self._rhodecode_user.ip_addr}} + + # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size + allowed_extensions = [ + 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', + '.pptx', '.txt', '.xlsx', '.zip'] + max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js + + try: + storage = store_utils.get_file_storage(self.request.registry.settings) + store_uid, metadata = storage.save_file( + file_obj.file, filename, extra_metadata=metadata, + extensions=allowed_extensions, max_filesize=max_file_size) + except FileNotAllowedException: + self.request.response.status = 400 + permitted_extensions = ', '.join(allowed_extensions) + error_msg = 'File `{}` is not allowed. ' \ + 'Only following extensions are permitted: {}'.format( + filename, permitted_extensions) + return {'store_fid': None, + 'access_path': None, + 'error': error_msg} + except FileOverSizeException: + self.request.response.status = 400 + limit_mb = h.format_byte_size_binary(max_file_size) + return {'store_fid': None, + 'access_path': None, + 'error': 'File {} is exceeding allowed limit of {}.'.format( + filename, limit_mb)} + + try: + entry = FileStore.create( + file_uid=store_uid, filename=metadata["filename"], + file_hash=metadata["sha256"], file_size=metadata["size"], + file_display_name=file_display_name, + file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), + hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, + scope_repo_id=self.db_repo.repo_id + ) + Session().add(entry) + Session().commit() + log.debug('Stored upload in DB as %s', entry) + except Exception: + log.exception('Failed to store file %s', filename) + self.request.response.status = 400 + return {'store_fid': None, + 'access_path': None, + 'error': 'File {} failed to store in DB.'.format(filename)} + + Session().commit() + + return { + 'store_fid': store_uid, + 'access_path': h.route_path( + 'download_file', fid=store_uid), + 'fqn_access_path': h.route_url( + 'download_file', fid=store_uid), + 'repo_access_path': h.route_path( + 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), + 'repo_fqn_access_path': h.route_url( + 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), + } + + @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @CSRFRequired() + @view_config( route_name='repo_commit_comment_delete', request_method='POST', renderer='json_ext') def repo_commit_comment_delete(self): diff --git a/rhodecode/apps/repository/views/repo_compare.py b/rhodecode/apps/repository/views/repo_compare.py --- a/rhodecode/apps/repository/views/repo_compare.py +++ b/rhodecode/apps/repository/views/repo_compare.py @@ -211,7 +211,7 @@ class RepoCompareView(RepoAppView): c.source_ref_type = source_ref_type c.target_ref_type = target_ref_type - pre_load = ["author", "branch", "date", "message"] + pre_load = ["author", "date", "message", "branch"] c.ancestor = None try: diff --git a/rhodecode/apps/repository/views/repo_feed.py b/rhodecode/apps/repository/views/repo_feed.py --- a/rhodecode/apps/repository/views/repo_feed.py +++ b/rhodecode/apps/repository/views/repo_feed.py @@ -22,9 +22,10 @@ import logging from pyramid.view import view_config from pyramid.response import Response -from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed + from rhodecode.apps._base import RepoAppView +from rhodecode.lib.feedgenerator import Rss201rev2Feed, Atom1Feed from rhodecode.lib import audit_logger from rhodecode.lib import rc_cache from rhodecode.lib import helpers as h @@ -65,7 +66,7 @@ class RepoFeedView(RepoAppView): config = self._get_config() # common values for feeds self.description = _('Changes on %s repository') - self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s') + self.title = _('%s %s feed') % (self.db_repo_name, '%s') self.language = config["language"] self.ttl = config["feed_ttl"] self.feed_include_diff = config['feed_include_diff'] @@ -81,7 +82,7 @@ class RepoFeedView(RepoAppView): return diff_processor, _parsed, limited_diff def _get_title(self, commit): - return h.shorter(commit.message, 160) + return h.chop_at_smart(commit.message, '\n', suffix_if_chopped='...') def _get_description(self, commit): _renderer = self.request.get_partial_renderer( @@ -104,7 +105,12 @@ class RepoFeedView(RepoAppView): return date def _get_commits(self): - return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) + pre_load = ['author', 'branch', 'date', 'message', 'parents'] + collection = self.rhodecode_vcs_repo.get_commits( + branch_name=None, show_hidden=False, pre_load=pre_load, + translate_tags=False) + + return list(collection[-self.feed_items_per_page:]) def uid(self, repo_id, commit_id): return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id)) @@ -112,33 +118,72 @@ class RepoFeedView(RepoAppView): @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') - @view_config( - route_name='atom_feed_home', request_method='GET', - renderer=None) - @view_config( - route_name='atom_feed_home_old', request_method='GET', - renderer=None) + @view_config(route_name='atom_feed_home', request_method='GET', renderer=None) + @view_config(route_name='atom_feed_home_old', request_method='GET', renderer=None) def atom(self): """ Produce an atom-1.0 feed via feedgenerator module """ self.load_default_context() + force_recache = self.get_recache_flag() - cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( - self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) - invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( - repo_id=self.db_repo.repo_id) - - region = rc_cache.get_or_create_region('cache_repo_longterm', - cache_namespace_uid) - - condition = not self.path_filter.is_enabled + cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) + condition = not (self.path_filter.is_enabled or force_recache) + region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=condition) - def generate_atom_feed(repo_id, _repo_name, _feed_type): + def generate_atom_feed(repo_id, _repo_name, _commit_id, _feed_type): feed = Atom1Feed( - title=self.title % _repo_name, + title=self.title % 'atom', + link=h.route_url('repo_summary', repo_name=_repo_name), + description=self.description % _repo_name, + language=self.language, + ttl=self.ttl + ) + for commit in reversed(self._get_commits()): + date = self._set_timezone(commit.date) + feed.add_item( + unique_id=self.uid(repo_id, commit.raw_id), + title=self._get_title(commit), + author_name=commit.author, + description=self._get_description(commit), + link=h.route_url( + 'repo_commit', repo_name=_repo_name, + commit_id=commit.raw_id), + pubdate=date,) + + return feed.content_type, feed.writeString('utf-8') + + commit_id = self.db_repo.changeset_cache.get('raw_id') + content_type, feed = generate_atom_feed( + self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom') + + response = Response(feed) + response.content_type = content_type + return response + + @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @view_config(route_name='rss_feed_home', request_method='GET', renderer=None) + @view_config(route_name='rss_feed_home_old', request_method='GET', renderer=None) + def rss(self): + """ + Produce an rss2 feed via feedgenerator module + """ + self.load_default_context() + force_recache = self.get_recache_flag() + + cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) + condition = not (self.path_filter.is_enabled or force_recache) + region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, + condition=condition) + def generate_rss_feed(repo_id, _repo_name, _commit_id, _feed_type): + feed = Rss201rev2Feed( + title=self.title % 'rss', link=h.route_url('repo_summary', repo_name=_repo_name), description=self.description % _repo_name, language=self.language, @@ -156,87 +201,12 @@ class RepoFeedView(RepoAppView): 'repo_commit', repo_name=_repo_name, commit_id=commit.raw_id), pubdate=date,) - - return feed.mime_type, feed.writeString('utf-8') + return feed.content_type, feed.writeString('utf-8') - inv_context_manager = rc_cache.InvalidationContext( - uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) - with inv_context_manager as invalidation_context: - args = (self.db_repo.repo_id, self.db_repo.repo_name, 'atom',) - # re-compute and store cache if we get invalidate signal - if invalidation_context.should_invalidate(): - mime_type, feed = generate_atom_feed.refresh(*args) - else: - mime_type, feed = generate_atom_feed(*args) - - log.debug('Repo ATOM feed computed in %.3fs', - inv_context_manager.compute_time) + commit_id = self.db_repo.changeset_cache.get('raw_id') + content_type, feed = generate_rss_feed( + self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'rss') response = Response(feed) - response.content_type = mime_type + response.content_type = content_type return response - - @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) - @HasRepoPermissionAnyDecorator( - 'repository.read', 'repository.write', 'repository.admin') - @view_config( - route_name='rss_feed_home', request_method='GET', - renderer=None) - @view_config( - route_name='rss_feed_home_old', request_method='GET', - renderer=None) - def rss(self): - """ - Produce an rss2 feed via feedgenerator module - """ - self.load_default_context() - - cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( - self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) - invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( - repo_id=self.db_repo.repo_id) - region = rc_cache.get_or_create_region('cache_repo_longterm', - cache_namespace_uid) - - condition = not self.path_filter.is_enabled - - @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, - condition=condition) - def generate_rss_feed(repo_id, _repo_name, _feed_type): - feed = Rss201rev2Feed( - title=self.title % _repo_name, - link=h.route_url('repo_summary', repo_name=_repo_name), - description=self.description % _repo_name, - language=self.language, - ttl=self.ttl - ) - - for commit in reversed(self._get_commits()): - date = self._set_timezone(commit.date) - feed.add_item( - unique_id=self.uid(repo_id, commit.raw_id), - title=self._get_title(commit), - author_name=commit.author, - description=self._get_description(commit), - link=h.route_url( - 'repo_commit', repo_name=_repo_name, - commit_id=commit.raw_id), - pubdate=date,) - - return feed.mime_type, feed.writeString('utf-8') - - inv_context_manager = rc_cache.InvalidationContext( - uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) - with inv_context_manager as invalidation_context: - args = (self.db_repo.repo_id, self.db_repo.repo_name, 'rss',) - # re-compute and store cache if we get invalidate signal - if invalidation_context.should_invalidate(): - mime_type, feed = generate_rss_feed.refresh(*args) - else: - mime_type, feed = generate_rss_feed(*args) - log.debug( - 'Repo RSS feed computed in %.3fs', inv_context_manager.compute_time) - - response = Response(feed) - response.content_type = mime_type - return response diff --git a/rhodecode/apps/repository/views/repo_files.py b/rhodecode/apps/repository/views/repo_files.py --- a/rhodecode/apps/repository/views/repo_files.py +++ b/rhodecode/apps/repository/views/repo_files.py @@ -272,7 +272,8 @@ class RepoFilesView(RepoAppView): 'rhodecode:templates/files/files_browser_tree.mako', self._get_template_context(c), self.request) - return compute_file_tree('v1', self.db_repo.repo_id, commit_id, f_path, full_load) + return compute_file_tree( + rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_id, commit_id, f_path, full_load) def _get_archive_spec(self, fname): log.debug('Detecting archive spec for: `%s`', fname) @@ -305,6 +306,21 @@ class RepoFilesView(RepoAppView): pure_path = pathlib2.PurePath(*sanitized_path) return pure_path + def _is_lf_enabled(self, target_repo): + lf_enabled = False + + lf_key_for_vcs_map = { + 'hg': 'extensions_largefiles', + 'git': 'vcs_git_lfs_enabled' + } + + lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type) + + if lf_key_for_vcs: + lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs) + + return lf_enabled + @LoginRequired() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') @@ -645,11 +661,18 @@ class RepoFilesView(RepoAppView): # load file content if c.file.is_file(): - c.lf_node = c.file.get_largefile_node() + c.lf_node = {} + + has_lf_enabled = self._is_lf_enabled(self.db_repo) + if has_lf_enabled: + c.lf_node = c.file.get_largefile_node() c.file_source_page = 'true' c.file_last_commit = c.file.last_commit - if c.file.size < c.visual.cut_off_limit_diff: + + c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file + + if not (c.file_size_too_big or c.file.is_binary): if c.annotate: # annotation has precedence over renderer c.annotated_lines = filenode_as_annotated_lines_tokens( c.file @@ -684,6 +707,10 @@ class RepoFilesView(RepoAppView): # later via ajax we call repo_nodetree_full and fetch whole c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path) + c.readme_data, c.readme_file = \ + self._get_readme_data(self.db_repo, c.visual.default_renderer, + c.commit.raw_id, f_path) + except RepositoryError as e: h.flash(safe_str(h.escape(e)), category='error') raise HTTPNotFound() @@ -825,10 +852,9 @@ class RepoFilesView(RepoAppView): if disposition == 'attachment': disposition = self._get_attachement_headers(f_path) - def stream_node(): - yield file_node.raw_bytes + stream_content = file_node.stream_bytes() - response = Response(app_iter=stream_node()) + response = Response(app_iter=stream_content) response.content_disposition = disposition response.content_type = mimetype @@ -864,10 +890,9 @@ class RepoFilesView(RepoAppView): disposition = self._get_attachement_headers(f_path) - def stream_node(): - yield file_node.raw_bytes + stream_content = file_node.stream_bytes() - response = Response(app_iter=stream_node()) + response = Response(app_iter=stream_content) response.content_disposition = disposition response.content_type = file_node.mimetype @@ -896,8 +921,7 @@ class RepoFilesView(RepoAppView): log.debug('Generating cached nodelist for repo_id:%s, %s, %s', repo_id, commit_id, f_path) try: - _d, _f = ScmModel().get_nodes( - repo_name, commit_id, f_path, flat=False) + _d, _f = ScmModel().get_quick_filter_nodes(repo_name, commit_id, f_path) except (RepositoryError, CommitDoesNotExistError, Exception) as e: log.exception(safe_str(e)) h.flash(safe_str(h.escape(e)), category='error') diff --git a/rhodecode/apps/repository/views/repo_forks.py b/rhodecode/apps/repository/views/repo_forks.py --- a/rhodecode/apps/repository/views/repo_forks.py +++ b/rhodecode/apps/repository/views/repo_forks.py @@ -56,9 +56,7 @@ class RepoForksView(RepoAppView, DataGri perm_set=['group.write', 'group.admin']) c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) - choices, c.landing_revs = ScmModel().get_repo_landing_revs( - self.request.translate) - c.landing_revs_choices = choices + c.personal_repo_group = c.rhodecode_user.personal_repo_group return c @@ -213,13 +211,14 @@ class RepoForksView(RepoAppView, DataGri _ = self.request.translate c = self.load_default_context() - _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type}, - repo_groups=c.repo_groups_choices, - landing_revs=c.landing_revs_choices)() + _form = RepoForkForm(self.request.translate, + old_data={'repo_type': self.db_repo.repo_type}, + repo_groups=c.repo_groups_choices)() post_data = dict(self.request.POST) # forbid injecting other repo by forging a request post_data['fork_parent_id'] = self.db_repo.repo_id + post_data['landing_rev'] = self.db_repo._landing_revision form_result = {} task_id = None diff --git a/rhodecode/apps/repository/views/repo_permissions.py b/rhodecode/apps/repository/views/repo_permissions.py --- a/rhodecode/apps/repository/views/repo_permissions.py +++ b/rhodecode/apps/repository/views/repo_permissions.py @@ -87,7 +87,7 @@ class RepoSettingsPermissionsView(RepoAp user=self._rhodecode_user, repo=self.db_repo) Session().commit() - h.flash(_('Repository permissions updated'), category='success') + h.flash(_('Repository access permissions updated'), category='success') PermissionModel().flush_user_permission_caches(changes) diff --git a/rhodecode/apps/repository/views/repo_pull_requests.py b/rhodecode/apps/repository/views/repo_pull_requests.py --- a/rhodecode/apps/repository/views/repo_pull_requests.py +++ b/rhodecode/apps/repository/views/repo_pull_requests.py @@ -76,28 +76,28 @@ class RepoPullRequestsView(RepoAppView, if filter_type == 'awaiting_review': pull_requests = PullRequestModel().get_awaiting_review( - repo_name, source=source, opened_by=opened_by, + repo_name, search_q=search_q, source=source, opened_by=opened_by, statuses=statuses, offset=start, length=limit, order_by=order_by, order_dir=order_dir) pull_requests_total_count = PullRequestModel().count_awaiting_review( - repo_name, source=source, statuses=statuses, + repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) elif filter_type == 'awaiting_my_review': pull_requests = PullRequestModel().get_awaiting_my_review( - repo_name, source=source, opened_by=opened_by, + repo_name, search_q=search_q, source=source, opened_by=opened_by, user_id=self._rhodecode_user.user_id, statuses=statuses, offset=start, length=limit, order_by=order_by, order_dir=order_dir) pull_requests_total_count = PullRequestModel().count_awaiting_my_review( - repo_name, source=source, user_id=self._rhodecode_user.user_id, + repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, statuses=statuses, opened_by=opened_by) else: pull_requests = PullRequestModel().get_all( - repo_name, source=source, opened_by=opened_by, + repo_name, search_q=search_q, source=source, opened_by=opened_by, statuses=statuses, offset=start, length=limit, order_by=order_by, order_dir=order_dir) pull_requests_total_count = PullRequestModel().count_all( - repo_name, source=source, statuses=statuses, + repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) data = [] @@ -108,7 +108,8 @@ class RepoPullRequestsView(RepoAppView, data.append({ 'name': _render('pullrequest_name', - pr.pull_request_id, pr.target_repo.repo_name), + pr.pull_request_id, pr.pull_request_state, + pr.work_in_progress, pr.target_repo.repo_name), 'name_raw': pr.pull_request_id, 'status': _render('pullrequest_status', pr.calculated_review_status()), @@ -272,15 +273,7 @@ class RepoPullRequestsView(RepoAppView, self.request.matchdict['pull_request_id']) pull_request_id = pull_request.pull_request_id - if pull_request.pull_request_state != PullRequest.STATE_CREATED: - log.debug('show: forbidden because pull request is in state %s', - pull_request.pull_request_state) - msg = _(u'Cannot show pull requests in state other than `{}`. ' - u'Current state is: `{}`').format(PullRequest.STATE_CREATED, - pull_request.pull_request_state) - h.flash(msg, category='error') - raise HTTPFound(h.route_path('pullrequest_show_all', - repo_name=self.db_repo_name)) + c.state_progressing = pull_request.is_state_changing() version = self.request.GET.get('version') from_version = self.request.GET.get('from_version') or version @@ -426,6 +419,12 @@ class RepoPullRequestsView(RepoAppView, c.inline_versions = comments_model.aggregate_comments( inline_comments, versions, c.at_version_num, inline=True) + # TODOs + c.unresolved_comments = CommentsModel() \ + .get_pull_request_unresolved_todos(pull_request) + c.resolved_comments = CommentsModel() \ + .get_pull_request_resolved_todos(pull_request) + # inject latest version latest_ver = PullRequest.get_pr_display_object( pull_request_latest, pull_request_latest) @@ -488,7 +487,9 @@ class RepoPullRequestsView(RepoAppView, log.debug('Failed to get shadow repo', exc_info=True) # try first the existing source_repo, and then shadow # repo if we can obtain one - commits_source_repo = source_scm or shadow_scm + commits_source_repo = source_scm + if shadow_scm: + commits_source_repo = shadow_scm c.commits_source_repo = commits_source_repo c.ancestor = None # set it to None, to hide it from PR view @@ -618,7 +619,7 @@ class RepoPullRequestsView(RepoAppView, diffset = cached_diff['diff'] else: diffset = self._get_range_diffset( - source_scm, source_repo, + commits_source_repo, source_repo, commit1, commit2, diff_limit, file_limit, c.fulldiff, ign_whitespace_lcl, context_lcl ) @@ -687,7 +688,7 @@ class RepoPullRequestsView(RepoAppView, commit_cache = collections.OrderedDict() missing_requirements = False try: - pre_load = ["author", "branch", "date", "message", "parents"] + pre_load = ["author", "date", "message", "branch", "parents"] show_revs = pull_request_at_ver.revisions for rev in show_revs: comm = commits_source_repo.get_commit( @@ -1045,39 +1046,52 @@ class RepoPullRequestsView(RepoAppView, _ = self.request.translate self.load_default_context() + redirect_url = None if pull_request.is_closed(): log.debug('update: forbidden because pull request is closed') msg = _(u'Cannot update closed pull requests.') h.flash(msg, category='error') - return True + return {'response': True, + 'redirect_url': redirect_url} - if pull_request.pull_request_state != PullRequest.STATE_CREATED: - log.debug('update: forbidden because pull request is in state %s', - pull_request.pull_request_state) - msg = _(u'Cannot update pull requests in state other than `{}`. ' - u'Current state is: `{}`').format(PullRequest.STATE_CREATED, - pull_request.pull_request_state) - h.flash(msg, category='error') - return True + is_state_changing = pull_request.is_state_changing() # only owner or admin can update it allowed_to_update = PullRequestModel().check_user_update( pull_request, self._rhodecode_user) if allowed_to_update: controls = peppercorn.parse(self.request.POST.items()) + force_refresh = str2bool(self.request.POST.get('force_refresh')) if 'review_members' in controls: self._update_reviewers( pull_request, controls['review_members'], pull_request.reviewer_data) elif str2bool(self.request.POST.get('update_commits', 'false')): + if is_state_changing: + log.debug('commits update: forbidden because pull request is in state %s', + pull_request.pull_request_state) + msg = _(u'Cannot update pull requests commits in state other than `{}`. ' + u'Current state is: `{}`').format( + PullRequest.STATE_CREATED, pull_request.pull_request_state) + h.flash(msg, category='error') + return {'response': True, + 'redirect_url': redirect_url} + self._update_commits(pull_request) + if force_refresh: + redirect_url = h.route_path( + 'pullrequest_show', repo_name=self.db_repo_name, + pull_request_id=pull_request.pull_request_id, + _query={"force_refresh": 1}) elif str2bool(self.request.POST.get('edit_pull_request', 'false')): self._edit_pull_request(pull_request) else: raise HTTPBadRequest() - return True + + return {'response': True, + 'redirect_url': redirect_url} raise HTTPForbidden() def _edit_pull_request(self, pull_request): @@ -1105,7 +1119,8 @@ class RepoPullRequestsView(RepoAppView, _ = self.request.translate with pull_request.set_state(PullRequest.STATE_UPDATING): - resp = PullRequestModel().update_commits(pull_request) + resp = PullRequestModel().update_commits( + pull_request, self._rhodecode_db_user) if resp.executed: @@ -1164,7 +1179,7 @@ class RepoPullRequestsView(RepoAppView, self.request.matchdict['pull_request_id']) _ = self.request.translate - if pull_request.pull_request_state != PullRequest.STATE_CREATED: + if pull_request.is_state_changing(): log.debug('show: forbidden because pull request is in state %s', pull_request.pull_request_state) msg = _(u'Cannot merge pull requests in state other than `{}`. ' diff --git a/rhodecode/apps/repository/views/repo_settings_advanced.py b/rhodecode/apps/repository/views/repo_settings_advanced.py --- a/rhodecode/apps/repository/views/repo_settings_advanced.py +++ b/rhodecode/apps/repository/views/repo_settings_advanced.py @@ -60,6 +60,7 @@ class RepoSettingsView(RepoAppView): route_name='edit_repo_advanced', request_method='GET', renderer='rhodecode:templates/admin/repos/repo_edit.mako') def edit_advanced(self): + _ = self.request.translate c = self.load_default_context() c.active = 'advanced' @@ -70,6 +71,11 @@ class RepoSettingsView(RepoAppView): c.ver_info_dict = self.rhodecode_vcs_repo.get_hooks_info() + # update commit cache if GET flag is present + if self.request.GET.get('update_commit_cache'): + self.db_repo.update_commit_cache() + h.flash(_('updated commit cache'), category='success') + return self._get_template_context(c) @LoginRequired() diff --git a/rhodecode/apps/repository/views/repo_settings_issue_trackers.py b/rhodecode/apps/repository/views/repo_settings_issue_trackers.py --- a/rhodecode/apps/repository/views/repo_settings_issue_trackers.py +++ b/rhodecode/apps/repository/views/repo_settings_issue_trackers.py @@ -20,7 +20,7 @@ import logging -from pyramid.httpexceptions import HTTPFound +from pyramid.httpexceptions import HTTPFound, HTTPNotFound from pyramid.view import view_config import formencode @@ -31,7 +31,7 @@ from rhodecode.lib.auth import ( LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) from rhodecode.model.forms import IssueTrackerPatternsForm from rhodecode.model.meta import Session -from rhodecode.model.settings import IssueTrackerSettingsModel +from rhodecode.model.settings import IssueTrackerSettingsModel, SettingsModel log = logging.getLogger(__name__) @@ -64,7 +64,7 @@ class RepoSettingsIssueTrackersView(Repo @CSRFRequired() @view_config( route_name='edit_repo_issuetracker_test', request_method='POST', - xhr=True, renderer='string') + renderer='string', xhr=True) def repo_issuetracker_test(self): return h.urlify_commit_message( self.request.POST.get('test_text', ''), @@ -75,7 +75,7 @@ class RepoSettingsIssueTrackersView(Repo @CSRFRequired() @view_config( route_name='edit_repo_issuetracker_delete', request_method='POST', - renderer='rhodecode:templates/admin/repos/repo_edit.mako') + renderer='json_ext', xhr=True) def repo_issuetracker_delete(self): _ = self.request.translate uid = self.request.POST.get('uid') @@ -85,10 +85,12 @@ class RepoSettingsIssueTrackersView(Repo except Exception: h.flash(_('Error occurred during deleting issue tracker entry'), category='error') - else: - h.flash(_('Removed issue tracker entry'), category='success') - raise HTTPFound( - h.route_path('edit_repo_issuetracker', repo_name=self.db_repo_name)) + raise HTTPNotFound() + + SettingsModel().invalidate_settings_cache() + h.flash(_('Removed issue tracker entry.'), category='success') + + return {'deleted': uid} def _update_patterns(self, form, repo_settings): for uid in form['delete_patterns']: diff --git a/rhodecode/apps/repository/views/repo_summary.py b/rhodecode/apps/repository/views/repo_summary.py --- a/rhodecode/apps/repository/views/repo_summary.py +++ b/rhodecode/apps/repository/views/repo_summary.py @@ -20,6 +20,8 @@ import logging import string +import time + import rhodecode from pyramid.view import view_config @@ -30,14 +32,12 @@ from rhodecode.config.conf import (LANGU from rhodecode.lib import helpers as h, rc_cache from rhodecode.lib.utils2 import safe_str, safe_int from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator -from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links from rhodecode.lib.ext_json import json from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.lib.vcs.exceptions import ( CommitError, EmptyRepositoryError, CommitDoesNotExistError) from rhodecode.model.db import Statistics, CacheKey, User from rhodecode.model.meta import Session -from rhodecode.model.repo import ReadmeFinder from rhodecode.model.scm import ScmModel log = logging.getLogger(__name__) @@ -52,91 +52,15 @@ class RepoSummaryView(RepoAppView): c.rhodecode_repo = self.rhodecode_vcs_repo return c - def _get_readme_data(self, db_repo, renderer_type): - - log.debug('Looking for README file') - - cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( - db_repo.repo_id, CacheKey.CACHE_TYPE_README) - invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( - repo_id=self.db_repo.repo_id) - region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) - - @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) - def generate_repo_readme(repo_id, _repo_name, _renderer_type): - readme_data = None - readme_node = None - readme_filename = None - commit = self._get_landing_commit_or_none(db_repo) - if commit: - log.debug("Searching for a README file.") - readme_node = ReadmeFinder(_renderer_type).search(commit) - if readme_node: - log.debug('Found README node: %s', readme_node) - relative_urls = { - 'raw': h.route_path( - 'repo_file_raw', repo_name=_repo_name, - commit_id=commit.raw_id, f_path=readme_node.path), - 'standard': h.route_path( - 'repo_files', repo_name=_repo_name, - commit_id=commit.raw_id, f_path=readme_node.path), - } - readme_data = self._render_readme_or_none( - commit, readme_node, relative_urls) - readme_filename = readme_node.unicode_path - - return readme_data, readme_filename - - inv_context_manager = rc_cache.InvalidationContext( - uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) - with inv_context_manager as invalidation_context: - args = (db_repo.repo_id, db_repo.repo_name, renderer_type,) - # re-compute and store cache if we get invalidate signal - if invalidation_context.should_invalidate(): - instance = generate_repo_readme.refresh(*args) - else: - instance = generate_repo_readme(*args) - - log.debug( - 'Repo readme generated and computed in %.3fs', - inv_context_manager.compute_time) - return instance - - def _get_landing_commit_or_none(self, db_repo): - log.debug("Getting the landing commit.") - try: - commit = db_repo.get_landing_commit() - if not isinstance(commit, EmptyCommit): - return commit - else: - log.debug("Repository is empty, no README to render.") - except CommitError: - log.exception( - "Problem getting commit when trying to render the README.") - - def _render_readme_or_none(self, commit, readme_node, relative_urls): - log.debug( - 'Found README file `%s` rendering...', readme_node.path) - renderer = MarkupRenderer() - try: - html_source = renderer.render( - readme_node.content, filename=readme_node.path) - if relative_urls: - return relative_links(html_source, relative_urls) - return html_source - except Exception: - log.exception( - "Exception while trying to render the README") - def _load_commits_context(self, c): p = safe_int(self.request.GET.get('page'), 1) size = safe_int(self.request.GET.get('size'), 10) - def url_generator(**kw): + def url_generator(page_num): query_params = { + 'page': page_num, 'size': size } - query_params.update(kw) return h.route_path( 'repo_summary_commits', repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) @@ -149,7 +73,7 @@ class RepoSummaryView(RepoAppView): collection = self.rhodecode_vcs_repo c.repo_commits = h.RepoPage( - collection, page=p, items_per_page=size, url=url_generator) + collection, page=p, items_per_page=size, url_maker=url_generator) page_ids = [x.raw_id for x in c.repo_commits] c.comments = self.db_repo.get_comments(page_ids) c.statuses = self.db_repo.statuses(page_ids) @@ -204,12 +128,7 @@ class RepoSummaryView(RepoAppView): # Prepare the clone URL self._prepare_and_set_clone_url(c) - # update every 5 min - if self.db_repo.last_commit_cache_update_diff > 60 * 5: - self.db_repo.update_commit_cache() - # If enabled, get statistics data - c.show_stats = bool(self.db_repo.enable_statistics) stats = Session().query(Statistics) \ @@ -254,9 +173,6 @@ class RepoSummaryView(RepoAppView): return self._get_template_context(c) - def get_request_commit_id(self): - return self.request.matchdict['commit_id'] - @LoginRequired() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') @@ -264,33 +180,35 @@ class RepoSummaryView(RepoAppView): route_name='repo_stats', request_method='GET', renderer='json_ext') def repo_stats(self): - commit_id = self.get_request_commit_id() show_stats = bool(self.db_repo.enable_statistics) repo_id = self.db_repo.repo_id - cache_seconds = safe_int( - rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) + landing_commit = self.db_repo.get_landing_commit() + if isinstance(landing_commit, EmptyCommit): + return {'size': 0, 'code_stats': {}} + + cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) cache_on = cache_seconds > 0 + log.debug( - 'Computing REPO TREE for repo_id %s commit_id `%s` ' + 'Computing REPO STATS for repo_id %s commit_id `%s` ' 'with caching: %s[TTL: %ss]' % ( - repo_id, commit_id, cache_on, cache_seconds or 0)) + repo_id, landing_commit, cache_on, cache_seconds or 0)) cache_namespace_uid = 'cache_repo.{}'.format(repo_id) region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) - def compute_stats(repo_id, commit_id, show_stats): + def compute_stats(repo_id, commit_id, _show_stats): code_stats = {} size = 0 try: - scm_instance = self.db_repo.scm_instance() - commit = scm_instance.get_commit(commit_id) + commit = self.db_repo.get_commit(commit_id) for node in commit.get_filenodes_generator(): size += node.size - if not show_stats: + if not _show_stats: continue ext = string.lower(node.extension) ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext) @@ -304,7 +222,7 @@ class RepoSummaryView(RepoAppView): return {'size': h.format_byte_size_binary(size), 'code_stats': code_stats} - stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats) + stats = compute_stats(self.db_repo.repo_id, landing_commit.raw_id, show_stats) return stats @LoginRequired() diff --git a/rhodecode/apps/search/tests/test_search.py b/rhodecode/apps/search/tests/test_search.py --- a/rhodecode/apps/search/tests/test_search.py +++ b/rhodecode/apps/search/tests/test_search.py @@ -46,7 +46,7 @@ class TestSearchController(TestControlle def test_index(self): self.log_user() response = self.app.get(route_path('search')) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.one_element_exists('input#q') def test_search_files_empty_search(self): diff --git a/rhodecode/apps/search/views.py b/rhodecode/apps/search/views.py --- a/rhodecode/apps/search/views.py +++ b/rhodecode/apps/search/views.py @@ -21,7 +21,7 @@ import logging import urllib from pyramid.view import view_config -from webhelpers.util import update_params +from webhelpers2.html.tools import update_params from rhodecode.apps._base import BaseAppView, RepoAppView, RepoGroupAppView from rhodecode.lib.auth import ( @@ -44,6 +44,7 @@ def perform_search(request, tmpl_context search_tags = [] search_params = {} errors = [] + try: search_params = schema.deserialize( dict( @@ -58,11 +59,19 @@ def perform_search(request, tmpl_context except validation_schema.Invalid as e: errors = e.children - def url_generator(**kw): + def url_generator(page_num): q = urllib.quote(safe_str(search_query)) - return update_params( - "?q=%s&type=%s&max_lines=%s" % ( - q, safe_str(search_type), search_max_lines), **kw) + + query_params = { + 'page': page_num, + 'q': q, + 'type': safe_str(search_type), + 'max_lines': search_max_lines, + 'sort': search_sort + } + + return '?' + urllib.urlencode(query_params) + c = tmpl_context search_query = search_params.get('search_query') @@ -81,14 +90,14 @@ def perform_search(request, tmpl_context formatted_results = Page( search_result['results'], page=requested_page, item_count=search_result['count'], - items_per_page=page_limit, url=url_generator) + items_per_page=page_limit, url_maker=url_generator) finally: searcher.cleanup() search_tags = searcher.extract_search_tags(search_query) if not search_result['error']: - execution_time = '%s results (%.3f seconds)' % ( + execution_time = '%s results (%.4f seconds)' % ( search_result['count'], search_result['runtime']) elif not errors: @@ -99,8 +108,6 @@ def perform_search(request, tmpl_context c.perm_user = c.auth_user c.repo_name = repo_name c.repo_group_name = repo_group_name - c.sort = search_sort - c.url_generator = url_generator c.errors = errors c.formatted_results = formatted_results c.runtime = execution_time @@ -109,6 +116,15 @@ def perform_search(request, tmpl_context c.searcher = searcher c.search_tags = search_tags + direction, sort_field = searcher.get_sort(search_type, search_sort) + sort_definition = searcher.sort_def(search_type, direction, sort_field) + c.sort = '' + c.sort_tag = None + c.sort_tag_dir = direction + if sort_definition: + c.sort = '{}:{}'.format(direction, sort_field) + c.sort_tag = sort_field + class SearchView(BaseAppView): def load_default_context(self): diff --git a/rhodecode/apps/ssh_support/tests/conftest.py b/rhodecode/apps/ssh_support/tests/conftest.py --- a/rhodecode/apps/ssh_support/tests/conftest.py +++ b/rhodecode/apps/ssh_support/tests/conftest.py @@ -26,7 +26,7 @@ from rhodecode.apps.ssh_support.lib.ssh_ from rhodecode.lib.utils2 import AttributeDict -@pytest.fixture +@pytest.fixture() def dummy_conf_file(tmpdir): conf = configparser.ConfigParser() conf.add_section('app:main') @@ -48,7 +48,7 @@ def plain_dummy_env(): } -@pytest.fixture +@pytest.fixture() def dummy_env(): return plain_dummy_env() @@ -57,12 +57,12 @@ def plain_dummy_user(): return AttributeDict(username='test_user') -@pytest.fixture +@pytest.fixture() def dummy_user(): return plain_dummy_user() -@pytest.fixture +@pytest.fixture() def ssh_wrapper(app, dummy_conf_file, dummy_env): conn_info = '127.0.0.1 22 10.0.0.1 443' return SshWrapper( diff --git a/rhodecode/apps/ssh_support/tests/test_server_git.py b/rhodecode/apps/ssh_support/tests/test_server_git.py --- a/rhodecode/apps/ssh_support/tests/test_server_git.py +++ b/rhodecode/apps/ssh_support/tests/test_server_git.py @@ -63,7 +63,7 @@ class GitServerCreator(object): return server -@pytest.fixture +@pytest.fixture() def git_server(app): return GitServerCreator() diff --git a/rhodecode/apps/ssh_support/tests/test_server_hg.py b/rhodecode/apps/ssh_support/tests/test_server_hg.py --- a/rhodecode/apps/ssh_support/tests/test_server_hg.py +++ b/rhodecode/apps/ssh_support/tests/test_server_hg.py @@ -62,7 +62,7 @@ class MercurialServerCreator(object): return server -@pytest.fixture +@pytest.fixture() def hg_server(app): return MercurialServerCreator() diff --git a/rhodecode/apps/ssh_support/tests/test_server_svn.py b/rhodecode/apps/ssh_support/tests/test_server_svn.py --- a/rhodecode/apps/ssh_support/tests/test_server_svn.py +++ b/rhodecode/apps/ssh_support/tests/test_server_svn.py @@ -61,7 +61,7 @@ class SubversionServerCreator(object): return server -@pytest.fixture +@pytest.fixture() def svn_server(app): return SubversionServerCreator() diff --git a/rhodecode/apps/user_profile/tests/test_users.py b/rhodecode/apps/user_profile/tests/test_users.py --- a/rhodecode/apps/user_profile/tests/test_users.py +++ b/rhodecode/apps/user_profile/tests/test_users.py @@ -46,14 +46,14 @@ class TestUsersController(TestController response = self.app.get(route_path('user_profile', username=username)) response.mustcontain('testme') response.mustcontain('testme@rhodecode.org') - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.no_element_exists(edit_link_css) # edit should be available to superadmin users self.logout_user() self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) response = self.app.get(route_path('user_profile', username=username)) - assert_response = AssertResponse(response) + assert_response = response.assert_response() assert_response.element_contains(edit_link_css, 'Edit') def test_user_profile_not_available(self, user_util): diff --git a/rhodecode/authentication/base.py b/rhodecode/authentication/base.py --- a/rhodecode/authentication/base.py +++ b/rhodecode/authentication/base.py @@ -762,7 +762,7 @@ def authenticate(username, password, env plugin_user = compute_auth('auth', plugin.name, username, (password or '')) auth_time = time.time() - start - log.debug('Authentication for plugin `%s` completed in %.3fs, ' + log.debug('Authentication for plugin `%s` completed in %.4fs, ' 'expiration time of fetched cache %.1fs.', plugin.get_id(), auth_time, cache_ttl) diff --git a/rhodecode/authentication/plugins/auth_headers.py b/rhodecode/authentication/plugins/auth_headers.py --- a/rhodecode/authentication/plugins/auth_headers.py +++ b/rhodecode/authentication/plugins/auth_headers.py @@ -76,6 +76,7 @@ class HeadersSettingsSchema(AuthnPluginS class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): uid = 'headers' + def includeme(self, config): config.add_authn_plugin(self) config.add_authn_resource(self.get_id(), HeadersAuthnResource(self)) diff --git a/rhodecode/authentication/registry.py b/rhodecode/authentication/registry.py --- a/rhodecode/authentication/registry.py +++ b/rhodecode/authentication/registry.py @@ -58,6 +58,11 @@ class AuthenticationPluginRegistry(objec def get_plugin(self, plugin_id): return self._plugins.get(plugin_id, None) + def get_plugin_by_uid(self, plugin_uid): + for plugin in self._plugins.values(): + if plugin.uid == plugin_uid: + return plugin + def get_plugins_for_authentication(self): """ Returns a list of plugins which should be consulted when authenticating diff --git a/rhodecode/config/licenses.json b/rhodecode/config/licenses.json --- a/rhodecode/config/licenses.json +++ b/rhodecode/config/licenses.json @@ -49,7 +49,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-webtest-2.0.32" + "name": "python2.7-webtest-2.0.33" }, { "license": [ @@ -60,7 +60,7 @@ "url": "http://spdx.org/licenses/ZPL-2.1.html" } ], - "name": "python2.7-waitress-1.1.0" + "name": "python2.7-waitress-1.3.1" }, { "license": [ @@ -71,7 +71,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-webob-1.8.4" + "name": "python2.7-webob-1.8.5" }, { "license": [ @@ -93,7 +93,7 @@ "url": "http://spdx.org/licenses/Apache-2.0.html" } ], - "name": "python2.7-coverage-4.5.1" + "name": "python2.7-coverage-4.5.4" }, { "license": [ @@ -113,9 +113,26 @@ "shortName": "bsdOriginal", "spdxId": "BSD-4-Clause", "url": "http://spdx.org/licenses/BSD-4-Clause.html" + }, + { + "fullName": "OSI Approved :: BSD License" } ], - "name": "python2.7-mock-1.0.1" + "name": "python2.7-mock-3.0.5" + }, + { + "license": [ + { + "fullName": "ASL" + }, + { + "fullName": "Apache License 2.0", + "shortName": "asl20", + "spdxId": "Apache-2.0", + "url": "http://spdx.org/licenses/Apache-2.0.html" + } + ], + "name": "python2.7-funcsigs-1.0.2" }, { "license": [ @@ -140,18 +157,7 @@ "fullName": "DFSG approved" } ], - "name": "python2.7-pytest-timeout-1.3.2" - }, - { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-pytest-3.8.2" + "name": "python2.7-pytest-timeout-1.3.3" }, { "license": [ @@ -162,57 +168,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pathlib2-2.3.3" - }, - { - "license": [ - { - "fullName": "BSD 4-clause \"Original\" or \"Old\" License", - "shortName": "bsdOriginal", - "spdxId": "BSD-4-Clause", - "url": "http://spdx.org/licenses/BSD-4-Clause.html" - }, - { - "fullName": "New BSD License" - } - ], - "name": "python2.7-scandir-1.9.0" - }, - { - "license": [ - { - "fullName": "ASL" - }, - { - "fullName": "Apache License 2.0", - "shortName": "asl20", - "spdxId": "Apache-2.0", - "url": "http://spdx.org/licenses/Apache-2.0.html" - } - ], - "name": "python2.7-funcsigs-1.0.2" - }, - { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-pluggy-0.9.0" - }, - { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-atomicwrites-1.2.1" + "name": "python2.7-pytest-4.6.5" }, { "license": [ @@ -234,7 +190,21 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-attrs-18.2.0" + "name": "python2.7-pathlib2-2.3.5" + }, + { + "license": [ + { + "fullName": "BSD 4-clause \"Original\" or \"Old\" License", + "shortName": "bsdOriginal", + "spdxId": "BSD-4-Clause", + "url": "http://spdx.org/licenses/BSD-4-Clause.html" + }, + { + "fullName": "New BSD License" + } + ], + "name": "python2.7-scandir-1.10.0" }, { "license": [ @@ -245,7 +215,51 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-py-1.6.0" + "name": "python2.7-wcwidth-0.1.7" + }, + { + "license": [ + { + "fullName": "Apache License 2.0", + "shortName": "asl20", + "spdxId": "Apache-2.0", + "url": "http://spdx.org/licenses/Apache-2.0.html" + } + ], + "name": "python2.7-importlib-metadata-0.23" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-configparser-4.0.2" + }, + { + "license": [ + { + "fullName": "Python Software Foundation License version 2", + "shortName": "psfl", + "spdxId": "Python-2.0", + "url": "http://spdx.org/licenses/Python-2.0.html" + } + ], + "name": "python2.7-contextlib2-0.6.0" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-zipp-0.6.0" }, { "license": [ @@ -256,7 +270,18 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pytest-profiling-1.3.0" + "name": "python2.7-pluggy-0.13.0" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-atomicwrites-1.3.0" }, { "license": [ @@ -267,7 +292,71 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pytest-runner-4.2" + "name": "python2.7-attrs-19.1.0" + }, + { + "license": [ + { + "fullName": "BSD 4-clause \"Original\" or \"Old\" License", + "shortName": "bsdOriginal", + "spdxId": "BSD-4-Clause", + "url": "http://spdx.org/licenses/BSD-4-Clause.html" + }, + { + "fullName": "BSD or Apache License, Version 2.0" + }, + { + "fullName": "Apache License 2.0", + "shortName": "asl20", + "spdxId": "Apache-2.0", + "url": "http://spdx.org/licenses/Apache-2.0.html" + } + ], + "name": "python2.7-packaging-19.2" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-pyparsing-2.4.2" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-py-1.8.0" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-pytest-profiling-1.7.0" + }, + { + "license": [ + { + "fullName": "MIT License", + "shortName": "mit", + "spdxId": "MIT", + "url": "http://spdx.org/licenses/MIT.html" + } + ], + "name": "python2.7-pytest-runner-5.1" }, { "license": [ @@ -278,7 +367,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-pytest-sugar-0.9.1" + "name": "python2.7-pytest-sugar-0.9.2" }, { "license": [ @@ -306,7 +395,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pytest-cov-2.6.0" + "name": "python2.7-pytest-cov-2.7.1" }, { "license": [ @@ -336,7 +425,7 @@ "fullName": "Apache 2.0 and Proprietary" } ], - "name": "python2.7-rhodecode-tools-1.2.1" + "name": "python2.7-rhodecode-tools-1.3.0" }, { "license": [ @@ -389,7 +478,7 @@ "fullName": "Dual License" } ], - "name": "python2.7-python-dateutil-2.8.0" + "name": "python2.7-python-dateutil-2.8.1" }, { "license": [ @@ -400,7 +489,7 @@ "url": "http://spdx.org/licenses/Apache-2.0.html" } ], - "name": "python2.7-elasticsearch2-2.5.0" + "name": "python2.7-elasticsearch2-2.5.1" }, { "license": [ @@ -422,7 +511,7 @@ "url": "http://spdx.org/licenses/Python-2.0.html" } ], - "name": "python2.7-ipaddress-1.0.22" + "name": "python2.7-ipaddress-1.0.23" }, { "license": [ @@ -459,9 +548,15 @@ "shortName": "bsdOriginal", "spdxId": "BSD-4-Clause", "url": "http://spdx.org/licenses/BSD-4-Clause.html" + }, + { + "fullName": "BSD 3-clause \"New\" or \"Revised\" License", + "shortName": "bsd3", + "spdxId": "BSD-3-Clause", + "url": "http://spdx.org/licenses/BSD-3-Clause.html" } ], - "name": "python2.7-markupsafe-1.1.0" + "name": "python2.7-markupsafe-1.1.1" }, { "license": [ @@ -472,7 +567,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-mako-1.0.7" + "name": "python2.7-mako-1.1.0" }, { "license": [ @@ -533,7 +628,7 @@ "fullName": "ISC License (ISCL)" } ], - "name": "python2.7-pexpect-4.6.0" + "name": "python2.7-pexpect-4.7.0" }, { "license": [], @@ -559,7 +654,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-pygments-2.3.1" + "name": "python2.7-pygments-2.4.2" }, { "license": [ @@ -570,18 +665,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-prompt-toolkit-1.0.15" - }, - { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-wcwidth-0.1.7" + "name": "python2.7-prompt-toolkit-1.0.18" }, { "license": [ @@ -592,7 +676,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-traitlets-4.3.2" + "name": "python2.7-traitlets-4.3.3" }, { "license": [ @@ -661,7 +745,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-ipdb-0.11" + "name": "python2.7-ipdb-0.12" }, { "license": [ @@ -716,7 +800,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-psutil-5.5.1" + "name": "python2.7-psutil-5.6.5" }, { "license": [ @@ -749,7 +833,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-alembic-1.0.5" + "name": "python2.7-alembic-1.3.1" }, { "license": { @@ -769,7 +853,18 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-sqlalchemy-1.1.18" + "name": "python2.7-sqlalchemy-1.3.11" + }, + { + "license": [ + { + "fullName": "BSD 4-clause \"Original\" or \"Old\" License", + "shortName": "bsdOriginal", + "spdxId": "BSD-4-Clause", + "url": "http://spdx.org/licenses/BSD-4-Clause.html" + } + ], + "name": "python2.7-jupyter-core-4.5.0" }, { "license": [ @@ -808,17 +903,6 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-jupyter-core-4.4.0" - }, - { - "license": [ - { - "fullName": "BSD 4-clause \"Original\" or \"Old\" License", - "shortName": "bsdOriginal", - "spdxId": "BSD-4-Clause", - "url": "http://spdx.org/licenses/BSD-4-Clause.html" - } - ], "name": "python2.7-nbformat-4.4.0" }, { @@ -863,7 +947,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-testpath-0.4.2" + "name": "python2.7-testpath-0.4.4" }, { "license": [ @@ -912,17 +996,6 @@ { "license": [ { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-configparser-3.7.3" - }, - { - "license": [ - { "fullName": "BSD 4-clause \"Original\" or \"Old\" License", "shortName": "bsdOriginal", "spdxId": "BSD-4-Clause", @@ -949,7 +1022,7 @@ "spdxId": "LGPL-3.0+", "url": "http://spdx.org/licenses/LGPL-3.0+.html" }, - "name": "python2.7-psycopg2-2.7.7" + "name": "python2.7-psycopg2-2.8.4" }, { "license": { @@ -1102,7 +1175,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-paste-3.0.5" + "name": "python2.7-paste-3.2.1" }, { "license": [ @@ -1143,15 +1216,7 @@ "fullName": "BSD-derived (http://www.repoze.org/LICENSE.txt)" } ], - "name": "python2.7-supervisor-3.3.5" - }, - { - "license": [ - { - "fullName": "BSD-derived (http://www.repoze.org/LICENSE.txt)" - } - ], - "name": "python2.7-meld3-1.0.2" + "name": "python2.7-supervisor-4.1.0" }, { "license": [ @@ -1162,7 +1227,7 @@ "url": "http://spdx.org/licenses/Python-2.0.html" } ], - "name": "python2.7-subprocess32-3.5.3" + "name": "python2.7-subprocess32-3.5.4" }, { "license": [ @@ -1184,7 +1249,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-ecdsa-0.13" + "name": "python2.7-ecdsa-0.13.2" }, { "license": [ @@ -1204,7 +1269,7 @@ "url": "http://spdx.org/licenses/Apache-2.0.html" } ], - "name": "python2.7-cryptography-2.5" + "name": "python2.7-cryptography-2.6.1" }, { "license": [ @@ -1215,7 +1280,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-cffi-1.12.1" + "name": "python2.7-cffi-1.12.3" }, { "license": [ @@ -1280,7 +1345,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-redis-2.10.6" + "name": "python2.7-redis-3.3.11" }, { "license": [ @@ -1324,7 +1389,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pytz-2018.4" + "name": "python2.7-pytz-2019.2" }, { "license": [ @@ -1346,7 +1411,7 @@ "url": "http://spdx.org/licenses/Python-2.0.html" } ], - "name": "python2.7-defusedxml-0.5.0" + "name": "python2.7-defusedxml-0.6.0" }, { "license": [ @@ -1452,9 +1517,15 @@ "shortName": "bsdOriginal", "spdxId": "BSD-4-Clause", "url": "http://spdx.org/licenses/BSD-4-Clause.html" + }, + { + "fullName": "BSD 2-clause \"Simplified\" License", + "shortName": "bsd2", + "spdxId": "BSD-2-Clause", + "url": "http://spdx.org/licenses/BSD-2-Clause.html" } ], - "name": "python2.7-pyasn1-modules-0.2.4" + "name": "python2.7-pyasn1-modules-0.2.6" }, { "license": [ @@ -1465,7 +1536,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-pyasn1-0.4.5" + "name": "python2.7-pyasn1-0.4.7" }, { "license": [ @@ -1505,7 +1576,7 @@ "fullName": "Repoze License", "url": "http://www.repoze.org/LICENSE.txt" }, - "name": "python2.7-pyramid-1.10.1" + "name": "python2.7-pyramid-1.10.4" }, { "license": [ @@ -1516,7 +1587,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-plaster-pastedeploy-0.6" + "name": "python2.7-plaster-pastedeploy-0.7" }, { "license": [ @@ -1549,25 +1620,14 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-hupper-1.5" + "name": "python2.7-hupper-1.9.1" }, { "license": { "fullName": "Repoze License", "url": "http://www.repoze.org/LICENSE.txt" }, - "name": "python2.7-pyramid-mako-1.0.2" - }, - { - "license": [ - { - "fullName": "Repoze Public License" - }, - { - "fullName": "BSD-derived (http://www.repoze.org/LICENSE.txt)" - } - ], - "name": "python2.7-pyramid-jinja2-2.7" + "name": "python2.7-pyramid-mako-1.1.0" }, { "license": [ @@ -1582,36 +1642,7 @@ "url": "http://www.repoze.org/LICENSE.txt" } ], - "name": "python2.7-pyramid-debugtoolbar-4.5" - }, - { - "license": { - "fullName": "Repoze License", - "url": "http://www.repoze.org/LICENSE.txt" - }, - "name": "python2.7-pyramid-beaker-0.8" - }, - { - "license": [ - { - "fullName": "BSD 4-clause \"Original\" or \"Old\" License", - "shortName": "bsdOriginal", - "spdxId": "BSD-4-Clause", - "url": "http://spdx.org/licenses/BSD-4-Clause.html" - } - ], - "name": "python2.7-beaker-1.9.1" - }, - { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-pyparsing-2.3.0" + "name": "python2.7-pyramid-debugtoolbar-4.5.1" }, { "license": [ @@ -1629,7 +1660,7 @@ "spdxId": "MIT", "url": "http://spdx.org/licenses/MIT.html" }, - "name": "python2.7-pycurl-7.43.0.2" + "name": "python2.7-pycurl-7.43.0.3" }, { "license": [ @@ -1659,18 +1690,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pastescript-3.0.0" - }, - { - "license": [ - { - "fullName": "Apache License 2.0", - "shortName": "asl20", - "spdxId": "Apache-2.0", - "url": "http://spdx.org/licenses/Apache-2.0.html" - } - ], - "name": "python2.7-packaging-15.2" + "name": "python2.7-pastescript-3.2.0" }, { "license": [ @@ -1681,7 +1701,7 @@ "url": "http://spdx.org/licenses/MIT.html" } ], - "name": "python2.7-pyotp-2.2.7" + "name": "python2.7-pyotp-2.3.0" }, { "license": [ @@ -1703,7 +1723,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-kombu-4.2.1" + "name": "python2.7-kombu-4.6.6" }, { "license": [ @@ -1714,7 +1734,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-amqp-2.3.1" + "name": "python2.7-amqp-2.5.2" }, { "license": [ @@ -1725,7 +1745,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-vine-1.2.0" + "name": "python2.7-vine-1.3.0" }, { "license": [ @@ -1763,6 +1783,17 @@ { "license": [ { + "fullName": "BSD 4-clause \"Original\" or \"Old\" License", + "shortName": "bsdOriginal", + "spdxId": "BSD-4-Clause", + "url": "http://spdx.org/licenses/BSD-4-Clause.html" + } + ], + "name": "python2.7-beaker-1.9.1" + }, + { + "license": [ + { "fullName": "Python Software Foundation License version 2", "shortName": "psfl", "spdxId": "Python-2.0", @@ -1791,7 +1822,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-dogpile.cache-0.7.1" + "name": "python2.7-dogpile.cache-0.9.0" }, { "license": { @@ -1811,7 +1842,7 @@ "fullName": "BSD-derived (http://www.repoze.org/LICENSE.txt)" } ], - "name": "python2.7-deform-2.0.7" + "name": "python2.7-deform-2.0.8" }, { "license": { @@ -1864,6 +1895,17 @@ { "license": [ { + "fullName": "Repoze Public License" + }, + { + "fullName": "BSD-derived (http://www.repoze.org/LICENSE.txt)" + } + ], + "name": "python2.7-pyramid-jinja2-2.7" + }, + { + "license": [ + { "fullName": "BSD 4-clause \"Original\" or \"Old\" License", "shortName": "bsdOriginal", "spdxId": "BSD-4-Clause", @@ -1881,7 +1923,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-celery-4.1.1" + "name": "python2.7-celery-4.3.0" }, { "license": [ @@ -1892,7 +1934,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-billiard-3.5.0.3" + "name": "python2.7-billiard-3.6.1.0" }, { "license": [ @@ -1906,17 +1948,6 @@ "name": "python2.7-babel-1.3" }, { - "license": [ - { - "fullName": "MIT License", - "shortName": "mit", - "spdxId": "MIT", - "url": "http://spdx.org/licenses/MIT.html" - } - ], - "name": "python2.7-authomatic-0.1.0.post1" - }, - { "license": "UNKNOWN", "name": "python2.7-rhodecode-testdata-0.10.0" } diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -69,6 +69,10 @@ def should_load_all(): Returns if all application components should be loaded. In some cases it's desired to skip apps loading for faster shell script execution """ + ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') + if ssh_cmd: + return False + return True @@ -256,12 +260,12 @@ def includeme(config): config.include('rhodecode.apps._base.navigation') config.include('rhodecode.apps._base.subscribers') config.include('rhodecode.tweens') - - config.include('rhodecode.integrations') config.include('rhodecode.authentication') if load_all: - from rhodecode.authentication import discover_legacy_plugins + config.include('rhodecode.integrations') + + if load_all: # load CE authentication plugins config.include('rhodecode.authentication.plugins.auth_crowd') config.include('rhodecode.authentication.plugins.auth_headers') @@ -272,12 +276,14 @@ def includeme(config): config.include('rhodecode.authentication.plugins.auth_token') # Auto discover authentication plugins and include their configuration. - discover_legacy_plugins(config) + if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): + from rhodecode.authentication import discover_legacy_plugins + discover_legacy_plugins(config) # apps - config.include('rhodecode.apps._base') - if load_all: + config.include('rhodecode.apps._base') + config.include('rhodecode.apps.hovercards') config.include('rhodecode.apps.ops') config.include('rhodecode.apps.admin') config.include('rhodecode.apps.channelstream') @@ -303,20 +309,25 @@ def includeme(config): settings['default_locale_name'] = settings.get('lang', 'en') # Add subscribers. - config.add_subscriber(inject_app_settings, - pyramid.events.ApplicationCreated) - config.add_subscriber(scan_repositories_if_enabled, - pyramid.events.ApplicationCreated) - config.add_subscriber(write_metadata_if_needed, - pyramid.events.ApplicationCreated) - config.add_subscriber(write_js_routes_if_enabled, - pyramid.events.ApplicationCreated) + if load_all: + config.add_subscriber(inject_app_settings, + pyramid.events.ApplicationCreated) + config.add_subscriber(scan_repositories_if_enabled, + pyramid.events.ApplicationCreated) + config.add_subscriber(write_metadata_if_needed, + pyramid.events.ApplicationCreated) + config.add_subscriber(write_js_routes_if_enabled, + pyramid.events.ApplicationCreated) # request custom methods config.add_request_method( 'rhodecode.lib.partial_renderer.get_partial_renderer', 'get_partial_renderer') + config.add_request_method( + 'rhodecode.lib.request_counter.get_request_counter', + 'request_count') + # Set the authorization policy. authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy) @@ -410,10 +421,14 @@ def sanitize_settings_and_apply_defaults "Using the following Mako template directories: %s", mako_directories) + # NOTE(marcink): fix redis requirement for schema of connection since 3.X + if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': + raw_url = settings['beaker.session.url'] + if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): + settings['beaker.session.url'] = 'redis://' + raw_url + # Default includes, possible to change as a user - pyramid_includes = settings.setdefault('pyramid.includes', [ - 'rhodecode.lib.middleware.request_wrapper', - ]) + pyramid_includes = settings.setdefault('pyramid.includes', []) log.debug( "Using the following pyramid.includes: %s", pyramid_includes) @@ -557,12 +572,10 @@ def _sanitize_vcs_settings(settings): settings. """ _string_setting(settings, 'vcs.svn.compatible_version', '') - _string_setting(settings, 'git_rev_filter', '--all') _string_setting(settings, 'vcs.hooks.protocol', 'http') _string_setting(settings, 'vcs.hooks.host', '127.0.0.1') _string_setting(settings, 'vcs.scm_app_implementation', 'http') _string_setting(settings, 'vcs.server', '') - _string_setting(settings, 'vcs.server.log_level', 'debug') _string_setting(settings, 'vcs.server.protocol', 'http') _bool_setting(settings, 'startup.import_repos', 'false') _bool_setting(settings, 'vcs.hooks.direct_calls', 'false') diff --git a/rhodecode/config/patches.py b/rhodecode/config/patches.py --- a/rhodecode/config/patches.py +++ b/rhodecode/config/patches.py @@ -32,7 +32,6 @@ Please keep the following principles in """ - def inspect_getargspec(): """ Pyramid rely on inspect.getargspec to lookup the signature of diff --git a/rhodecode/config/rcextensions/helpers/http_call.py b/rhodecode/config/rcextensions/helpers/http_call.py --- a/rhodecode/config/rcextensions/helpers/http_call.py +++ b/rhodecode/config/rcextensions/helpers/http_call.py @@ -41,7 +41,7 @@ def run(url, json_data=None, params=None if json_data: resp = method_caller(url, json=json_data, timeout=timeout) elif params: - resp = method_caller(url, params=json_data, timeout=timeout) + resp = method_caller(url, params=params, timeout=timeout) else: raise AttributeError('Provide json_data= or params= in function call') resp.raise_for_status() # raise exception on a failed request diff --git a/rhodecode/config/rcextensions/hooks.py b/rhodecode/config/rcextensions/hooks.py --- a/rhodecode/config/rcextensions/hooks.py +++ b/rhodecode/config/rcextensions/hooks.py @@ -15,8 +15,9 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ - +import logging from .utils import DotDict, HookResponse, has_kwargs +log = logging.getLogger('rhodecode.' + __name__) # Config shortcut to keep, all configuration in one place diff --git a/rhodecode/config/rcextensions/utils.py b/rhodecode/config/rcextensions/utils.py --- a/rhodecode/config/rcextensions/utils.py +++ b/rhodecode/config/rcextensions/utils.py @@ -16,10 +16,13 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ +import logging import os import functools import collections +log = logging.getLogger('rhodecode.' + __name__) + class HookResponse(object): def __init__(self, status, output): @@ -135,6 +138,7 @@ def has_kwargs(required_args): _verify_kwargs(func.func_name, required_args.keys(), kwargs) # in case there's `calls` defined on module we store the data maybe_log_call(func.func_name, args, kwargs) + log.debug('Calling rcextensions function %s', func.func_name) return func(*args, **kwargs) return wrapper return wrap diff --git a/rhodecode/config/routing_links.py b/rhodecode/config/routing_links.py --- a/rhodecode/config/routing_links.py +++ b/rhodecode/config/routing_links.py @@ -75,7 +75,7 @@ link_config = [ { "name": "rst_help", "target": "http://docutils.sourceforge.net/docs/user/rst/quickref.html", - "external_target": "http://docutils.sourceforge.net/docs/user/rst/quickref.html", + "external_target": "https://docutils.sourceforge.io/docs/user/rst/quickref.html", }, { "name": "markdown_help", diff --git a/rhodecode/config/utils.py b/rhodecode/config/utils.py --- a/rhodecode/config/utils.py +++ b/rhodecode/config/utils.py @@ -41,7 +41,6 @@ def configure_vcs(config): conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol'] conf.settings.HOOKS_HOST = config['vcs.hooks.host'] conf.settings.HOOKS_DIRECT_CALLS = config['vcs.hooks.direct_calls'] - conf.settings.GIT_REV_FILTER = shlex.split(config['git_rev_filter']) conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] conf.settings.ALIASES[:] = config['vcs.backends'] conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version'] diff --git a/rhodecode/integrations/views.py b/rhodecode/integrations/views.py --- a/rhodecode/integrations/views.py +++ b/rhodecode/integrations/views.py @@ -21,7 +21,6 @@ import deform import logging import peppercorn -import webhelpers.paginate from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPNotFound @@ -32,6 +31,7 @@ from rhodecode.lib.auth import ( LoginRequired, CSRFRequired, HasPermissionAnyDecorator, HasRepoPermissionAnyDecorator, HasRepoGroupPermissionAnyDecorator) from rhodecode.lib.utils2 import safe_int +from rhodecode.lib.helpers import Page from rhodecode.lib import helpers as h from rhodecode.model.db import Repository, RepoGroup, Session, Integration from rhodecode.model.scm import ScmModel @@ -219,12 +219,16 @@ class IntegrationSettingsViewBase(BaseAp key=lambda x: getattr(x[1], sort_field), reverse=(sort_dir == 'desc')) - page_url = webhelpers.paginate.PageURL( - self.request.path, self.request.GET) + def url_generator(page_num): + query_params = { + 'page': page_num + } + return self.request.current_route_path(_query=query_params) + page = safe_int(self.request.GET.get('page', 1), 1) - integrations = h.Page( - integrations, page=page, items_per_page=10, url=page_url) + integrations = Page( + integrations, page=page, items_per_page=10, url_maker=url_generator) c.rev_sort_dir = sort_dir != 'desc' and 'desc' or 'asc' @@ -402,6 +406,7 @@ class RepoIntegrationsView(IntegrationSe c.rhodecode_db_repo = self.repo c.repo_name = self.db_repo.repo_name c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) + c.repository_artifacts = ScmModel().get_artifacts(self.repo) c.repository_is_user_following = ScmModel().is_following_repo( c.repo_name, self._rhodecode_user.user_id) c.has_origin_repo_read_perm = False diff --git a/rhodecode/lib/_vendor/__init__.py b/rhodecode/lib/_vendor/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/_vendor/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2012-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +# This package contains non rhodecode licensed packages that are +# vendored for various reasons + +import os +import sys + +vendor_dir = os.path.abspath(os.path.dirname(__file__)) + +sys.path.append(vendor_dir) diff --git a/rhodecode/lib/_vendor/authomatic/__init__.py b/rhodecode/lib/_vendor/authomatic/__init__.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Helper functions for use with :class:`Authomatic`. + +.. autosummary:: + :nosignatures: + + authomatic.provider_id + +""" + +from . import six +from .core import Authomatic +from .core import provider_id diff --git a/rhodecode/lib/_vendor/authomatic/adapters.py b/rhodecode/lib/_vendor/authomatic/adapters.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/adapters.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +""" +Adapters +-------- + +.. contents:: + :backlinks: none + +The :func:`authomatic.login` function needs access to functionality like +getting the **URL** of the handler where it is being called, getting the +**request params** and **cookies** and **writing the body**, **headers** +and **status** to the response. + +Since implementation of these features varies across Python web frameworks, +the Authomatic library uses **adapters** to unify these differences into a +single interface. + +Available Adapters +^^^^^^^^^^^^^^^^^^ + +If you are missing an adapter for the framework of your choice, please +open an `enhancement issue `_ +or consider a contribution to this module by +:ref:`implementing ` one by yourself. +Its very easy and shouldn't take you more than a few minutes. + +.. autoclass:: DjangoAdapter + :members: + +.. autoclass:: Webapp2Adapter + :members: + +.. autoclass:: WebObAdapter + :members: + +.. autoclass:: WerkzeugAdapter + :members: + +.. _implement_adapters: + +Implementing an Adapter +^^^^^^^^^^^^^^^^^^^^^^^ + +Implementing an adapter for a Python web framework is pretty easy. + +Do it by subclassing the :class:`.BaseAdapter` abstract class. +There are only **six** members that you need to implement. + +Moreover if your framework is based on the |webob|_ or |werkzeug|_ package +you can subclass the :class:`.WebObAdapter` or :class:`.WerkzeugAdapter` +respectively. + +.. autoclass:: BaseAdapter + :members: + +""" + +import abc +from authomatic.core import Response + + +class BaseAdapter(object): + """ + Base class for platform adapters. + + Defines common interface for WSGI framework specific functionality. + + """ + + __metaclass__ = abc.ABCMeta + + @abc.abstractproperty + def params(self): + """ + Must return a :class:`dict` of all request parameters of any HTTP + method. + + :returns: + :class:`dict` + + """ + + @abc.abstractproperty + def url(self): + """ + Must return the url of the actual request including path but without + query and fragment. + + :returns: + :class:`str` + + """ + + @abc.abstractproperty + def cookies(self): + """ + Must return cookies as a :class:`dict`. + + :returns: + :class:`dict` + + """ + + @abc.abstractmethod + def write(self, value): + """ + Must write specified value to response. + + :param str value: + String to be written to response. + + """ + + @abc.abstractmethod + def set_header(self, key, value): + """ + Must set response headers to ``Key: value``. + + :param str key: + Header name. + + :param str value: + Header value. + + """ + + @abc.abstractmethod + def set_status(self, status): + """ + Must set the response status e.g. ``'302 Found'``. + + :param str status: + The HTTP response status. + + """ + + +class DjangoAdapter(BaseAdapter): + """ + Adapter for the |django|_ framework. + """ + + def __init__(self, request, response): + """ + :param request: + An instance of the :class:`django.http.HttpRequest` class. + + :param response: + An instance of the :class:`django.http.HttpResponse` class. + """ + self.request = request + self.response = response + + @property + def params(self): + params = {} + params.update(self.request.GET.dict()) + params.update(self.request.POST.dict()) + return params + + @property + def url(self): + return self.request.build_absolute_uri(self.request.path) + + @property + def cookies(self): + return dict(self.request.COOKIES) + + def write(self, value): + self.response.write(value) + + def set_header(self, key, value): + self.response[key] = value + + def set_status(self, status): + status_code, reason = status.split(' ', 1) + self.response.status_code = int(status_code) + + +class WebObAdapter(BaseAdapter): + """ + Adapter for the |webob|_ package. + """ + + def __init__(self, request, response): + """ + :param request: + A |webob|_ :class:`Request` instance. + + :param response: + A |webob|_ :class:`Response` instance. + """ + self.request = request + self.response = response + + # ========================================================================= + # Request + # ========================================================================= + + @property + def url(self): + return self.request.path_url + + @property + def params(self): + return dict(self.request.params) + + @property + def cookies(self): + return dict(self.request.cookies) + + # ========================================================================= + # Response + # ========================================================================= + + def write(self, value): + self.response.write(value) + + def set_header(self, key, value): + self.response.headers[key] = str(value) + + def set_status(self, status): + self.response.status = status + + +class Webapp2Adapter(WebObAdapter): + """ + Adapter for the |webapp2|_ framework. + + Inherits from the :class:`.WebObAdapter`. + + """ + + def __init__(self, handler): + """ + :param handler: + A :class:`webapp2.RequestHandler` instance. + """ + self.request = handler.request + self.response = handler.response + + +class WerkzeugAdapter(BaseAdapter): + """ + Adapter for |flask|_ and other |werkzeug|_ based frameworks. + + Thanks to `Mark Steve Samson `_. + + """ + + @property + def params(self): + return self.request.args + + @property + def url(self): + return self.request.base_url + + @property + def cookies(self): + return self.request.cookies + + def __init__(self, request, response): + """ + :param request: + Instance of the :class:`werkzeug.wrappers.Request` class. + + :param response: + Instance of the :class:`werkzeug.wrappers.Response` class. + """ + + self.request = request + self.response = response + + def write(self, value): + self.response.data = self.response.data.decode('utf-8') + value + + def set_header(self, key, value): + self.response.headers[key] = value + + def set_status(self, status): + self.response.status = status diff --git a/rhodecode/lib/_vendor/authomatic/core.py b/rhodecode/lib/_vendor/authomatic/core.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/core.py @@ -0,0 +1,1764 @@ +# -*- coding: utf-8 -*- + +import collections +import copy +import datetime +import hashlib +import hmac +import json +import logging +try: + import cPickle as pickle +except ImportError: + import pickle +import sys +import threading +import time +from xml.etree import ElementTree + +from authomatic.exceptions import ( + ConfigError, + CredentialsError, + ImportStringError, + RequestElementsError, + SessionError, +) +from authomatic import six +from authomatic.six.moves import urllib_parse as parse + + +# ========================================================================= +# Global variables !!! +# ========================================================================= + +_logger = logging.getLogger(__name__) +_logger.addHandler(logging.StreamHandler(sys.stdout)) + +_counter = None + + +def normalize_dict(dict_): + """ + Replaces all values that are single-item iterables with the value of its + index 0. + + :param dict dict_: + Dictionary to normalize. + + :returns: + Normalized dictionary. + + """ + + return dict([(k, v[0] if not isinstance(v, str) and len(v) == 1 else v) + for k, v in list(dict_.items())]) + + +def items_to_dict(items): + """ + Converts list of tuples to dictionary with duplicate keys converted to + lists. + + :param list items: + List of tuples. + + :returns: + :class:`dict` + + """ + + res = collections.defaultdict(list) + + for k, v in items: + res[k].append(v) + + return normalize_dict(dict(res)) + + +class Counter(object): + """ + A simple counter to be used in the config to generate unique `id` values. + """ + + def __init__(self, start=0): + self._count = start + + def count(self): + self._count += 1 + return self._count + + +_counter = Counter() + + +def provider_id(): + """ + A simple counter to be used in the config to generate unique `IDs`. + + :returns: + :class:`int`. + + Use it in the :doc:`config` like this: + :: + + import authomatic + + CONFIG = { + 'facebook': { + 'class_': authomatic.providers.oauth2.Facebook, + 'id': authomatic.provider_id(), # returns 1 + 'consumer_key': '##########', + 'consumer_secret': '##########', + 'scope': ['user_about_me', 'email'] + }, + 'google': { + 'class_': 'authomatic.providers.oauth2.Google', + 'id': authomatic.provider_id(), # returns 2 + 'consumer_key': '##########', + 'consumer_secret': '##########', + 'scope': ['https://www.googleapis.com/auth/userinfo.profile', + 'https://www.googleapis.com/auth/userinfo.email'] + }, + 'windows_live': { + 'class_': 'oauth2.WindowsLive', + 'id': authomatic.provider_id(), # returns 3 + 'consumer_key': '##########', + 'consumer_secret': '##########', + 'scope': ['wl.basic', 'wl.emails', 'wl.photos'] + }, + } + + """ + + return _counter.count() + + +def escape(s): + """ + Escape a URL including any /. + """ + return parse.quote(s.encode('utf-8'), safe='~') + + +def json_qs_parser(body): + """ + Parses response body from JSON, XML or query string. + + :param body: + string + + :returns: + :class:`dict`, :class:`list` if input is JSON or query string, + :class:`xml.etree.ElementTree.Element` if XML. + + """ + try: + # Try JSON first. + return json.loads(body) + except (OverflowError, TypeError, ValueError): + pass + + try: + # Then XML. + return ElementTree.fromstring(body) + except (ElementTree.ParseError, TypeError, ValueError): + pass + + # Finally query string. + return dict(parse.parse_qsl(body)) + + +def import_string(import_name, silent=False): + """ + Imports an object by string in dotted notation. + + taken `from webapp2.import_string() `_ + + """ + + try: + if '.' in import_name: + module, obj = import_name.rsplit('.', 1) + return getattr(__import__(module, None, None, [obj]), obj) + else: + return __import__(import_name) + except (ImportError, AttributeError) as e: + if not silent: + raise ImportStringError('Import from string failed for path {0}' + .format(import_name), str(e)) + + +def resolve_provider_class(class_): + """ + Returns a provider class. + + :param class_name: :class:`string` or + :class:`authomatic.providers.BaseProvider` subclass. + + """ + + if isinstance(class_, str): + # prepare path for authomatic.providers package + path = '.'.join([__package__, 'providers', class_]) + + # try to import class by string from providers module or by fully + # qualified path + return import_string(class_, True) or import_string(path) + else: + return class_ + + +def id_to_name(config, short_name): + """ + Returns the provider :doc:`config` key based on it's ``id`` value. + + :param dict config: + :doc:`config`. + :param id: + Value of the id parameter in the :ref:`config` to search for. + + """ + + for k, v in list(config.items()): + if v.get('id') == short_name: + return k + + raise Exception( + 'No provider with id={0} found in the config!'.format(short_name)) + + +class ReprMixin(object): + """ + Provides __repr__() method with output *ClassName(arg1=value, arg2=value)*. + + Ignored are attributes + + * which values are considered false. + * with leading underscore. + * listed in _repr_ignore. + + Values of attributes listed in _repr_sensitive will be replaced by *###*. + Values which repr() string is longer than _repr_length_limit will be + represented as *ClassName(...)* + + """ + + #: Iterable of attributes to be ignored. + _repr_ignore = [] + #: Iterable of attributes which value should not be visible. + _repr_sensitive = [] + #: `int` Values longer than this will be truncated to *ClassName(...)*. + _repr_length_limit = 20 + + def __repr__(self): + + # get class name + name = self.__class__.__name__ + + # construct keyword arguments + args = [] + + for k, v in list(self.__dict__.items()): + + # ignore attributes with leading underscores and those listed in + # _repr_ignore + if v and not k.startswith('_') and k not in self._repr_ignore: + + # replace sensitive values + if k in self._repr_sensitive: + v = '###' + + # if repr is too long + if len(repr(v)) > self._repr_length_limit: + # Truncate to ClassName(...) + v = '{0}(...)'.format(v.__class__.__name__) + else: + v = repr(v) + + args.append('{0}={1}'.format(k, v)) + + return '{0}({1})'.format(name, ', '.join(args)) + + +class Future(threading.Thread): + """ + Represents an activity run in a separate thread. Subclasses the standard + library :class:`threading.Thread` and adds :attr:`.get_result` method. + + .. warning:: + + |async| + + """ + + def __init__(self, func, *args, **kwargs): + """ + :param callable func: + The function to be run in separate thread. + + Calls :data:`func` in separate thread and returns immediately. + Accepts arbitrary positional and keyword arguments which will be + passed to :data:`func`. + """ + + super(Future, self).__init__() + self._func = func + self._args = args + self._kwargs = kwargs + self._result = None + + self.start() + + def run(self): + self._result = self._func(*self._args, **self._kwargs) + + def get_result(self, timeout=None): + """ + Waits for the wrapped :data:`func` to finish and returns its result. + + .. note:: + + This will block the **calling thread** until the :data:`func` + returns. + + :param timeout: + :class:`float` or ``None`` A timeout for the :data:`func` to + return in seconds. + + :returns: + The result of the wrapped :data:`func`. + + """ + + self.join(timeout) + return self._result + + +class Session(object): + """ + A dictionary-like secure cookie session implementation. + """ + + def __init__(self, adapter, secret, name='authomatic', max_age=600, + secure=False): + """ + :param str secret: + Session secret used to sign the session cookie. + :param str name: + Session cookie name. + :param int max_age: + Maximum allowed age of session cookie nonce in seconds. + :param bool secure: + If ``True`` the session cookie will be saved with ``Secure`` + attribute. + """ + + self.adapter = adapter + self.name = name + self.secret = secret + self.max_age = max_age + self.secure = secure + self._data = {} + + def create_cookie(self, delete=None): + """ + Creates the value for ``Set-Cookie`` HTTP header. + + :param bool delete: + If ``True`` the cookie value will be ``deleted`` and the + Expires value will be ``Thu, 01-Jan-1970 00:00:01 GMT``. + + """ + value = 'deleted' if delete else self._serialize(self.data) + split_url = parse.urlsplit(self.adapter.url) + domain = split_url.netloc.split(':')[0] + + # Work-around for issue #11, failure of WebKit-based browsers to accept + # cookies set as part of a redirect response in some circumstances. + if '.' not in domain: + template = '{name}={value}; Path={path}; HttpOnly{secure}{expires}' + else: + template = ('{name}={value}; Domain={domain}; Path={path}; ' + 'HttpOnly{secure}{expires}') + + return template.format( + name=self.name, + value=value, + domain=domain, + path=split_url.path, + secure='; Secure' if self.secure else '', + expires='; Expires=Thu, 01-Jan-1970 00:00:01 GMT' if delete else '' + ) + + def save(self): + """ + Adds the session cookie to headers. + """ + if self.data: + cookie = self.create_cookie() + cookie_len = len(cookie) + + if cookie_len > 4093: + raise SessionError('Cookie too long! The cookie size {0} ' + 'is more than 4093 bytes.' + .format(cookie_len)) + + self.adapter.set_header('Set-Cookie', cookie) + + # Reset data + self._data = {} + + def delete(self): + self.adapter.set_header('Set-Cookie', self.create_cookie(delete=True)) + + def _get_data(self): + """ + Extracts the session data from cookie. + """ + cookie = self.adapter.cookies.get(self.name) + return self._deserialize(cookie) if cookie else {} + + @property + def data(self): + """ + Gets session data lazily. + """ + if not self._data: + self._data = self._get_data() + # Always return a dict, even if deserialization returned nothing + if self._data is None: + self._data = {} + return self._data + + def _signature(self, *parts): + """ + Creates signature for the session. + """ + signature = hmac.new(six.b(self.secret), digestmod=hashlib.sha1) + signature.update(six.b('|'.join(parts))) + return signature.hexdigest() + + def _serialize(self, value): + """ + Converts the value to a signed string with timestamp. + + :param value: + Object to be serialized. + + :returns: + Serialized value. + + """ + + # data = copy.deepcopy(value) + data = value + + # 1. Serialize + serialized = pickle.dumps(data).decode('latin-1') + + # 2. Encode + # Percent encoding produces smaller result then urlsafe base64. + encoded = parse.quote(serialized, '') + + # 3. Concatenate + timestamp = str(int(time.time())) + signature = self._signature(self.name, encoded, timestamp) + concatenated = '|'.join([encoded, timestamp, signature]) + + return concatenated + + def _deserialize(self, value): + """ + Deserializes and verifies the value created by :meth:`._serialize`. + + :param str value: + The serialized value. + + :returns: + Deserialized object. + + """ + + # 3. Split + encoded, timestamp, signature = value.split('|') + + # Verify signature + if not signature == self._signature(self.name, encoded, timestamp): + raise SessionError('Invalid signature "{0}"!'.format(signature)) + + # Verify timestamp + if int(timestamp) < int(time.time()) - self.max_age: + return None + + # 2. Decode + decoded = parse.unquote(encoded) + + # 1. Deserialize + deserialized = pickle.loads(decoded.encode('latin-1')) + + return deserialized + + def __setitem__(self, key, value): + self._data[key] = value + + def __getitem__(self, key): + return self.data.__getitem__(key) + + def __delitem__(self, key): + return self._data.__delitem__(key) + + def get(self, key, default=None): + return self.data.get(key, default) + + +class User(ReprMixin): + """ + Provides unified interface to selected **user** info returned by different + **providers**. + + .. note:: The value format may vary across providers. + + """ + + def __init__(self, provider, **kwargs): + #: A :doc:`provider ` instance. + self.provider = provider + + #: An :class:`.Credentials` instance. + self.credentials = kwargs.get('credentials') + + #: A :class:`dict` containing all the **user** information returned + #: by the **provider**. + #: The structure differs across **providers**. + self.data = kwargs.get('data') + + #: The :attr:`.Response.content` of the request made to update + #: the user. + self.content = kwargs.get('content') + + #: :class:`str` ID assigned to the **user** by the **provider**. + self.id = kwargs.get('id') + #: :class:`str` User name e.g. *andrewpipkin*. + self.username = kwargs.get('username') + #: :class:`str` Name e.g. *Andrew Pipkin*. + self.name = kwargs.get('name') + #: :class:`str` First name e.g. *Andrew*. + self.first_name = kwargs.get('first_name') + #: :class:`str` Last name e.g. *Pipkin*. + self.last_name = kwargs.get('last_name') + #: :class:`str` Nickname e.g. *Andy*. + self.nickname = kwargs.get('nickname') + #: :class:`str` Link URL. + self.link = kwargs.get('link') + #: :class:`str` Gender. + self.gender = kwargs.get('gender') + #: :class:`str` Timezone. + self.timezone = kwargs.get('timezone') + #: :class:`str` Locale. + self.locale = kwargs.get('locale') + #: :class:`str` E-mail. + self.email = kwargs.get('email') + #: :class:`str` phone. + self.phone = kwargs.get('phone') + #: :class:`str` Picture URL. + self.picture = kwargs.get('picture') + #: Birth date as :class:`datetime.datetime()` or :class:`str` + # if parsing failed or ``None``. + self.birth_date = kwargs.get('birth_date') + #: :class:`str` Country. + self.country = kwargs.get('country') + #: :class:`str` City. + self.city = kwargs.get('city') + #: :class:`str` Geographical location. + self.location = kwargs.get('location') + #: :class:`str` Postal code. + self.postal_code = kwargs.get('postal_code') + #: Instance of the Google App Engine Users API + #: `User `_ class. + #: Only present when using the :class:`authomatic.providers.gaeopenid.GAEOpenID` provider. + self.gae_user = kwargs.get('gae_user') + + def update(self): + """ + Updates the user info by fetching the **provider's** user info URL. + + :returns: + Updated instance of this class. + + """ + + return self.provider.update_user() + + def async_update(self): + """ + Same as :meth:`.update` but runs asynchronously in a separate thread. + + .. warning:: + + |async| + + :returns: + :class:`.Future` instance representing the separate thread. + + """ + + return Future(self.update) + + def to_dict(self): + """ + Converts the :class:`.User` instance to a :class:`dict`. + + :returns: + :class:`dict` + + """ + + # copy the dictionary + d = copy.copy(self.__dict__) + + # Keep only the provider name to avoid circular reference + d['provider'] = self.provider.name + d['credentials'] = self.credentials.serialize( + ) if self.credentials else None + d['birth_date'] = str(d['birth_date']) + + # Remove content + d.pop('content') + + if isinstance(self.data, ElementTree.Element): + d['data'] = None + + return d + + +SupportedUserAttributesNT = collections.namedtuple( + typename='SupportedUserAttributesNT', + field_names=['birth_date', 'city', 'country', 'email', 'first_name', + 'gender', 'id', 'last_name', 'link', 'locale', 'location', + 'name', 'nickname', 'phone', 'picture', 'postal_code', + 'timezone', 'username', ] +) + + +class SupportedUserAttributes(SupportedUserAttributesNT): + def __new__(cls, **kwargs): + defaults = dict((i, False) for i in SupportedUserAttributes._fields) # pylint:disable=no-member + defaults.update(**kwargs) + return super(SupportedUserAttributes, cls).__new__(cls, **defaults) + + +class Credentials(ReprMixin): + """ + Contains all necessary information to fetch **user's protected resources**. + """ + + _repr_sensitive = ('token', 'refresh_token', 'token_secret', + 'consumer_key', 'consumer_secret') + + def __init__(self, config, **kwargs): + + #: :class:`dict` :doc:`config`. + self.config = config + + #: :class:`str` User **access token**. + self.token = kwargs.get('token', '') + + #: :class:`str` Access token type. + self.token_type = kwargs.get('token_type', '') + + #: :class:`str` Refresh token. + self.refresh_token = kwargs.get('refresh_token', '') + + #: :class:`str` Access token secret. + self.token_secret = kwargs.get('token_secret', '') + + #: :class:`int` Expiration date as UNIX timestamp. + self.expiration_time = int(kwargs.get('expiration_time', 0)) + + #: A :doc:`Provider ` instance**. + provider = kwargs.get('provider') + + self.expire_in = int(kwargs.get('expire_in', 0)) + + if provider: + #: :class:`str` Provider name specified in the :doc:`config`. + self.provider_name = provider.name + + #: :class:`str` Provider type e.g. + # ``"authomatic.providers.oauth2.OAuth2"``. + self.provider_type = provider.get_type() + + #: :class:`str` Provider type e.g. + # ``"authomatic.providers.oauth2.OAuth2"``. + self.provider_type_id = provider.type_id + + #: :class:`str` Provider short name specified in the :doc:`config`. + self.provider_id = int(provider.id) if provider.id else None + + #: :class:`class` Provider class. + self.provider_class = provider.__class__ + + #: :class:`str` Consumer key specified in the :doc:`config`. + self.consumer_key = provider.consumer_key + + #: :class:`str` Consumer secret specified in the :doc:`config`. + self.consumer_secret = provider.consumer_secret + + else: + self.provider_name = kwargs.get('provider_name', '') + self.provider_type = kwargs.get('provider_type', '') + self.provider_type_id = kwargs.get('provider_type_id') + self.provider_id = kwargs.get('provider_id') + self.provider_class = kwargs.get('provider_class') + + self.consumer_key = kwargs.get('consumer_key', '') + self.consumer_secret = kwargs.get('consumer_secret', '') + + @property + def expire_in(self): + """ + + """ + + return self._expire_in + + @expire_in.setter + def expire_in(self, value): + """ + Computes :attr:`.expiration_time` when the value is set. + """ + + # pylint:disable=attribute-defined-outside-init + if value: + self._expiration_time = int(time.time()) + int(value) + self._expire_in = value + + @property + def expiration_time(self): + return self._expiration_time + + @expiration_time.setter + def expiration_time(self, value): + + # pylint:disable=attribute-defined-outside-init + self._expiration_time = int(value) + self._expire_in = self._expiration_time - int(time.time()) + + @property + def expiration_date(self): + """ + Expiration date as :class:`datetime.datetime` or ``None`` if + credentials never expire. + """ + + if self.expire_in < 0: + return None + else: + return datetime.datetime.fromtimestamp(self.expiration_time) + + @property + def valid(self): + """ + ``True`` if credentials are valid, ``False`` if expired. + """ + + if self.expiration_time: + return self.expiration_time > int(time.time()) + else: + return True + + def expire_soon(self, seconds): + """ + Returns ``True`` if credentials expire sooner than specified. + + :param int seconds: + Number of seconds. + + :returns: + ``True`` if credentials expire sooner than specified, + else ``False``. + + """ + + if self.expiration_time: + return self.expiration_time < int(time.time()) + int(seconds) + else: + return False + + def refresh(self, force=False, soon=86400): + """ + Refreshes the credentials only if the **provider** supports it and if + it will expire in less than one day. It does nothing in other cases. + + .. note:: + + The credentials will be refreshed only if it gives sense + i.e. only |oauth2|_ has the notion of credentials + *refreshment/extension*. + And there are also differences across providers e.g. Google + supports refreshment only if there is a ``refresh_token`` in + the credentials and that in turn is present only if the + ``access_type`` parameter was set to ``offline`` in the + **user authorization request**. + + :param bool force: + If ``True`` the credentials will be refreshed even if they + won't expire soon. + + :param int soon: + Number of seconds specifying what means *soon*. + + """ + + if hasattr(self.provider_class, 'refresh_credentials'): + if force or self.expire_soon(soon): + logging.info('PROVIDER NAME: {0}'.format(self.provider_name)) + return self.provider_class( + self, None, self.provider_name).refresh_credentials(self) + + def async_refresh(self, *args, **kwargs): + """ + Same as :meth:`.refresh` but runs asynchronously in a separate thread. + + .. warning:: + + |async| + + :returns: + :class:`.Future` instance representing the separate thread. + + """ + + return Future(self.refresh, *args, **kwargs) + + def provider_type_class(self): + """ + Returns the :doc:`provider ` class specified in the + :doc:`config`. + + :returns: + :class:`authomatic.providers.BaseProvider` subclass. + + """ + + return resolve_provider_class(self.provider_type) + + def serialize(self): + """ + Converts the credentials to a percent encoded string to be stored for + later use. + + :returns: + :class:`string` + + """ + + if self.provider_id is None: + raise ConfigError( + 'To serialize credentials you need to specify a ' + 'unique integer under the "id" key in the config ' + 'for each provider!') + + # Get the provider type specific items. + rest = self.provider_type_class().to_tuple(self) + + # Provider ID and provider type ID are always the first two items. + result = (self.provider_id, self.provider_type_id) + rest + + # Make sure that all items are strings. + stringified = [str(i) for i in result] + + # Concatenate by newline. + concatenated = '\n'.join(stringified) + + # Percent encode. + return parse.quote(concatenated, '') + + @classmethod + def deserialize(cls, config, credentials): + """ + A *class method* which reconstructs credentials created by + :meth:`serialize`. You can also pass it a :class:`.Credentials` + instance. + + :param dict config: + The same :doc:`config` used in the :func:`.login` to get the + credentials. + :param str credentials: + :class:`string` The serialized credentials or + :class:`.Credentials` instance. + + :returns: + :class:`.Credentials` + + """ + + # Accept both serialized and normal. + if isinstance(credentials, Credentials): + return credentials + + decoded = parse.unquote(credentials) + + split = decoded.split('\n') + + # We need the provider ID to move forward. + if split[0] is None: + raise CredentialsError( + 'To deserialize credentials you need to specify a unique ' + 'integer under the "id" key in the config for each provider!') + + # Get provider config by short name. + provider_name = id_to_name(config, int(split[0])) + cfg = config.get(provider_name) + + # Get the provider class. + ProviderClass = resolve_provider_class(cfg.get('class_')) + + deserialized = Credentials(config) + + deserialized.provider_id = provider_id + deserialized.provider_type = ProviderClass.get_type() + deserialized.provider_type_id = split[1] + deserialized.provider_class = ProviderClass + deserialized.provider_name = provider_name + deserialized.provider_class = ProviderClass + + # Add provider type specific properties. + return ProviderClass.reconstruct(split[2:], deserialized, cfg) + + +class LoginResult(ReprMixin): + """ + Result of the :func:`authomatic.login` function. + """ + + def __init__(self, provider): + #: A :doc:`provider ` instance. + self.provider = provider + + #: An instance of the :exc:`authomatic.exceptions.BaseError` subclass. + self.error = None + + def popup_js(self, callback_name=None, indent=None, + custom=None, stay_open=False): + """ + Returns JavaScript that: + + #. Triggers the ``options.onLoginComplete(result, closer)`` + handler set with the :ref:`authomatic.setup() ` + function of :ref:`javascript.js `. + #. Calls the JavasScript callback specified by :data:`callback_name` + on the opener of the *login handler popup* and passes it the + *login result* JSON object as first argument and the `closer` + function which you should call in your callback to close the popup. + + :param str callback_name: + The name of the javascript callback e.g ``foo.bar.loginCallback`` + will result in ``window.opener.foo.bar.loginCallback(result);`` + in the HTML. + + :param int indent: + The number of spaces to indent the JSON result object. + If ``0`` or negative, only newlines are added. + If ``None``, no newlines are added. + + :param custom: + Any JSON serializable object that will be passed to the + ``result.custom`` attribute. + + :param str stay_open: + If ``True``, the popup will stay open. + + :returns: + :class:`str` with JavaScript. + + """ + + custom_callback = """ + try {{ window.opener.{cb}(result, closer); }} catch(e) {{}} + """.format(cb=callback_name) if callback_name else '' + + # TODO: Move the window.close() to the opener + return """ + (function(){{ + + closer = function(){{ + window.close(); + }}; + + var result = {result}; + result.custom = {custom}; + + {custom_callback} + + try {{ + window.opener.authomatic.loginComplete(result, closer); + }} catch(e) {{}} + + }})(); + + """.format(result=self.to_json(indent), + custom=json.dumps(custom), + custom_callback=custom_callback, + stay_open='// ' if stay_open else '') + + def popup_html(self, callback_name=None, indent=None, + title='Login | {0}', custom=None, stay_open=False): + """ + Returns a HTML with JavaScript that: + + #. Triggers the ``options.onLoginComplete(result, closer)`` handler + set with the :ref:`authomatic.setup() ` function of + :ref:`javascript.js `. + #. Calls the JavasScript callback specified by :data:`callback_name` + on the opener of the *login handler popup* and passes it the + *login result* JSON object as first argument and the `closer` + function which you should call in your callback to close the popup. + + :param str callback_name: + The name of the javascript callback e.g ``foo.bar.loginCallback`` + will result in ``window.opener.foo.bar.loginCallback(result);`` + in the HTML. + + :param int indent: + The number of spaces to indent the JSON result object. + If ``0`` or negative, only newlines are added. + If ``None``, no newlines are added. + + :param str title: + The text of the HTML title. You can use ``{0}`` tag inside, + which will be replaced by the provider name. + + :param custom: + Any JSON serializable object that will be passed to the + ``result.custom`` attribute. + + :param str stay_open: + If ``True``, the popup will stay open. + + :returns: + :class:`str` with HTML. + + """ + + return """ + + + {title} + + + + + """.format( + title=title.format(self.provider.name if self.provider else ''), + js=self.popup_js(callback_name, indent, custom, stay_open) + ) + + @property + def user(self): + """ + A :class:`.User` instance. + """ + + return self.provider.user if self.provider else None + + def to_dict(self): + return dict(provider=self.provider, user=self.user, error=self.error) + + def to_json(self, indent=4): + return json.dumps(self, default=lambda obj: obj.to_dict( + ) if hasattr(obj, 'to_dict') else '', indent=indent) + + +class Response(ReprMixin): + """ + Wraps :class:`httplib.HTTPResponse` and adds. + + :attr:`.content` and :attr:`.data` attributes. + + """ + + def __init__(self, httplib_response, content_parser=None): + """ + :param httplib_response: + The wrapped :class:`httplib.HTTPResponse` instance. + + :param function content_parser: + Callable which accepts :attr:`.content` as argument, + parses it and returns the parsed data as :class:`dict`. + """ + + self.httplib_response = httplib_response + self.content_parser = content_parser or json_qs_parser + self._data = None + self._content = None + + #: Same as :attr:`httplib.HTTPResponse.msg`. + self.msg = httplib_response.msg + #: Same as :attr:`httplib.HTTPResponse.version`. + self.version = httplib_response.version + #: Same as :attr:`httplib.HTTPResponse.status`. + self.status = httplib_response.status + #: Same as :attr:`httplib.HTTPResponse.reason`. + self.reason = httplib_response.reason + + def read(self, amt=None): + """ + Same as :meth:`httplib.HTTPResponse.read`. + + :param amt: + + """ + + return self.httplib_response.read(amt) + + def getheader(self, name, default=None): + """ + Same as :meth:`httplib.HTTPResponse.getheader`. + + :param name: + :param default: + + """ + + return self.httplib_response.getheader(name, default) + + def fileno(self): + """ + Same as :meth:`httplib.HTTPResponse.fileno`. + """ + return self.httplib_response.fileno() + + def getheaders(self): + """ + Same as :meth:`httplib.HTTPResponse.getheaders`. + """ + return self.httplib_response.getheaders() + + @staticmethod + def is_binary_string(content): + """ + Return true if string is binary data. + """ + + textchars = (bytearray([7, 8, 9, 10, 12, 13, 27]) + + bytearray(range(0x20, 0x100))) + return bool(content.translate(None, textchars)) + + @property + def content(self): + """ + The whole response content. + """ + + if not self._content: + content = self.httplib_response.read() + if self.is_binary_string(content): + self._content = content + else: + self._content = content.decode('utf-8') + return self._content + + @property + def data(self): + """ + A :class:`dict` of data parsed from :attr:`.content`. + """ + + if not self._data: + self._data = self.content_parser(self.content) + return self._data + + +class UserInfoResponse(Response): + """ + Inherits from :class:`.Response`, adds :attr:`~UserInfoResponse.user` + attribute. + """ + + def __init__(self, user, *args, **kwargs): + super(UserInfoResponse, self).__init__(*args, **kwargs) + + #: :class:`.User` instance. + self.user = user + + +class RequestElements(tuple): + """ + A tuple of ``(url, method, params, headers, body)`` request elements. + + With some additional properties. + + """ + + def __new__(cls, url, method, params, headers, body): + return tuple.__new__(cls, (url, method, params, headers, body)) + + @property + def url(self): + """ + Request URL. + """ + + return self[0] + + @property + def method(self): + """ + HTTP method of the request. + """ + + return self[1] + + @property + def params(self): + """ + Dictionary of request parameters. + """ + + return self[2] + + @property + def headers(self): + """ + Dictionary of request headers. + """ + + return self[3] + + @property + def body(self): + """ + :class:`str` Body of ``POST``, ``PUT`` and ``PATCH`` requests. + """ + + return self[4] + + @property + def query_string(self): + """ + Query string of the request. + """ + + return parse.urlencode(self.params) + + @property + def full_url(self): + """ + URL with query string. + """ + + return self.url + '?' + self.query_string + + def to_json(self): + return json.dumps(dict(url=self.url, + method=self.method, + params=self.params, + headers=self.headers, + body=self.body)) + + +class Authomatic(object): + def __init__( + self, config, secret, session_max_age=600, secure_cookie=False, + session=None, session_save_method=None, report_errors=True, + debug=False, logging_level=logging.INFO, prefix='authomatic', + logger=None + ): + """ + Encapsulates all the functionality of this package. + + :param dict config: + :doc:`config` + + :param str secret: + A secret string that will be used as the key for signing + :class:`.Session` cookie and as a salt by *CSRF* token generation. + + :param session_max_age: + Maximum allowed age of :class:`.Session` cookie nonce in seconds. + + :param bool secure_cookie: + If ``True`` the :class:`.Session` cookie will be saved wit + ``Secure`` attribute. + + :param session: + Custom dictionary-like session implementation. + + :param callable session_save_method: + A method of the supplied session or any mechanism that saves the + session data and cookie. + + :param bool report_errors: + If ``True`` exceptions encountered during the **login procedure** + will be caught and reported in the :attr:`.LoginResult.error` + attribute. + Default is ``True``. + + :param bool debug: + If ``True`` traceback of exceptions will be written to response. + Default is ``False``. + + :param int logging_level: + The logging level threshold for the default logger as specified in + the standard Python + `logging library `_. + This setting is ignored when :data:`logger` is set. + Default is ``logging.INFO``. + + :param str prefix: + Prefix used as the :class:`.Session` cookie name. + + :param logger: + A :class:`logging.logger` instance. + + """ + + self.config = config + self.secret = secret + self.session_max_age = session_max_age + self.secure_cookie = secure_cookie + self.session = session + self.session_save_method = session_save_method + self.report_errors = report_errors + self.debug = debug + self.logging_level = logging_level + self.prefix = prefix + self._logger = logger or logging.getLogger(str(id(self))) + + # Set logging level. + if logger is None: + self._logger.setLevel(logging_level) + + def login(self, adapter, provider_name, callback=None, + session=None, session_saver=None, **kwargs): + """ + If :data:`provider_name` specified, launches the login procedure for + corresponding :doc:`provider ` and returns + :class:`.LoginResult`. + + If :data:`provider_name` is empty, acts like + :meth:`.Authomatic.backend`. + + .. warning:: + + The method redirects the **user** to the **provider** which in + turn redirects **him/her** back to the *request handler* where + it has been called. + + :param str provider_name: + Name of the provider as specified in the keys of the :doc:`config`. + + :param callable callback: + If specified the method will call the callback with + :class:`.LoginResult` passed as argument and will return nothing. + + :param bool report_errors: + + .. note:: + + Accepts additional keyword arguments that will be passed to + :doc:`provider ` constructor. + + :returns: + :class:`.LoginResult` + + """ + + if provider_name: + # retrieve required settings for current provider and raise + # exceptions if missing + provider_settings = self.config.get(provider_name) + if not provider_settings: + raise ConfigError('Provider name "{0}" not specified!' + .format(provider_name)) + + if not (session is None or session_saver is None): + session = session + session_saver = session_saver + else: + session = Session(adapter=adapter, + secret=self.secret, + max_age=self.session_max_age, + name=self.prefix, + secure=self.secure_cookie) + + session_saver = session.save + + # Resolve provider class. + class_ = provider_settings.get('class_') + if not class_: + raise ConfigError( + 'The "class_" key not specified in the config' + ' for provider {0}!'.format(provider_name)) + ProviderClass = resolve_provider_class(class_) + + # FIXME: Find a nicer solution + ProviderClass._logger = self._logger + + # instantiate provider class + provider = ProviderClass(self, + adapter=adapter, + provider_name=provider_name, + callback=callback, + session=session, + session_saver=session_saver, + **kwargs) + + # return login result + return provider.login() + + else: + # Act like backend. + self.backend(adapter) + + def credentials(self, credentials): + """ + Deserializes credentials. + + :param credentials: + Credentials serialized with :meth:`.Credentials.serialize` or + :class:`.Credentials` instance. + + :returns: + :class:`.Credentials` + + """ + + return Credentials.deserialize(self.config, credentials) + + def access(self, credentials, url, params=None, method='GET', + headers=None, body='', max_redirects=5, content_parser=None): + """ + Accesses **protected resource** on behalf of the **user**. + + :param credentials: + The **user's** :class:`.Credentials` (serialized or normal). + + :param str url: + The **protected resource** URL. + + :param str method: + HTTP method of the request. + + :param dict headers: + HTTP headers of the request. + + :param str body: + Body of ``POST``, ``PUT`` and ``PATCH`` requests. + + :param int max_redirects: + Maximum number of HTTP redirects to follow. + + :param function content_parser: + A function to be used to parse the :attr:`.Response.data` + from :attr:`.Response.content`. + + :returns: + :class:`.Response` + + """ + + # Deserialize credentials. + credentials = Credentials.deserialize(self.config, credentials) + + # Resolve provider class. + ProviderClass = credentials.provider_class + logging.info('ACCESS HEADERS: {0}'.format(headers)) + # Access resource and return response. + + provider = ProviderClass( + self, adapter=None, provider_name=credentials.provider_name) + provider.credentials = credentials + + return provider.access(url=url, + params=params, + method=method, + headers=headers, + body=body, + max_redirects=max_redirects, + content_parser=content_parser) + + def async_access(self, *args, **kwargs): + """ + Same as :meth:`.Authomatic.access` but runs asynchronously in a + separate thread. + + .. warning:: + + |async| + + :returns: + :class:`.Future` instance representing the separate thread. + + """ + + return Future(self.access, *args, **kwargs) + + def request_elements( + self, credentials=None, url=None, method='GET', params=None, + headers=None, body='', json_input=None, return_json=False + ): + """ + Creates request elements for accessing **protected resource of a + user**. Required arguments are :data:`credentials` and :data:`url`. You + can pass :data:`credentials`, :data:`url`, :data:`method`, and + :data:`params` as a JSON object. + + :param credentials: + The **user's** credentials (can be serialized). + + :param str url: + The url of the protected resource. + + :param str method: + The HTTP method of the request. + + :param dict params: + Dictionary of request parameters. + + :param dict headers: + Dictionary of request headers. + + :param str body: + Body of ``POST``, ``PUT`` and ``PATCH`` requests. + + :param str json_input: + you can pass :data:`credentials`, :data:`url`, :data:`method`, + :data:`params` and :data:`headers` in a JSON object. + Values from arguments will be used for missing properties. + + :: + + { + "credentials": "###", + "url": "https://example.com/api", + "method": "POST", + "params": { + "foo": "bar" + }, + "headers": { + "baz": "bing", + "Authorization": "Bearer ###" + }, + "body": "Foo bar baz bing." + } + + :param bool return_json: + if ``True`` the function returns a json object. + + :: + + { + "url": "https://example.com/api", + "method": "POST", + "params": { + "access_token": "###", + "foo": "bar" + }, + "headers": { + "baz": "bing", + "Authorization": "Bearer ###" + }, + "body": "Foo bar baz bing." + } + + :returns: + :class:`.RequestElements` or JSON string. + + """ + + # Parse values from JSON + if json_input: + parsed_input = json.loads(json_input) + + credentials = parsed_input.get('credentials', credentials) + url = parsed_input.get('url', url) + method = parsed_input.get('method', method) + params = parsed_input.get('params', params) + headers = parsed_input.get('headers', headers) + body = parsed_input.get('body', body) + + if not credentials and url: + raise RequestElementsError( + 'To create request elements, you must provide credentials ' + 'and URL either as keyword arguments or in the JSON object!') + + # Get the provider class + credentials = Credentials.deserialize(self.config, credentials) + ProviderClass = credentials.provider_class + + # Create request elements + request_elements = ProviderClass.create_request_elements( + ProviderClass.PROTECTED_RESOURCE_REQUEST_TYPE, + credentials=credentials, + url=url, + method=method, + params=params, + headers=headers, + body=body) + + if return_json: + return request_elements.to_json() + + else: + return request_elements + + def backend(self, adapter): + """ + Converts a *request handler* to a JSON backend which you can use with + :ref:`authomatic.js `. + + Just call it inside a *request handler* like this: + + :: + + class JSONHandler(webapp2.RequestHandler): + def get(self): + authomatic.backend(Webapp2Adapter(self)) + + :param adapter: + The only argument is an :doc:`adapter `. + + The *request handler* will now accept these request parameters: + + :param str type: + Type of the request. Either ``auto``, ``fetch`` or ``elements``. + Default is ``auto``. + + :param str credentials: + Serialized :class:`.Credentials`. + + :param str url: + URL of the **protected resource** request. + + :param str method: + HTTP method of the **protected resource** request. + + :param str body: + HTTP body of the **protected resource** request. + + :param JSON params: + HTTP params of the **protected resource** request as a JSON object. + + :param JSON headers: + HTTP headers of the **protected resource** request as a + JSON object. + + :param JSON json: + You can pass all of the aforementioned params except ``type`` + in a JSON object. + + .. code-block:: javascript + + { + "credentials": "######", + "url": "https://example.com", + "method": "POST", + "params": {"foo": "bar"}, + "headers": {"baz": "bing"}, + "body": "the body of the request" + } + + Depending on the ``type`` param, the handler will either write + a JSON object with *request elements* to the response, + and add an ``Authomatic-Response-To: elements`` response header, ... + + .. code-block:: javascript + + { + "url": "https://example.com/api", + "method": "POST", + "params": { + "access_token": "###", + "foo": "bar" + }, + "headers": { + "baz": "bing", + "Authorization": "Bearer ###" + } + } + + ... or make a fetch to the **protected resource** and forward + it's response content, status and headers with an additional + ``Authomatic-Response-To: fetch`` header to the response. + + .. warning:: + + The backend will not work if you write anything to the + response in the handler! + + """ + + AUTHOMATIC_HEADER = 'Authomatic-Response-To' + + # Collect request params + request_type = adapter.params.get('type', 'auto') + json_input = adapter.params.get('json') + credentials = adapter.params.get('credentials') + url = adapter.params.get('url') + method = adapter.params.get('method', 'GET') + body = adapter.params.get('body', '') + + params = adapter.params.get('params') + params = json.loads(params) if params else {} + + headers = adapter.params.get('headers') + headers = json.loads(headers) if headers else {} + + ProviderClass = Credentials.deserialize( + self.config, credentials).provider_class + + if request_type == 'auto': + # If there is a "callback" param, it's a JSONP request. + jsonp = params.get('callback') + + # JSONP is possible only with GET method. + if ProviderClass.supports_jsonp and method is 'GET': + request_type = 'elements' + else: + # Remove the JSONP callback + if jsonp: + params.pop('callback') + request_type = 'fetch' + + if request_type == 'fetch': + # Access protected resource + response = self.access( + credentials, url, params, method, headers, body) + result = response.content + + # Forward status + adapter.status = str(response.status) + ' ' + str(response.reason) + + # Forward headers + for k, v in response.getheaders(): + logging.info(' {0}: {1}'.format(k, v)) + adapter.set_header(k, v) + + elif request_type == 'elements': + # Create request elements + if json_input: + result = self.request_elements( + json_input=json_input, return_json=True) + else: + result = self.request_elements(credentials=credentials, + url=url, + method=method, + params=params, + headers=headers, + body=body, + return_json=True) + + adapter.set_header('Content-Type', 'application/json') + else: + result = '{"error": "Bad Request!"}' + + # Add the authomatic header + adapter.set_header(AUTHOMATIC_HEADER, request_type) + + # Write result to response + adapter.write(result) diff --git a/rhodecode/lib/_vendor/authomatic/exceptions.py b/rhodecode/lib/_vendor/authomatic/exceptions.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/exceptions.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" +Provides various exception types for the library. +""" + + +class BaseError(Exception): + """ + Base error for all errors. + """ + + def __init__(self, message, original_message='', url='', status=None): + super(BaseError, self).__init__(message) + + #: Error message. + self.message = message + + #: Original message. + self.original_message = original_message + + #: URL related with the error. + self.url = url + + #: HTTP status code related with the error. + self.status = status + + def to_dict(self): + return self.__dict__ + + +class ConfigError(BaseError): + pass + + +class SessionError(BaseError): + pass + + +class CredentialsError(BaseError): + pass + + +class HTTPError(BaseError): + pass + + +class CSRFError(BaseError): + pass + + +class ImportStringError(BaseError): + pass + + +class AuthenticationError(BaseError): + pass + + +class OAuth1Error(BaseError): + pass + + +class OAuth2Error(BaseError): + pass + + +class OpenIDError(BaseError): + pass + + +class CancellationError(BaseError): + pass + + +class FailureError(BaseError): + pass + + +class FetchError(BaseError): + pass + + +class RequestElementsError(BaseError): + pass diff --git a/rhodecode/lib/_vendor/authomatic/extras/__init__.py b/rhodecode/lib/_vendor/authomatic/extras/__init__.py new file mode 100755 diff --git a/rhodecode/lib/_vendor/authomatic/extras/flask.py b/rhodecode/lib/_vendor/authomatic/extras/flask.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/extras/flask.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +|flask| Extras +-------------- + +Utilities you can use when using this library with the |flask|_ framework. + +Thanks to `Mark Steve Samson `_. +""" + +from __future__ import absolute_import +from functools import wraps + +from authomatic.adapters import WerkzeugAdapter +from authomatic import Authomatic +from flask import make_response, request, session + + +class FlaskAuthomatic(Authomatic): + """ + Flask Plugin for authomatic support. + """ + + result = None + + def login(self, *login_args, **login_kwargs): + """ + Decorator for Flask view functions. + """ + + def decorator(f): + @wraps(f) + def decorated(*args, **kwargs): + self.response = make_response() + adapter = WerkzeugAdapter(request, self.response) + login_kwargs.setdefault('session', session) + login_kwargs.setdefault('session_saver', self.session_saver) + self.result = super(FlaskAuthomatic, self).login( + adapter, + *login_args, + **login_kwargs) + return f(*args, **kwargs) + return decorated + return decorator + + def session_saver(self): + session.modified = True diff --git a/rhodecode/lib/_vendor/authomatic/extras/gae/__init__.py b/rhodecode/lib/_vendor/authomatic/extras/gae/__init__.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/extras/gae/__init__.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +""" +|gae| Extras +------------ + +Utilities you can use when using this library on |gae|_. +""" + +from google.appengine.ext import ndb +from webapp2_extras import sessions + +from authomatic import exceptions +from authomatic.extras import interfaces +from authomatic.extras.gae.openid import NDBOpenIDStore + + +__all__ = ['ndb_config', 'Webapp2Session'] + + +class GAEError(exceptions.BaseError): + pass + + +class Webapp2Session(interfaces.BaseSession): + """ + A simple wrapper for |webapp2|_ sessions. If you provide a session it wraps + it and adds the :meth:`.save` method. + + If you don't provide a session it creates a new one but you must provide + the :data:`.secret`. + + For more about |webapp2| sessions see: + http://webapp-improved.appspot.com/api/webapp2_extras/sessions.html. + + """ + + def __init__(self, handler, session=None, secret=None, + cookie_name='webapp2authomatic', backend='memcache', + config=None): + """ + .. warning:: + + Do not use the ``'securecookie'`` backend with + :class:`.providers.OpenID` provider. The + `python-openid`_ library saves **non json serializable** objects + to session which the ``'securecookie'`` backend cannot cope with. + + :param handler: + A :class:`webapp2.RequestHandler` instance. + + :param session: + A :class:`webapp2_extras.session.SessionDict` instance. + + :param str secret: + The session secret. + + :param str cookie_name: + The name of the session cookie. + + :param backend: + The session backend. One of ``'memcache'`` or ``'datastore'``. + + :param config: + The session config. + + """ + + self.handler = handler + + if session is None: + if not secret: + raise GAEError('Either session or secret must be specified!') + else: + # Create new session. + cfg = config or dict( + secret_key=secret, cookie_name=cookie_name) + session_store = sessions.SessionStore(handler.request, cfg) + self.session_dict = session_store.get_session(backend=backend) + else: + # Use supplied session. + self.session_dict = session + + def save(self): + return self.session_dict.container.save_session(self.handler.response) + + def __setitem__(self, key, value): + return self.session_dict.__setitem__(key, value) + + def __getitem__(self, key): + return self.session_dict.__getitem__(key) + + def __delitem__(self, key): + return self.session_dict.__delitem__(key) + + def get(self, key): + return self.session_dict.get(key) + + +class NDBConfig(ndb.Model): + """ + |gae| `NDB `_ + based :doc:`config`. + + .. note:: + + By :class:`.OpenID` provider uses :class:`.NDBOpenIDStore` + as default :attr:`.OpenID.store`. + + """ + + # General properties + provider_name = ndb.StringProperty() + class_ = ndb.StringProperty() + + # AuthorizationProvider properties + provider_id = ndb.IntegerProperty() + consumer_key = ndb.StringProperty() + consumer_secret = ndb.StringProperty() + + # OAuth2 properties + scope = ndb.StringProperty() + offline = ndb.BooleanProperty() + + # AuthenticationProvider properties + identifier_param = ndb.StringProperty() + + @classmethod + def get(cls, key, default=None): + """ + Resembles the :meth:`dict.get` method. + + :returns: + A configuration dictionary for specified provider. + + """ + + # Query datastore. + result = cls.query(cls.provider_name == key).get() + + if result: + result_dict = result.to_dict() + + # Use NDBOpenIDStore by default + result_dict['store'] = NDBOpenIDStore + + # Convert coma-separated values to list. Currently only scope is + # csv. + for i in ('scope', ): + prop = result_dict.get(i) + if prop: + result_dict[i] = [s.strip() for s in prop.split(',')] + else: + result_dict[i] = None + + return result_dict + else: + return default + + @classmethod + def values(cls): + """ + Resembles the :meth:`dict.values` method. + """ + + # get all items + results = cls.query().fetch() + # return list of dictionaries + return [result.to_dict() for result in results] + + @classmethod + def initialize(cls): + """ + Creates an **"Example"** entity of kind **"NDBConfig"** in the + datastore if the model is empty and raises and error to inform you that + you should populate the model with data. + + .. note:: + + The *Datastore Viewer* in the ``_ah/admin/`` won't let you add + properties to a model if there is not an entity with that + property already. Therefore it is a good idea to keep the + **"Example"** entity (which has all possible properties set) in + the datastore. + + """ + + if not len(cls.query().fetch()): + + example = cls.get_or_insert('Example') + + example.class_ = 'Provider class e.g. ' + \ + '"authomatic.providers.oauth2.Facebook".' + example.provider_name = 'Your custom provider name e.g. "fb".' + + # AuthorizationProvider + example.consumer_key = 'Consumer key.' + example.consumer_secret = 'Consumer secret' + example.provider_id = 1 + + # OAuth2 + example.scope = 'coma, separated, list, of, scopes' + + # AuthenticationProvider + example.identifier_param = 'Querystring parameter for claimed ' + \ + 'id. default is "id"' + + # Save the example + example.put() + + # Raise an information error. + raise GAEError( + 'A NDBConfig data model was created! Go to Datastore Viewer ' + 'in your dashboard and populate it with data!') + + +def ndb_config(): + """ + Allows you to have a **datastore** :doc:`config` instead of a hardcoded + one. + + This function creates an **"Example"** entity of kind **"NDBConfig"** in + the datastore if the model is empty and raises and error to inform you + that you should populate the model with data. + + .. note:: + + The *Datastore Viewer* of the |gae|_ admin won't let you add + properties to a model if there is not an entity with that property + already. Therefore it is a good idea to keep the **"Example"** + entity (which has all properties set) in the datastore. + + :raises: + :exc:`.GAEError` + + :returns: + :class:`.NDBConfig` + + """ + + NDBConfig.initialize() + return NDBConfig diff --git a/rhodecode/lib/_vendor/authomatic/extras/gae/openid.py b/rhodecode/lib/_vendor/authomatic/extras/gae/openid.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/extras/gae/openid.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- + +# We need absolute import to import from openid library which has the same +# name as this module +from __future__ import absolute_import +import logging +import datetime + +from google.appengine.ext import ndb +import openid.store.interface + + +class NDBOpenIDStore(ndb.Expando, openid.store.interface.OpenIDStore): + """ + |gae| `NDB `_ + based implementation of the :class:`openid.store.interface.OpenIDStore` + interface of the `python-openid`_ library. + """ + + serialized = ndb.StringProperty() + expiration_date = ndb.DateTimeProperty() + # we need issued to sort by most recently issued + issued = ndb.IntegerProperty() + + @staticmethod + def _log(*args, **kwargs): + pass + + @classmethod + def storeAssociation(cls, server_url, association): + # store an entity with key = server_url + + issued = datetime.datetime.fromtimestamp(association.issued) + lifetime = datetime.timedelta(0, association.lifetime) + + expiration_date = issued + lifetime + entity = cls.get_or_insert( + association.handle, parent=ndb.Key( + 'ServerUrl', server_url)) + + entity.serialized = association.serialize() + entity.expiration_date = expiration_date + entity.issued = association.issued + + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Putting OpenID association to datastore.') + + entity.put() + + @classmethod + def cleanupAssociations(cls): + # query for all expired + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Querying datastore for OpenID associations.') + query = cls.query(cls.expiration_date <= datetime.datetime.now()) + + # fetch keys only + expired = query.fetch(keys_only=True) + + # delete all expired + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Deleting expired OpenID associations from datastore.') + ndb.delete_multi(expired) + + return len(expired) + + @classmethod + def getAssociation(cls, server_url, handle=None): + cls.cleanupAssociations() + + if handle: + key = ndb.Key('ServerUrl', server_url, cls, handle) + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Getting OpenID association from datastore by key.') + entity = key.get() + else: + # return most recently issued association + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Querying datastore for OpenID associations by ancestor.') + entity = cls.query(ancestor=ndb.Key( + 'ServerUrl', server_url)).order(-cls.issued).get() + + if entity and entity.serialized: + return openid.association.Association.deserialize( + entity.serialized) + + @classmethod + def removeAssociation(cls, server_url, handle): + key = ndb.Key('ServerUrl', server_url, cls, handle) + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Getting OpenID association from datastore by key.') + if key.get(): + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Deleting OpenID association from datastore.') + key.delete() + return True + + @classmethod + def useNonce(cls, server_url, timestamp, salt): + + # check whether there is already an entity with the same ancestor path + # in the datastore + key = ndb.Key( + 'ServerUrl', + str(server_url) or 'x', + 'TimeStamp', + str(timestamp), + cls, + str(salt)) + + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Getting OpenID nonce from datastore by key.') + result = key.get() + + if result: + # if so, the nonce is not valid so return False + cls._log( + logging.WARNING, + u'NDBOpenIDStore: Nonce was already used!') + return False + else: + # if not, store the key to datastore and return True + nonce = cls(key=key) + nonce.expiration_date = datetime.datetime.fromtimestamp( + timestamp) + datetime.timedelta(0, openid.store.nonce.SKEW) + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Putting new nonce to datastore.') + nonce.put() + return True + + @classmethod + def cleanupNonces(cls): + # get all expired nonces + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Querying datastore for OpenID nonces ordered by expiration date.') + expired = cls.query().filter( + cls.expiration_date <= datetime.datetime.now()).fetch( + keys_only=True) + + # delete all expired + cls._log( + logging.DEBUG, + u'NDBOpenIDStore: Deleting expired OpenID nonces from datastore.') + ndb.delete_multi(expired) + + return len(expired) diff --git a/rhodecode/lib/_vendor/authomatic/extras/interfaces.py b/rhodecode/lib/_vendor/authomatic/extras/interfaces.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/extras/interfaces.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" +Interfaces +^^^^^^^^^^ + +If you want to implement framework specific extras, use these abstract +classes as bases: + +""" + +import abc + + +class BaseSession(object): + """ + Abstract class for custom session implementations. + """ + + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def save(self): + """ + Called only once per request. + + Should implement a mechanism for setting the the session + **cookie** and saving the session **data** to storage. + + """ + + @abc.abstractmethod + def __setitem__(self, key, value): + """ + Same as :meth:`dict.__setitem__`. + """ + + @abc.abstractmethod + def __getitem__(self, key): + """ + Same as :meth:`dict.__getitem__`. + """ + + @abc.abstractmethod + def __delitem__(self, key): + """ + Same as :meth:`dict.__delitem__`. + """ + + @abc.abstractmethod + def get(self, key): + """ + Same as :meth:`dict.get`. + """ + + +class BaseConfig(object): + """ + Abstract class for :doc:`config` implementations. + """ + + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def get(self, key): + """ + Same as :attr:`dict.get`. + """ + + @abc.abstractmethod + def values(self): + """ + Same as :meth:`dict.values`. + """ diff --git a/rhodecode/lib/_vendor/authomatic/providers/__init__.py b/rhodecode/lib/_vendor/authomatic/providers/__init__.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/__init__.py @@ -0,0 +1,1012 @@ +# -*- coding: utf-8 -*- +""" +Abstract Classes for Providers +------------------------------ + +Abstract base classes for implementation of protocol specific providers. + +.. note:: + + Attributes prefixed with ``_x_`` serve the purpose of unification + of differences across providers. + +.. autosummary:: + + login_decorator + BaseProvider + AuthorizationProvider + AuthenticationProvider + +""" + +import abc +import base64 +import hashlib +import logging +import random +import sys +import traceback +import uuid + +import authomatic.core +from authomatic.exceptions import ( + ConfigError, + FetchError, + CredentialsError, +) +from authomatic import six +from authomatic.six.moves import urllib_parse as parse +from authomatic.six.moves import http_client +from authomatic.exceptions import CancellationError + +__all__ = [ + 'BaseProvider', + 'AuthorizationProvider', + 'AuthenticationProvider', + 'login_decorator'] + + +def _error_traceback_html(exc_info, traceback_): + """ + Generates error traceback HTML. + + :param tuple exc_info: + Output of :func:`sys.exc_info` function. + + :param traceback: + Output of :func:`traceback.format_exc` function. + + """ + + html = """ + + + ERROR: {error} + + +

The Authomatic library encountered an error!

+

{error}

+
{traceback}
+ + + """ + + return html.format(error=exc_info[1], traceback=traceback_) + + +def login_decorator(func): + """ + Decorate the :meth:`.BaseProvider.login` implementations with this + decorator. + + Provides mechanism for error reporting and returning result which + makes the :meth:`.BaseProvider.login` implementation cleaner. + + """ + + def wrap(provider, *args, **kwargs): + error = None + result = authomatic.core.LoginResult(provider) + + try: + func(provider, *args, **kwargs) + except Exception as e: # pylint:disable=broad-except + if provider.settings.report_errors: + error = e + if not isinstance(error, CancellationError): + provider._log( + logging.ERROR, + u'Reported suppressed exception: {0}!'.format( + repr(error)), + exc_info=1) + else: + if provider.settings.debug: + # TODO: Check whether it actually works without middleware + provider.write( + _error_traceback_html( + sys.exc_info(), + traceback.format_exc())) + raise + + # If there is user or error the login procedure has finished + if provider.user or error: + result = authomatic.core.LoginResult(provider) + # Add error to result + result.error = error + + # delete session cookie + if isinstance(provider.session, authomatic.core.Session): + provider.session.delete() + + provider._log(logging.INFO, u'Procedure finished.') + + if provider.callback: + provider.callback(result) + return result + else: + # Save session + provider.save_session() + + return wrap + + +class BaseProvider(object): + """ + Abstract base class for all providers. + """ + + PROVIDER_TYPE_ID = 0 + + _repr_ignore = ('user',) + + __metaclass__ = abc.ABCMeta + + supported_user_attributes = authomatic.core.SupportedUserAttributes() + + def __init__(self, settings, adapter, provider_name, session=None, + session_saver=None, callback=None, js_callback=None, + prefix='authomatic', **kwargs): + + self.settings = settings + self.adapter = adapter + + self.session = session + self.save_session = session_saver + + #: :class:`str` The provider name as specified in the :doc:`config`. + self.name = provider_name + + #: :class:`callable` An optional callback called when the login + #: procedure is finished with :class:`.core.LoginResult` passed as + #: argument. + self.callback = callback + + #: :class:`str` Name of an optional javascript callback. + self.js_callback = js_callback + + #: :class:`.core.User`. + self.user = None + + #: :class:`bool` If ``True``, the + #: :attr:`.BaseProvider.user_authorization_url` will be displayed + #: in a *popup mode*, if the **provider** supports it. + self.popup = self._kwarg(kwargs, 'popup') + + @property + def url(self): + return self.adapter.url + + @property + def params(self): + return self.adapter.params + + def write(self, value): + self.adapter.write(value) + + def set_header(self, key, value): + self.adapter.set_header(key, value) + + def set_status(self, status): + self.adapter.set_status(status) + + def redirect(self, url): + self.set_status('302 Found') + self.set_header('Location', url) + + # ======================================================================== + # Abstract methods + # ======================================================================== + + @abc.abstractmethod + def login(self): + """ + Launches the *login procedure* to get **user's credentials** from + **provider**. + + Should be decorated with :func:`.login_decorator`. The *login + procedure* is considered finished when the :attr:`.user` + attribute is not empty when the method runs out of it's flow or + when there are errors. + + """ + + # ======================================================================== + # Exposed methods + # ======================================================================== + + def to_dict(self): + """ + Converts the provider instance to a :class:`dict`. + + :returns: + :class:`dict` + + """ + + return dict(name=self.name, + id=getattr(self, 'id', None), + type_id=self.type_id, + type=self.get_type(), + scope=getattr(self, 'scope', None), + user=self.user.id if self.user else None) + + @classmethod + def get_type(cls): + """ + Returns the provider type. + + :returns: + :class:`str` The full dotted path to base class e.g. + :literal:`"authomatic.providers.oauth2.OAuth2"`. + + """ + + return cls.__module__ + '.' + cls.__bases__[0].__name__ + + def update_user(self): + """ + Updates and returns :attr:`.user`. + + :returns: + :class:`.User` + + """ + + # ======================================================================== + # Internal methods + # ======================================================================== + + @property + def type_id(self): + pass + + def _kwarg(self, kwargs, kwname, default=None): + """ + Resolves keyword arguments from constructor or :doc:`config`. + + .. note:: + + The keyword arguments take this order of precedence: + + 1. Arguments passed to constructor through the + :func:`authomatic.login`. + 2. Provider specific arguments from :doc:`config`. + 3. Arguments from :doc:`config` set in the ``__defaults__`` key. + 2. The value from :data:`default` argument. + + :param dict kwargs: + Keyword arguments dictionary. + :param str kwname: + Name of the desired keyword argument. + + """ + + return kwargs.get(kwname) or \ + self.settings.config.get(self.name, {}).get(kwname) or \ + self.settings.config.get('__defaults__', {}).get(kwname) or \ + default + + def _session_key(self, key): + """ + Generates session key string. + + :param str key: + e.g. ``"authomatic:facebook:key"`` + + """ + + return '{0}:{1}:{2}'.format(self.settings.prefix, self.name, key) + + def _session_set(self, key, value): + """ + Saves a value to session. + """ + + self.session[self._session_key(key)] = value + + def _session_get(self, key): + """ + Retrieves a value from session. + """ + + return self.session.get(self._session_key(key)) + + @staticmethod + def csrf_generator(secret): + """ + Generates CSRF token. + + Inspired by this article: + http://blog.ptsecurity.com/2012/10/random-number-security-in-python.html + + :returns: + :class:`str` Random unguessable string. + + """ + + # Create hash from random string plus salt. + hashed = hashlib.md5(uuid.uuid4().bytes + six.b(secret)).hexdigest() + + # Each time return random portion of the hash. + span = 5 + shift = random.randint(0, span) + return hashed[shift:shift - span - 1] + + @classmethod + def _log(cls, level, msg, **kwargs): + """ + Logs a message with pre-formatted prefix. + + :param int level: + Logging level as specified in the + `login module `_ of + Python standard library. + + :param str msg: + The actual message. + + """ + + logger = getattr(cls, '_logger', None) or authomatic.core._logger + logger.log( + level, ': '.join( + ('authomatic', cls.__name__, msg)), **kwargs) + + def _fetch(self, url, method='GET', params=None, headers=None, + body='', max_redirects=5, content_parser=None): + """ + Fetches a URL. + + :param str url: + The URL to fetch. + + :param str method: + HTTP method of the request. + + :param dict params: + Dictionary of request parameters. + + :param dict headers: + HTTP headers of the request. + + :param str body: + Body of ``POST``, ``PUT`` and ``PATCH`` requests. + + :param int max_redirects: + Number of maximum HTTP redirects to follow. + + :param function content_parser: + A callable to be used to parse the :attr:`.Response.data` + from :attr:`.Response.content`. + + """ + # 'magic' using _kwarg method + # pylint:disable=no-member + params = params or {} + params.update(self.access_params) + + headers = headers or {} + headers.update(self.access_headers) + + scheme, host, path, query, fragment = parse.urlsplit(url) + query = parse.urlencode(params) + + if method in ('POST', 'PUT', 'PATCH'): + if not body: + # Put querystring to body + body = query + query = '' + headers.update( + {'Content-Type': 'application/x-www-form-urlencoded'}) + request_path = parse.urlunsplit(('', '', path or '', query or '', '')) + + self._log(logging.DEBUG, u' \u251C\u2500 host: {0}'.format(host)) + self._log( + logging.DEBUG, + u' \u251C\u2500 path: {0}'.format(request_path)) + self._log(logging.DEBUG, u' \u251C\u2500 method: {0}'.format(method)) + self._log(logging.DEBUG, u' \u251C\u2500 body: {0}'.format(body)) + self._log(logging.DEBUG, u' \u251C\u2500 params: {0}'.format(params)) + self._log(logging.DEBUG, u' \u2514\u2500 headers: {0}'.format(headers)) + + # Connect + if scheme.lower() == 'https': + connection = http_client.HTTPSConnection(host) + else: + connection = http_client.HTTPConnection(host) + + try: + connection.request(method, request_path, body, headers) + except Exception as e: + raise FetchError('Fetching URL failed', + original_message=str(e), + url=request_path) + + response = connection.getresponse() + location = response.getheader('Location') + + if response.status in (300, 301, 302, 303, 307) and location: + if location == url: + raise FetchError('Url redirects to itself!', + url=location, + status=response.status) + + elif max_redirects > 0: + remaining_redirects = max_redirects - 1 + + self._log(logging.DEBUG, u'Redirecting to {0}'.format(url)) + self._log(logging.DEBUG, u'Remaining redirects: {0}' + .format(remaining_redirects)) + + # Call this method again. + response = self._fetch(url=location, + params=params, + method=method, + headers=headers, + max_redirects=remaining_redirects) + + else: + raise FetchError('Max redirects reached!', + url=location, + status=response.status) + else: + self._log(logging.DEBUG, u'Got response:') + self._log(logging.DEBUG, u' \u251C\u2500 url: {0}'.format(url)) + self._log( + logging.DEBUG, + u' \u251C\u2500 status: {0}'.format( + response.status)) + self._log( + logging.DEBUG, + u' \u2514\u2500 headers: {0}'.format( + response.getheaders())) + + return authomatic.core.Response(response, content_parser) + + def _update_or_create_user(self, data, credentials=None, content=None): + """ + Updates or creates :attr:`.user`. + + :returns: + :class:`.User` + + """ + + if not self.user: + self.user = authomatic.core.User(self, credentials=credentials) + + self.user.content = content + self.user.data = data + + # Update. + for key in self.user.__dict__: + # Exclude data. + if key not in ('data', 'content'): + # Extract every data item whose key matches the user + # property name, but only if it has a value. + value = data.get(key) + if value: + setattr(self.user, key, value) + + # Handle different structure of data by different providers. + self.user = self._x_user_parser(self.user, data) + + if self.user.id: + self.user.id = str(self.user.id) + + # TODO: Move to User + # If there is no user.name, + if not self.user.name: + if self.user.first_name and self.user.last_name: + # Create it from first name and last name if available. + self.user.name = ' '.join((self.user.first_name, + self.user.last_name)) + else: + # Or use one of these. + self.user.name = (self.user.username or self.user.nickname or + self.user.first_name or self.user.last_name) + + if not self.user.location: + if self.user.city and self.user.country: + self.user.location = '{0}, {1}'.format(self.user.city, + self.user.country) + else: + self.user.location = self.user.city or self.user.country + + return self.user + + @staticmethod + def _x_user_parser(user, data): + """ + Handles different structure of user info data by different providers. + + :param user: + :class:`.User` + :param dict data: + User info data returned by provider. + + """ + + return user + + @staticmethod + def _http_status_in_category(status, category): + """ + Checks whether a HTTP status code is in the category denoted by the + hundreds digit. + """ + + assert category < 10, 'HTTP status category must be a one-digit int!' + cat = category * 100 + return status >= cat and status < cat + 100 + + +class AuthorizationProvider(BaseProvider): + """ + Base provider for *authorization protocols* i.e. protocols which allow a + **provider** to authorize a **consumer** to access **protected resources** + of a **user**. + + e.g. `OAuth 2.0 `_ or `OAuth 1.0a + `_. + + """ + + USER_AUTHORIZATION_REQUEST_TYPE = 2 + ACCESS_TOKEN_REQUEST_TYPE = 3 + PROTECTED_RESOURCE_REQUEST_TYPE = 4 + REFRESH_TOKEN_REQUEST_TYPE = 5 + + BEARER = 'Bearer' + + _x_term_dict = {} + + #: If ``True`` the provider doesn't support Cross-site HTTP requests. + same_origin = True + + #: :class:`bool` Whether the provider supports JSONP requests. + supports_jsonp = False + + # Whether to use the HTTP Authorization header. + _x_use_authorization_header = True + + def __init__(self, *args, **kwargs): + """ + Accepts additional keyword arguments: + + :arg str consumer_key: + The *key* assigned to our application (**consumer**) by the + **provider**. + + :arg str consumer_secret: + The *secret* assigned to our application (**consumer**) by the + **provider**. + + :arg int id: + A unique numeric ID used to serialize :class:`.Credentials`. + + :arg dict user_authorization_params: + A dictionary of additional request parameters for + **user authorization request**. + + :arg dict access_token_params: + A dictionary of additional request parameters for + **access_with_credentials token request**. + + :arg dict access_headers: + A dictionary of default HTTP headers that will be used when + accessing **user's** protected resources. + Applied by :meth:`.access()`, :meth:`.update_user()` and + :meth:`.User.update()` + + :arg dict access_params: + A dictionary of default query string parameters that will be used + when accessing **user's** protected resources. + Applied by :meth:`.access()`, :meth:`.update_user()` and + :meth:`.User.update()` + + """ + + super(AuthorizationProvider, self).__init__(*args, **kwargs) + + self.consumer_key = self._kwarg(kwargs, 'consumer_key') + self.consumer_secret = self._kwarg(kwargs, 'consumer_secret') + + self.user_authorization_params = self._kwarg( + kwargs, 'user_authorization_params', {}) + + self.access_token_headers = self._kwarg( + kwargs, 'user_authorization_headers', {}) + self.access_token_params = self._kwarg( + kwargs, 'access_token_params', {}) + + self.id = self._kwarg(kwargs, 'id') + + self.access_headers = self._kwarg(kwargs, 'access_headers', {}) + self.access_params = self._kwarg(kwargs, 'access_params', {}) + + #: :class:`.Credentials` to access **user's protected resources**. + self.credentials = authomatic.core.Credentials( + self.settings.config, provider=self) + + #: Response of the *access token request*. + self.access_token_response = None + + # ======================================================================== + # Abstract properties + # ======================================================================== + + @abc.abstractproperty + def user_authorization_url(self): + """ + :class:`str` URL to which we redirect the **user** to grant our app + i.e. the **consumer** an **authorization** to access his + **protected resources**. See + http://tools.ietf.org/html/rfc6749#section-4.1.1 and + http://oauth.net/core/1.0a/#auth_step2. + """ + + @abc.abstractproperty + def access_token_url(self): + """ + :class:`str` URL where we can get the *access token* to access + **protected resources** of a **user**. See + http://tools.ietf.org/html/rfc6749#section-4.1.3 and + http://oauth.net/core/1.0a/#auth_step3. + """ + + @abc.abstractproperty + def user_info_url(self): + """ + :class:`str` URL where we can get the **user** info. + see http://tools.ietf.org/html/rfc6749#section-7 and + http://oauth.net/core/1.0a/#anchor12. + """ + + # ======================================================================== + # Abstract methods + # ======================================================================== + + @abc.abstractmethod + def to_tuple(self, credentials): + """ + Must convert :data:`credentials` to a :class:`tuple` to be used by + :meth:`.Credentials.serialize`. + + .. warning:: + + |classmethod| + + :param credentials: + :class:`.Credentials` + + :returns: + :class:`tuple` + + """ + + @abc.abstractmethod + def reconstruct(self, deserialized_tuple, credentials, cfg): + """ + Must convert the :data:`deserialized_tuple` back to + :class:`.Credentials`. + + .. warning:: + + |classmethod| + + :param tuple deserialized_tuple: + A tuple whose first index is the :attr:`.id` and the rest + are all the items of the :class:`tuple` created by + :meth:`.to_tuple`. + + :param credentials: + A :class:`.Credentials` instance. + + :param dict cfg: + Provider configuration from :doc:`config`. + + """ + + @abc.abstractmethod + def create_request_elements(self, request_type, credentials, + url, method='GET', params=None, headers=None, + body=''): + """ + Must return :class:`.RequestElements`. + + .. warning:: + + |classmethod| + + :param int request_type: + Type of the request specified by one of the class's constants. + + :param credentials: + :class:`.Credentials` of the **user** whose + **protected resource** we want to access. + + :param str url: + URL of the request. + + :param str method: + HTTP method of the request. + + :param dict params: + Dictionary of request parameters. + + :param dict headers: + Dictionary of request headers. + + :param str body: + Body of ``POST``, ``PUT`` and ``PATCH`` requests. + + :returns: + :class:`.RequestElements` + + """ + + # ======================================================================== + # Exposed methods + # ======================================================================== + + @property + def type_id(self): + """ + A short string representing the provider implementation id used for + serialization of :class:`.Credentials` and to identify the type of + provider in JavaScript. + + The part before hyphen denotes the type of the provider, the part + after hyphen denotes the class id e.g. + ``oauth2.Facebook.type_id = '2-5'``, + ``oauth1.Twitter.type_id = '1-5'``. + + """ + + cls = self.__class__ + mod = sys.modules.get(cls.__module__) + + return str(self.PROVIDER_TYPE_ID) + '-' + \ + str(mod.PROVIDER_ID_MAP.index(cls)) + + def access(self, url, params=None, method='GET', headers=None, + body='', max_redirects=5, content_parser=None): + """ + Fetches the **protected resource** of an authenticated **user**. + + :param credentials: + The **user's** :class:`.Credentials` (serialized or normal). + + :param str url: + The URL of the **protected resource**. + + :param str method: + HTTP method of the request. + + :param dict headers: + HTTP headers of the request. + + :param str body: + Body of ``POST``, ``PUT`` and ``PATCH`` requests. + + :param int max_redirects: + Maximum number of HTTP redirects to follow. + + :param function content_parser: + A function to be used to parse the :attr:`.Response.data` + from :attr:`.Response.content`. + + :returns: + :class:`.Response` + + """ + + if not self.user and not self.credentials: + raise CredentialsError(u'There is no authenticated user!') + + headers = headers or {} + + self._log( + logging.INFO, + u'Accessing protected resource {0}.'.format(url)) + + request_elements = self.create_request_elements( + request_type=self.PROTECTED_RESOURCE_REQUEST_TYPE, + credentials=self.credentials, + url=url, + body=body, + params=params, + headers=headers, + method=method + ) + + response = self._fetch(*request_elements, + max_redirects=max_redirects, + content_parser=content_parser) + + self._log( + logging.INFO, + u'Got response. HTTP status = {0}.'.format( + response.status)) + return response + + def async_access(self, *args, **kwargs): + """ + Same as :meth:`.access` but runs asynchronously in a separate thread. + + .. warning:: + + |async| + + :returns: + :class:`.Future` instance representing the separate thread. + + """ + + return authomatic.core.Future(self.access, *args, **kwargs) + + def update_user(self): + """ + Updates the :attr:`.BaseProvider.user`. + + .. warning:: + Fetches the :attr:`.user_info_url`! + + :returns: + :class:`.UserInfoResponse` + + """ + if self.user_info_url: + response = self._access_user_info() + self.user = self._update_or_create_user(response.data, + content=response.content) + return authomatic.core.UserInfoResponse(self.user, + response.httplib_response) + + # ======================================================================== + # Internal methods + # ======================================================================== + + @classmethod + def _authorization_header(cls, credentials): + """ + Creates authorization headers if the provider supports it. See: + http://en.wikipedia.org/wiki/Basic_access_authentication. + + :param credentials: + :class:`.Credentials` + + :returns: + Headers as :class:`dict`. + + """ + + if cls._x_use_authorization_header: + res = ':'.join( + (credentials.consumer_key, + credentials.consumer_secret)) + res = base64.b64encode(six.b(res)).decode() + return {'Authorization': 'Basic {0}'.format(res)} + else: + return {} + + def _check_consumer(self): + """ + Validates the :attr:`.consumer`. + """ + + # 'magic' using _kwarg method + # pylint:disable=no-member + if not self.consumer.key: + raise ConfigError( + 'Consumer key not specified for provider {0}!'.format( + self.name)) + + if not self.consumer.secret: + raise ConfigError( + 'Consumer secret not specified for provider {0}!'.format( + self.name)) + + @staticmethod + def _split_url(url): + """ + Splits given url to url base and params converted to list of tuples. + """ + + split = parse.urlsplit(url) + base = parse.urlunsplit((split.scheme, split.netloc, split.path, 0, 0)) + params = parse.parse_qsl(split.query, True) + + return base, params + + @classmethod + def _x_request_elements_filter( + cls, request_type, request_elements, credentials): + """ + Override this to handle special request requirements of zealous + providers. + + .. warning:: + + |classmethod| + + :param int request_type: + Type of request. + + :param request_elements: + :class:`.RequestElements` + + :param credentials: + :class:`.Credentials` + + :returns: + :class:`.RequestElements` + + """ + + return request_elements + + @staticmethod + def _x_credentials_parser(credentials, data): + """ + Override this to handle differences in naming conventions across + providers. + + :param credentials: + :class:`.Credentials` + + :param dict data: + Response data dictionary. + + :returns: + :class:`.Credentials` + + """ + return credentials + + def _access_user_info(self): + """ + Accesses the :attr:`.user_info_url`. + + :returns: + :class:`.UserInfoResponse` + + """ + url = self.user_info_url.format(**self.user.__dict__) + return self.access(url) + + +class AuthenticationProvider(BaseProvider): + """ + Base provider for *authentication protocols* i.e. protocols which allow a + **provider** to authenticate a *claimed identity* of a **user**. + + e.g. `OpenID `_. + + """ + + #: Indicates whether the **provider** supports access_with_credentials to + #: **user's** protected resources. + # TODO: Useless + has_protected_resources = False + + def __init__(self, *args, **kwargs): + super(AuthenticationProvider, self).__init__(*args, **kwargs) + + # Lookup default identifier, if available in provider + default_identifier = getattr(self, 'identifier', None) + + # Allow for custom name for the "id" querystring parameter. + self.identifier_param = kwargs.get('identifier_param', 'id') + + # Get the identifier from request params, or use default as fallback. + self.identifier = self.params.get( + self.identifier_param, default_identifier) + + +PROVIDER_ID_MAP = [ + AuthenticationProvider, + AuthorizationProvider, + BaseProvider, +] diff --git a/rhodecode/lib/_vendor/authomatic/providers/gaeopenid.py b/rhodecode/lib/_vendor/authomatic/providers/gaeopenid.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/gaeopenid.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +""" +Google App Engine OpenID Providers +---------------------------------- + +|openid|_ provider implementations based on the |gae_users_api|_. + +.. note:: + + When using the :class:`GAEOpenID` provider, the :class:`.User` object + will always have only the + :attr:`.User.user_id`, + :attr:`.User.email`, + :attr:`.User.gae_user` + attributes populated with data. + Moreover the :attr:`.User.user_id` will always be empty on the + `GAE Development Server + `_. + +.. autosummary:: + + GAEOpenID + Yahoo + Google + +""" + +import logging + +from google.appengine.api import users + +import authomatic.core as core +from authomatic import providers +from authomatic.exceptions import FailureError + + +__all__ = ['GAEOpenID', 'Yahoo', 'Google'] + + +class GAEOpenID(providers.AuthenticationProvider): + """ + |openid|_ provider based on the |gae_users_api|_. + + Accepts additional keyword arguments inherited from + :class:`.AuthenticationProvider`. + + """ + + @providers.login_decorator + def login(self): + """ + Launches the OpenID authentication procedure. + """ + + if self.params.get(self.identifier_param): + # ================================================================= + # Phase 1 before redirect. + # ================================================================= + self._log( + logging.INFO, + u'Starting OpenID authentication procedure.') + + url = users.create_login_url( + dest_url=self.url, federated_identity=self.identifier) + + self._log(logging.INFO, u'Redirecting user to {0}.'.format(url)) + + self.redirect(url) + else: + # ================================================================= + # Phase 2 after redirect. + # ================================================================= + + self._log( + logging.INFO, + u'Continuing OpenID authentication procedure after redirect.') + + user = users.get_current_user() + + if user: + self._log(logging.INFO, u'Authentication successful.') + self._log(logging.INFO, u'Creating user.') + self.user = core.User(self, + id=user.federated_identity(), + email=user.email(), + gae_user=user) + + # ============================================================= + # We're done + # ============================================================= + else: + raise FailureError( + 'Unable to authenticate identifier "{0}"!'.format( + self.identifier)) + + +class Yahoo(GAEOpenID): + """ + :class:`.GAEOpenID` provider with the :attr:`.identifier` set to + ``"me.yahoo.com"``. + """ + + identifier = 'me.yahoo.com' + + +class Google(GAEOpenID): + """ + :class:`.GAEOpenID` provider with the :attr:`.identifier` set to + ``"https://www.google.com/accounts/o8/id"``. + """ + + identifier = 'https://www.google.com/accounts/o8/id' diff --git a/rhodecode/lib/_vendor/authomatic/providers/oauth1.py b/rhodecode/lib/_vendor/authomatic/providers/oauth1.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/oauth1.py @@ -0,0 +1,1377 @@ +# -*- coding: utf-8 -*- +""" +|oauth1| Providers +-------------------- + +Providers which implement the |oauth1|_ protocol. + +.. autosummary:: + + OAuth1 + Bitbucket + Flickr + Meetup + Plurk + Twitter + Tumblr + UbuntuOne + Vimeo + Xero + Xing + Yahoo + +""" + +import abc +import binascii +import datetime +import hashlib +import hmac +import logging +import time +import uuid + +import authomatic.core as core +from authomatic import providers +from authomatic.exceptions import ( + CancellationError, + FailureError, + OAuth1Error, +) +from authomatic import six +from authomatic.six.moves import urllib_parse as parse + + +__all__ = [ + 'OAuth1', + 'Bitbucket', + 'Flickr', + 'Meetup', + 'Plurk', + 'Twitter', + 'Tumblr', + 'UbuntuOne', + 'Vimeo', + 'Xero', + 'Xing', + 'Yahoo' +] + + +def _normalize_params(params): + """ + Returns a normalized query string sorted first by key, then by value + excluding the ``realm`` and ``oauth_signature`` parameters as specified + here: http://oauth.net/core/1.0a/#rfc.section.9.1.1. + + :param params: + :class:`dict` or :class:`list` of tuples. + + """ + + if isinstance(params, dict): + params = list(params.items()) + + # remove "realm" and "oauth_signature" + params = sorted([ + (k, v) for k, v in params + if k not in ('oauth_signature', 'realm') + ]) + # sort + # convert to query string + qs = parse.urlencode(params) + # replace "+" to "%20" + qs = qs.replace('+', '%20') + # replace "%7E" to "%20" + qs = qs.replace('%7E', '~') + + return qs + + +def _join_by_ampersand(*args): + return '&'.join([core.escape(i) for i in args]) + + +def _create_base_string(method, base, params): + """ + Returns base string for HMAC-SHA1 signature as specified in: + http://oauth.net/core/1.0a/#rfc.section.9.1.3. + """ + + normalized_qs = _normalize_params(params) + return _join_by_ampersand(method, base, normalized_qs) + + +class BaseSignatureGenerator(object): + """ + Abstract base class for all signature generators. + """ + + __metaclass__ = abc.ABCMeta + + #: :class:`str` The name of the signature method. + method = '' + + @abc.abstractmethod + def create_signature(self, method, base, params, + consumer_secret, token_secret=''): + """ + Must create signature based on the parameters as specified in + http://oauth.net/core/1.0a/#signing_process. + + .. warning:: + + |classmethod| + + :param str method: + HTTP method of the request to be signed. + + :param str base: + Base URL of the request without query string an fragment. + + :param dict params: + Dictionary or list of tuples of the request parameters. + + :param str consumer_secret: + :attr:`.core.Consumer.secret` + + :param str token_secret: + Access token secret as specified in + http://oauth.net/core/1.0a/#anchor3. + + :returns: + The signature string. + + """ + + +class HMACSHA1SignatureGenerator(BaseSignatureGenerator): + """ + HMAC-SHA1 signature generator. + + See: http://oauth.net/core/1.0a/#anchor15 + + """ + + method = 'HMAC-SHA1' + + @classmethod + def _create_key(cls, consumer_secret, token_secret=''): + """ + Returns a key for HMAC-SHA1 signature as specified at: + http://oauth.net/core/1.0a/#rfc.section.9.2. + + :param str consumer_secret: + :attr:`.core.Consumer.secret` + + :param str token_secret: + Access token secret as specified in + http://oauth.net/core/1.0a/#anchor3. + + :returns: + Key to sign the request with. + + """ + + return _join_by_ampersand(consumer_secret, token_secret or '') + + @classmethod + def create_signature(cls, method, base, params, + consumer_secret, token_secret=''): + """ + Returns HMAC-SHA1 signature as specified at: + http://oauth.net/core/1.0a/#rfc.section.9.2. + + :param str method: + HTTP method of the request to be signed. + + :param str base: + Base URL of the request without query string an fragment. + + :param dict params: + Dictionary or list of tuples of the request parameters. + + :param str consumer_secret: + :attr:`.core.Consumer.secret` + + :param str token_secret: + Access token secret as specified in + http://oauth.net/core/1.0a/#anchor3. + + :returns: + The signature string. + + """ + + base_string = _create_base_string(method, base, params) + key = cls._create_key(consumer_secret, token_secret) + + hashed = hmac.new( + six.b(key), + base_string.encode('utf-8'), + hashlib.sha1) + + base64_encoded = binascii.b2a_base64(hashed.digest())[:-1] + + return base64_encoded + + +class PLAINTEXTSignatureGenerator(BaseSignatureGenerator): + """ + PLAINTEXT signature generator. + + See: http://oauth.net/core/1.0a/#anchor21 + + """ + + method = 'PLAINTEXT' + + @classmethod + def create_signature(cls, method, base, params, + consumer_secret, token_secret=''): + + consumer_secret = parse.quote(consumer_secret, '') + token_secret = parse.quote(token_secret, '') + + return parse.quote('&'.join((consumer_secret, token_secret)), '') + + +class OAuth1(providers.AuthorizationProvider): + """ + Base class for |oauth1|_ providers. + """ + + _signature_generator = HMACSHA1SignatureGenerator + + PROVIDER_TYPE_ID = 1 + REQUEST_TOKEN_REQUEST_TYPE = 1 + + def __init__(self, *args, **kwargs): + """ + Accepts additional keyword arguments: + + :param str consumer_key: + The *key* assigned to our application (**consumer**) by + the **provider**. + + :param str consumer_secret: + The *secret* assigned to our application (**consumer**) by + the **provider**. + + :param id: + A unique short name used to serialize :class:`.Credentials`. + + :param dict user_authorization_params: + A dictionary of additional request parameters for + **user authorization request**. + + :param dict access_token_params: + A dictionary of additional request parameters for + **access token request**. + + :param dict request_token_params: + A dictionary of additional request parameters for + **request token request**. + + """ + + super(OAuth1, self).__init__(*args, **kwargs) + + self.request_token_params = self._kwarg( + kwargs, 'request_token_params', {}) + + # ======================================================================== + # Abstract properties + # ======================================================================== + + @abc.abstractproperty + def request_token_url(self): + """ + :class:`str` URL where we can get the |oauth1| request token. + see http://oauth.net/core/1.0a/#auth_step1. + """ + + # ======================================================================== + # Internal methods + # ======================================================================== + + @classmethod + def create_request_elements( + cls, request_type, credentials, url, params=None, headers=None, + body='', method='GET', verifier='', callback='' + ): + """ + Creates |oauth1| request elements. + """ + + params = params or {} + headers = headers or {} + + consumer_key = credentials.consumer_key or '' + consumer_secret = credentials.consumer_secret or '' + token = credentials.token or '' + token_secret = credentials.token_secret or '' + + # separate url base and query parameters + url, base_params = cls._split_url(url) + + # add extracted params to future params + params.update(dict(base_params)) + + if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE: + # no need for signature + if token: + params['oauth_token'] = token + else: + raise OAuth1Error( + 'Credentials with valid token are required to create ' + 'User Authorization URL!') + else: + # signature needed + if request_type == cls.REQUEST_TOKEN_REQUEST_TYPE: + # Request Token URL + if consumer_key and consumer_secret and callback: + params['oauth_consumer_key'] = consumer_key + params['oauth_callback'] = callback + else: + raise OAuth1Error( + 'Credentials with valid consumer_key, consumer_secret ' + 'and callback are required to create Request Token ' + 'URL!') + + elif request_type == cls.ACCESS_TOKEN_REQUEST_TYPE: + # Access Token URL + if consumer_key and consumer_secret and token and verifier: + params['oauth_token'] = token + params['oauth_consumer_key'] = consumer_key + params['oauth_verifier'] = verifier + else: + raise OAuth1Error( + 'Credentials with valid consumer_key, ' + 'consumer_secret, token and argument verifier' + ' are required to create Access Token URL!') + + elif request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: + # Protected Resources URL + if consumer_key and consumer_secret and token and token_secret: + params['oauth_token'] = token + params['oauth_consumer_key'] = consumer_key + else: + raise OAuth1Error( + 'Credentials with valid consumer_key, ' + + 'consumer_secret, token and token_secret are required ' + 'to create Protected Resources URL!') + + # Sign request. + # http://oauth.net/core/1.0a/#anchor13 + + # Prepare parameters for signature base string + # http://oauth.net/core/1.0a/#rfc.section.9.1 + params['oauth_signature_method'] = cls._signature_generator.method + params['oauth_timestamp'] = str(int(time.time())) + params['oauth_nonce'] = cls.csrf_generator(str(uuid.uuid4())) + params['oauth_version'] = '1.0' + + # add signature to params + params['oauth_signature'] = cls._signature_generator.create_signature( # noqa + method, url, params, consumer_secret, token_secret) + + request_elements = core.RequestElements( + url, method, params, headers, body) + + return cls._x_request_elements_filter( + request_type, request_elements, credentials) + + # ======================================================================== + # Exposed methods + # ======================================================================== + + @staticmethod + def to_tuple(credentials): + return (credentials.token, credentials.token_secret) + + @classmethod + def reconstruct(cls, deserialized_tuple, credentials, cfg): + + token, token_secret = deserialized_tuple + + credentials.token = token + credentials.token_secret = token_secret + credentials.consumer_key = cfg.get('consumer_key', '') + credentials.consumer_secret = cfg.get('consumer_secret', '') + + return credentials + + @providers.login_decorator + def login(self): + # get request parameters from which we can determine the login phase + denied = self.params.get('denied') + verifier = self.params.get('oauth_verifier', '') + request_token = self.params.get('oauth_token', '') + + if request_token and verifier: + # Phase 2 after redirect with success + self._log( + logging.INFO, + u'Continuing OAuth 1.0a authorization procedure after ' + u'redirect.') + token_secret = self._session_get('token_secret') + if not token_secret: + raise FailureError( + u'Unable to retrieve token secret from storage!') + + # Get Access Token + self._log( + logging.INFO, + u'Fetching for access token from {0}.'.format( + self.access_token_url)) + + self.credentials.token = request_token + self.credentials.token_secret = token_secret + + request_elements = self.create_request_elements( + request_type=self.ACCESS_TOKEN_REQUEST_TYPE, + url=self.access_token_url, + credentials=self.credentials, + verifier=verifier, + params=self.access_token_params + ) + + response = self._fetch(*request_elements) + self.access_token_response = response + + if not self._http_status_in_category(response.status, 2): + raise FailureError( + 'Failed to obtain OAuth 1.0a oauth_token from {0}! ' + 'HTTP status code: {1}.' + .format(self.access_token_url, response.status), + original_message=response.content, + status=response.status, + url=self.access_token_url + ) + + self._log(logging.INFO, u'Got access token.') + self.credentials.token = response.data.get('oauth_token', '') + self.credentials.token_secret = response.data.get( + 'oauth_token_secret', '' + ) + + self.credentials = self._x_credentials_parser(self.credentials, + response.data) + self._update_or_create_user(response.data, self.credentials) + + # ================================================================= + # We're done! + # ================================================================= + + elif denied: + # Phase 2 after redirect denied + raise CancellationError( + 'User denied the request token {0} during a redirect' + 'to {1}!'.format(denied, self.user_authorization_url), + original_message=denied, + url=self.user_authorization_url) + else: + # Phase 1 before redirect + self._log( + logging.INFO, + u'Starting OAuth 1.0a authorization procedure.') + + # Fetch for request token + request_elements = self.create_request_elements( + request_type=self.REQUEST_TOKEN_REQUEST_TYPE, + credentials=self.credentials, + url=self.request_token_url, + callback=self.url, + params=self.request_token_params + ) + + self._log( + logging.INFO, + u'Fetching for request token and token secret.') + response = self._fetch(*request_elements) + + # check if response status is OK + if not self._http_status_in_category(response.status, 2): + raise FailureError( + u'Failed to obtain request token from {0}! HTTP status ' + u'code: {1} content: {2}'.format( + self.request_token_url, + response.status, + response.content + ), + original_message=response.content, + status=response.status, + url=self.request_token_url) + + # extract request token + request_token = response.data.get('oauth_token') + if not request_token: + raise FailureError( + 'Response from {0} doesn\'t contain oauth_token ' + 'parameter!'.format(self.request_token_url), + original_message=response.content, + url=self.request_token_url) + + # we need request token for user authorization redirect + self.credentials.token = request_token + + # extract token secret and save it to storage + token_secret = response.data.get('oauth_token_secret') + if token_secret: + # we need token secret after user authorization redirect to get + # access token + self._session_set('token_secret', token_secret) + else: + raise FailureError( + u'Failed to obtain token secret from {0}!'.format( + self.request_token_url), + original_message=response.content, + url=self.request_token_url) + + self._log(logging.INFO, u'Got request token and token secret') + + # Create User Authorization URL + request_elements = self.create_request_elements( + request_type=self.USER_AUTHORIZATION_REQUEST_TYPE, + credentials=self.credentials, + url=self.user_authorization_url, + params=self.user_authorization_params + ) + + self._log( + logging.INFO, + u'Redirecting user to {0}.'.format( + request_elements.full_url)) + + self.redirect(request_elements.full_url) + + +class Bitbucket(OAuth1): + """ + Bitbucket |oauth1| provider. + + * Dashboard: https://bitbucket.org/account/user/peterhudec/api + * Docs: https://confluence.atlassian.com/display/BITBUCKET/oauth+Endpoint + * API reference: + https://confluence.atlassian.com/display/BITBUCKET/Using+the+Bitbucket+REST+APIs + + Supported :class:`.User` properties: + + * first_name + * id + * last_name + * link + * name + * picture + * username + * email + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * gender + * locale + * location + * nickname + * phone + * postal_code + * timezone + + .. note:: + + To get the full user info, you need to select both the *Account Read* + and the *Repositories Read* permission in the Bitbucket application + edit form. + + """ + + supported_user_attributes = core.SupportedUserAttributes( + first_name=True, + id=True, + last_name=True, + link=True, + name=True, + picture=True, + username=True, + email=True + ) + + request_token_url = 'https://bitbucket.org/!api/1.0/oauth/request_token' + user_authorization_url = 'https://bitbucket.org/!api/1.0/oauth/' + \ + 'authenticate' + access_token_url = 'https://bitbucket.org/!api/1.0/oauth/access_token' + user_info_url = 'https://api.bitbucket.org/1.0/user' + user_email_url = 'https://api.bitbucket.org/1.0/emails' + + @staticmethod + def _x_user_parser(user, data): + _user = data.get('user', {}) + user.username = user.id = _user.get('username') + user.name = _user.get('display_name') + user.first_name = _user.get('first_name') + user.last_name = _user.get('last_name') + user.picture = _user.get('avatar') + user.link = 'https://bitbucket.org/api{0}'\ + .format(_user.get('resource_uri')) + return user + + def _access_user_info(self): + """ + Email is available in separate method so second request is needed. + """ + response = super(Bitbucket, self)._access_user_info() + + response.data.setdefault("email", None) + + email_response = self.access(self.user_email_url) + if email_response.data: + for item in email_response.data: + if item.get("primary", False): + response.data.update(email=item.get("email", None)) + + return response + + +class Flickr(OAuth1): + """ + Flickr |oauth1| provider. + + * Dashboard: https://www.flickr.com/services/apps/ + * Docs: https://www.flickr.com/services/api/auth.oauth.html + * API reference: https://www.flickr.com/services/api/ + + Supported :class:`.User` properties: + + * id + * name + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * gender + * last_name + * link + * locale + * location + * nickname + * phone + * picture + * postal_code + * timezone + + .. note:: + + If you encounter the "Oops! Flickr doesn't recognise the + permission set." message, you need to add the ``perms=read`` or + ``perms=write`` parameter to the *user authorization request*. + You can do it by adding the ``user_authorization_params`` + key to the :doc:`config`: + + .. code-block:: python + :emphasize-lines: 6 + + CONFIG = { + 'flickr': { + 'class_': oauth1.Flickr, + 'consumer_key': '##########', + 'consumer_secret': '##########', + 'user_authorization_params': dict(perms='read'), + }, + } + + """ + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + name=True, + username=True + ) + + request_token_url = 'http://www.flickr.com/services/oauth/request_token' + user_authorization_url = 'http://www.flickr.com/services/oauth/authorize' + access_token_url = 'http://www.flickr.com/services/oauth/access_token' + user_info_url = None + + supports_jsonp = True + + @staticmethod + def _x_user_parser(user, data): + _user = data.get('user', {}) + + user.name = data.get('fullname') or _user.get( + 'username', {}).get('_content') + user.id = data.get('user_nsid') or _user.get('id') + + return user + + +class Meetup(OAuth1): + """ + Meetup |oauth1| provider. + + .. note:: + + Meetup also supports |oauth2| but you need the **user ID** to update + the **user** info, which they don't provide in the |oauth2| access + token response. + + * Dashboard: http://www.meetup.com/meetup_api/oauth_consumers/ + * Docs: http://www.meetup.com/meetup_api/auth/#oauth + * API: http://www.meetup.com/meetup_api/docs/ + + Supported :class:`.User` properties: + + * city + * country + * id + * link + * locale + * location + * name + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * email + * first_name + * gender + * last_name + * nickname + * phone + * postal_code + * timezone + * username + + """ + + supported_user_attributes = core.SupportedUserAttributes( + city=True, + country=True, + id=True, + link=True, + locale=True, + location=True, + name=True, + picture=True + ) + + request_token_url = 'https://api.meetup.com/oauth/request/' + user_authorization_url = 'http://www.meetup.com/authorize/' + access_token_url = 'https://api.meetup.com/oauth/access/' + user_info_url = 'https://api.meetup.com/2/member/{id}' + + @staticmethod + def _x_user_parser(user, data): + + user.id = data.get('id') or data.get('member_id') + user.locale = data.get('lang') + user.picture = data.get('photo', {}).get('photo_link') + + return user + + +class Plurk(OAuth1): + """ + Plurk |oauth1| provider. + + * Dashboard: http://www.plurk.com/PlurkApp/ + * Docs: + * API: http://www.plurk.com/API + * API explorer: http://www.plurk.com/OAuth/test/ + + Supported :class:`.User` properties: + + * birth_date + * city + * country + * email + * gender + * id + * link + * locale + * location + * name + * nickname + * picture + * timezone + * username + + Unsupported :class:`.User` properties: + + * first_name + * last_name + * phone + * postal_code + + """ + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=True, + country=True, + email=True, + gender=True, + id=True, + link=True, + locale=True, + location=True, + name=True, + nickname=True, + picture=True, + timezone=True, + username=True + ) + + request_token_url = 'http://www.plurk.com/OAuth/request_token' + user_authorization_url = 'http://www.plurk.com/OAuth/authorize' + access_token_url = 'http://www.plurk.com/OAuth/access_token' + user_info_url = 'http://www.plurk.com/APP/Profile/getOwnProfile' + + @staticmethod + def _x_user_parser(user, data): + + _user = data.get('user_info', {}) + + user.email = _user.get('email') + user.gender = _user.get('gender') + user.id = _user.get('id') or _user.get('uid') + user.locale = _user.get('default_lang') + user.name = _user.get('full_name') + user.nickname = _user.get('nick_name') + user.picture = 'http://avatars.plurk.com/{0}-big2.jpg'.format(user.id) + user.timezone = _user.get('timezone') + user.username = _user.get('display_name') + + user.link = 'http://www.plurk.com/{0}/'.format(user.username) + + user.city, user.country = _user.get('location', ',').split(',') + user.city = user.city.strip() + user.country = user.country.strip() + + _bd = _user.get('date_of_birth') + if _bd: + try: + user.birth_date = datetime.datetime.strptime( + _bd, + "%a, %d %b %Y %H:%M:%S %Z" + ) + except ValueError: + pass + + return user + + +class Twitter(OAuth1): + """ + Twitter |oauth1| provider. + + * Dashboard: https://dev.twitter.com/apps + * Docs: https://dev.twitter.com/docs + * API reference: https://dev.twitter.com/docs/api + + .. note:: To prevent multiple authorization attempts, you should enable + the option: + ``Allow this application to be used to Sign in with Twitter`` + in the Twitter 'Application Management' page. (http://apps.twitter.com) + + Supported :class:`.User` properties: + + * email + * city + * country + * id + * link + * locale + * location + * name + * picture + * username + + Unsupported :class:`.User` properties: + + * birth_date + * email + * gender + * first_name + * last_name + * locale + * nickname + * phone + * postal_code + * timezone + + """ + + supported_user_attributes = core.SupportedUserAttributes( + city=True, + country=True, + id=True, + email=False, + link=True, + locale=False, + location=True, + name=True, + picture=True, + username=True + ) + + request_token_url = 'https://api.twitter.com/oauth/request_token' + user_authorization_url = 'https://api.twitter.com/oauth/authenticate' + access_token_url = 'https://api.twitter.com/oauth/access_token' + user_info_url = ( + 'https://api.twitter.com/1.1/account/verify_credentials.json?' + 'include_entities=true&include_email=true' + ) + supports_jsonp = True + + @staticmethod + def _x_user_parser(user, data): + user.username = data.get('screen_name') + user.id = data.get('id') or data.get('user_id') + user.picture = data.get('profile_image_url') + user.locale = data.get('lang') + user.link = data.get('url') + _location = data.get('location', '') + if _location: + user.location = _location.strip() + _split_location = _location.split(',') + if len(_split_location) > 1: + _city, _country = _split_location + user.country = _country.strip() + else: + _city = _split_location[0] + user.city = _city.strip() + return user + + +class Tumblr(OAuth1): + """ + Tumblr |oauth1| provider. + + * Dashboard: http://www.tumblr.com/oauth/apps + * Docs: http://www.tumblr.com/docs/en/api/v2#auth + * API reference: http://www.tumblr.com/docs/en/api/v2 + + Supported :class:`.User` properties: + + * id + * name + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * gender + * first_name + * last_name + * link + * locale + * location + * nickname + * phone + * picture + * postal_code + * timezone + + """ + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + name=True, + username=True + ) + + request_token_url = 'http://www.tumblr.com/oauth/request_token' + user_authorization_url = 'http://www.tumblr.com/oauth/authorize' + access_token_url = 'http://www.tumblr.com/oauth/access_token' + user_info_url = 'http://api.tumblr.com/v2/user/info' + + supports_jsonp = True + + @staticmethod + def _x_user_parser(user, data): + _user = data.get('response', {}).get('user', {}) + user.username = user.id = _user.get('name') + return user + + +class UbuntuOne(OAuth1): + """ + Ubuntu One |oauth1| provider. + + .. note:: + + The UbuntuOne service + `has been shut down `__. + + .. warning:: + + Uses the `PLAINTEXT `_ + Signature method! + + * Dashboard: https://one.ubuntu.com/developer/account_admin/auth/web + * Docs: https://one.ubuntu.com/developer/account_admin/auth/web + * API reference: https://one.ubuntu.com/developer/contents + + """ + + _signature_generator = PLAINTEXTSignatureGenerator + + request_token_url = 'https://one.ubuntu.com/oauth/request/' + user_authorization_url = 'https://one.ubuntu.com/oauth/authorize/' + access_token_url = 'https://one.ubuntu.com/oauth/access/' + user_info_url = 'https://one.ubuntu.com/api/account/' + + +class Vimeo(OAuth1): + """ + Vimeo |oauth1| provider. + + .. warning:: + + Vimeo needs one more fetch to get rich user info! + + * Dashboard: https://developer.vimeo.com/apps + * Docs: https://developer.vimeo.com/apis/advanced#oauth-endpoints + * API reference: https://developer.vimeo.com/apis + + Supported :class:`.User` properties: + + * id + * link + * location + * name + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * gender + * first_name + * last_name + * locale + * nickname + * phone + * postal_code + * timezone + * username + + """ + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + link=True, + location=True, + name=True, + picture=True + ) + + request_token_url = 'https://vimeo.com/oauth/request_token' + user_authorization_url = 'https://vimeo.com/oauth/authorize' + access_token_url = 'https://vimeo.com/oauth/access_token' + user_info_url = ('http://vimeo.com/api/rest/v2?' + 'format=json&method=vimeo.oauth.checkAccessToken') + + def _access_user_info(self): + """ + Vimeo requires the user ID to access the user info endpoint, so we need + to make two requests: one to get user ID and second to get user info. + """ + response = super(Vimeo, self)._access_user_info() + uid = response.data.get('oauth', {}).get('user', {}).get('id') + if uid: + return self.access('http://vimeo.com/api/v2/{0}/info.json' + .format(uid)) + return response + + @staticmethod + def _x_user_parser(user, data): + user.name = data.get('display_name') + user.link = data.get('profile_url') + user.picture = data.get('portrait_huge') + return user + + +class Xero(OAuth1): + """ + Xero |oauth1| provider. + + .. note:: + + API returns XML! + + * Dashboard: https://api.xero.com/Application + * Docs: http://blog.xero.com/developer/api-overview/public-applications/ + * API reference: http://blog.xero.com/developer/api/ + + Supported :class:`.User` properties: + + * email + * first_name + * id + * last_name + * name + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * gender + * link + * locale + * location + * nickname + * phone + * picture + * postal_code + * timezone + * username + + """ + + supported_user_attributes = core.SupportedUserAttributes( + email=True, + first_name=True, + id=True, + last_name=True, + name=True + ) + + request_token_url = 'https://api.xero.com/oauth/RequestToken' + user_authorization_url = 'https://api.xero.com/oauth/Authorize' + access_token_url = 'https://api.xero.com/oauth/AccessToken' + user_info_url = 'https://api.xero.com/api.xro/2.0/Users' + + @staticmethod + def _x_user_parser(user, data): + # Data is xml.etree.ElementTree.Element object. + if not isinstance(data, dict): + # But only on user.update() + _user = data.find('Users/User') + user.id = _user.find('UserID').text + user.first_name = _user.find('FirstName').text + user.last_name = _user.find('LastName').text + user.email = _user.find('EmailAddress').text + + return user + + +class Yahoo(OAuth1): + """ + Yahoo |oauth1| provider. + + * Dashboard: https://developer.apps.yahoo.com/dashboard/ + * Docs: http://developer.yahoo.com/oauth/guide/oauth-auth-flow.html + * API: http://developer.yahoo.com/everything.html + * API explorer: http://developer.yahoo.com/yql/console/ + + Supported :class:`.User` properties: + + * city + * country + * id + * link + * location + * name + * nickname + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * gender + * locale + * phone + * postal_code + * timezone + * username + + """ + + supported_user_attributes = core.SupportedUserAttributes( + city=True, + country=True, + id=True, + link=True, + location=True, + name=True, + nickname=True, + picture=True + ) + + request_token_url = 'https://api.login.yahoo.com/oauth/v2/' + \ + 'get_request_token' + user_authorization_url = 'https://api.login.yahoo.com/oauth/v2/' + \ + 'request_auth' + access_token_url = 'https://api.login.yahoo.com/oauth/v2/get_token' + user_info_url = ( + 'https://query.yahooapis.com/v1/yql?q=select%20*%20from%20' + 'social.profile%20where%20guid%3Dme%3B&format=json' + ) + + same_origin = False + supports_jsonp = True + + @staticmethod + def _x_user_parser(user, data): + + _user = data.get('query', {}).get('results', {}).get('profile', {}) + + user.id = _user.get('guid') + user.gender = _user.get('gender') + user.nickname = _user.get('nickname') + user.link = _user.get('profileUrl') + + emails = _user.get('emails') + if isinstance(emails, list): + for email in emails: + if 'primary' in list(email.keys()): + user.email = email.get('handle') + elif isinstance(emails, dict): + user.email = emails.get('handle') + + user.picture = _user.get('image', {}).get('imageUrl') + + try: + user.city, user.country = _user.get('location', ',').split(',') + user.city = user.city.strip() + user.country = user.country.strip() + except ValueError: + # probably user hasn't activated Yahoo Profile + user.city = None + user.country = None + return user + + +class Xing(OAuth1): + """ + Xing |oauth1| provider. + + * Dashboard: https://dev.xing.com/applications + * Docs: https://dev.xing.com/docs/authentication + * API reference: https://dev.xing.com/docs/resources + + Supported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * gender + * id + * last_name + * link + * locale + * location + * name + * phone + * picture + * postal_code + * timezone + * username + + Unsupported :class:`.User` properties: + + * nickname + + """ + + request_token_url = 'https://api.xing.com/v1/request_token' + user_authorization_url = 'https://api.xing.com/v1/authorize' + access_token_url = 'https://api.xing.com/v1/access_token' + user_info_url = 'https://api.xing.com/v1/users/me' + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=True, + country=True, + email=True, + first_name=True, + gender=True, + id=True, + last_name=True, + link=True, + locale=True, + location=True, + name=True, + phone=True, + picture=True, + postal_code=True, + timezone=True, + username=True, + ) + + @staticmethod + def _x_user_parser(user, data): + _users = data.get('users', []) + if _users and _users[0]: + _user = _users[0] + user.id = _user.get('id') + user.name = _user.get('display_name') + user.first_name = _user.get('first_name') + user.last_name = _user.get('last_name') + user.gender = _user.get('gender') + user.timezone = _user.get('time_zone', {}).get('name') + user.email = _user.get('active_email') + user.link = _user.get('permalink') + user.username = _user.get('page_name') + user.picture = _user.get('photo_urls', {}).get('large') + + _address = _user.get('business_address', {}) + if _address: + user.city = _address.get('city') + user.country = _address.get('country') + user.postal_code = _address.get('zip_code') + user.phone = ( + _address.get('phone', '') or + _address.get('mobile_phone', '')).replace('|', '') + + _languages = list(_user.get('languages', {}).keys()) + if _languages and _languages[0]: + user.locale = _languages[0] + + _birth_date = _user.get('birth_date', {}) + _year = _birth_date.get('year') + _month = _birth_date.get('month') + _day = _birth_date.get('day') + if _year and _month and _day: + user.birth_date = datetime.datetime(_year, _month, _day) + + return user + + +# The provider type ID is generated from this list's indexes! +# Always append new providers at the end so that ids of existing providers +# don't change! +PROVIDER_ID_MAP = [ + Bitbucket, + Flickr, + Meetup, + OAuth1, + Plurk, + Tumblr, + Twitter, + UbuntuOne, + Vimeo, + Xero, + Xing, + Yahoo, +] diff --git a/rhodecode/lib/_vendor/authomatic/providers/oauth2.py b/rhodecode/lib/_vendor/authomatic/providers/oauth2.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/oauth2.py @@ -0,0 +1,2053 @@ +# -*- coding: utf-8 -*- +""" +|oauth2| Providers +------------------- + +Providers which implement the |oauth2|_ protocol. + +.. autosummary:: + + OAuth2 + Amazon + Behance + Bitly + Bitbucket + Cosm + DeviantART + Eventbrite + Facebook + Foursquare + GitHub + Google + LinkedIn + PayPal + Reddit + Viadeo + VK + WindowsLive + Yammer + Yandex + +""" + +import base64 +import datetime +import json +import logging + +from authomatic.six.moves.urllib.parse import unquote +from authomatic import providers +from authomatic.exceptions import CancellationError, FailureError, OAuth2Error +import authomatic.core as core + + +__all__ = [ + 'OAuth2', + 'Amazon', + 'Behance', + 'Bitly', + 'Bitbucket', + 'Cosm', + 'DeviantART', + 'Eventbrite', + 'Facebook', + 'Foursquare', + 'GitHub', + 'Google', + 'LinkedIn', + 'PayPal', + 'Reddit', + 'Viadeo', + 'VK', + 'WindowsLive', + 'Yammer', + 'Yandex' +] + + +class OAuth2(providers.AuthorizationProvider): + """ + Base class for |oauth2|_ providers. + """ + + PROVIDER_TYPE_ID = 2 + TOKEN_TYPES = ['', 'Bearer'] + + #: A scope preset to get most of the **user** info. + #: Use it in the :doc:`config` like + #: ``{'scope': oauth2.Facebook.user_info_scope}``. + user_info_scope = [] + + #: :class:`bool` If ``False``, the provider doesn't support CSRF + #: protection. + supports_csrf_protection = True + + #: :class:`bool` If ``False``, the provider doesn't support user_state. + supports_user_state = True + + token_request_method = 'POST' # method for requesting an access token + + def __init__(self, *args, **kwargs): + """ + Accepts additional keyword arguments: + + :param list scope: + List of strings specifying requested permissions as described + in the + `OAuth 2.0 spec `_. + + :param bool offline: + If ``True`` the **provider** will be set up to request an + *offline access token*. + Default is ``False``. + + As well as those inherited from :class:`.AuthorizationProvider` + constructor. + + """ + + super(OAuth2, self).__init__(*args, **kwargs) + + self.scope = self._kwarg(kwargs, 'scope', []) + self.offline = self._kwarg(kwargs, 'offline', False) + + # ======================================================================== + # Internal methods + # ======================================================================== + + def _x_scope_parser(self, scope): + """ + Override this to handle differences between accepted format of scope + across providers. + + :attr list scope: + List of scopes. + + """ + + # pylint:disable=no-self-use + + # Most providers accept csv scope. + return ','.join(scope) if scope else '' + + @classmethod + def create_request_elements( + cls, request_type, credentials, url, method='GET', params=None, + headers=None, body='', secret=None, redirect_uri='', scope='', + csrf='', user_state='' + ): + """ + Creates |oauth2| request elements. + """ + + headers = headers or {} + params = params or {} + + consumer_key = credentials.consumer_key or '' + consumer_secret = credentials.consumer_secret or '' + token = credentials.token or '' + refresh_token = credentials.refresh_token or credentials.token or '' + + # Separate url base and query parameters. + url, base_params = cls._split_url(url) + + # Add params extracted from URL. + params.update(dict(base_params)) + + if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE: + # User authorization request. + # TODO: Raise error for specific message for each missing argument. + if consumer_key and redirect_uri and ( + csrf or not cls.supports_csrf_protection): + params['client_id'] = consumer_key + params['redirect_uri'] = redirect_uri + params['scope'] = scope + if cls.supports_user_state: + params['state'] = base64.urlsafe_b64encode( + json.dumps( + {"csrf": csrf, "user_state": user_state} + ).encode('utf-8') + ) + else: + params['state'] = csrf + params['response_type'] = 'code' + + # Add authorization header + headers.update(cls._authorization_header(credentials)) + else: + raise OAuth2Error( + 'Credentials with valid consumer_key and arguments ' + 'redirect_uri, scope and state are required to create ' + 'OAuth 2.0 user authorization request elements!') + + elif request_type == cls.ACCESS_TOKEN_REQUEST_TYPE: + # Access token request. + if consumer_key and consumer_secret: + params['code'] = token + params['client_id'] = consumer_key + params['client_secret'] = consumer_secret + params['redirect_uri'] = redirect_uri + params['grant_type'] = 'authorization_code' + + # TODO: Check whether all providers accept it + headers.update(cls._authorization_header(credentials)) + else: + raise OAuth2Error( + 'Credentials with valid token, consumer_key, ' + 'consumer_secret and argument redirect_uri are required ' + 'to create OAuth 2.0 access token request elements!') + + elif request_type == cls.REFRESH_TOKEN_REQUEST_TYPE: + # Refresh access token request. + if refresh_token and consumer_key and consumer_secret: + params['refresh_token'] = refresh_token + params['client_id'] = consumer_key + params['client_secret'] = consumer_secret + params['grant_type'] = 'refresh_token' + else: + raise OAuth2Error( + 'Credentials with valid refresh_token, consumer_key, ' + 'consumer_secret are required to create OAuth 2.0 ' + 'refresh token request elements!') + + elif request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: + # Protected resource request. + + # Add Authorization header. See: + # http://tools.ietf.org/html/rfc6749#section-7.1 + if credentials.token_type == cls.BEARER: + # http://tools.ietf.org/html/rfc6750#section-2.1 + headers.update( + {'Authorization': 'Bearer {0}'.format(credentials.token)}) + + elif token: + params['access_token'] = token + else: + raise OAuth2Error( + 'Credentials with valid token are required to create ' + 'OAuth 2.0 protected resources request elements!') + + request_elements = core.RequestElements( + url, method, params, headers, body) + + return cls._x_request_elements_filter( + request_type, request_elements, credentials) + + @staticmethod + def _x_refresh_credentials_if(credentials): + """ + Override this to specify conditions when it gives sense to refresh + credentials. + + .. warning:: + + |classmethod| + + :param credentials: + :class:`.Credentials` + + :returns: + ``True`` or ``False`` + + """ + + if credentials.refresh_token: + return True + + # ======================================================================== + # Exposed methods + # ======================================================================== + + @classmethod + def to_tuple(cls, credentials): + return (credentials.token, + credentials.refresh_token, + credentials.expiration_time, + cls.TOKEN_TYPES.index(credentials.token_type)) + + @classmethod + def reconstruct(cls, deserialized_tuple, credentials, cfg): + + token, refresh_token, expiration_time, token_type = deserialized_tuple + + credentials.token = token + credentials.refresh_token = refresh_token + credentials.expiration_time = expiration_time + credentials.token_type = cls.TOKEN_TYPES[int(token_type)] + + return credentials + + @classmethod + def decode_state(cls, state, param='user_state'): + """ + Decode state and return param. + + :param str state: + state parameter passed through by provider + + :param str param: + key to query from decoded state variable. Options include 'csrf' + and 'user_state'. + + :returns: + string value from decoded state + + """ + if state and cls.supports_user_state: + # urlsafe_b64 may include = which the browser quotes so must + # unquote Cast to str to void b64decode translation error. Base64 + # should be str compatible. + return json.loads(base64.urlsafe_b64decode( + unquote(str(state))).decode('utf-8'))[param] + else: + return state if param == 'csrf' else '' + + def refresh_credentials(self, credentials): + """ + Refreshes :class:`.Credentials` if it gives sense. + + :param credentials: + :class:`.Credentials` to be refreshed. + + :returns: + :class:`.Response`. + + """ + + if not self._x_refresh_credentials_if(credentials): + return + + # We need consumer key and secret to make this kind of request. + cfg = credentials.config.get(credentials.provider_name) + credentials.consumer_key = cfg.get('consumer_key') + credentials.consumer_secret = cfg.get('consumer_secret') + + request_elements = self.create_request_elements( + request_type=self.REFRESH_TOKEN_REQUEST_TYPE, + credentials=credentials, + url=self.access_token_url, + method='POST' + ) + + self._log(logging.INFO, u'Refreshing credentials.') + response = self._fetch(*request_elements) + + # We no longer need consumer info. + credentials.consumer_key = None + credentials.consumer_secret = None + + # Extract the refreshed data. + access_token = response.data.get('access_token') + refresh_token = response.data.get('refresh_token') + + # Update credentials only if there is access token. + if access_token: + credentials.token = access_token + credentials.expire_in = response.data.get('expires_in') + + # Update refresh token only if there is a new one. + if refresh_token: + credentials.refresh_token = refresh_token + + # Handle different naming conventions across providers. + credentials = self._x_credentials_parser( + credentials, response.data) + + return response + + @providers.login_decorator + def login(self): + + # get request parameters from which we can determine the login phase + authorization_code = self.params.get('code') + error = self.params.get('error') + error_message = self.params.get('error_message') + state = self.params.get('state') + # optional user_state to be passed in oauth2 state + user_state = self.params.get('user_state', '') + + if authorization_code or not self.user_authorization_url: + + if authorization_code: + # ============================================================= + # Phase 2 after redirect with success + # ============================================================= + + self._log( + logging.INFO, + u'Continuing OAuth 2.0 authorization procedure after ' + u'redirect.') + + # validate CSRF token + if self.supports_csrf_protection: + self._log( + logging.INFO, + u'Validating request by comparing request state with ' + u'stored state.') + stored_csrf = self._session_get('csrf') + + state_csrf = self.decode_state(state, 'csrf') + if not stored_csrf: + raise FailureError(u'Unable to retrieve stored state!') + elif stored_csrf != state_csrf: + raise FailureError( + u'The returned state csrf cookie "{0}" doesn\'t ' + u'match with the stored state!'.format( + state_csrf + ), + url=self.user_authorization_url) + self._log(logging.INFO, u'Request is valid.') + else: + self._log(logging.WARN, u'Skipping CSRF validation!') + + elif not self.user_authorization_url: + # ============================================================= + # Phase 1 without user authorization redirect. + # ============================================================= + + self._log( + logging.INFO, + u'Starting OAuth 2.0 authorization procedure without ' + u'user authorization redirect.') + + # exchange authorization code for access token by the provider + self._log( + logging.INFO, + u'Fetching access token from {0}.'.format( + self.access_token_url)) + + self.credentials.token = authorization_code + + request_elements = self.create_request_elements( + request_type=self.ACCESS_TOKEN_REQUEST_TYPE, + credentials=self.credentials, + url=self.access_token_url, + method=self.token_request_method, + redirect_uri=self.url, + params=self.access_token_params, + headers=self.access_token_headers + ) + + response = self._fetch(*request_elements) + self.access_token_response = response + + access_token = response.data.get('access_token', '') + refresh_token = response.data.get('refresh_token', '') + + if response.status != 200 or not access_token: + raise FailureError( + 'Failed to obtain OAuth 2.0 access token from {0}! ' + 'HTTP status: {1}, message: {2}.'.format( + self.access_token_url, + response.status, + response.content + ), + original_message=response.content, + status=response.status, + url=self.access_token_url) + + self._log(logging.INFO, u'Got access token.') + + if refresh_token: + self._log(logging.INFO, u'Got refresh access token.') + + # OAuth 2.0 credentials need access_token, refresh_token, + # token_type and expire_in. + self.credentials.token = access_token + self.credentials.refresh_token = refresh_token + self.credentials.expire_in = response.data.get('expires_in') + self.credentials.token_type = response.data.get('token_type', '') + # sWe don't need these two guys anymore. + self.credentials.consumer_key = '' + self.credentials.consumer_secret = '' + + # update credentials + self.credentials = self._x_credentials_parser( + self.credentials, response.data) + + # create user + self._update_or_create_user(response.data, self.credentials) + + # ================================================================= + # We're done! + # ================================================================= + + elif error or error_message: + # ================================================================= + # Phase 2 after redirect with error + # ================================================================= + + error_reason = self.params.get('error_reason') or error + error_description = self.params.get('error_description') \ + or error_message or error + + if error_reason and 'denied' in error_reason: + raise CancellationError(error_description, + url=self.user_authorization_url) + else: + raise FailureError( + error_description, + url=self.user_authorization_url) + + elif ( + not self.params or + len(self.params) == 1 and + 'user_state' in self.params + ): + # ================================================================= + # Phase 1 before redirect + # ================================================================= + + self._log( + logging.INFO, + u'Starting OAuth 2.0 authorization procedure.') + + csrf = '' + if self.supports_csrf_protection: + # generate csfr + csrf = self.csrf_generator(self.settings.secret) + # and store it to session + self._session_set('csrf', csrf) + else: + self._log( + logging.WARN, + u'Provider doesn\'t support CSRF validation!') + + request_elements = self.create_request_elements( + request_type=self.USER_AUTHORIZATION_REQUEST_TYPE, + credentials=self.credentials, + url=self.user_authorization_url, + redirect_uri=self.url, + scope=self._x_scope_parser( + self.scope), + csrf=csrf, + user_state=user_state, + params=self.user_authorization_params + ) + + self._log( + logging.INFO, + u'Redirecting user to {0}.'.format( + request_elements.full_url)) + + self.redirect(request_elements.full_url) + + +class Amazon(OAuth2): + """ + Amazon |oauth2| provider. + + Thanks to `Ghufran Syed `__. + + * Dashboard: https://developer.amazon.com/lwa/sp/overview.html + * Docs: https://developer.amazon.com/public/apis/engage/login-with-amazon/docs/conceptual_overview.html + * API reference: https://developer.amazon.com/public/apis + + .. note:: + + Amazon only accepts **redirect_uri** with **https** schema, + Therefore the *login handler* must also be accessible through + **https**. + + Supported :class:`.User` properties: + + * email + * id + * name + * postal_code + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * first_name + * gender + * last_name + * link + * locale + * nickname + * phone + * picture + * timezone + * username + + """ + + user_authorization_url = 'https://www.amazon.com/ap/oa' + access_token_url = 'https://api.amazon.com/auth/o2/token' + user_info_url = 'https://api.amazon.com/user/profile' + user_info_scope = ['profile', 'postal_code'] + + supported_user_attributes = core.SupportedUserAttributes( + email=True, + id=True, + name=True, + postal_code=True + ) + + def _x_scope_parser(self, scope): + # Amazon has space-separated scopes + return ' '.join(scope) + + @staticmethod + def _x_user_parser(user, data): + user.id = data.get('user_id') + return user + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + +class Behance(OAuth2): + """ + Behance |oauth2| provider. + + .. note:: + + Behance doesn't support third party authorization anymore, + which renders this class pretty much useless. + + * Dashboard: http://www.behance.net/dev/apps + * Docs: http://www.behance.net/dev/authentication + * API reference: http://www.behance.net/dev/api/endpoints/ + + """ + + user_authorization_url = 'https://www.behance.net/v2/oauth/authenticate' + access_token_url = 'https://www.behance.net/v2/oauth/token' + user_info_url = '' + + user_info_scope = ['activity_read'] + + def _x_scope_parser(self, scope): + """ + Behance has pipe-separated scopes. + """ + return '|'.join(scope) + + @staticmethod + def _x_user_parser(user, data): + + _user = data.get('user', {}) + + user.id = _user.get('id') + user.first_name = _user.get('first_name') + user.last_name = _user.get('last_name') + user.username = _user.get('username') + user.city = _user.get('city') + user.country = _user.get('country') + user.link = _user.get('url') + user.name = _user.get('display_name') + user.picture = _user.get('images', {}).get('138') + + return user + + +class Bitly(OAuth2): + """ + Bitly |oauth2| provider. + + .. warning:: + + |no-csrf| + + * Dashboard: https://bitly.com/a/oauth_apps + * Docs: http://dev.bitly.com/authentication.html + * API reference: http://dev.bitly.com/api.html + + Supported :class:`.User` properties: + + * id + * link + * name + * picture + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * gender + * last_name + * locale + * nickname + * phone + * postal_code + * timezone + + """ + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + link=True, + name=True, + picture=True, + username=True + ) + + supports_csrf_protection = False + _x_use_authorization_header = False + + user_authorization_url = 'https://bitly.com/oauth/authorize' + access_token_url = 'https://api-ssl.bitly.com/oauth/access_token' + user_info_url = 'https://api-ssl.bitly.com/v3/user/info' + + def __init__(self, *args, **kwargs): + super(Bitly, self).__init__(*args, **kwargs) + + if self.offline: + if 'grant_type' not in self.access_token_params: + self.access_token_params['grant_type'] = 'refresh_token' + + @staticmethod + def _x_user_parser(user, data): + info = data.get('data', {}) + + user.id = info.get('login') + user.name = info.get('full_name') + user.username = info.get('display_name') + user.picture = info.get('profile_image') + user.link = info.get('profile_url') + + return user + + +class Cosm(OAuth2): + """ + Cosm |oauth2| provider. + + .. note:: + + Cosm doesn't provide any *user info URL*. + + * Dashboard: https://cosm.com/users/{your_username}/apps + * Docs: https://cosm.com/docs/ + * API reference: https://cosm.com/docs/v2/ + + """ + + user_authorization_url = 'https://cosm.com/oauth/authenticate' + access_token_url = 'https://cosm.com/oauth/token' + user_info_url = '' + + @staticmethod + def _x_user_parser(user, data): + user.id = user.username = data.get('user') + return user + + +class DeviantART(OAuth2): + """ + DeviantART |oauth2| provider. + + * Dashboard: https://www.deviantart.com/settings/myapps + * Docs: https://www.deviantart.com/developers/authentication + * API reference: http://www.deviantart.com/developers/oauth2 + + .. note:: + + Although it is not documented anywhere, DeviantART requires the + *access token* request to contain a ``User-Agent`` header. + You can apply a default ``User-Agent`` header for all API calls in the + config like this: + + .. code-block:: python + :emphasize-lines: 6 + + CONFIG = { + 'deviantart': { + 'class_': oauth2.DeviantART, + 'consumer_key': '#####', + 'consumer_secret': '#####', + 'access_headers': {'User-Agent': 'Some User Agent'}, + } + } + + Supported :class:`.User` properties: + + * name + * picture + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * gender + * id + * last_name + * link + * locale + * nickname + * phone + * postal_code + * timezone + + """ + + user_authorization_url = 'https://www.deviantart.com/oauth2/authorize' + access_token_url = 'https://www.deviantart.com/oauth2/token' + user_info_url = 'https://www.deviantart.com/api/oauth2/user/whoami' + + user_info_scope = ['basic'] + + supported_user_attributes = core.SupportedUserAttributes( + name=True, + picture=True, + username=True + ) + + def __init__(self, *args, **kwargs): + super(DeviantART, self).__init__(*args, **kwargs) + + if self.offline: + if 'grant_type' not in self.access_token_params: + self.access_token_params['grant_type'] = 'refresh_token' + + @staticmethod + def _x_user_parser(user, data): + user.picture = data.get('usericonurl') + return user + + +class Eventbrite(OAuth2): + """ + Eventbrite |oauth2| provider. + + Thanks to `Paul Brown `__. + + * Dashboard: http://www.eventbrite.com/myaccount/apps/ + * Docs: https://developer.eventbrite.com/docs/auth/ + * API: http://developer.eventbrite.com/docs/ + + Supported :class:`.User` properties: + + * email + * first_name + * id + * last_name + * name + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * gender + * link + * locale + * nickname + * phone + * picture + * postal_code + * timezone + * username + + """ + + user_authorization_url = 'https://www.eventbrite.com/oauth/authorize' + access_token_url = 'https://www.eventbrite.com/oauth/token' + user_info_url = 'https://www.eventbriteapi.com/v3/users/me' + + supported_user_attributes = core.SupportedUserAttributes( + email=True, + first_name=True, + id=True, + last_name=True, + name=True, + ) + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + @staticmethod + def _x_user_parser(user, data): + for email in data.get('emails', []): + if email.get('primary'): + user.email = email.get('email') + break + + return user + + +class Facebook(OAuth2): + """ + Facebook |oauth2| provider. + + * Dashboard: https://developers.facebook.com/apps + * Docs: http://developers.facebook.com/docs/howtos/login/server-side-login/ + * API reference: http://developers.facebook.com/docs/reference/api/ + * API explorer: http://developers.facebook.com/tools/explorer + + Supported :class:`.User` properties: + + * birth_date + * email + * first_name + * id + * last_name + * name + * picture + + Unsupported :class:`.User` properties: + + * nickname + * phone + * postal_code + * username + + """ + user_authorization_url = 'https://www.facebook.com/dialog/oauth' + access_token_url = 'https://graph.facebook.com/oauth/access_token' + user_info_url = 'https://graph.facebook.com/v2.3/me' + user_info_scope = ['email', 'public_profile', 'user_birthday', + 'user_location'] + same_origin = False + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=False, + country=False, + email=True, + first_name=True, + gender=False, + id=True, + last_name=True, + link=False, + locale=False, + location=False, + name=True, + picture=True, + timezone=False, + username=False, + ) + + @classmethod + def _x_request_elements_filter(cls, request_type, request_elements, + credentials): + + if request_type == cls.REFRESH_TOKEN_REQUEST_TYPE: + # As always, Facebook has it's original name for "refresh_token"! + url, method, params, headers, body = request_elements + params['fb_exchange_token'] = params.pop('refresh_token') + params['grant_type'] = 'fb_exchange_token' + request_elements = core.RequestElements(url, method, params, + headers, body) + + return request_elements + + def __init__(self, *args, **kwargs): + super(Facebook, self).__init__(*args, **kwargs) + + # Handle special Facebook requirements to be able + # to refresh the access token. + if self.offline: + # Facebook needs an offline_access scope. + if 'offline_access' not in self.scope: + self.scope.append('offline_access') + + if self.popup: + self.user_authorization_url += '?display=popup' + + @staticmethod + def _x_user_parser(user, data): + _birth_date = data.get('birthday') + if _birth_date: + try: + user.birth_date = datetime.datetime.strptime(_birth_date, + '%m/%d/%Y') + except ValueError: + pass + + user.picture = ('http://graph.facebook.com/{0}/picture?type=large' + .format(user.id)) + + user.location = data.get('location', {}).get('name') + if user.location: + split_location = user.location.split(', ') + user.city = split_location[0].strip() + if len(split_location) > 1: + user.country = split_location[1].strip() + + return user + + @staticmethod + def _x_credentials_parser(credentials, data): + """ + We need to override this method to fix Facebooks naming deviation. + """ + + # Facebook returns "expires" instead of "expires_in". + credentials.expire_in = data.get('expires') + + if data.get('token_type') == 'bearer': + # TODO: cls is not available here, hardcode for now. + credentials.token_type = 'Bearer' + + return credentials + + @staticmethod + def _x_refresh_credentials_if(credentials): + # Always refresh. + return True + + def access(self, url, params=None, **kwargs): + if params is None: + params = {} + params['fields'] = 'id,first_name,last_name,picture,email,gender,' + \ + 'timezone,location,birthday,locale' + + return super(Facebook, self).access(url, params, **kwargs) + + +class Foursquare(OAuth2): + """ + Foursquare |oauth2| provider. + + * Dashboard: https://foursquare.com/developers/apps + * Docs: https://developer.foursquare.com/overview/auth.html + * API reference: https://developer.foursquare.com/docs/ + + .. note:: + + Foursquare requires a *version* parameter in each request. + The default value is ``v=20140501``. You can override the version in + the ``params`` parameter of the :meth:`.Authomatic.access` method. + See https://developer.foursquare.com/overview/versioning + + Supported :class:`.User` properties: + + * city + * country + * email + * first_name + * gender + * id + * last_name + * location + * name + * phone + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * link + * locale + * nickname + * postal_code + * timezone + * username + + """ + + user_authorization_url = 'https://foursquare.com/oauth2/authenticate' + access_token_url = 'https://foursquare.com/oauth2/access_token' + user_info_url = 'https://api.foursquare.com/v2/users/self' + + same_origin = False + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=True, + country=True, + email=True, + first_name=True, + gender=True, + id=True, + last_name=True, + location=True, + name=True, + phone=True, + picture=True + ) + + @classmethod + def _x_request_elements_filter(cls, request_type, request_elements, + credentials): + + if request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: + # Foursquare uses OAuth 1.0 "oauth_token" for what should be + # "access_token" in OAuth 2.0! + url, method, params, headers, body = request_elements + params['oauth_token'] = params.pop('access_token') + + # Foursquare needs the version "v" parameter in every request. + # https://developer.foursquare.com/overview/versioning + if not params.get('v'): + params['v'] = '20140501' + + request_elements = core.RequestElements(url, method, params, + headers, body) + + return request_elements + + @staticmethod + def _x_user_parser(user, data): + + _resp = data.get('response', {}) + _user = _resp.get('user', {}) + + user.id = _user.get('id') + user.first_name = _user.get('firstName') + user.last_name = _user.get('lastName') + user.gender = _user.get('gender') + + _birth_date = _user.get('birthday') + if _birth_date: + user.birth_date = datetime.datetime.fromtimestamp(_birth_date) + + _photo = _user.get('photo', {}) + if isinstance(_photo, dict): + _photo_prefix = _photo.get('prefix', '').strip('/') + _photo_suffix = _photo.get('suffix', '').strip('/') + user.picture = '/'.join([_photo_prefix, _photo_suffix]) + + if isinstance(_photo, str): + user.picture = _photo + + user.location = _user.get('homeCity') + if user.location: + split_location = user.location.split(',') + user.city = split_location[0].strip() + if len(user.location) > 1: + user.country = split_location[1].strip() + + _contact = _user.get('contact', {}) + user.email = _contact.get('email') + user.phone = _contact.get('phone') + + return user + + +class Bitbucket(OAuth2): + + user_authorization_url = 'https://bitbucket.org/site/oauth2/authorize' + access_token_url = 'https://bitbucket.org/site/oauth2/access_token' + user_info_url = 'https://bitbucket.org/api/2.0/user' + user_email_info_url = 'https://bitbucket.org/api/2.0/user/emails' + + same_origin = False + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + first_name=True, + last_name=True, + link=True, + name=True, + picture=True, + username=True, + email=True + ) + + @staticmethod + def _x_user_parser(user, data): + user.username = user.id = data.get('username') + user.name = data.get('display_name') + user.first_name = data.get('first_name') + user.last_name = data.get('last_name') + + return user + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + def _access_user_info(self): + """ + Email is available in separate method so second request is needed. + """ + response = super(Bitbucket, self)._access_user_info() + + response.data.setdefault("email", None) + + email_response = self.access(self.user_email_info_url) + emails = email_response.data.get('values', []) + if emails: + for item in emails: + if item.get("is_primary", False): + response.data.update(email=item.get("email", None)) + + return response + + +class GitHub(OAuth2): + """ + GitHub |oauth2| provider. + + * Dashboard: https://github.com/settings/developers + * Docs: http://developer.github.com/v3/#authentication + * API reference: http://developer.github.com/v3/ + + .. note:: + + GitHub API + `documentation `_ + says: + + all API requests MUST include a valid ``User-Agent`` header. + + You can apply a default ``User-Agent`` header for all API calls in + the config like this: + + .. code-block:: python + :emphasize-lines: 6 + + CONFIG = { + 'github': { + 'class_': oauth2.GitHub, + 'consumer_key': '#####', + 'consumer_secret': '#####', + 'access_headers': {'User-Agent': 'Awesome-Octocat-App'}, + } + } + + Supported :class:`.User` properties: + + * email + * id + * link + * location + * name + * picture + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * first_name + * gender + * last_name + * locale + * nickname + * phone + * postal_code + * timezone + + """ + + user_authorization_url = 'https://github.com/login/oauth/authorize' + access_token_url = 'https://github.com/login/oauth/access_token' + user_info_url = 'https://api.github.com/user' + + same_origin = False + + supported_user_attributes = core.SupportedUserAttributes( + email=True, + id=True, + link=True, + location=True, + name=True, + picture=True, + username=True + ) + + @staticmethod + def _x_user_parser(user, data): + user.username = data.get('login') + user.picture = data.get('avatar_url') + user.link = data.get('html_url') + return user + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + +class Google(OAuth2): + """ + Google |oauth2| provider. + + * Dashboard: https://console.developers.google.com/project + * Docs: https://developers.google.com/accounts/docs/OAuth2 + * API reference: https://developers.google.com/gdata/docs/directory + * API explorer: https://developers.google.com/oauthplayground/ + + Supported :class:`.User` properties: + + * email + * first_name + * gender + * id + * last_name + * link + * locale + * name + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * nickname + * phone + * postal_code + * timezone + * username + + .. note:: + + To get the user info, you need to activate the **Google+ API** + in the **APIs & auth >> APIs** section of the`Google Developers Console + `__. + + """ + + user_authorization_url = 'https://accounts.google.com/o/oauth2/auth' + access_token_url = 'https://accounts.google.com/o/oauth2/token' + user_info_url = 'https://www.googleapis.com/oauth2/v3/userinfo?alt=json' + + user_info_scope = ['profile', + 'email'] + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + email=True, + name=True, + first_name=True, + last_name=True, + locale=True, + picture=True + ) + + def __init__(self, *args, **kwargs): + super(Google, self).__init__(*args, **kwargs) + + # Handle special Google requirements to be able to refresh the access + # token. + if self.offline: + if 'access_type' not in self.user_authorization_params: + # Google needs access_type=offline param in the user + # authorization request. + self.user_authorization_params['access_type'] = 'offline' + if 'approval_prompt' not in self.user_authorization_params: + # And also approval_prompt=force. + self.user_authorization_params['approval_prompt'] = 'force' + + @classmethod + def _x_request_elements_filter(cls, request_type, request_elements, + credentials): + """ + Google doesn't accept client ID and secret to be at the same time in + request parameters and in the basic authorization header in the access + token request. + """ + if request_type is cls.ACCESS_TOKEN_REQUEST_TYPE: + params = request_elements[2] + del params['client_id'] + del params['client_secret'] + return request_elements + + @staticmethod + def _x_user_parser(user, data): + emails = data.get('emails', []) + if emails: + user.email = emails[0].get('value') + for email in emails: + if email.get('type') == 'account': + user.email = email.get('value') + break + + user.id = data.get('sub') + user.name = data.get('name') + user.first_name = data.get('given_name', '') + user.last_name = data.get('family_name', '') + user.locale = data.get('locale', '') + user.picture = data.get('picture', '') + + user.email_verified = data.get("email_verified") + user.hosted_domain = data.get("hd") + return user + + def _x_scope_parser(self, scope): + """ + Google has space-separated scopes. + """ + return ' '.join(scope) + + +class LinkedIn(OAuth2): + """ + Linked In |oauth2| provider. + + .. note:: + + Doesn't support access token refreshment. + + * Dashboard: https://www.linkedin.com/secure/developer + * Docs: http://developer.linkedin.com/documents/authentication + * API reference: http://developer.linkedin.com/rest + + Supported :class:`.User` properties: + + * city + * country + * email + * first_name + * id + * last_name + * link + * name + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * gender + * locale + * location + * nickname + * phone + * postal_code + * timezone + * username + + """ + + user_authorization_url = 'https://www.linkedin.com/uas/oauth2/' + \ + 'authorization' + access_token_url = 'https://www.linkedin.com/uas/oauth2/accessToken' + user_info_url = ('https://api.linkedin.com/v1/people/~:' + '(id,first-name,last-name,formatted-name,location,' + 'picture-url,public-profile-url,email-address)' + '?format=json') + + user_info_scope = ['r_emailaddress'] + + token_request_method = 'GET' # To avoid a bug with OAuth2.0 on Linkedin + # http://developer.linkedin.com/forum/unauthorized-invalid-or-expired-token-immediately-after-receiving-oauth2-token + + supported_user_attributes = core.SupportedUserAttributes( + city=True, + country=True, + email=True, + first_name=True, + id=True, + last_name=True, + link=True, + location=False, + name=True, + picture=True + ) + + @classmethod + def _x_request_elements_filter(cls, request_type, request_elements, + credentials): + if request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: + # LinkedIn too has it's own terminology! + url, method, params, headers, body = request_elements + params['oauth2_access_token'] = params.pop('access_token') + request_elements = core.RequestElements(url, method, params, + headers, body) + + return request_elements + + @staticmethod + def _x_user_parser(user, data): + + user.first_name = data.get('firstName') + user.last_name = data.get('lastName') + user.email = data.get('emailAddress') + user.name = data.get('formattedName') + user.city = user.city = data.get('location', {}).get('name') + user.country = data.get('location', {}).get('country', {}).get('code') + user.phone = data.get('phoneNumbers', {}).get('values', [{}])[0]\ + .get('phoneNumber') + user.picture = data.get('pictureUrl') + user.link = data.get('publicProfileUrl') + + _birthdate = data.get('dateOfBirth', {}) + if _birthdate: + _day = _birthdate.get('day') + _month = _birthdate.get('month') + _year = _birthdate.get('year') + if _day and _month and _year: + user.birth_date = datetime.datetime(_year, _month, _day) + + return user + + +class PayPal(OAuth2): + """ + PayPal |oauth2| provider. + + * Dashboard: https://developer.paypal.com/webapps/developer/applications + * Docs: + https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/ + * API reference: https://developer.paypal.com/webapps/developer/docs/api/ + + .. note:: + + Paypal doesn't redirect the **user** to authorize your app! + It grants you an **access token** based on your **app's** key and + secret instead. + + """ + + _x_use_authorization_header = True + + supported_user_attributes = core.SupportedUserAttributes() + + @classmethod + def _x_request_elements_filter( + cls, request_type, request_elements, credentials): + + if request_type == cls.ACCESS_TOKEN_REQUEST_TYPE: + url, method, params, headers, body = request_elements + params['grant_type'] = 'client_credentials' + request_elements = core.RequestElements( + url, method, params, headers, body) + + return request_elements + + user_authorization_url = '' + access_token_url = 'https://api.sandbox.paypal.com/v1/oauth2/token' + user_info_url = '' + + +class Reddit(OAuth2): + """ + Reddit |oauth2| provider. + + .. note:: + + Currently credentials refreshment returns + ``{"error": "invalid_request"}``. + + * Dashboard: https://ssl.reddit.com/prefs/apps + * Docs: https://github.com/reddit/reddit/wiki/OAuth2 + * API reference: http://www.reddit.com/dev/api + + .. note:: + + According to Reddit API + `docs `_, + you have to include a `User-Agent` header in each API call. + + You can apply a default ``User-Agent`` header for all API calls in the + config like this: + + .. code-block:: python + :emphasize-lines: 6 + + CONFIG = { + 'reddit': { + 'class_': oauth2.Reddit, + 'consumer_key': '#####', + 'consumer_secret': '#####', + 'access_headers': {'User-Agent': "Andy Pipkin's App"}, + } + } + + Supported :class:`.User` properties: + + * id + * username + + Unsupported :class:`.User` properties: + + * birth_date + * country + * city + * email + * first_name + * gender + * last_name + * link + * locale + * location + * name + * nickname + * phone + * picture + * postal_code + * timezone + + """ + + user_authorization_url = 'https://ssl.reddit.com/api/v1/authorize' + access_token_url = 'https://ssl.reddit.com/api/v1/access_token' + user_info_url = 'https://oauth.reddit.com/api/v1/me.json' + + user_info_scope = ['identity'] + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + name=True, + username=True + ) + + def __init__(self, *args, **kwargs): + super(Reddit, self).__init__(*args, **kwargs) + + if self.offline: + if 'duration' not in self.user_authorization_params: + # http://www.reddit.com/r/changelog/comments/11jab9/reddit_change_permanent_oauth_grants_using/ + self.user_authorization_params['duration'] = 'permanent' + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + @staticmethod + def _x_user_parser(user, data): + user.username = data.get('name') + return user + + +class Viadeo(OAuth2): + """ + Viadeo |oauth2| provider. + + .. note:: + + As stated in the `Viadeo documentation + `__: + + Viadeo restrains access to its API. + They are now exclusively reserved for its strategic partners. + + * Dashboard: http://dev.viadeo.com/dashboard/ + * Docs: + http://dev.viadeo.com/documentation/authentication/oauth-authentication/ + * API reference: http://dev.viadeo.com/documentation/ + + .. note:: + + Viadeo doesn't support **credentials refreshment**. + As stated in their + `docs + `_: + "The access token has an infinite time to live." + + """ + + user_authorization_url = 'https://secure.viadeo.com/oauth-provider/' + \ + 'authorize2' + access_token_url = 'https://secure.viadeo.com/oauth-provider/access_token2' + user_info_url = 'https://api.viadeo.com/me' + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer_token': + credentials.token_type = cls.BEARER + return credentials + + @staticmethod + def _x_refresh_credentials_if(credentials): + # Never refresh. + return False + + @staticmethod + def _x_user_parser(user, data): + user.username = data.get('nickname') + user.picture = data.get('picture_large') + user.picture = data.get('picture_large') + user.locale = data.get('language') + user.email = data.get('') + user.email = data.get('') + user.country = data.get('location', {}).get('country') + user.city = data.get('location', {}).get('city') + user.postal_code = data.get('location', {}).get('zipcode') + user.timezone = data.get('location', {}).get('timezone') + + return user + + +class VK(OAuth2): + """ + VK.com |oauth2| provider. + + * Dashboard: http://vk.com/apps?act=manage + * Docs: http://vk.com/developers.php?oid=-17680044&p=Authorizing_Sites + * API reference: http://vk.com/developers.php?oid=-17680044&p=API_ + Method_Description + + .. note:: + + VK uses a + `bitmask scope + `_! + Use it like this: + + .. code-block:: python + :emphasize-lines: 7 + + CONFIG = { + 'vk': { + 'class_': oauth2.VK, + 'consumer_key': '#####', + 'consumer_secret': '#####', + 'id': authomatic.provider_id(), + 'scope': ['1024'] # Always a single item. + } + } + + Supported :class:`.User` properties: + + * birth_date + * city + * country + * first_name + * gender + * id + * last_name + * location + * name + * picture + * timezone + + Unsupported :class:`.User` properties: + + * email + * link + * locale + * nickname + * phone + * postal_code + * username + + """ + + user_authorization_url = 'http://api.vkontakte.ru/oauth/authorize' + access_token_url = 'https://api.vkontakte.ru/oauth/access_token' + user_info_url = 'https://api.vk.com/method/getProfiles?' + \ + 'fields=uid,first_name,last_name,nickname,sex,bdate,' + \ + 'city,country,timezone,photo_big' + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=True, + country=True, + first_name=True, + gender=True, + id=True, + last_name=True, + location=True, + name=True, + picture=True, + timezone=True, + ) + + def __init__(self, *args, **kwargs): + super(VK, self).__init__(*args, **kwargs) + + if self.offline: + if 'offline' not in self.scope: + self.scope.append('offline') + + @staticmethod + def _x_user_parser(user, data): + _resp = data.get('response', [{}])[0] + + _birth_date = _resp.get('bdate') + if _birth_date: + user.birth_date = datetime.datetime.strptime( + _birth_date, '%d.%m.%Y') + user.id = _resp.get('uid') + user.first_name = _resp.get('first_name') + user.gender = _resp.get('sex') + user.last_name = _resp.get('last_name') + user.nickname = _resp.get('nickname') + user.city = _resp.get('city') + user.country = _resp.get('country') + user.timezone = _resp.get('timezone') + user.picture = _resp.get('photo_big') + + return user + + +class WindowsLive(OAuth2): + """ + Windows Live |oauth2| provider. + + * Dashboard: https://account.live.com/developers/applications + * Docs: http://msdn.microsoft.com/en-us/library/hh243647.aspx + * API explorer: http://isdk.dev.live.com/?mkt=en-us + + Supported :class:`.User` properties: + + * email + * first_name + * id + * last_name + * link + * locale + * name + * picture + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * gender + * nickname + * location + * phone + * postal_code + * timezone + * username + + """ + + user_authorization_url = 'https://login.live.com/oauth20_authorize.srf' + access_token_url = 'https://login.live.com/oauth20_token.srf' + user_info_url = 'https://apis.live.net/v5.0/me' + + user_info_scope = ['wl.basic', 'wl.emails', 'wl.photos'] + + supported_user_attributes = core.SupportedUserAttributes( + email=True, + first_name=True, + id=True, + last_name=True, + link=True, + locale=True, + name=True, + picture=True + ) + + def __init__(self, *args, **kwargs): + super(WindowsLive, self).__init__(*args, **kwargs) + + if self.offline: + if 'wl.offline_access' not in self.scope: + self.scope.append('wl.offline_access') + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + @staticmethod + def _x_user_parser(user, data): + user.email = data.get('emails', {}).get('preferred') + user.picture = 'https://apis.live.net/v5.0/{0}/picture'.format( + data.get('id')) + return user + + +class Yammer(OAuth2): + """ + Yammer |oauth2| provider. + + * Dashboard: https://www.yammer.com/client_applications + * Docs: https://developer.yammer.com/authentication/ + * API reference: https://developer.yammer.com/restapi/ + + Supported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * id + * last_name + * link + * locale + * location + * name + * phone + * picture + * timezone + * username + + Unsupported :class:`.User` properties: + + * gender + * nickname + * postal_code + + """ + + user_authorization_url = 'https://www.yammer.com/dialog/oauth' + access_token_url = 'https://www.yammer.com/oauth2/access_token.json' + user_info_url = 'https://www.yammer.com/api/v1/users/current.json' + + supported_user_attributes = core.SupportedUserAttributes( + birth_date=True, + city=True, + country=True, + email=True, + first_name=True, + id=True, + last_name=True, + link=True, + locale=True, + location=True, + name=True, + phone=True, + picture=True, + timezone=True, + username=True + ) + + @classmethod + def _x_credentials_parser(cls, credentials, data): + # import pdb; pdb.set_trace() + credentials.token_type = cls.BEARER + _access_token = data.get('access_token', {}) + credentials.token = _access_token.get('token') + _expire_in = _access_token.get('expires_at', 0) + if _expire_in: + credentials.expire_in = _expire_in + return credentials + + @staticmethod + def _x_user_parser(user, data): + + # Yammer provides most of the user info in the access token request, + # but provides more on in user info request. + _user = data.get('user', {}) + if not _user: + # If there is "user key", it is token request. + _user = data + + user.username = _user.get('name') + user.name = _user.get('full_name') + user.link = _user.get('web_url') + user.picture = _user.get('mugshot_url') + + user.city, user.country = _user.get('location', ',').split(',') + user.city = user.city.strip() + user.country = user.country.strip() + user.locale = _user.get('web_preferences', {}).get('locale') + + # Contact + _contact = _user.get('contact', {}) + user.phone = _contact.get('phone_numbers', [{}])[0].get('number') + _emails = _contact.get('email_addresses', []) + for email in _emails: + if email.get('type', '') == 'primary': + user.email = email.get('address') + break + + try: + user.birth_date = datetime.datetime.strptime( + _user.get('birth_date'), "%B %d") + except ValueError: + user.birth_date = _user.get('birth_date') + + return user + + +class Yandex(OAuth2): + """ + Yandex |oauth2| provider. + + * Dashboard: https://oauth.yandex.com/client/my + * Docs: + http://api.yandex.com/oauth/doc/dg/reference/obtain-access-token.xml + * API reference: + + Supported :class:`.User` properties: + + * id + * name + * username + + Unsupported :class:`.User` properties: + + * birth_date + * city + * country + * email + * first_name + * gender + * last_name + * link + * locale + * location + * nickname + * phone + * picture + * postal_code + * timezone + + """ + + user_authorization_url = 'https://oauth.yandex.com/authorize' + access_token_url = 'https://oauth.yandex.com/token' + user_info_url = 'https://login.yandex.ru/info' + + supported_user_attributes = core.SupportedUserAttributes( + id=True, + name=True, + username=True + ) + + @classmethod + def _x_credentials_parser(cls, credentials, data): + if data.get('token_type') == 'bearer': + credentials.token_type = cls.BEARER + return credentials + + @staticmethod + def _x_user_parser(user, data): + + # http://api.yandex.ru/login/doc/dg/reference/response.xml + user.name = data.get('real_name') + user.nickname = data.get('display_name') + user.gender = data.get('Sex') + user.email = data.get('Default_email') + user.username = data.get('login') + + try: + user.birth_date = datetime.datetime.strptime( + data.get('birthday'), "%Y-%m-%d") + except ValueError: + user.birth_date = data.get('birthday') + + return user + + +# The provider type ID is generated from this list's indexes! +# Always append new providers at the end so that ids of existing providers +# don't change! +PROVIDER_ID_MAP = [ + Amazon, + Behance, + Bitly, + Bitbucket, + Cosm, + DeviantART, + Eventbrite, + Facebook, + Foursquare, + GitHub, + Google, + LinkedIn, + OAuth2, + PayPal, + Reddit, + Viadeo, + VK, + WindowsLive, + Yammer, + Yandex, +] diff --git a/rhodecode/lib/_vendor/authomatic/providers/openid.py b/rhodecode/lib/_vendor/authomatic/providers/openid.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/openid.py @@ -0,0 +1,505 @@ +# -*- coding: utf-8 -*- +""" +|openid| Providers +---------------------------------- + +Providers which implement the |openid|_ protocol based on the +`python-openid`_ library. + +.. warning:: + + This providers are dependent on the |pyopenid|_ package. + +.. autosummary:: + + OpenID + Yahoo + Google + +""" + +# We need absolute import to import from openid library which has the same +# name as this module +from __future__ import absolute_import +import datetime +import logging +import time + +from openid import oidutil +from openid.consumer import consumer +from openid.extensions import ax, pape, sreg +from openid.association import Association + +from authomatic import providers +from authomatic.exceptions import FailureError, CancellationError, OpenIDError + + +__all__ = ['OpenID', 'Yahoo', 'Google'] + + +# Suppress openid logging. +oidutil.log = lambda message, level=0: None + + +REALM_HTML = \ + """ + + + + + + {body} + +""" + + +XRDS_XML = \ + """ + + + + + http://specs.openid.net/auth/2.0/return_to + {return_to} + + + +""" + + +class SessionOpenIDStore(object): + """ + A very primitive session-based implementation of the. + + :class:`openid.store.interface.OpenIDStore` interface of the + `python-openid`_ library. + + .. warning:: + + Nonces get verified only by their timeout. Use on your own risk! + + """ + + @staticmethod + def _log(level, message): + return None + + ASSOCIATION_KEY = ('authomatic.providers.openid.SessionOpenIDStore:' + 'association') + + def __init__(self, session, nonce_timeout=None): + """ + :param int nonce_timeout: + + Nonces older than this in seconds will be considered expired. + Default is 600. + """ + self.session = session + self.nonce_timeout = nonce_timeout or 600 + + def storeAssociation(self, server_url, association): + self._log(logging.DEBUG, + 'SessionOpenIDStore: Storing association to session.') + + serialized = association.serialize() + decoded = serialized.decode('latin-1') + + assoc = decoded + # assoc = serialized + + # Always store only one association as a tuple. + self.session[self.ASSOCIATION_KEY] = (server_url, association.handle, + assoc) + + def getAssociation(self, server_url, handle=None): + # Try to get association. + assoc = self.session.get(self.ASSOCIATION_KEY) + if assoc and assoc[0] == server_url: + # If found deserialize and return it. + self._log(logging.DEBUG, u'SessionOpenIDStore: Association found.') + return Association.deserialize(assoc[2].encode('latin-1')) + else: + self._log(logging.DEBUG, + u'SessionOpenIDStore: Association not found.') + + def removeAssociation(self, server_url, handle): + # Just inform the caller that it's gone. + return True + + def useNonce(self, server_url, timestamp, salt): + # Evaluate expired nonces as false. + age = int(time.time()) - int(timestamp) + if age < self.nonce_timeout: + return True + else: + self._log(logging.ERROR, u'SessionOpenIDStore: Expired nonce!') + return False + + +class OpenID(providers.AuthenticationProvider): + """ + |openid|_ provider based on the `python-openid`_ library. + """ + + AX = ['http://axschema.org/contact/email', + 'http://schema.openid.net/contact/email', + 'http://axschema.org/namePerson', + 'http://openid.net/schema/namePerson/first', + 'http://openid.net/schema/namePerson/last', + 'http://openid.net/schema/gender', + 'http://openid.net/schema/language/pref', + 'http://openid.net/schema/contact/web/default', + 'http://openid.net/schema/media/image', + 'http://openid.net/schema/timezone'] + + AX_REQUIRED = ['http://schema.openid.net/contact/email'] + + SREG = ['nickname', + 'email', + 'fullname', + 'dob', + 'gender', + 'postcode', + 'country', + 'language', + 'timezone'] + + PAPE = [ + 'http://schemas.openid.net/pape/policies/2007/06/' + 'multi-factor-physical', + 'http://schemas.openid.net/pape/policies/2007/06/multi-factor', + 'http://schemas.openid.net/pape/policies/2007/06/phishing-resistant' + ] + + def __init__(self, *args, **kwargs): + """ + Accepts additional keyword arguments: + + :param store: + Any object which implements + :class:`openid.store.interface.OpenIDStore` + of the `python-openid`_ library. + + :param bool use_realm: + Whether to use `OpenID realm + `_ + If ``True`` the realm HTML document will be accessible at + ``{current url}?{realm_param}={realm_param}`` + e.g. ``http://example.com/path?realm=realm``. + + :param str realm_body: + Contents of the HTML body tag of the realm. + + :param str realm_param: + Name of the query parameter to be used to serve the realm. + + :param str xrds_param: + The name of the query parameter to be used to serve the + `XRDS document + `_. + + :param list sreg: + List of strings of optional + `SREG + `_ + fields. + Default = :attr:`OpenID.SREG`. + + :param list sreg_required: + List of strings of required + `SREG + `_ + fields. + Default = ``[]``. + + :param list ax: + List of strings of optional + `AX + `_ + schemas. + Default = :attr:`OpenID.AX`. + + :param list ax_required: + List of strings of required + `AX + `_ + schemas. + Default = :attr:`OpenID.AX_REQUIRED`. + + :param list pape: + of requested + `PAPE + `_ + policies. + Default = :attr:`OpenID.PAPE`. + + As well as those inherited from :class:`.AuthenticationProvider` + constructor. + + """ + + super(OpenID, self).__init__(*args, **kwargs) + + # Allow for other openid store implementations. + self.store = self._kwarg( + kwargs, 'store', SessionOpenIDStore( + self.session)) + + # Realm + self.use_realm = self._kwarg(kwargs, 'use_realm', True) + self.realm_body = self._kwarg(kwargs, 'realm_body', '') + self.realm_param = self._kwarg(kwargs, 'realm_param', 'realm') + self.xrds_param = self._kwarg(kwargs, 'xrds_param', 'xrds') + + # SREG + self.sreg = self._kwarg(kwargs, 'sreg', self.SREG) + self.sreg_required = self._kwarg(kwargs, 'sreg_required', []) + + # AX + self.ax = self._kwarg(kwargs, 'ax', self.AX) + self.ax_required = self._kwarg(kwargs, 'ax_required', self.AX_REQUIRED) + # add required schemas to schemas if not already there + for i in self.ax_required: + if i not in self.ax: + self.ax.append(i) + + # PAPE + self.pape = self._kwarg(kwargs, 'pape', self.PAPE) + + @staticmethod + def _x_user_parser(user, data): + + user.first_name = data.get('ax', {}).get( + 'http://openid.net/schema/namePerson/first') + user.last_name = data.get('ax', {}).get( + 'http://openid.net/schema/namePerson/last') + user.id = data.get('guid') + user.link = data.get('ax', {}).get( + 'http://openid.net/schema/contact/web/default') + user.picture = data.get('ax', {}).get( + 'http://openid.net/schema/media/image') + user.nickname = data.get('sreg', {}).get('nickname') + user.country = data.get('sreg', {}).get('country') + user.postal_code = data.get('sreg', {}).get('postcode') + + user.name = data.get('sreg', {}).get('fullname') or \ + data.get('ax', {}).get('http://axschema.org/namePerson') + + user.gender = data.get('sreg', {}).get('gender') or \ + data.get('ax', {}).get('http://openid.net/schema/gender') + + user.locale = data.get('sreg', {}).get('language') or \ + data.get('ax', {}).get('http://openid.net/schema/language/pref') + + user.timezone = data.get('sreg', {}).get('timezone') or \ + data.get('ax', {}).get('http://openid.net/schema/timezone') + + user.email = data.get('sreg', {}).get('email') or \ + data.get('ax', {}).get('http://axschema.org/contact/email') or \ + data.get('ax', {}).get('http://schema.openid.net/contact/email') + + if data.get('sreg', {}).get('dob'): + user.birth_date = datetime.datetime.strptime( + data.get('sreg', {}).get('dob'), + '%Y-%m-%d' + ) + else: + user.birth_date = None + + return user + + @providers.login_decorator + def login(self): + # Instantiate consumer + self.store._log = self._log + oi_consumer = consumer.Consumer(self.session, self.store) + + # handle realm and XRDS if there is only one query parameter + if self.use_realm and len(self.params) == 1: + realm_request = self.params.get(self.realm_param) + xrds_request = self.params.get(self.xrds_param) + else: + realm_request = None + xrds_request = None + + # determine type of request + if realm_request: + # ================================================================= + # Realm HTML + # ================================================================= + + self._log( + logging.INFO, + u'Writing OpenID realm HTML to the response.') + xrds_location = '{u}?{x}={x}'.format(u=self.url, x=self.xrds_param) + self.write( + REALM_HTML.format( + xrds_location=xrds_location, + body=self.realm_body)) + + elif xrds_request: + # ================================================================= + # XRDS XML + # ================================================================= + + self._log( + logging.INFO, + u'Writing XRDS XML document to the response.') + self.set_header('Content-Type', 'application/xrds+xml') + self.write(XRDS_XML.format(return_to=self.url)) + + elif self.params.get('openid.mode'): + # ================================================================= + # Phase 2 after redirect + # ================================================================= + + self._log( + logging.INFO, + u'Continuing OpenID authentication procedure after redirect.') + + # complete the authentication process + response = oi_consumer.complete(self.params, self.url) + + # on success + if response.status == consumer.SUCCESS: + + data = {} + + # get user ID + data['guid'] = response.getDisplayIdentifier() + + self._log(logging.INFO, u'Authentication successful.') + + # get user data from AX response + ax_response = ax.FetchResponse.fromSuccessResponse(response) + if ax_response and ax_response.data: + self._log(logging.INFO, u'Got AX data.') + ax_data = {} + # convert iterable values to their first item + for k, v in ax_response.data.items(): + if v and isinstance(v, (list, tuple)): + ax_data[k] = v[0] + data['ax'] = ax_data + + # get user data from SREG response + sreg_response = sreg.SRegResponse.fromSuccessResponse(response) + if sreg_response and sreg_response.data: + self._log(logging.INFO, u'Got SREG data.') + data['sreg'] = sreg_response.data + + # get data from PAPE response + pape_response = pape.Response.fromSuccessResponse(response) + if pape_response and pape_response.auth_policies: + self._log(logging.INFO, u'Got PAPE data.') + data['pape'] = pape_response.auth_policies + + # create user + self._update_or_create_user(data) + + # ============================================================= + # We're done! + # ============================================================= + + elif response.status == consumer.CANCEL: + raise CancellationError( + u'User cancelled the verification of ID "{0}"!'.format( + response.getDisplayIdentifier())) + + elif response.status == consumer.FAILURE: + raise FailureError(response.message) + + elif self.identifier: # As set in AuthenticationProvider.__init__ + # ================================================================= + # Phase 1 before redirect + # ================================================================= + + self._log( + logging.INFO, + u'Starting OpenID authentication procedure.') + + # get AuthRequest object + try: + auth_request = oi_consumer.begin(self.identifier) + except consumer.DiscoveryFailure as e: + raise FailureError( + u'Discovery failed for identifier {0}!'.format( + self.identifier + ), + url=self.identifier, + original_message=e.message) + + self._log( + logging.INFO, + u'Service discovery for identifier {0} successful.'.format( + self.identifier)) + + # add SREG extension + # we need to remove required fields from optional fields because + # addExtension then raises an error + self.sreg = [i for i in self.sreg if i not in self.sreg_required] + auth_request.addExtension( + sreg.SRegRequest( + optional=self.sreg, + required=self.sreg_required) + ) + + # add AX extension + ax_request = ax.FetchRequest() + # set AX schemas + for i in self.ax: + required = i in self.ax_required + ax_request.add(ax.AttrInfo(i, required=required)) + auth_request.addExtension(ax_request) + + # add PAPE extension + auth_request.addExtension(pape.Request(self.pape)) + + # prepare realm and return_to URLs + if self.use_realm: + realm = return_to = '{u}?{r}={r}'.format( + u=self.url, r=self.realm_param) + else: + realm = return_to = self.url + + url = auth_request.redirectURL(realm, return_to) + + if auth_request.shouldSendRedirect(): + # can be redirected + url = auth_request.redirectURL(realm, return_to) + self._log( + logging.INFO, + u'Redirecting user to {0}.'.format(url)) + self.redirect(url) + else: + # must be sent as POST + # this writes a html post form with auto-submit + self._log( + logging.INFO, + u'Writing an auto-submit HTML form to the response.') + form = auth_request.htmlMarkup( + realm, return_to, False, dict( + id='openid_form')) + self.write(form) + else: + raise OpenIDError('No identifier specified!') + + +class Yahoo(OpenID): + """ + Yahoo :class:`.OpenID` provider with the :attr:`.identifier` predefined to + ``"me.yahoo.com"``. + """ + + identifier = 'me.yahoo.com' + + +class Google(OpenID): + """ + Google :class:`.OpenID` provider with the :attr:`.identifier` predefined to + ``"https://www.google.com/accounts/o8/id"``. + """ + + identifier = 'https://www.google.com/accounts/o8/id' diff --git a/rhodecode/lib/_vendor/authomatic/providers/persona.py b/rhodecode/lib/_vendor/authomatic/providers/persona.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/providers/persona.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +from authomatic import providers + + +class MozillaPersona(providers.AuthenticationProvider): + pass diff --git a/rhodecode/lib/_vendor/authomatic/six.py b/rhodecode/lib/_vendor/authomatic/six.py new file mode 100755 --- /dev/null +++ b/rhodecode/lib/_vendor/authomatic/six.py @@ -0,0 +1,839 @@ +# -*- coding: utf-8 -*- +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.9.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return iter(d.iterkeys(**kw)) + + def itervalues(d, **kw): + return iter(d.itervalues(**kw)) + + def iteritems(d, **kw): + return iter(d.iteritems(**kw)) + + def iterlists(d, **kw): + return iter(d.iterlists(**kw)) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + unichr = chr + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + def byte2int(bs): + return ord(bs[0]) + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/rhodecode/lib/action_parser.py b/rhodecode/lib/action_parser.py --- a/rhodecode/lib/action_parser.py +++ b/rhodecode/lib/action_parser.py @@ -20,8 +20,8 @@ import logging -from webhelpers.html.builder import literal -from webhelpers.html.tags import link_to +from webhelpers2.html.builder import literal +from webhelpers2.html.tags import link_to from rhodecode.lib.utils2 import AttributeDict from rhodecode.lib.vcs.backends.base import BaseCommit diff --git a/rhodecode/lib/auth.py b/rhodecode/lib/auth.py --- a/rhodecode/lib/auth.py +++ b/rhodecode/lib/auth.py @@ -352,23 +352,23 @@ class PermOriginDict(dict): `.perm_origin_stack` will return the stack of (perm, origin) set per key >>> perms = PermOriginDict() - >>> perms['resource'] = 'read', 'default' + >>> perms['resource'] = 'read', 'default', 1 >>> perms['resource'] 'read' - >>> perms['resource'] = 'write', 'admin' + >>> perms['resource'] = 'write', 'admin', 2 >>> perms['resource'] 'write' >>> perms.perm_origin_stack - {'resource': [('read', 'default'), ('write', 'admin')]} + {'resource': [('read', 'default', 1), ('write', 'admin', 2)]} """ def __init__(self, *args, **kw): dict.__init__(self, *args, **kw) self.perm_origin_stack = collections.OrderedDict() - def __setitem__(self, key, (perm, origin)): + def __setitem__(self, key, (perm, origin, obj_id)): self.perm_origin_stack.setdefault(key, []).append( - (perm, origin)) + (perm, origin, obj_id)) dict.__setitem__(self, key, perm) @@ -463,26 +463,29 @@ class PermissionCalculator(object): # repositories for perm in self.default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name + obj_id = perm.UserRepoToPerm.repository.repo_id archived = perm.UserRepoToPerm.repository.archived p = 'repository.admin' - self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN + self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id # special case for archived repositories, which we block still even for # super admins if archived: p = 'repository.read' - self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED + self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id # repository groups for perm in self.default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name + obj_id = perm.UserRepoGroupToPerm.group.group_id p = 'group.admin' - self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN + self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id # user groups for perm in self.default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name + obj_id = perm.UserUserGroupToPerm.user_group.users_group_id p = 'usergroup.admin' - self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN + self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id # branch permissions # since super-admin also can have custom rule permissions @@ -578,10 +581,11 @@ class PermissionCalculator(object): def _calculate_default_permissions_repositories(self, user_inherit_object_permissions): for perm in self.default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name + obj_id = perm.UserRepoToPerm.repository.repo_id archived = perm.UserRepoToPerm.repository.archived p = perm.Permission.permission_name o = PermOrigin.REPO_DEFAULT - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id # if we decide this user isn't inheriting permissions from # default user we set him to .none so only explicit @@ -589,25 +593,25 @@ class PermissionCalculator(object): if not user_inherit_object_permissions: p = 'repository.none' o = PermOrigin.REPO_DEFAULT_NO_INHERIT - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if perm.Repository.private and not ( perm.Repository.user_id == self.user_id): # disable defaults for private repos, p = 'repository.none' o = PermOrigin.REPO_PRIVATE - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id elif perm.Repository.user_id == self.user_id: # set admin if owner p = 'repository.admin' o = PermOrigin.REPO_OWNER - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if self.user_is_admin: p = 'repository.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id # finally in case of archived repositories, we downgrade higher # permissions to read @@ -616,7 +620,7 @@ class PermissionCalculator(object): if current_perm in ['repository.write', 'repository.admin']: p = 'repository.read' o = PermOrigin.ARCHIVED - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions): for perm in self.default_branch_repo_perms: @@ -641,52 +645,54 @@ class PermissionCalculator(object): def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions): for perm in self.default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name + obj_id = perm.UserRepoGroupToPerm.group.group_id p = perm.Permission.permission_name o = PermOrigin.REPOGROUP_DEFAULT - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id # if we decide this user isn't inheriting permissions from default # user we set him to .none so only explicit permissions work if not user_inherit_object_permissions: p = 'group.none' o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if perm.RepoGroup.user_id == self.user_id: # set admin if owner p = 'group.admin' o = PermOrigin.REPOGROUP_OWNER - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if self.user_is_admin: p = 'group.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions): for perm in self.default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name + obj_id = perm.UserUserGroupToPerm.user_group.users_group_id p = perm.Permission.permission_name o = PermOrigin.USERGROUP_DEFAULT - self.permissions_user_groups[u_k] = p, o + self.permissions_user_groups[u_k] = p, o, obj_id # if we decide this user isn't inheriting permissions from default # user we set him to .none so only explicit permissions work if not user_inherit_object_permissions: p = 'usergroup.none' o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT - self.permissions_user_groups[u_k] = p, o + self.permissions_user_groups[u_k] = p, o, obj_id if perm.UserGroup.user_id == self.user_id: # set admin if owner p = 'usergroup.admin' o = PermOrigin.USERGROUP_OWNER - self.permissions_user_groups[u_k] = p, o + self.permissions_user_groups[u_k] = p, o, obj_id if self.user_is_admin: p = 'usergroup.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_user_groups[u_k] = p, o + self.permissions_user_groups[u_k] = p, o, obj_id def _calculate_default_permissions(self): """ @@ -723,7 +729,7 @@ class PermissionCalculator(object): def _calculate_repository_permissions(self): """ - Repository permissions for the current user. + Repository access permissions for the current user. Check if the user is part of user groups for this repository and fill in the permission from it. `_choose_permission` decides of which @@ -738,6 +744,7 @@ class PermissionCalculator(object): multiple_counter = collections.defaultdict(int) for perm in user_repo_perms_from_user_group: r_k = perm.UserGroupRepoToPerm.repository.repo_name + obj_id = perm.UserGroupRepoToPerm.repository.repo_id multiple_counter[r_k] += 1 p = perm.Permission.permission_name o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\ @@ -747,18 +754,18 @@ class PermissionCalculator(object): cur_perm = self.permissions_repositories[r_k] p = self._choose_permission(p, cur_perm) - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if perm.Repository.user_id == self.user_id: # set admin if owner p = 'repository.admin' o = PermOrigin.REPO_OWNER - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if self.user_is_admin: p = 'repository.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id # user explicit permissions for repositories, overrides any specified # by the group permission @@ -766,6 +773,7 @@ class PermissionCalculator(object): self.user_id, self.scope_repo_id) for perm in user_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name + obj_id = perm.UserRepoToPerm.repository.repo_id p = perm.Permission.permission_name o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username @@ -774,18 +782,18 @@ class PermissionCalculator(object): r_k, 'repository.none') p = self._choose_permission(p, cur_perm) - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if perm.Repository.user_id == self.user_id: # set admin if owner p = 'repository.admin' o = PermOrigin.REPO_OWNER - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id if self.user_is_admin: p = 'repository.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repositories[r_k] = p, o + self.permissions_repositories[r_k] = p, o, obj_id def _calculate_repository_branch_permissions(self): # user group for repositories permissions @@ -847,6 +855,7 @@ class PermissionCalculator(object): multiple_counter = collections.defaultdict(int) for perm in user_repo_group_perms_from_user_group: rg_k = perm.UserGroupRepoGroupToPerm.group.group_name + obj_id = perm.UserGroupRepoGroupToPerm.group.group_id multiple_counter[rg_k] += 1 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\ .users_group.users_group_name @@ -855,24 +864,25 @@ class PermissionCalculator(object): if multiple_counter[rg_k] > 1: cur_perm = self.permissions_repository_groups[rg_k] p = self._choose_permission(p, cur_perm) - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if perm.RepoGroup.user_id == self.user_id: # set admin if owner, even for member of other user group p = 'group.admin' o = PermOrigin.REPOGROUP_OWNER - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if self.user_is_admin: p = 'group.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id # user explicit permissions for repository groups user_repo_groups_perms = Permission.get_default_group_perms( self.user_id, self.scope_repo_group_id) for perm in user_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name + obj_id = perm.UserRepoGroupToPerm.group.group_id o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\ .user.username p = perm.Permission.permission_name @@ -881,18 +891,18 @@ class PermissionCalculator(object): cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none') p = self._choose_permission(p, cur_perm) - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if perm.RepoGroup.user_id == self.user_id: # set admin if owner p = 'group.admin' o = PermOrigin.REPOGROUP_OWNER - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id if self.user_is_admin: p = 'group.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_repository_groups[rg_k] = p, o + self.permissions_repository_groups[rg_k] = p, o, obj_id def _calculate_user_group_permissions(self): """ @@ -905,8 +915,8 @@ class PermissionCalculator(object): multiple_counter = collections.defaultdict(int) for perm in user_group_from_user_group: - ug_k = perm.UserGroupUserGroupToPerm\ - .target_user_group.users_group_name + ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name + obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id multiple_counter[ug_k] += 1 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\ .user_group.users_group_name @@ -916,24 +926,25 @@ class PermissionCalculator(object): cur_perm = self.permissions_user_groups[ug_k] p = self._choose_permission(p, cur_perm) - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id if perm.UserGroup.user_id == self.user_id: # set admin if owner, even for member of other user group p = 'usergroup.admin' o = PermOrigin.USERGROUP_OWNER - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id if self.user_is_admin: p = 'usergroup.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id # user explicit permission for user groups user_user_groups_perms = Permission.get_default_user_group_perms( self.user_id, self.scope_user_group_id) for perm in user_user_groups_perms: ug_k = perm.UserUserGroupToPerm.user_group.users_group_name + obj_id = perm.UserUserGroupToPerm.user_group.users_group_id o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\ .user.username p = perm.Permission.permission_name @@ -942,18 +953,18 @@ class PermissionCalculator(object): cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none') p = self._choose_permission(p, cur_perm) - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id if perm.UserGroup.user_id == self.user_id: # set admin if owner p = 'usergroup.admin' o = PermOrigin.USERGROUP_OWNER - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id if self.user_is_admin: p = 'usergroup.admin' o = PermOrigin.SUPER_ADMIN - self.permissions_user_groups[ug_k] = p, o + self.permissions_user_groups[ug_k] = p, o, obj_id def _choose_permission(self, new_perm, cur_perm): new_perm_val = Permission.PERM_WEIGHTS[new_perm] @@ -1040,6 +1051,9 @@ class AuthUser(object): anonymous access is enabled and if so, it returns default user as logged in """ GLOBAL_PERMS = [x[0] for x in Permission.PERMS] + repo_read_perms = ['repository.read', 'repository.admin', 'repository.write'] + repo_group_read_perms = ['group.read', 'group.write', 'group.admin'] + user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): @@ -1140,7 +1154,7 @@ class AuthUser(object): # try go get user by api key elif self._api_key and self._api_key != anon_user.api_key: - log.debug('Trying Auth User lookup by API KEY: `%s`', self._api_key) + log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:]) is_user_loaded = user_model.fill_data(self, api_key=self._api_key) # lookup by username @@ -1216,7 +1230,7 @@ class AuthUser(object): @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) - def compute_perm_tree(cache_name, + def compute_perm_tree(cache_name, cache_ver, user_id, scope, user_is_admin,user_inherit_default_permissions, explicit, algo, calculate_super_admin): return _cached_perms_data( @@ -1225,7 +1239,7 @@ class AuthUser(object): start = time.time() result = compute_perm_tree( - 'permissions', user_id, scope, user_is_admin, + 'permissions', 'v1', user_id, scope, user_is_admin, user_inherit_default_permissions, explicit, algo, calculate_super_admin) @@ -1233,7 +1247,7 @@ class AuthUser(object): for k in result: result_repr.append((k, len(result[k]))) total = time.time() - start - log.debug('PERMISSION tree for user %s computed in %.3fs: %s', + log.debug('PERMISSION tree for user %s computed in %.4fs: %s', user, total, result_repr) return result @@ -1277,6 +1291,18 @@ class AuthUser(object): x[0] for x in self.permissions['user_groups'].items() if x[1] == 'usergroup.admin'] + def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False): + if not perms: + perms = AuthUser.repo_read_perms + allowed_ids = [] + for k, stack_data in self.permissions['repositories'].perm_origin_stack.items(): + perm, origin, obj_id = stack_data[-1] # last item is the current permission + if prefix_filter and not k.startswith(prefix_filter): + continue + if perm in perms: + allowed_ids.append(obj_id) + return allowed_ids + def repo_acl_ids(self, perms=None, name_filter=None, cache=False): """ Returns list of repository ids that user have access to based on given @@ -1285,8 +1311,7 @@ class AuthUser(object): """ from rhodecode.model.scm import RepoList if not perms: - perms = [ - 'repository.read', 'repository.write', 'repository.admin'] + perms = AuthUser.repo_read_perms def _cached_repo_acl(user_id, perm_def, _name_filter): qry = Repository.query() @@ -1296,10 +1321,22 @@ class AuthUser(object): Repository.repo_name.ilike(ilike_expression)) return [x.repo_id for x in - RepoList(qry, perm_set=perm_def)] + RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})] return _cached_repo_acl(self.user_id, perms, name_filter) + def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False): + if not perms: + perms = AuthUser.repo_group_read_perms + allowed_ids = [] + for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items(): + perm, origin, obj_id = stack_data[-1] # last item is the current permission + if prefix_filter and not k.startswith(prefix_filter): + continue + if perm in perms: + allowed_ids.append(obj_id) + return allowed_ids + def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False): """ Returns list of repository group ids that user have access to based on given @@ -1308,8 +1345,7 @@ class AuthUser(object): """ from rhodecode.model.scm import RepoGroupList if not perms: - perms = [ - 'group.read', 'group.write', 'group.admin'] + perms = AuthUser.repo_group_read_perms def _cached_repo_group_acl(user_id, perm_def, _name_filter): qry = RepoGroup.query() @@ -1319,10 +1355,20 @@ class AuthUser(object): RepoGroup.group_name.ilike(ilike_expression)) return [x.group_id for x in - RepoGroupList(qry, perm_set=perm_def)] + RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})] return _cached_repo_group_acl(self.user_id, perms, name_filter) + def user_group_acl_ids_from_stack(self, perms=None, cache=False): + if not perms: + perms = AuthUser.user_group_read_perms + allowed_ids = [] + for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items(): + perm, origin, obj_id = stack_data[-1] # last item is the current permission + if perm in perms: + allowed_ids.append(obj_id) + return allowed_ids + def user_group_acl_ids(self, perms=None, name_filter=None, cache=False): """ Returns list of user group ids that user have access to based on given @@ -1331,8 +1377,7 @@ class AuthUser(object): """ from rhodecode.model.scm import UserGroupList if not perms: - perms = [ - 'usergroup.read', 'usergroup.write', 'usergroup.admin'] + perms = AuthUser.user_group_read_perms def _cached_user_group_acl(user_id, perm_def, name_filter): qry = UserGroup.query() @@ -1342,7 +1387,7 @@ class AuthUser(object): UserGroup.users_group_name.ilike(ilike_expression)) return [x.users_group_id for x in - UserGroupList(qry, perm_set=perm_def)] + UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})] return _cached_user_group_acl(self.user_id, perms, name_filter) @@ -1366,6 +1411,10 @@ class AuthUser(object): def feed_token(self): return self.get_instance().feed_token + @LazyProperty + def artifact_token(self): + return self.get_instance().artifact_token + @classmethod def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default): allowed_ips = AuthUser.get_allowed_ips( @@ -1536,11 +1585,10 @@ class CSRFRequired(object): http://en.wikipedia.org/wiki/Cross-site_request_forgery for more information). - For use with the ``webhelpers.secure_form`` helper functions. + For use with the ``secure_form`` helper functions. """ - def __init__(self, token=csrf_token_key, header='X-CSRF-Token', - except_methods=None): + def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None): self.token = token self.header = header self.except_methods = except_methods or [] @@ -1597,6 +1645,11 @@ class LoginRequired(object): """ def __init__(self, auth_token_access=None): self.auth_token_access = auth_token_access + if self.auth_token_access: + valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES)) + if not valid_type: + raise ValueError('auth_token_access must be on of {}, got {}'.format( + UserApiKeys.ROLES, auth_token_access)) def __call__(self, func): return get_cython_compat_decorator(self.__wrapper, func) @@ -1616,19 +1669,25 @@ class LoginRequired(object): # check if our IP is allowed ip_access_valid = True if not user.ip_allowed: - h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), + h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))), category='warning') ip_access_valid = False - # check if we used an APIKEY and it's a valid one + # we used stored token that is extract from GET or URL param (if any) + _auth_token = request.user_auth_token + + # check if we used an AUTH_TOKEN and it's a valid one # defined white-list of controllers which API access will be enabled - _auth_token = request.GET.get( - 'auth_token', '') or request.GET.get('api_key', '') + whitelist = None + if self.auth_token_access: + # since this location is allowed by @LoginRequired decorator it's our + # only whitelist + whitelist = [loc] auth_token_access_valid = allowed_auth_token_access( - loc, auth_token=_auth_token) + loc, whitelist=whitelist, auth_token=_auth_token) # explicit controller is enabled or API is in our whitelist - if self.auth_token_access or auth_token_access_valid: + if auth_token_access_valid: log.debug('Checking AUTH TOKEN access for %s', cls) db_user = user.get_instance() @@ -1637,6 +1696,8 @@ class LoginRequired(object): roles = self.auth_token_access else: roles = [UserApiKeys.ROLE_HTTP] + log.debug('AUTH TOKEN: checking auth for user %s and roles %s', + db_user, roles) token_match = db_user.authenticate_by_token( _auth_token, roles=roles) else: diff --git a/rhodecode/lib/base.py b/rhodecode/lib/base.py --- a/rhodecode/lib/base.py +++ b/rhodecode/lib/base.py @@ -289,7 +289,7 @@ def attach_context_attributes(context, r config = request.registry.settings rc_config = SettingsModel().get_all_settings(cache=True) - + context.rc_config = rc_config context.rhodecode_version = rhodecode.__version__ context.rhodecode_edition = config.get('rhodecode.edition') # unique secret + version does not leak the version but keep consistency @@ -363,6 +363,10 @@ def attach_context_attributes(context, r context.visual.cut_off_limit_file = safe_int( config.get('cut_off_limit_file')) + context.license = AttributeDict({}) + context.license.hide_license_info = str2bool( + config.get('license.hide_license_info', False)) + # AppEnlight context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) context.appenlight_api_public_key = config.get( @@ -459,9 +463,14 @@ def get_auth_user(request): session = request.session ip_addr = get_ip_addr(environ) + # make sure that we update permissions each time we call controller - _auth_token = (request.GET.get('auth_token', '') or - request.GET.get('api_key', '')) + _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) + if not _auth_token and request.matchdict: + url_auth_token = request.matchdict.get('_auth_token') + _auth_token = url_auth_token + if _auth_token: + log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) if _auth_token: # when using API_KEY we assume user exists, and @@ -495,7 +504,7 @@ def get_auth_user(request): # user is not authenticated and not empty auth_user.set_authenticated(authenticated) - return auth_user + return auth_user, _auth_token def h_filter(s): @@ -519,6 +528,8 @@ def add_events_routes(config): from rhodecode.apps._base import ADMIN_PREFIX config.add_route(name='home', pattern='/') + config.add_route(name='main_page_repos_data', pattern='/_home_repos') + config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') @@ -530,12 +541,24 @@ def add_events_routes(config): pattern='/{repo_name}/pull-request/{pull_request_id}') config.add_route(name='pull_requests_global', pattern='/pull-request/{pull_request_id}') + config.add_route(name='repo_commit', pattern='/{repo_name}/changeset/{commit_id}') - config.add_route(name='repo_files', pattern='/{repo_name}/files/{commit_id}/{f_path}') + config.add_route(name='hovercard_user', + pattern='/_hovercard/user/{user_id}') + + config.add_route(name='hovercard_user_group', + pattern='/_hovercard/user_group/{user_group_id}') + + config.add_route(name='hovercard_pull_request', + pattern='/_hovercard/pull_request/{pull_request_id}') + + config.add_route(name='hovercard_repo_commit', + pattern='/_hovercard/commit/{repo_name}/{commit_id}') + def bootstrap_config(request): import pyramid.testing diff --git a/rhodecode/lib/celerylib/tasks.py b/rhodecode/lib/celerylib/tasks.py --- a/rhodecode/lib/celerylib/tasks.py +++ b/rhodecode/lib/celerylib/tasks.py @@ -35,7 +35,8 @@ from rhodecode.lib import audit_logger from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask from rhodecode.lib.hooks_base import log_create_repository from rhodecode.lib.utils2 import safe_int, str2bool -from rhodecode.model.db import Session, IntegrityError, Repository, User, true +from rhodecode.model.db import ( + Session, IntegrityError, true, Repository, RepoGroup, User) @async_task(ignore_result=True, base=RequestContextTask) @@ -125,6 +126,7 @@ def send_email(recipients, subject, body def create_repo(form_data, cur_user): from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel + from rhodecode.model.scm import ScmModel from rhodecode.model.settings import SettingsModel log = get_logger(create_repo) @@ -139,7 +141,6 @@ def create_repo(form_data, cur_user): private = form_data['repo_private'] clone_uri = form_data.get('clone_uri') repo_group = safe_int(form_data['repo_group']) - landing_rev = form_data['repo_landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') copy_group_permissions = form_data.get('repo_copy_permissions') fork_of = form_data.get('fork_parent_id') @@ -154,6 +155,9 @@ def create_repo(form_data, cur_user): enable_downloads = form_data.get( 'enable_downloads', defs.get('repo_enable_downloads')) + # set landing rev based on default branches for SCM + landing_ref, _label = ScmModel.backend_landing_ref(repo_type) + try: RepoModel()._create_repo( repo_name=repo_name_full, @@ -163,7 +167,7 @@ def create_repo(form_data, cur_user): private=private, clone_uri=clone_uri, repo_group=repo_group, - landing_rev=landing_rev, + landing_rev=landing_ref, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions, copy_group_permissions=copy_group_permissions, @@ -236,7 +240,7 @@ def create_repo_fork(form_data, cur_user private = form_data['private'] clone_uri = form_data.get('clone_uri') repo_group = safe_int(form_data['repo_group']) - landing_rev = form_data['landing_rev'] + landing_ref = form_data['landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') fork_id = safe_int(form_data.get('fork_parent_id')) @@ -250,7 +254,7 @@ def create_repo_fork(form_data, cur_user private=private, clone_uri=clone_uri, repo_group=repo_group, - landing_rev=landing_rev, + landing_rev=landing_ref, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions ) @@ -338,3 +342,24 @@ def beat_check(*args, **kwargs): log = get_logger(beat_check) log.info('Got args: %r and kwargs %r', args, kwargs) return time.time() + + +@async_task(ignore_result=True) +def sync_last_update(*args, **kwargs): + + skip_repos = kwargs.get('skip_repos') + if not skip_repos: + repos = Repository.query() \ + .order_by(Repository.group_id.asc()) + + for repo in repos: + repo.update_commit_cache() + + skip_groups = kwargs.get('skip_groups') + if not skip_groups: + repo_groups = RepoGroup.query() \ + .filter(RepoGroup.group_parent_id == None) + + for root_gr in repo_groups: + for repo_gr in reversed(root_gr.recursive_groups()): + repo_gr.update_commit_cache() diff --git a/rhodecode/lib/db_manage.py b/rhodecode/lib/db_manage.py --- a/rhodecode/lib/db_manage.py +++ b/rhodecode/lib/db_manage.py @@ -117,7 +117,7 @@ class DbManage(object): self.sa.add(ver) log.info('db version set to: %s', __dbversion__) - def run_pre_migration_tasks(self): + def run_post_migration_tasks(self): """ Run various tasks before actually doing migrations """ @@ -172,7 +172,6 @@ class DbManage(object): notify(msg) - self.run_pre_migration_tasks() if curr_version == __dbversion__: log.info('This database is already at the newest version') @@ -194,6 +193,7 @@ class DbManage(object): _step = step + self.run_post_migration_tasks() notify('upgrade to version %s successful' % _step) def fix_repo_paths(self): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py @@ -2334,7 +2334,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3667,7 +3667,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_13_0_0.py @@ -2400,7 +2400,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3750,7 +3750,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_0.py @@ -2402,7 +2402,7 @@ class Repository(Base, BaseModel): instance = get_instance_cached(*args) log.debug( - 'Repo instance fetched in %.3fs', inv_context_manager.compute_time) + 'Repo instance fetched in %.4fs', inv_context_manager.compute_time) return instance def _get_instance(self, cache=True, config=None): @@ -2474,7 +2474,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3900,7 +3900,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_1.py @@ -2425,7 +2425,7 @@ class Repository(Base, BaseModel): instance = get_instance_cached(*args) log.debug( - 'Repo instance fetched in %.3fs', inv_context_manager.compute_time) + 'Repo instance fetched in %.4fs', inv_context_manager.compute_time) return instance def _get_instance(self, cache=True, config=None): @@ -2497,7 +2497,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3974,7 +3974,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py b/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py --- a/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_16_0_2.py @@ -2425,7 +2425,7 @@ class Repository(Base, BaseModel): instance = get_instance_cached(*args) log.debug( - 'Repo instance fetched in %.3fs', inv_context_manager.compute_time) + 'Repo instance fetched in %.4fs', inv_context_manager.compute_time) return instance def _get_instance(self, cache=True, config=None): @@ -2497,7 +2497,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3975,7 +3975,7 @@ class PullRequest(Base, _PullRequestBase shadow_repository_path = vcs_obj._get_shadow_repository_path( self.target_repo.repo_id, workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_18_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_18_0_1.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/schema/db_4_18_0_1.py @@ -0,0 +1,5378 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2010-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Database Models for RhodeCode Enterprise +""" + +import re +import os +import time +import string +import hashlib +import logging +import datetime +import uuid +import warnings +import ipaddress +import functools +import traceback +import collections + +from sqlalchemy import ( + or_, and_, not_, func, TypeDecorator, event, + Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, + Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, + Text, Float, PickleType) +from sqlalchemy.sql.expression import true, false, case +from sqlalchemy.sql.functions import coalesce, count # pragma: no cover +from sqlalchemy.orm import ( + relationship, joinedload, class_mapper, validates, aliased) +from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.exc import IntegrityError # pragma: no cover +from sqlalchemy.dialects.mysql import LONGTEXT +from zope.cachedescriptors.property import Lazy as LazyProperty +from pyramid import compat +from pyramid.threadlocal import get_current_request +from webhelpers2.text import collapse, remove_formatting + +from rhodecode.translation import _ +from rhodecode.lib.vcs import get_vcs_instance +from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference +from rhodecode.lib.utils2 import ( + str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, + time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, + glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict) +from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ + JsonRaw +from rhodecode.lib.ext_json import json +from rhodecode.lib.caching_query import FromCache +from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data +from rhodecode.lib.encrypt2 import Encryptor +from rhodecode.lib.exceptions import ( + ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) +from rhodecode.model.meta import Base, Session + +URL_SEP = '/' +log = logging.getLogger(__name__) + +# ============================================================================= +# BASE CLASSES +# ============================================================================= + +# this is propagated from .ini file rhodecode.encrypted_values.secret or +# beaker.session.secret if first is not set. +# and initialized at environment.py +ENCRYPTION_KEY = None + +# used to sort permissions by types, '#' used here is not allowed to be in +# usernames, and it's very early in sorted string.printable table. +PERMISSION_TYPE_SORT = { + 'admin': '####', + 'write': '###', + 'read': '##', + 'none': '#', +} + + +def display_user_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + if obj.username == User.DEFAULT_USER: + return '#####' + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + return prefix + obj.username + + +def display_user_group_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + return prefix + obj.users_group_name + + +def _hash_key(k): + return sha1_safe(k) + + +def in_filter_generator(qry, items, limit=500): + """ + Splits IN() into multiple with OR + e.g.:: + cnt = Repository.query().filter( + or_( + *in_filter_generator(Repository.repo_id, range(100000)) + )).count() + """ + if not items: + # empty list will cause empty query which might cause security issues + # this can lead to hidden unpleasant results + items = [-1] + + parts = [] + for chunk in xrange(0, len(items), limit): + parts.append( + qry.in_(items[chunk: chunk + limit]) + ) + + return parts + + +base_table_args = { + 'extend_existing': True, + 'mysql_engine': 'InnoDB', + 'mysql_charset': 'utf8', + 'sqlite_autoincrement': True +} + + +class EncryptedTextValue(TypeDecorator): + """ + Special column for encrypted long text data, use like:: + + value = Column("encrypted_value", EncryptedValue(), nullable=False) + + This column is intelligent so if value is in unencrypted form it return + unencrypted form, but on save it always encrypts + """ + impl = Text + + def process_bind_param(self, value, dialect): + """ + Setter for storing value + """ + import rhodecode + if not value: + return value + + # protect against double encrypting if values is already encrypted + if value.startswith('enc$aes$') \ + or value.startswith('enc$aes_hmac$') \ + or value.startswith('enc2$'): + raise ValueError('value needs to be in unencrypted format, ' + 'ie. not starting with enc$ or enc2$') + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + if algo == 'aes': + return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).encrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + + def process_result_value(self, value, dialect): + """ + Getter for retrieving value + """ + + import rhodecode + if not value: + return value + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True) + if algo == 'aes': + decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).decrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + return decrypted_data + + +class BaseModel(object): + """ + Base Model for all classes + """ + + @classmethod + def _get_keys(cls): + """return column names for this model """ + return class_mapper(cls).c.keys() + + def get_dict(self): + """ + return dict with keys and values corresponding + to this model data """ + + d = {} + for k in self._get_keys(): + d[k] = getattr(self, k) + + # also use __json__() if present to get additional fields + _json_attr = getattr(self, '__json__', None) + if _json_attr: + # update with attributes from __json__ + if callable(_json_attr): + _json_attr = _json_attr() + for k, val in _json_attr.iteritems(): + d[k] = val + return d + + def get_appstruct(self): + """return list with keys and values tuples corresponding + to this model data """ + + lst = [] + for k in self._get_keys(): + lst.append((k, getattr(self, k),)) + return lst + + def populate_obj(self, populate_dict): + """populate model with data from given populate_dict""" + + for k in self._get_keys(): + if k in populate_dict: + setattr(self, k, populate_dict[k]) + + @classmethod + def query(cls): + return Session().query(cls) + + @classmethod + def get(cls, id_): + if id_: + return cls.query().get(id_) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + try: + id_ = int(id_) + except (TypeError, ValueError): + raise HTTPNotFound() + + res = cls.query().get(id_) + if not res: + raise HTTPNotFound() + return res + + @classmethod + def getAll(cls): + # deprecated and left for backward compatibility + return cls.get_all() + + @classmethod + def get_all(cls): + return cls.query().all() + + @classmethod + def delete(cls, id_): + obj = cls.query().get(id_) + Session().delete(obj) + + @classmethod + def identity_cache(cls, session, attr_name, value): + exist_in_session = [] + for (item_cls, pkey), instance in session.identity_map.items(): + if cls == item_cls and getattr(instance, attr_name) == value: + exist_in_session.append(instance) + if exist_in_session: + if len(exist_in_session) == 1: + return exist_in_session[0] + log.exception( + 'multiple objects with attr %s and ' + 'value %s found with same name: %r', + attr_name, value, exist_in_session) + + def __repr__(self): + if hasattr(self, '__unicode__'): + # python repr needs to return str + try: + return safe_str(self.__unicode__()) + except UnicodeDecodeError: + pass + return '' % (self.__class__.__name__) + + +class RhodeCodeSetting(Base, BaseModel): + __tablename__ = 'rhodecode_settings' + __table_args__ = ( + UniqueConstraint('app_settings_name'), + base_table_args + ) + + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' + GLOBAL_CONF_KEY = 'app_settings' + + app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) + _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) + _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) + + def __init__(self, key='', val='', type='unicode'): + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + _type = self.app_settings_type + if _type: + _type = self.app_settings_type.split('.')[0] + # decode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + v = safe_unicode(cipher.process_result_value(v, None)) + + converter = self.SETTINGS_TYPES.get(_type) or \ + self.SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + val = safe_unicode(val) + # encode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._app_settings_value = val + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + if val.split('.')[0] not in self.SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (self.SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + @classmethod + def get_by_prefix(cls, prefix): + return RhodeCodeSetting.query()\ + .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ + .all() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RhodeCodeUi(Base, BaseModel): + __tablename__ = 'rhodecode_ui' + __table_args__ = ( + UniqueConstraint('ui_key'), + base_table_args + ) + + HOOK_REPO_SIZE = 'changegroup.repo_size' + # HG + HOOK_PRE_PULL = 'preoutgoing.pre_pull' + HOOK_PULL = 'outgoing.pull_logger' + HOOK_PRE_PUSH = 'prechangegroup.pre_push' + HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' + HOOK_PUSH = 'changegroup.push_logger' + HOOK_PUSH_KEY = 'pushkey.key_push' + + HOOKS_BUILTIN = [ + HOOK_PRE_PULL, + HOOK_PULL, + HOOK_PRE_PUSH, + HOOK_PRETX_PUSH, + HOOK_PUSH, + HOOK_PUSH_KEY, + ] + + # TODO: johbo: Unify way how hooks are configured for git and hg, + # git part is currently hardcoded. + + # SVN PATTERNS + SVN_BRANCH_ID = 'vcs_svn_branch' + SVN_TAG_ID = 'vcs_svn_tag' + + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, + self.ui_key, self.ui_value) + + +class RepoRhodeCodeSetting(Base, BaseModel): + __tablename__ = 'repo_rhodecode_settings' + __table_args__ = ( + UniqueConstraint( + 'app_settings_name', 'repository_id', + name='uq_repo_rhodecode_setting_name_repo_id'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + app_settings_id = Column( + "app_settings_id", Integer(), nullable=False, unique=True, + default=None, primary_key=True) + app_settings_name = Column( + "app_settings_name", String(255), nullable=True, unique=None, + default=None) + _app_settings_value = Column( + "app_settings_value", String(4096), nullable=True, unique=None, + default=None) + _app_settings_type = Column( + "app_settings_type", String(255), nullable=True, unique=None, + default=None) + + repository = relationship('Repository') + + def __init__(self, repository_id, key='', val='', type='unicode'): + self.repository_id = repository_id + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + type_ = self.app_settings_type + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + self._app_settings_value = safe_unicode(val) + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + if val not in SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + def __unicode__(self): + return u"<%s('%s:%s:%s[%s]')>" % ( + self.__class__.__name__, self.repository.repo_name, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RepoRhodeCodeUi(Base, BaseModel): + __tablename__ = 'repo_rhodecode_ui' + __table_args__ = ( + UniqueConstraint( + 'repository_id', 'ui_section', 'ui_key', + name='uq_repo_rhodecode_ui_repository_id_section_key'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + repository = relationship('Repository') + + def __repr__(self): + return '<%s[%s:%s]%s=>%s]>' % ( + self.__class__.__name__, self.repository.repo_name, + self.ui_section, self.ui_key, self.ui_value) + + +class User(Base, BaseModel): + __tablename__ = 'users' + __table_args__ = ( + UniqueConstraint('username'), UniqueConstraint('email'), + Index('u_username_idx', 'username'), + Index('u_email_idx', 'email'), + base_table_args + ) + + DEFAULT_USER = 'default' + DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' + DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' + + user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + username = Column("username", String(255), nullable=True, unique=None, default=None) + password = Column("password", String(255), nullable=True, unique=None, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) + name = Column("firstname", String(255), nullable=True, unique=None, default=None) + lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=None, default=None) + last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) + last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) + + extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) + extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) + _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data + + user_log = relationship('UserLog') + user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') + + repositories = relationship('Repository') + repository_groups = relationship('RepoGroup') + user_groups = relationship('UserGroup') + + user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') + followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') + + repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') + repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + + group_member = relationship('UserGroupMember', cascade='all') + + notifications = relationship('UserNotification', cascade='all') + # notifications assigned to this user + user_created_notifications = relationship('Notification', cascade='all') + # comments created by this user + user_comments = relationship('ChangesetComment', cascade='all') + # user profile extra info + user_emails = relationship('UserEmailMap', cascade='all') + user_ip_map = relationship('UserIpMap', cascade='all') + user_auth_tokens = relationship('UserApiKeys', cascade='all') + user_ssh_keys = relationship('UserSshKeys', cascade='all') + + # gists + user_gists = relationship('Gist', cascade='all') + # user pull requests + user_pull_requests = relationship('PullRequest', cascade='all') + # external identities + extenal_identities = relationship( + 'ExternalIdentity', + primaryjoin="User.user_id==ExternalIdentity.local_user_id", + cascade='all') + # review rules + user_review_rules = relationship('RepoReviewRuleUser', cascade='all') + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.user_id, self.username) + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + @hybrid_property + def first_name(self): + from rhodecode.lib import helpers as h + if self.name: + return h.escape(self.name) + return self.name + + @hybrid_property + def last_name(self): + from rhodecode.lib import helpers as h + if self.lastname: + return h.escape(self.lastname) + return self.lastname + + @hybrid_property + def api_key(self): + """ + Fetch if exist an auth-token with role ALL connected to this user + """ + user_auth_token = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time()))\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() + if user_auth_token: + user_auth_token = user_auth_token.api_key + + return user_auth_token + + @api_key.setter + def api_key(self, val): + # don't allow to set API key this is deprecated for now + self._api_key = None + + @property + def reviewer_pull_requests(self): + return PullRequestReviewers.query() \ + .options(joinedload(PullRequestReviewers.pull_request)) \ + .filter(PullRequestReviewers.user_id == self.user_id) \ + .all() + + @property + def firstname(self): + # alias for future + return self.name + + @property + def emails(self): + other = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) \ + .all() + return [self.email] + [x.email for x in other] + + @property + def auth_tokens(self): + auth_tokens = self.get_auth_tokens() + return [x.api_key for x in auth_tokens] + + def get_auth_tokens(self): + return UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .order_by(UserApiKeys.user_api_key_id.asc())\ + .all() + + @LazyProperty + def feed_token(self): + return self.get_feed_token() + + def get_feed_token(self, cache=True): + feed_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) + if cache: + feed_tokens = feed_tokens.options( + FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) + + feed_tokens = feed_tokens.all() + if feed_tokens: + return feed_tokens[0].api_key + return 'NO_FEED_TOKEN_AVAILABLE' + + @classmethod + def get(cls, user_id, cache=False): + if not user_id: + return + + user = cls.query() + if cache: + user = user.options( + FromCache("sql_cache_short", "get_users_%s" % user_id)) + return user.get(user_id) + + @classmethod + def extra_valid_auth_tokens(cls, user, role=None): + tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if role: + tokens = tokens.filter(or_(UserApiKeys.role == role, + UserApiKeys.role == UserApiKeys.ROLE_ALL)) + return tokens.all() + + def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): + from rhodecode.lib import auth + + log.debug('Trying to authenticate user: %s via auth-token, ' + 'and roles: %s', self, roles) + + if not auth_token: + return False + + roles = (roles or []) + [UserApiKeys.ROLE_ALL] + tokens_q = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + + tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) + + crypto_backend = auth.crypto_backend() + enc_token_map = {} + plain_token_map = {} + for token in tokens_q: + if token.api_key.startswith(crypto_backend.ENC_PREF): + enc_token_map[token.api_key] = token + else: + plain_token_map[token.api_key] = token + log.debug( + 'Found %s plain and %s encrypted user tokens to check for authentication', + len(plain_token_map), len(enc_token_map)) + + # plain token match comes first + match = plain_token_map.get(auth_token) + + # check encrypted tokens now + if not match: + for token_hash, token in enc_token_map.items(): + # NOTE(marcink): this is expensive to calculate, but most secure + if crypto_backend.hash_check(auth_token, token_hash): + match = token + break + + if match: + log.debug('Found matching token %s', match) + if match.repo_id: + log.debug('Found scope, checking for scope match of token %s', match) + if match.repo_id == scope_repo_id: + return True + else: + log.debug( + 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' + 'and calling scope is:%s, skipping further checks', + match.repo, scope_repo_id) + return False + else: + return True + + return False + + @property + def ip_addresses(self): + ret = UserIpMap.query().filter(UserIpMap.user == self).all() + return [x.ip_addr for x in ret] + + @property + def username_and_name(self): + return '%s (%s %s)' % (self.username, self.first_name, self.last_name) + + @property + def username_or_name_or_email(self): + full_name = self.full_name if self.full_name is not ' ' else None + return self.username or full_name or self.email + + @property + def full_name(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def full_name_or_username(self): + return ('%s %s' % (self.first_name, self.last_name) + if (self.first_name and self.last_name) else self.username) + + @property + def full_contact(self): + return '%s %s <%s>' % (self.first_name, self.last_name, self.email) + + @property + def short_contact(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def is_admin(self): + return self.admin + + def AuthUser(self, **kwargs): + """ + Returns instance of AuthUser for this user + """ + from rhodecode.lib.auth import AuthUser + return AuthUser(user_id=self.user_id, username=self.username, **kwargs) + + @hybrid_property + def user_data(self): + if not self._user_data: + return {} + + try: + return json.loads(self._user_data) + except TypeError: + return {} + + @user_data.setter + def user_data(self, val): + if not isinstance(val, dict): + raise Exception('user_data must be dict, got %s' % type(val)) + try: + self._user_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def get_by_username(cls, username, case_insensitive=False, + cache=False, identity_cache=False): + session = Session() + + if case_insensitive: + q = cls.query().filter( + func.lower(cls.username) == func.lower(username)) + else: + q = cls.query().filter(cls.username == username) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'username', username) + if val: + return val + else: + cache_key = "get_user_by_name_%s" % _hash_key(username) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_auth_token(cls, auth_token, cache=False): + q = UserApiKeys.query()\ + .filter(UserApiKeys.api_key == auth_token)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_auth_token_%s" % auth_token)) + + match = q.first() + if match: + return match.user + + @classmethod + def get_by_email(cls, email, case_insensitive=False, cache=False): + + if case_insensitive: + q = cls.query().filter(func.lower(cls.email) == func.lower(email)) + + else: + q = cls.query().filter(cls.email == email) + + email_key = _hash_key(email) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_key_%s" % email_key)) + + ret = q.scalar() + if ret is None: + q = UserEmailMap.query() + # try fetching in alternate email map + if case_insensitive: + q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) + else: + q = q.filter(UserEmailMap.email == email) + q = q.options(joinedload(UserEmailMap.user)) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_map_key_%s" % email_key)) + ret = getattr(q.scalar(), 'user', None) + + return ret + + @classmethod + def get_from_cs_author(cls, author): + """ + Tries to get User objects out of commit author string + + :param author: + """ + from rhodecode.lib.helpers import email, author_name + # Valid email in the attribute passed, see if they're in the system + _email = email(author) + if _email: + user = cls.get_by_email(_email, case_insensitive=True) + if user: + return user + # Maybe we can match by username? + _author = author_name(author) + user = cls.get_by_username(_author, case_insensitive=True) + if user: + return user + + def update_userdata(self, **kwargs): + usr = self + old = usr.user_data + old.update(**kwargs) + usr.user_data = old + Session().add(usr) + log.debug('updated userdata with %s', kwargs) + + def update_lastlogin(self): + """Update user lastlogin""" + self.last_login = datetime.datetime.now() + Session().add(self) + log.debug('updated user %s lastlogin', self.username) + + def update_password(self, new_password): + from rhodecode.lib.auth import get_crypt_password + + self.password = get_crypt_password(new_password) + Session().add(self) + + @classmethod + def get_first_super_admin(cls): + user = User.query()\ + .filter(User.admin == true()) \ + .order_by(User.user_id.asc()) \ + .first() + + if user is None: + raise Exception('FATAL: Missing administrative account!') + return user + + @classmethod + def get_all_super_admins(cls, only_active=False): + """ + Returns all admin accounts sorted by username + """ + qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) + if only_active: + qry = qry.filter(User.active == true()) + return qry.all() + + @classmethod + def get_default_user(cls, cache=False, refresh=False): + user = User.get_by_username(User.DEFAULT_USER, cache=cache) + if user is None: + raise Exception('FATAL: Missing default account!') + if refresh: + # The default user might be based on outdated state which + # has been loaded from the cache. + # A call to refresh() ensures that the + # latest state from the database is used. + Session().refresh(user) + return user + + def _get_default_perms(self, user, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user.user_perms, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, include_secrets=False, details='full'): + """ + Common function for generating user related data for API + + :param include_secrets: By default secrets in the API data will be replaced + by a placeholder value to prevent exposing this data by accident. In case + this data shall be exposed, set this flag to ``True``. + + :param details: details can be 'basic|full' basic gives only a subset of + the available user information that includes user_id, name and emails. + """ + user = self + user_data = self.user_data + data = { + 'user_id': user.user_id, + 'username': user.username, + 'firstname': user.name, + 'lastname': user.lastname, + 'email': user.email, + 'emails': user.emails, + } + if details == 'basic': + return data + + auth_token_length = 40 + auth_token_replacement = '*' * auth_token_length + + extras = { + 'auth_tokens': [auth_token_replacement], + 'active': user.active, + 'admin': user.admin, + 'extern_type': user.extern_type, + 'extern_name': user.extern_name, + 'last_login': user.last_login, + 'last_activity': user.last_activity, + 'ip_addresses': user.ip_addresses, + 'language': user_data.get('language') + } + data.update(extras) + + if include_secrets: + data['auth_tokens'] = user.auth_tokens + return data + + def __json__(self): + data = { + 'full_name': self.full_name, + 'full_name_or_username': self.full_name_or_username, + 'short_contact': self.short_contact, + 'full_contact': self.full_contact, + } + data.update(self.get_api_data()) + return data + + +class UserApiKeys(Base, BaseModel): + __tablename__ = 'user_api_keys' + __table_args__ = ( + Index('uak_api_key_idx', 'api_key'), + Index('uak_api_key_expires_idx', 'api_key', 'expires'), + base_table_args + ) + __mapper_args__ = {} + + # ApiKey role + ROLE_ALL = 'token_role_all' + ROLE_HTTP = 'token_role_http' + ROLE_VCS = 'token_role_vcs' + ROLE_API = 'token_role_api' + ROLE_FEED = 'token_role_feed' + ROLE_PASSWORD_RESET = 'token_password_reset' + + ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] + + user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + api_key = Column("api_key", String(255), nullable=False, unique=True) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + expires = Column('expires', Float(53), nullable=False) + role = Column('role', String(255), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + # scope columns + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + user = relationship('User', lazy='joined') + + def __unicode__(self): + return u"<%s('%s')>" % (self.__class__.__name__, self.role) + + def __json__(self): + data = { + 'auth_token': self.api_key, + 'role': self.role, + 'scope': self.scope_humanized, + 'expired': self.expired + } + return data + + def get_api_data(self, include_secrets=False): + data = self.__json__() + if include_secrets: + return data + else: + data['auth_token'] = self.token_obfuscated + return data + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @property + def expired(self): + if self.expires == -1: + return False + return time.time() > self.expires + + @classmethod + def _get_role_name(cls, role): + return { + cls.ROLE_ALL: _('all'), + cls.ROLE_HTTP: _('http/web interface'), + cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), + cls.ROLE_API: _('api calls'), + cls.ROLE_FEED: _('feed access'), + }.get(role, role) + + @property + def role_humanized(self): + return self._get_role_name(self.role) + + def _get_scope(self): + if self.repo: + return 'Repository: {}'.format(self.repo.repo_name) + if self.repo_group: + return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) + return 'Global' + + @property + def scope_humanized(self): + return self._get_scope() + + @property + def token_obfuscated(self): + if self.api_key: + return self.api_key[:4] + "****" + + +class UserEmailMap(Base, BaseModel): + __tablename__ = 'user_email_map' + __table_args__ = ( + Index('uem_email_idx', 'email'), + UniqueConstraint('email'), + base_table_args + ) + __mapper_args__ = {} + + email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=False, default=None) + user = relationship('User', lazy='joined') + + @validates('_email') + def validate_email(self, key, email): + # check if this email is not main one + main_email = Session().query(User).filter(User.email == email).scalar() + if main_email is not None: + raise AttributeError('email %s is present is user table' % email) + return email + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + +class UserIpMap(Base, BaseModel): + __tablename__ = 'user_ip_map' + __table_args__ = ( + UniqueConstraint('user_id', 'ip_addr'), + base_table_args + ) + __mapper_args__ = {} + + ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + description = Column("description", String(10000), nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined') + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @classmethod + def _get_ip_range(cls, ip_addr): + net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False) + return [str(net.network_address), str(net.broadcast_address)] + + def __json__(self): + return { + 'ip_addr': self.ip_addr, + 'ip_range': self._get_ip_range(self.ip_addr), + } + + def __unicode__(self): + return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, + self.user_id, self.ip_addr) + + +class UserSshKeys(Base, BaseModel): + __tablename__ = 'user_ssh_keys' + __table_args__ = ( + Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), + + UniqueConstraint('ssh_key_fingerprint'), + + base_table_args + ) + __mapper_args__ = {} + + ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) + ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) + ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) + + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + + user = relationship('User', lazy='joined') + + def __json__(self): + data = { + 'ssh_fingerprint': self.ssh_key_fingerprint, + 'description': self.description, + 'created_on': self.created_on + } + return data + + def get_api_data(self): + data = self.__json__() + return data + + +class UserLog(Base, BaseModel): + __tablename__ = 'user_logs' + __table_args__ = ( + base_table_args, + ) + + VERSION_1 = 'v1' + VERSION_2 = 'v2' + VERSIONS = [VERSION_1, VERSION_2] + + user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) + username = Column("username", String(255), nullable=True, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) + repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) + user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) + action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) + action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) + + version = Column("version", String(255), nullable=True, default=VERSION_1) + user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.repository_name, self.action) + + def __json__(self): + return { + 'user_id': self.user_id, + 'username': self.username, + 'repository_id': self.repository_id, + 'repository_name': self.repository_name, + 'user_ip': self.user_ip, + 'action_date': self.action_date, + 'action': self.action, + } + + @hybrid_property + def entry_id(self): + return self.user_log_id + + @property + def action_as_day(self): + return datetime.date(*self.action_date.timetuple()[:3]) + + user = relationship('User') + repository = relationship('Repository', cascade='') + + +class UserGroup(Base, BaseModel): + __tablename__ = 'users_groups' + __table_args__ = ( + base_table_args, + ) + + users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) + user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) + users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data + + members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined") + users_group_to_perm = relationship('UserGroupToPerm', cascade='all') + users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') + user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') + + user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all') + user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id") + + @classmethod + def _load_group_data(cls, column): + if not column: + return {} + + try: + return json.loads(column) or {} + except TypeError: + return {} + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.user_group_description) + + @hybrid_property + def group_data(self): + return self._load_group_data(self._group_data) + + @group_data.expression + def group_data(self, **kwargs): + return self._group_data + + @group_data.setter + def group_data(self, val): + try: + self._group_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def _load_sync(cls, group_data): + if group_data: + return group_data.get('extern_type') + + @property + def sync(self): + return self._load_sync(self.group_data) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.users_group_id, + self.users_group_name) + + @classmethod + def get_by_group_name(cls, group_name, cache=False, + case_insensitive=False): + if case_insensitive: + q = cls.query().filter(func.lower(cls.users_group_name) == + func.lower(group_name)) + + else: + q = cls.query().filter(cls.users_group_name == group_name) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name))) + return q.scalar() + + @classmethod + def get(cls, user_group_id, cache=False): + if not user_group_id: + return + + user_group = cls.query() + if cache: + user_group = user_group.options( + FromCache("sql_cache_short", "get_users_group_%s" % user_group_id)) + return user_group.get(user_group_id) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for user groups + """ + _admin_perm = 'usergroup.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) + q = q.options(joinedload(UserUserGroupToPerm.user_group), + joinedload(UserUserGroupToPerm.user), + joinedload(UserUserGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupUserGroupToPerm.query()\ + .filter(UserGroupUserGroupToPerm.target_user_group == self) + q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), + joinedload(UserGroupUserGroupToPerm.target_user_group), + joinedload(UserGroupUserGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.user_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.user_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def _get_default_perms(self, user_group, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, with_group_members=True, include_secrets=False): + """ + :param include_secrets: See :meth:`User.get_api_data`, this parameter is + basically forwarded. + + """ + user_group = self + data = { + 'users_group_id': user_group.users_group_id, + 'group_name': user_group.users_group_name, + 'group_description': user_group.user_group_description, + 'active': user_group.users_group_active, + 'owner': user_group.user.username, + 'sync': user_group.sync, + 'owner_email': user_group.user.email, + } + + if with_group_members: + users = [] + for user in user_group.members: + user = user.user + users.append(user.get_api_data(include_secrets=include_secrets)) + data['users'] = users + + return data + + +class UserGroupMember(Base, BaseModel): + __tablename__ = 'users_groups_members' + __table_args__ = ( + base_table_args, + ) + + users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + + user = relationship('User', lazy='joined') + users_group = relationship('UserGroup') + + def __init__(self, gr_id='', u_id=''): + self.users_group_id = gr_id + self.user_id = u_id + + +class RepositoryField(Base, BaseModel): + __tablename__ = 'repositories_fields' + __table_args__ = ( + UniqueConstraint('repository_id', 'field_key'), # no-multi field + base_table_args, + ) + + PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields + + repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + field_key = Column("field_key", String(250)) + field_label = Column("field_label", String(1024), nullable=False) + field_value = Column("field_value", String(10000), nullable=False) + field_desc = Column("field_desc", String(1024), nullable=False) + field_type = Column("field_type", String(255), nullable=False, unique=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + repository = relationship('Repository') + + @property + def field_key_prefixed(self): + return 'ex_%s' % self.field_key + + @classmethod + def un_prefix_key(cls, key): + if key.startswith(cls.PREFIX): + return key[len(cls.PREFIX):] + return key + + @classmethod + def get_by_key_name(cls, key, repo): + row = cls.query()\ + .filter(cls.repository == repo)\ + .filter(cls.field_key == key).scalar() + return row + + +class Repository(Base, BaseModel): + __tablename__ = 'repositories' + __table_args__ = ( + Index('r_repo_name_idx', 'repo_name', mysql_length=255), + base_table_args, + ) + DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' + DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' + DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' + + STATE_CREATED = 'repo_state_created' + STATE_PENDING = 'repo_state_pending' + STATE_ERROR = 'repo_state_error' + + LOCK_AUTOMATIC = 'lock_auto' + LOCK_API = 'lock_api' + LOCK_WEB = 'lock_web' + LOCK_PULL = 'lock_pull' + + NAME_SEP = URL_SEP + + repo_id = Column( + "repo_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _repo_name = Column( + "repo_name", Text(), nullable=False, default=None) + _repo_name_hash = Column( + "repo_name_hash", String(255), nullable=False, unique=True) + repo_state = Column("repo_state", String(255), nullable=True) + + clone_uri = Column( + "clone_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + push_uri = Column( + "push_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + repo_type = Column( + "repo_type", String(255), nullable=False, unique=False, default=None) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=False, default=None) + private = Column( + "private", Boolean(), nullable=True, unique=None, default=None) + archived = Column( + "archived", Boolean(), nullable=True, unique=None, default=None) + enable_statistics = Column( + "statistics", Boolean(), nullable=True, unique=None, default=True) + enable_downloads = Column( + "downloads", Boolean(), nullable=True, unique=None, default=True) + description = Column( + "description", String(10000), nullable=True, unique=None, default=None) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + _landing_revision = Column( + "landing_revision", String(255), nullable=False, unique=False, + default=None) + enable_locking = Column( + "enable_locking", Boolean(), nullable=False, unique=None, + default=False) + _locked = Column( + "locked", String(255), nullable=True, unique=False, default=None) + _changeset_cache = Column( + "changeset_cache", LargeBinary(), nullable=True) # JSON data + + fork_id = Column( + "fork_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=False, default=None) + group_id = Column( + "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, + unique=False, default=None) + + user = relationship('User', lazy='joined') + fork = relationship('Repository', remote_side=repo_id, lazy='joined') + group = relationship('RepoGroup', lazy='joined') + repo_to_perm = relationship( + 'UserRepoToPerm', cascade='all', + order_by='UserRepoToPerm.repo_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + stats = relationship('Statistics', cascade='all', uselist=False) + + followers = relationship( + 'UserFollowing', + primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', + cascade='all') + extra_fields = relationship( + 'RepositoryField', cascade="all, delete-orphan") + logs = relationship('UserLog') + comments = relationship( + 'ChangesetComment', cascade="all, delete-orphan") + pull_requests_source = relationship( + 'PullRequest', + primaryjoin='PullRequest.source_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + pull_requests_target = relationship( + 'PullRequest', + primaryjoin='PullRequest.target_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + ui = relationship('RepoRhodeCodeUi', cascade="all") + settings = relationship('RepoRhodeCodeSetting', cascade="all") + integrations = relationship('Integration', cascade="all, delete-orphan") + + scoped_tokens = relationship('UserApiKeys', cascade="all") + + artifacts = relationship('FileStore', cascade="all") + + def __unicode__(self): + return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, + safe_unicode(self.repo_name)) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def landing_rev(self): + # always should return [rev_type, rev] + if self._landing_revision: + _rev_info = self._landing_revision.split(':') + if len(_rev_info) < 2: + _rev_info.insert(0, 'rev') + return [_rev_info[0], _rev_info[1]] + return [None, None] + + @landing_rev.setter + def landing_rev(self, val): + if ':' not in val: + raise ValueError('value must be delimited with `:` and consist ' + 'of :, got %s instead' % val) + self._landing_revision = val + + @hybrid_property + def locked(self): + if self._locked: + user_id, timelocked, reason = self._locked.split(':') + lock_values = int(user_id), timelocked, reason + else: + lock_values = [None, None, None] + return lock_values + + @locked.setter + def locked(self, val): + if val and isinstance(val, (list, tuple)): + self._locked = ':'.join(map(str, val)) + else: + self._locked = None + + @hybrid_property + def changeset_cache(self): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not self._changeset_cache: + dummy['source_repo_id'] = self.repo_id + return json.loads(json.dumps(dummy)) + + try: + return json.loads(self._changeset_cache) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @hybrid_property + def repo_name(self): + return self._repo_name + + @repo_name.setter + def repo_name(self, value): + self._repo_name = value + self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() + + @classmethod + def normalize_repo_name(cls, repo_name): + """ + Normalizes os specific repo_name to the format internally stored inside + database using URL_SEP + + :param cls: + :param repo_name: + """ + return cls.NAME_SEP.join(repo_name.split(os.sep)) + + @classmethod + def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): + session = Session() + q = session.query(cls).filter(cls.repo_name == repo_name) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'repo_name', repo_name) + if val: + return val + else: + cache_key = "get_repo_by_name_%s" % _hash_key(repo_name) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_id_or_repo_name(cls, repoid): + if isinstance(repoid, (int, long)): + try: + repo = cls.get(repoid) + except ValueError: + repo = None + else: + repo = cls.get_by_repo_name(repoid) + return repo + + @classmethod + def get_by_full_path(cls, repo_full_path): + repo_name = repo_full_path.split(cls.base_path(), 1)[-1] + repo_name = cls.normalize_repo_name(repo_name) + return cls.get_by_repo_name(repo_name.strip(URL_SEP)) + + @classmethod + def get_repo_forks(cls, repo_id): + return cls.query().filter(Repository.fork_id == repo_id) + + @classmethod + def base_path(cls): + """ + Returns base path when all repos are stored + + :param cls: + """ + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @classmethod + def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True, archived=False): + q = Repository.query() + + if not archived: + q = q.filter(Repository.archived.isnot(true())) + + if not isinstance(user_id, Optional): + q = q.filter(Repository.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(Repository.group_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(Repository.repo_name)) + else: + q = q.order_by(Repository.repo_name) + + return q.all() + + @property + def repo_uid(self): + return '_{}'.format(self.repo_id) + + @property + def forks(self): + """ + Return forks of this repo + """ + return Repository.get_repo_forks(self.repo_id) + + @property + def parent(self): + """ + Returns fork parent + """ + return self.fork + + @property + def just_name(self): + return self.repo_name.split(self.NAME_SEP)[-1] + + @property + def groups_with_parents(self): + groups = [] + if self.group is None: + return groups + + cur_gr = self.group + groups.insert(0, cur_gr) + while 1: + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + groups.insert(0, gr) + + return groups + + @property + def groups_and_repo(self): + return self.groups_with_parents, self + + @LazyProperty + def repo_path(self): + """ + Returns base full path for that repository means where it actually + exists on a filesystem + """ + q = Session().query(RhodeCodeUi).filter( + RhodeCodeUi.ui_key == self.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @property + def repo_full_path(self): + p = [self.repo_path] + # we need to split the name by / since this is how we store the + # names in the database, but that eventually needs to be converted + # into a valid system path + p += self.repo_name.split(self.NAME_SEP) + return os.path.join(*map(safe_unicode, p)) + + @property + def cache_keys(self): + """ + Returns associated cache keys for that repo + """ + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + return CacheKey.query()\ + .filter(CacheKey.cache_args == invalidation_namespace)\ + .order_by(CacheKey.cache_key)\ + .all() + + @property + def cached_diffs_relative_dir(self): + """ + Return a relative to the repository store path of cached diffs + used for safe display for users, who shouldn't know the absolute store + path + """ + return os.path.join( + os.path.dirname(self.repo_name), + self.cached_diffs_dir.split(os.path.sep)[-1]) + + @property + def cached_diffs_dir(self): + path = self.repo_full_path + return os.path.join( + os.path.dirname(path), + '.__shadow_diff_cache_repo_{}'.format(self.repo_id)) + + def cached_diffs(self): + diff_cache_dir = self.cached_diffs_dir + if os.path.isdir(diff_cache_dir): + return os.listdir(diff_cache_dir) + return [] + + def shadow_repos(self): + shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id) + return [ + x for x in os.listdir(os.path.dirname(self.repo_full_path)) + if x.startswith(shadow_repos_pattern)] + + def get_new_name(self, repo_name): + """ + returns new full repository name based on assigned group and new new + + :param group_name: + """ + path_prefix = self.group.full_path_splitted if self.group else [] + return self.NAME_SEP.join(path_prefix + [repo_name]) + + @property + def _config(self): + """ + Returns db based config object. + """ + from rhodecode.lib.utils import make_db_config + return make_db_config(clear_session=False, repo=self) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repositories + """ + _admin_perm = 'repository.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + usr.permission_id = None + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + usr.permission_id = None + super_admin_rows.append(usr) + + q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) + q = q.options(joinedload(UserRepoToPerm.repository), + joinedload(UserRepoToPerm.user), + joinedload(UserRepoToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + # also check if this permission is maybe used by branch_permissions + if _usr.branch_perm_entry: + usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] + + usr.permission = _usr.permission.permission_name + usr.permission_id = _usr.repo_to_perm_id + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=True): + q = UserGroupRepoToPerm.query()\ + .filter(UserGroupRepoToPerm.repository == self) + q = q.options(joinedload(UserGroupRepoToPerm.repository), + joinedload(UserGroupRepoToPerm.users_group), + joinedload(UserGroupRepoToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self, include_secrets=False): + """ + Common function for generating repo api data + + :param include_secrets: See :meth:`User.get_api_data`. + + """ + # TODO: mikhail: Here there is an anti-pattern, we probably need to + # move this methods on models level. + from rhodecode.model.settings import SettingsModel + from rhodecode.model.repo import RepoModel + + repo = self + _user_id, _time, _reason = self.locked + + data = { + 'repo_id': repo.repo_id, + 'repo_name': repo.repo_name, + 'repo_type': repo.repo_type, + 'clone_uri': repo.clone_uri or '', + 'push_uri': repo.push_uri or '', + 'url': RepoModel().get_url(self), + 'private': repo.private, + 'created_on': repo.created_on, + 'description': repo.description_safe, + 'landing_rev': repo.landing_rev, + 'owner': repo.user.username, + 'fork_of': repo.fork.repo_name if repo.fork else None, + 'fork_of_id': repo.fork.repo_id if repo.fork else None, + 'enable_statistics': repo.enable_statistics, + 'enable_locking': repo.enable_locking, + 'enable_downloads': repo.enable_downloads, + 'last_changeset': repo.changeset_cache, + 'locked_by': User.get(_user_id).get_api_data( + include_secrets=include_secrets) if _user_id else None, + 'locked_date': time_to_datetime(_time) if _time else None, + 'lock_reason': _reason if _reason else None, + } + + # TODO: mikhail: should be per-repo settings here + rc_config = SettingsModel().get_all_settings() + repository_fields = str2bool( + rc_config.get('rhodecode_repository_fields')) + if repository_fields: + for f in self.extra_fields: + data[f.field_key_prefixed] = f.field_value + + return data + + @classmethod + def lock(cls, repo, user_id, lock_time=None, lock_reason=None): + if not lock_time: + lock_time = time.time() + if not lock_reason: + lock_reason = cls.LOCK_AUTOMATIC + repo.locked = [user_id, lock_time, lock_reason] + Session().add(repo) + Session().commit() + + @classmethod + def unlock(cls, repo): + repo.locked = None + Session().add(repo) + Session().commit() + + @classmethod + def getlock(cls, repo): + return repo.locked + + def is_user_lock(self, user_id): + if self.lock[0]: + lock_user_id = safe_int(self.lock[0]) + user_id = safe_int(user_id) + # both are ints, and they are equal + return all([lock_user_id, user_id]) and lock_user_id == user_id + + return False + + def get_locking_state(self, action, user_id, only_when_enabled=True): + """ + Checks locking on this repository, if locking is enabled and lock is + present returns a tuple of make_lock, locked, locked_by. + make_lock can have 3 states None (do nothing) True, make lock + False release lock, This value is later propagated to hooks, which + do the locking. Think about this as signals passed to hooks what to do. + + """ + # TODO: johbo: This is part of the business logic and should be moved + # into the RepositoryModel. + + if action not in ('push', 'pull'): + raise ValueError("Invalid action value: %s" % repr(action)) + + # defines if locked error should be thrown to user + currently_locked = False + # defines if new lock should be made, tri-state + make_lock = None + repo = self + user = User.get(user_id) + + lock_info = repo.locked + + if repo and (repo.enable_locking or not only_when_enabled): + if action == 'push': + # check if it's already locked !, if it is compare users + locked_by_user_id = lock_info[0] + if user.user_id == locked_by_user_id: + log.debug( + 'Got `push` action from user %s, now unlocking', user) + # unlock if we have push from user who locked + make_lock = False + else: + # we're not the same user who locked, ban with + # code defined in settings (default is 423 HTTP Locked) ! + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + elif action == 'pull': + # [0] user [1] date + if lock_info[0] and lock_info[1]: + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + else: + log.debug('Setting lock on repo %s by %s', repo, user) + make_lock = True + + else: + log.debug('Repository %s do not have locking enabled', repo) + + log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', + make_lock, currently_locked, lock_info) + + from rhodecode.lib.auth import HasRepoPermissionAny + perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') + if make_lock and not perm_check(repo_name=repo.repo_name, user=user): + # if we don't have at least write permission we cannot make a lock + log.debug('lock state reset back to FALSE due to lack ' + 'of at least read permission') + make_lock = False + + return make_lock, currently_locked, lock_info + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @property + def last_commit_change(self): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = self.changeset_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_db_change(self): + return self.updated_on + + @property + def clone_uri_hidden(self): + clone_uri = self.clone_uri + if clone_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) + if url_obj.password: + clone_uri = url_obj.with_password('*****') + return clone_uri + + @property + def push_uri_hidden(self): + push_uri = self.push_uri + if push_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(push_uri)) + if url_obj.password: + push_uri = url_obj.with_password('*****') + return push_uri + + def clone_url(self, **override): + from rhodecode.model.settings import SettingsModel + + uri_tmpl = None + if 'with_id' in override: + uri_tmpl = self.DEFAULT_CLONE_URI_ID + del override['with_id'] + + if 'uri_tmpl' in override: + uri_tmpl = override['uri_tmpl'] + del override['uri_tmpl'] + + ssh = False + if 'ssh' in override: + ssh = True + del override['ssh'] + + # we didn't override our tmpl from **overrides + request = get_current_request() + if not uri_tmpl: + if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): + rc_config = request.call_context.rc_config + else: + rc_config = SettingsModel().get_all_settings(cache=True) + if ssh: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH + else: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI + + return get_clone_url(request=request, + uri_tmpl=uri_tmpl, + repo_name=self.repo_name, + repo_id=self.repo_id, **override) + + def set_state(self, state): + self.repo_state = state + Session().add(self) + #========================================================================== + # SCM PROPERTIES + #========================================================================== + + def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): + return get_commit_safe( + self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) + + def get_changeset(self, rev=None, pre_load=None): + warnings.warn("Use get_commit", DeprecationWarning) + commit_id = None + commit_idx = None + if isinstance(rev, compat.string_types): + commit_id = rev + else: + commit_idx = rev + return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, + pre_load=pre_load) + + def get_landing_commit(self): + """ + Returns landing commit, or if that doesn't exist returns the tip + """ + _rev_type, _rev = self.landing_rev + commit = self.get_commit(_rev) + if isinstance(commit, EmptyCommit): + return self.get_commit() + return commit + + def update_commit_cache(self, cs_cache=None, config=None): + """ + Update cache of last commit for repository, keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + updated_on + + """ + from rhodecode.lib.vcs.backends.base import BaseChangeset + if cs_cache is None: + # use no-cache version here + scm_repo = self.scm_instance(cache=False, config=config) + + empty = scm_repo is None or scm_repo.is_empty() + if not empty: + cs_cache = scm_repo.get_commit( + pre_load=["author", "date", "message", "parents", "branch"]) + else: + cs_cache = EmptyCommit() + + if isinstance(cs_cache, BaseChangeset): + cs_cache = cs_cache.__json__() + + def is_outdated(new_cs_cache): + if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or + new_cs_cache['revision'] != self.changeset_cache['revision']): + return True + return False + + # check if we have maybe already latest cached revision + if is_outdated(cs_cache) or not self.changeset_cache: + _default = datetime.datetime.utcnow() + last_change = cs_cache.get('date') or _default + # we check if last update is newer than the new value + # if yes, we use the current timestamp instead. Imagine you get + # old commit pushed 1y ago, we'd set last update 1y to ago. + last_change_timestamp = datetime_to_time(last_change) + current_timestamp = datetime_to_time(last_change) + if last_change_timestamp > current_timestamp: + cs_cache['date'] = _default + + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + Session().add(self) + Session().commit() + + log.debug('updated repo %s with new commit cache %s', + self.repo_name, cs_cache) + else: + cs_cache = self.changeset_cache + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + Session().add(self) + Session().commit() + + log.debug('Skipping update_commit_cache for repo:`%s` ' + 'commit already with latest changes', self.repo_name) + + @property + def tip(self): + return self.get_commit('tip') + + @property + def author(self): + return self.tip.author + + @property + def last_change(self): + return self.scm_instance().last_change + + def get_comments(self, revisions=None): + """ + Returns comments for this repository grouped by revisions + + :param revisions: filter query by revisions only + """ + cmts = ChangesetComment.query()\ + .filter(ChangesetComment.repo == self) + if revisions: + cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) + grouped = collections.defaultdict(list) + for cmt in cmts.all(): + grouped[cmt.revision].append(cmt) + return grouped + + def statuses(self, revisions=None): + """ + Returns statuses for this repository + + :param revisions: list of revisions to get statuses for + """ + statuses = ChangesetStatus.query()\ + .filter(ChangesetStatus.repo == self)\ + .filter(ChangesetStatus.version == 0) + + if revisions: + # Try doing the filtering in chunks to avoid hitting limits + size = 500 + status_results = [] + for chunk in xrange(0, len(revisions), size): + status_results += statuses.filter( + ChangesetStatus.revision.in_( + revisions[chunk: chunk+size]) + ).all() + else: + status_results = statuses.all() + + grouped = {} + + # maybe we have open new pullrequest without a status? + stat = ChangesetStatus.STATUS_UNDER_REVIEW + status_lbl = ChangesetStatus.get_status_lbl(stat) + for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): + for rev in pr.revisions: + pr_id = pr.pull_request_id + pr_repo = pr.target_repo.repo_name + grouped[rev] = [stat, status_lbl, pr_id, pr_repo] + + for stat in status_results: + pr_id = pr_repo = None + if stat.pull_request: + pr_id = stat.pull_request.pull_request_id + pr_repo = stat.pull_request.target_repo.repo_name + grouped[stat.revision] = [str(stat.status), stat.status_lbl, + pr_id, pr_repo] + return grouped + + # ========================================================================== + # SCM CACHE INSTANCE + # ========================================================================== + + def scm_instance(self, **kwargs): + import rhodecode + + # Passing a config will not hit the cache currently only used + # for repo2dbmapper + config = kwargs.pop('config', None) + cache = kwargs.pop('cache', None) + vcs_full_cache = kwargs.pop('vcs_full_cache', None) + if vcs_full_cache is not None: + # allows override global config + full_cache = vcs_full_cache + else: + full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) + # if cache is NOT defined use default global, else we have a full + # control over cache behaviour + if cache is None and full_cache and not config: + log.debug('Initializing pure cached instance for %s', self.repo_path) + return self._get_instance_cached() + + # cache here is sent to the "vcs server" + return self._get_instance(cache=bool(cache), config=config) + + def _get_instance_cached(self): + from rhodecode.lib import rc_cache + + cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def get_instance_cached(repo_id, context_id, _cache_state_uid): + return self._get_instance(repo_state_uid=_cache_state_uid) + + # we must use thread scoped cache here, + # because each thread of gevent needs it's own not shared connection and cache + # we also alter `args` so the cache key is individual for every green thread. + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, + thread_scoped=True) + with inv_context_manager as invalidation_context: + cache_state_uid = invalidation_context.cache_data['cache_state_uid'] + args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) + + # re-compute and store cache if we get invalidate signal + if invalidation_context.should_invalidate(): + instance = get_instance_cached.refresh(*args) + else: + instance = get_instance_cached(*args) + + log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) + return instance + + def _get_instance(self, cache=True, config=None, repo_state_uid=None): + log.debug('Initializing %s instance `%s` with cache flag set to: %s', + self.repo_type, self.repo_path, cache) + config = config or self._config + custom_wire = { + 'cache': cache, # controls the vcs.remote cache + 'repo_state_uid': repo_state_uid + } + repo = get_vcs_instance( + repo_path=safe_str(self.repo_full_path), + config=config, + with_wire=custom_wire, + create=False, + _vcs_alias=self.repo_type) + if repo is not None: + repo.count() # cache rebuild + return repo + + def get_shadow_repository_path(self, workspace_id): + from rhodecode.lib.vcs.backends.base import BaseRepository + shadow_repo_path = BaseRepository._get_shadow_repository_path( + self.repo_full_path, self.repo_id, workspace_id) + return shadow_repo_path + + def __json__(self): + return {'landing_rev': self.landing_rev} + + def get_dict(self): + + # Since we transformed `repo_name` to a hybrid property, we need to + # keep compatibility with the code which uses `repo_name` field. + + result = super(Repository, self).get_dict() + result['repo_name'] = result.pop('_repo_name', None) + return result + + +class RepoGroup(Base, BaseModel): + __tablename__ = 'groups' + __table_args__ = ( + UniqueConstraint('group_name', 'group_parent_id'), + base_table_args, + ) + __mapper_args__ = {'order_by': 'group_name'} + + CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups + + group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) + group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) + group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) + group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) + enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) + _changeset_cache = Column( + "changeset_cache", LargeBinary(), nullable=True) # JSON data + + repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + parent_group = relationship('RepoGroup', remote_side=group_id) + user = relationship('User') + integrations = relationship('Integration', cascade="all, delete-orphan") + + def __init__(self, group_name='', parent_group=None): + self.group_name = group_name + self.parent_group = parent_group + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.group_id, self.group_name) + + @hybrid_property + def group_name(self): + return self._group_name + + @group_name.setter + def group_name(self, value): + self._group_name = value + self.group_name_hash = self.hash_repo_group_name(value) + + @hybrid_property + def changeset_cache(self): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not self._changeset_cache: + dummy['source_repo_id'] = '' + return json.loads(json.dumps(dummy)) + + try: + return json.loads(self._changeset_cache) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @validates('group_parent_id') + def validate_group_parent_id(self, key, val): + """ + Check cycle references for a parent group to self + """ + if self.group_id and val: + assert val != self.group_id + + return val + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.group_description) + + @classmethod + def hash_repo_group_name(cls, repo_group_name): + val = remove_formatting(repo_group_name) + val = safe_str(val).lower() + chars = [] + for c in val: + if c not in string.ascii_letters: + c = str(ord(c)) + chars.append(c) + + return ''.join(chars) + + @classmethod + def _generate_choice(cls, repo_group): + from webhelpers2.html import literal as _literal + _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) + return repo_group.group_id, _name(repo_group.full_path_splitted) + + @classmethod + def groups_choices(cls, groups=None, show_empty_group=True): + if not groups: + groups = cls.query().all() + + repo_groups = [] + if show_empty_group: + repo_groups = [(-1, u'-- %s --' % _('No parent'))] + + repo_groups.extend([cls._generate_choice(x) for x in groups]) + + repo_groups = sorted( + repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) + return repo_groups + + @classmethod + def url_sep(cls): + return URL_SEP + + @classmethod + def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): + if case_insensitive: + gr = cls.query().filter(func.lower(cls.group_name) + == func.lower(group_name)) + else: + gr = cls.query().filter(cls.group_name == group_name) + if cache: + name_key = _hash_key(group_name) + gr = gr.options( + FromCache("sql_cache_short", "get_group_%s" % name_key)) + return gr.scalar() + + @classmethod + def get_user_personal_repo_group(cls, user_id): + user = User.get(user_id) + if user.username == User.DEFAULT_USER: + return None + + return cls.query()\ + .filter(cls.personal == true()) \ + .filter(cls.user == user) \ + .order_by(cls.group_id.asc()) \ + .first() + + @classmethod + def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True): + q = RepoGroup.query() + + if not isinstance(user_id, Optional): + q = q.filter(RepoGroup.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(RepoGroup.group_parent_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(RepoGroup.group_name)) + else: + q = q.order_by(RepoGroup.group_name) + return q.all() + + @property + def parents(self, parents_recursion_limit = 10): + groups = [] + if self.parent_group is None: + return groups + cur_gr = self.parent_group + groups.insert(0, cur_gr) + cnt = 0 + while 1: + cnt += 1 + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + if cnt == parents_recursion_limit: + # this will prevent accidental infinit loops + log.error('more than %s parents found for group %s, stopping ' + 'recursive parent fetching', parents_recursion_limit, self) + break + + groups.insert(0, gr) + return groups + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @property + def last_commit_change(self): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = self.changeset_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_db_change(self): + return self.updated_on + + @property + def children(self): + return RepoGroup.query().filter(RepoGroup.parent_group == self) + + @property + def name(self): + return self.group_name.split(RepoGroup.url_sep())[-1] + + @property + def full_path(self): + return self.group_name + + @property + def full_path_splitted(self): + return self.group_name.split(RepoGroup.url_sep()) + + @property + def repositories(self): + return Repository.query()\ + .filter(Repository.group == self)\ + .order_by(Repository.repo_name) + + @property + def repositories_recursive_count(self): + cnt = self.repositories.count() + + def children_count(group): + cnt = 0 + for child in group.children: + cnt += child.repositories.count() + cnt += children_count(child) + return cnt + + return cnt + children_count(self) + + def _recursive_objects(self, include_repos=True, include_groups=True): + all_ = [] + + def _get_members(root_gr): + if include_repos: + for r in root_gr.repositories: + all_.append(r) + childs = root_gr.children.all() + if childs: + for gr in childs: + if include_groups: + all_.append(gr) + _get_members(gr) + + root_group = [] + if include_groups: + root_group = [self] + + _get_members(self) + return root_group + all_ + + def recursive_groups_and_repos(self): + """ + Recursive return all groups, with repositories in those groups + """ + return self._recursive_objects() + + def recursive_groups(self): + """ + Returns all children groups for this group including children of children + """ + return self._recursive_objects(include_repos=False) + + def recursive_repos(self): + """ + Returns all children repositories for this group + """ + return self._recursive_objects(include_groups=False) + + def get_new_name(self, group_name): + """ + returns new full group name based on parent and new name + + :param group_name: + """ + path_prefix = (self.parent_group.full_path_splitted if + self.parent_group else []) + return RepoGroup.url_sep().join(path_prefix + [group_name]) + + def update_commit_cache(self, config=None): + """ + Update cache of last changeset for newest repository inside this group, keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + + """ + from rhodecode.lib.vcs.utils.helpers import parse_datetime + + def repo_groups_and_repos(): + all_entries = OrderedDefaultDict(list) + + def _get_members(root_gr, pos=0): + + for repo in root_gr.repositories: + all_entries[root_gr].append(repo) + + # fill in all parent positions + for parent_group in root_gr.parents: + all_entries[parent_group].extend(all_entries[root_gr]) + + children_groups = root_gr.children.all() + if children_groups: + for cnt, gr in enumerate(children_groups, 1): + _get_members(gr, pos=pos+cnt) + + _get_members(root_gr=self) + return all_entries + + empty_date = datetime.datetime.fromtimestamp(0) + for repo_group, repos in repo_groups_and_repos().items(): + + latest_repo_cs_cache = {} + for repo in repos: + repo_cs_cache = repo.changeset_cache + date_latest = latest_repo_cs_cache.get('date', empty_date) + date_current = repo_cs_cache.get('date', empty_date) + current_timestamp = datetime_to_time(parse_datetime(date_latest)) + if current_timestamp < datetime_to_time(parse_datetime(date_current)): + latest_repo_cs_cache = repo_cs_cache + latest_repo_cs_cache['source_repo_id'] = repo.repo_id + + latest_repo_cs_cache['updated_on'] = time.time() + repo_group.changeset_cache = latest_repo_cs_cache + Session().add(repo_group) + Session().commit() + + log.debug('updated repo group %s with new commit cache %s', + repo_group.group_name, latest_repo_cs_cache) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repository groups + """ + _admin_perm = 'group.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) + q = q.options(joinedload(UserRepoGroupToPerm.group), + joinedload(UserRepoGroupToPerm.user), + joinedload(UserRepoGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupRepoGroupToPerm.query()\ + .filter(UserGroupRepoGroupToPerm.group == self) + q = q.options(joinedload(UserGroupRepoGroupToPerm.group), + joinedload(UserGroupRepoGroupToPerm.users_group), + joinedload(UserGroupRepoGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self): + """ + Common function for generating api data + + """ + group = self + data = { + 'group_id': group.group_id, + 'group_name': group.group_name, + 'group_description': group.description_safe, + 'parent_group': group.parent_group.group_name if group.parent_group else None, + 'repositories': [x.repo_name for x in group.repositories], + 'owner': group.user.username, + } + return data + + def get_dict(self): + # Since we transformed `group_name` to a hybrid property, we need to + # keep compatibility with the code which uses `group_name` field. + result = super(RepoGroup, self).get_dict() + result['group_name'] = result.pop('_group_name', None) + return result + + +class Permission(Base, BaseModel): + __tablename__ = 'permissions' + __table_args__ = ( + Index('p_perm_name_idx', 'permission_name'), + base_table_args, + ) + + PERMS = [ + ('hg.admin', _('RhodeCode Super Administrator')), + + ('repository.none', _('Repository no access')), + ('repository.read', _('Repository read access')), + ('repository.write', _('Repository write access')), + ('repository.admin', _('Repository admin access')), + + ('group.none', _('Repository group no access')), + ('group.read', _('Repository group read access')), + ('group.write', _('Repository group write access')), + ('group.admin', _('Repository group admin access')), + + ('usergroup.none', _('User group no access')), + ('usergroup.read', _('User group read access')), + ('usergroup.write', _('User group write access')), + ('usergroup.admin', _('User group admin access')), + + ('branch.none', _('Branch no permissions')), + ('branch.merge', _('Branch access by web merge')), + ('branch.push', _('Branch access by push')), + ('branch.push_force', _('Branch access by push with force')), + + ('hg.repogroup.create.false', _('Repository Group creation disabled')), + ('hg.repogroup.create.true', _('Repository Group creation enabled')), + + ('hg.usergroup.create.false', _('User Group creation disabled')), + ('hg.usergroup.create.true', _('User Group creation enabled')), + + ('hg.create.none', _('Repository creation disabled')), + ('hg.create.repository', _('Repository creation enabled')), + ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), + ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), + + ('hg.fork.none', _('Repository forking disabled')), + ('hg.fork.repository', _('Repository forking enabled')), + + ('hg.register.none', _('Registration disabled')), + ('hg.register.manual_activate', _('User Registration with manual account activation')), + ('hg.register.auto_activate', _('User Registration with automatic account activation')), + + ('hg.password_reset.enabled', _('Password reset enabled')), + ('hg.password_reset.hidden', _('Password reset hidden')), + ('hg.password_reset.disabled', _('Password reset disabled')), + + ('hg.extern_activate.manual', _('Manual activation of external account')), + ('hg.extern_activate.auto', _('Automatic activation of external account')), + + ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), + ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), + ] + + # definition of system default permissions for DEFAULT user, created on + # system setup + DEFAULT_USER_PERMISSIONS = [ + # object perms + 'repository.read', + 'group.read', + 'usergroup.read', + # branch, for backward compat we need same value as before so forced pushed + 'branch.push_force', + # global + 'hg.create.repository', + 'hg.repogroup.create.false', + 'hg.usergroup.create.false', + 'hg.create.write_on_repogroup.true', + 'hg.fork.repository', + 'hg.register.manual_activate', + 'hg.password_reset.enabled', + 'hg.extern_activate.auto', + 'hg.inherit_default_perms.true', + ] + + # defines which permissions are more important higher the more important + # Weight defines which permissions are more important. + # The higher number the more important. + PERM_WEIGHTS = { + 'repository.none': 0, + 'repository.read': 1, + 'repository.write': 3, + 'repository.admin': 4, + + 'group.none': 0, + 'group.read': 1, + 'group.write': 3, + 'group.admin': 4, + + 'usergroup.none': 0, + 'usergroup.read': 1, + 'usergroup.write': 3, + 'usergroup.admin': 4, + + 'branch.none': 0, + 'branch.merge': 1, + 'branch.push': 3, + 'branch.push_force': 4, + + 'hg.repogroup.create.false': 0, + 'hg.repogroup.create.true': 1, + + 'hg.usergroup.create.false': 0, + 'hg.usergroup.create.true': 1, + + 'hg.fork.none': 0, + 'hg.fork.repository': 1, + 'hg.create.none': 0, + 'hg.create.repository': 1 + } + + permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) + permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) + + def __unicode__(self): + return u"<%s('%s:%s')>" % ( + self.__class__.__name__, self.permission_id, self.permission_name + ) + + @classmethod + def get_by_key(cls, key): + return cls.query().filter(cls.permission_name == key).scalar() + + @classmethod + def get_default_repo_perms(cls, user_id, repo_id=None): + q = Session().query(UserRepoToPerm, Repository, Permission)\ + .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ + .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ + .filter(UserRepoToPerm.user_id == user_id) + if repo_id: + q = q.filter(UserRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms(cls, user_id, repo_id=None): + q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ + .join( + Permission, + UserToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserRepoToPerm, + UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ + .filter(UserRepoToPerm.user_id == user_id) + + if repo_id: + q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ + .join( + Permission, + UserGroupRepoToPerm.permission_id == Permission.permission_id)\ + .join( + Repository, + UserGroupRepoToPerm.repository_id == Repository.repo_id)\ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_id: + q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ + .join( + Permission, + UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserGroupRepoToPerm, + UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + + if repo_id: + q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_group_perms(cls, user_id, repo_group_id=None): + q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserRepoGroupToPerm.permission_id == Permission.permission_id)\ + .join( + RepoGroup, + UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .filter(UserRepoGroupToPerm.user_id == user_id) + if repo_group_id: + q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_group_perms_from_user_group( + cls, user_id, repo_group_id=None): + q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserGroupRepoGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + RepoGroup, + UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .join( + UserGroup, + UserGroupRepoGroupToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoGroupToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_group_id: + q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms(cls, user_id, user_group_id=None): + q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ + .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ + .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ + .filter(UserUserGroupToPerm.user_id == user_id) + if user_group_id: + q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms_from_user_group( + cls, user_id, user_group_id=None): + TargetUserGroup = aliased(UserGroup, name='target_user_group') + q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ + .join( + Permission, + UserGroupUserGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + TargetUserGroup, + UserGroupUserGroupToPerm.target_user_group_id == + TargetUserGroup.users_group_id)\ + .join( + UserGroup, + UserGroupUserGroupToPerm.user_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupUserGroupToPerm.user_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if user_group_id: + q = q.filter( + UserGroupUserGroupToPerm.user_group_id == user_group_id) + + return q.all() + + +class UserRepoToPerm(Base, BaseModel): + __tablename__ = 'repo_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'repository_id', 'permission_id'), + base_table_args + ) + + repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + repository = relationship('Repository') + permission = relationship('Permission') + + branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined') + + @classmethod + def create(cls, user, repository, permission): + n = cls() + n.user = user + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.repository) + + +class UserUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'user_group_id', 'permission_id'), + base_table_args + ) + + user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + user_group = relationship('UserGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, user_group, permission): + n = cls() + n.user = user + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.user_group) + + +class UserToPerm(Base, BaseModel): + __tablename__ = 'user_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'permission_id'), + base_table_args + ) + + user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + permission = relationship('Permission', lazy='joined') + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.permission) + + +class UserGroupRepoToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_to_perm' + __table_args__ = ( + UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + repository = relationship('Repository') + user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all') + + @classmethod + def create(cls, users_group, repository, permission): + n = cls() + n.users_group = users_group + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.repository) + + +class UserGroupUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_group_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), + CheckConstraint('target_user_group_id != user_group_id'), + base_table_args + ) + + user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') + user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') + permission = relationship('Permission') + + @classmethod + def create(cls, target_user_group, user_group, permission): + n = cls() + n.target_user_group = target_user_group + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.target_user_group, self.user_group) + + +class UserGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'permission_id',), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + + +class UserRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'user_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'group_id', 'permission_id'), + base_table_args + ) + + group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + group = relationship('RepoGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, repository_group, permission): + n = cls() + n.user = user + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + +class UserGroupRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'group_id'), + base_table_args + ) + + users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + group = relationship('RepoGroup') + + @classmethod + def create(cls, user_group, repository_group, permission): + n = cls() + n.users_group = user_group + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.group) + + +class Statistics(Base, BaseModel): + __tablename__ = 'statistics' + __table_args__ = ( + base_table_args + ) + + stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) + stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) + commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data + commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data + languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data + + repository = relationship('Repository', single_parent=True) + + +class UserFollowing(Base, BaseModel): + __tablename__ = 'user_followings' + __table_args__ = ( + UniqueConstraint('user_id', 'follows_repository_id'), + UniqueConstraint('user_id', 'follows_user_id'), + base_table_args + ) + + user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) + follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + + user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') + + follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') + follows_repository = relationship('Repository', order_by='Repository.repo_name') + + @classmethod + def get_repo_followers(cls, repo_id): + return cls.query().filter(cls.follows_repo_id == repo_id) + + +class CacheKey(Base, BaseModel): + __tablename__ = 'cache_invalidation' + __table_args__ = ( + UniqueConstraint('cache_key'), + Index('key_idx', 'cache_key'), + base_table_args, + ) + + CACHE_TYPE_FEED = 'FEED' + + # namespaces used to register process/thread aware caches + REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' + SETTINGS_INVALIDATION_NAMESPACE = 'system_settings' + + cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) + cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) + cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) + cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) + + def __init__(self, cache_key, cache_args='', cache_state_uid=None): + self.cache_key = cache_key + self.cache_args = cache_args + self.cache_active = False + # first key should be same for all entries, since all workers should share it + self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.cache_id, self.cache_key, self.cache_active) + + def _cache_key_partition(self): + prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) + return prefix, repo_name, suffix + + def get_prefix(self): + """ + Try to extract prefix from existing cache key. The key could consist + of prefix, repo_name, suffix + """ + # this returns prefix, repo_name, suffix + return self._cache_key_partition()[0] + + def get_suffix(self): + """ + get suffix that might have been used in _get_cache_key to + generate self.cache_key. Only used for informational purposes + in repo_edit.mako. + """ + # prefix, repo_name, suffix + return self._cache_key_partition()[2] + + @classmethod + def generate_new_state_uid(cls, based_on=None): + if based_on: + return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) + else: + return str(uuid.uuid4()) + + @classmethod + def delete_all_cache(cls): + """ + Delete all cache keys from database. + Should only be run when all instances are down and all entries + thus stale. + """ + cls.query().delete() + Session().commit() + + @classmethod + def set_invalidate(cls, cache_uid, delete=False): + """ + Mark all caches of a repo as invalid in the database. + """ + + try: + qry = Session().query(cls).filter(cls.cache_args == cache_uid) + if delete: + qry.delete() + log.debug('cache objects deleted for cache args %s', + safe_str(cache_uid)) + else: + qry.update({"cache_active": False, + "cache_state_uid": cls.generate_new_state_uid()}) + log.debug('cache objects marked as invalid for cache args %s', + safe_str(cache_uid)) + + Session().commit() + except Exception: + log.exception( + 'Cache key invalidation failed for cache args %s', + safe_str(cache_uid)) + Session().rollback() + + @classmethod + def get_active_cache(cls, cache_key): + inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() + if inv_obj: + return inv_obj + return None + + @classmethod + def get_namespace_map(cls, namespace): + return { + x.cache_key: x + for x in cls.query().filter(cls.cache_args == namespace)} + + +class ChangesetComment(Base, BaseModel): + __tablename__ = 'changeset_comments' + __table_args__ = ( + Index('cc_revision_idx', 'revision'), + base_table_args, + ) + + COMMENT_OUTDATED = u'comment_outdated' + COMMENT_TYPE_NOTE = u'note' + COMMENT_TYPE_TODO = u'todo' + COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] + + comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + revision = Column('revision', String(40), nullable=True) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) + line_no = Column('line_no', Unicode(10), nullable=True) + hl_lines = Column('hl_lines', Unicode(512), nullable=True) + f_path = Column('f_path', Unicode(1000), nullable=True) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + renderer = Column('renderer', Unicode(64), nullable=True) + display_state = Column('display_state', Unicode(128), nullable=True) + + comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) + resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) + + resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') + resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + pull_request_version = relationship('PullRequestVersion') + + @classmethod + def get_users(cls, revision=None, pull_request_id=None): + """ + Returns user associated with this ChangesetComment. ie those + who actually commented + + :param cls: + :param revision: + """ + q = Session().query(User)\ + .join(ChangesetComment.author) + if revision: + q = q.filter(cls.revision == revision) + elif pull_request_id: + q = q.filter(cls.pull_request_id == pull_request_id) + return q.all() + + @classmethod + def get_index_from_version(cls, pr_version, versions): + num_versions = [x.pull_request_version_id for x in versions] + try: + return num_versions.index(pr_version) +1 + except (IndexError, ValueError): + return + + @property + def outdated(self): + return self.display_state == self.COMMENT_OUTDATED + + def outdated_at_version(self, version): + """ + Checks if comment is outdated for given pull request version + """ + return self.outdated and self.pull_request_version_id != version + + def older_than_version(self, version): + """ + Checks if comment is made from previous version than given + """ + if version is None: + return self.pull_request_version_id is not None + + return self.pull_request_version_id < version + + @property + def resolved(self): + return self.resolved_by[0] if self.resolved_by else None + + @property + def is_todo(self): + return self.comment_type == self.COMMENT_TYPE_TODO + + @property + def is_inline(self): + return self.line_no and self.f_path + + def get_index_version(self, versions): + return self.get_index_from_version( + self.pull_request_version_id, versions) + + def __repr__(self): + if self.comment_id: + return '' % self.comment_id + else: + return '' % id(self) + + def get_api_data(self): + comment = self + data = { + 'comment_id': comment.comment_id, + 'comment_type': comment.comment_type, + 'comment_text': comment.text, + 'comment_status': comment.status_change, + 'comment_f_path': comment.f_path, + 'comment_lineno': comment.line_no, + 'comment_author': comment.author, + 'comment_created_on': comment.created_on, + 'comment_resolved_by': self.resolved + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class ChangesetStatus(Base, BaseModel): + __tablename__ = 'changeset_statuses' + __table_args__ = ( + Index('cs_revision_idx', 'revision'), + Index('cs_version_idx', 'version'), + UniqueConstraint('repo_id', 'revision', 'version'), + base_table_args + ) + + STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' + STATUS_APPROVED = 'approved' + STATUS_REJECTED = 'rejected' + STATUS_UNDER_REVIEW = 'under_review' + + STATUSES = [ + (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default + (STATUS_APPROVED, _("Approved")), + (STATUS_REJECTED, _("Rejected")), + (STATUS_UNDER_REVIEW, _("Under Review")), + ] + + changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) + revision = Column('revision', String(40), nullable=False) + status = Column('status', String(128), nullable=False, default=DEFAULT) + changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) + modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) + version = Column('version', Integer(), nullable=False, default=0) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + comment = relationship('ChangesetComment', lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + + def __unicode__(self): + return u"<%s('%s[v%s]:%s')>" % ( + self.__class__.__name__, + self.status, self.version, self.author + ) + + @classmethod + def get_status_lbl(cls, value): + return dict(cls.STATUSES).get(value) + + @property + def status_lbl(self): + return ChangesetStatus.get_status_lbl(self.status) + + def get_api_data(self): + status = self + data = { + 'status_id': status.changeset_status_id, + 'status': status.status, + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class _SetState(object): + """ + Context processor allowing changing state for sensitive operation such as + pull request update or merge + """ + + def __init__(self, pull_request, pr_state, back_state=None): + self._pr = pull_request + self._org_state = back_state or pull_request.pull_request_state + self._pr_state = pr_state + self._current_state = None + + def __enter__(self): + log.debug('StateLock: entering set state context, setting state to: `%s`', + self._pr_state) + self.set_pr_state(self._pr_state) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_val is not None: + log.error(traceback.format_exc(exc_tb)) + return None + + self.set_pr_state(self._org_state) + log.debug('StateLock: exiting set state context, setting state to: `%s`', + self._org_state) + @property + def state(self): + return self._current_state + + def set_pr_state(self, pr_state): + try: + self._pr.pull_request_state = pr_state + Session().add(self._pr) + Session().commit() + self._current_state = pr_state + except Exception: + log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) + raise + +class _PullRequestBase(BaseModel): + """ + Common attributes of pull request and version entries. + """ + + # .status values + STATUS_NEW = u'new' + STATUS_OPEN = u'open' + STATUS_CLOSED = u'closed' + + # available states + STATE_CREATING = u'creating' + STATE_UPDATING = u'updating' + STATE_MERGING = u'merging' + STATE_CREATED = u'created' + + title = Column('title', Unicode(255), nullable=True) + description = Column( + 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), + nullable=True) + description_renderer = Column('description_renderer', Unicode(64), nullable=True) + + # new/open/closed status of pull request (not approve/reject/etc) + status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + + pull_request_state = Column("pull_request_state", String(255), nullable=True) + + @declared_attr + def user_id(cls): + return Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=None) + + # 500 revisions max + _revisions = Column( + 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) + + @declared_attr + def source_repo_id(cls): + # TODO: dan: rename column to source_repo_id + return Column( + 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _source_ref = Column('org_ref', Unicode(255), nullable=False) + + @hybrid_property + def source_ref(self): + return self._source_ref + + @source_ref.setter + def source_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._source_ref = safe_unicode(val) + + _target_ref = Column('other_ref', Unicode(255), nullable=False) + + @hybrid_property + def target_ref(self): + return self._target_ref + + @target_ref.setter + def target_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._target_ref = safe_unicode(val) + + @declared_attr + def target_repo_id(cls): + # TODO: dan: rename column to target_repo_id + return Column( + 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) + + # TODO: dan: rename column to last_merge_source_rev + _last_merge_source_rev = Column( + 'last_merge_org_rev', String(40), nullable=True) + # TODO: dan: rename column to last_merge_target_rev + _last_merge_target_rev = Column( + 'last_merge_other_rev', String(40), nullable=True) + _last_merge_status = Column('merge_status', Integer(), nullable=True) + merge_rev = Column('merge_rev', String(40), nullable=True) + + reviewer_data = Column( + 'reviewer_data_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + + @property + def reviewer_data_json(self): + return json.dumps(self.reviewer_data) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def revisions(self): + return self._revisions.split(':') if self._revisions else [] + + @revisions.setter + def revisions(self, val): + self._revisions = u':'.join(val) + + @hybrid_property + def last_merge_status(self): + return safe_int(self._last_merge_status) + + @last_merge_status.setter + def last_merge_status(self, val): + self._last_merge_status = val + + @declared_attr + def author(cls): + return relationship('User', lazy='joined') + + @declared_attr + def source_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) + + @property + def source_ref_parts(self): + return self.unicode_to_reference(self.source_ref) + + @declared_attr + def target_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) + + @property + def target_ref_parts(self): + return self.unicode_to_reference(self.target_ref) + + @property + def shadow_merge_ref(self): + return self.unicode_to_reference(self._shadow_merge_ref) + + @shadow_merge_ref.setter + def shadow_merge_ref(self, ref): + self._shadow_merge_ref = self.reference_to_unicode(ref) + + @staticmethod + def unicode_to_reference(raw): + """ + Convert a unicode (or string) to a reference object. + If unicode evaluates to False it returns None. + """ + if raw: + refs = raw.split(':') + return Reference(*refs) + else: + return None + + @staticmethod + def reference_to_unicode(ref): + """ + Convert a reference object to unicode. + If reference is None it returns None. + """ + if ref: + return u':'.join(ref) + else: + return None + + def get_api_data(self, with_merge_state=True): + from rhodecode.model.pull_request import PullRequestModel + + pull_request = self + if with_merge_state: + merge_status = PullRequestModel().merge_status(pull_request) + merge_state = { + 'status': merge_status[0], + 'message': safe_unicode(merge_status[1]), + } + else: + merge_state = {'status': 'not_available', + 'message': 'not_available'} + + merge_data = { + 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), + 'reference': ( + pull_request.shadow_merge_ref._asdict() + if pull_request.shadow_merge_ref else None), + } + + data = { + 'pull_request_id': pull_request.pull_request_id, + 'url': PullRequestModel().get_url(pull_request), + 'title': pull_request.title, + 'description': pull_request.description, + 'status': pull_request.status, + 'state': pull_request.pull_request_state, + 'created_on': pull_request.created_on, + 'updated_on': pull_request.updated_on, + 'commit_ids': pull_request.revisions, + 'review_status': pull_request.calculated_review_status(), + 'mergeable': merge_state, + 'source': { + 'clone_url': pull_request.source_repo.clone_url(), + 'repository': pull_request.source_repo.repo_name, + 'reference': { + 'name': pull_request.source_ref_parts.name, + 'type': pull_request.source_ref_parts.type, + 'commit_id': pull_request.source_ref_parts.commit_id, + }, + }, + 'target': { + 'clone_url': pull_request.target_repo.clone_url(), + 'repository': pull_request.target_repo.repo_name, + 'reference': { + 'name': pull_request.target_ref_parts.name, + 'type': pull_request.target_ref_parts.type, + 'commit_id': pull_request.target_ref_parts.commit_id, + }, + }, + 'merge': merge_data, + 'author': pull_request.author.get_api_data(include_secrets=False, + details='basic'), + 'reviewers': [ + { + 'user': reviewer.get_api_data(include_secrets=False, + details='basic'), + 'reasons': reasons, + 'review_status': st[0][1].status if st else 'not_reviewed', + } + for obj, reviewer, reasons, mandatory, st in + pull_request.reviewers_statuses() + ] + } + + return data + + def set_state(self, pull_request_state, final_state=None): + """ + # goes from initial state to updating to initial state. + # initial state can be changed by specifying back_state= + with pull_request_obj.set_state(PullRequest.STATE_UPDATING): + pull_request.merge() + + :param pull_request_state: + :param final_state: + + """ + + return _SetState(self, pull_request_state, back_state=final_state) + + +class PullRequest(Base, _PullRequestBase): + __tablename__ = 'pull_requests' + __table_args__ = ( + base_table_args, + ) + + pull_request_id = Column( + 'pull_request_id', Integer(), nullable=False, primary_key=True) + + def __repr__(self): + if self.pull_request_id: + return '' % self.pull_request_id + else: + return '' % id(self) + + reviewers = relationship('PullRequestReviewers', + cascade="all, delete-orphan") + statuses = relationship('ChangesetStatus', + cascade="all, delete-orphan") + comments = relationship('ChangesetComment', + cascade="all, delete-orphan") + versions = relationship('PullRequestVersion', + cascade="all, delete-orphan", + lazy='dynamic') + + @classmethod + def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, + internal_methods=None): + + class PullRequestDisplay(object): + """ + Special object wrapper for showing PullRequest data via Versions + It mimics PR object as close as possible. This is read only object + just for display + """ + + def __init__(self, attrs, internal=None): + self.attrs = attrs + # internal have priority over the given ones via attrs + self.internal = internal or ['versions'] + + def __getattr__(self, item): + if item in self.internal: + return getattr(self, item) + try: + return self.attrs[item] + except KeyError: + raise AttributeError( + '%s object has no attribute %s' % (self, item)) + + def __repr__(self): + return '' % self.attrs.get('pull_request_id') + + def versions(self): + return pull_request_obj.versions.order_by( + PullRequestVersion.pull_request_version_id).all() + + def is_closed(self): + return pull_request_obj.is_closed() + + @property + def pull_request_version_id(self): + return getattr(pull_request_obj, 'pull_request_version_id', None) + + attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) + + attrs.author = StrictAttributeDict( + pull_request_obj.author.get_api_data()) + if pull_request_obj.target_repo: + attrs.target_repo = StrictAttributeDict( + pull_request_obj.target_repo.get_api_data()) + attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url + + if pull_request_obj.source_repo: + attrs.source_repo = StrictAttributeDict( + pull_request_obj.source_repo.get_api_data()) + attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url + + attrs.source_ref_parts = pull_request_obj.source_ref_parts + attrs.target_ref_parts = pull_request_obj.target_ref_parts + attrs.revisions = pull_request_obj.revisions + + attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref + attrs.reviewer_data = org_pull_request_obj.reviewer_data + attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json + + return PullRequestDisplay(attrs, internal=internal_methods) + + def is_closed(self): + return self.status == self.STATUS_CLOSED + + def __json__(self): + return { + 'revisions': self.revisions, + } + + def calculated_review_status(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().calculated_review_status(self) + + def reviewers_statuses(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().reviewers_statuses(self) + + @property + def workspace_id(self): + from rhodecode.model.pull_request import PullRequestModel + return PullRequestModel()._workspace_id(self) + + def get_shadow_repo(self): + workspace_id = self.workspace_id + shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) + if os.path.isdir(shadow_repository_path): + vcs_obj = self.target_repo.scm_instance() + return vcs_obj.get_shadow_instance(shadow_repository_path) + + +class PullRequestVersion(Base, _PullRequestBase): + __tablename__ = 'pull_request_versions' + __table_args__ = ( + base_table_args, + ) + + pull_request_version_id = Column( + 'pull_request_version_id', Integer(), nullable=False, primary_key=True) + pull_request_id = Column( + 'pull_request_id', Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + pull_request = relationship('PullRequest') + + def __repr__(self): + if self.pull_request_version_id: + return '' % self.pull_request_version_id + else: + return '' % id(self) + + @property + def reviewers(self): + return self.pull_request.reviewers + + @property + def versions(self): + return self.pull_request.versions + + def is_closed(self): + # calculate from original + return self.pull_request.status == self.STATUS_CLOSED + + def calculated_review_status(self): + return self.pull_request.calculated_review_status() + + def reviewers_statuses(self): + return self.pull_request.reviewers_statuses() + + +class PullRequestReviewers(Base, BaseModel): + __tablename__ = 'pull_request_reviewers' + __table_args__ = ( + base_table_args, + ) + + @hybrid_property + def reasons(self): + if not self._reasons: + return [] + return self._reasons + + @reasons.setter + def reasons(self, val): + val = val or [] + if any(not isinstance(x, compat.string_types) for x in val): + raise Exception('invalid reasons type, must be list of strings') + self._reasons = val + + pull_requests_reviewers_id = Column( + 'pull_requests_reviewers_id', Integer(), nullable=False, + primary_key=True) + pull_request_id = Column( + "pull_request_id", Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) + _reasons = Column( + 'reason', MutationList.as_mutable( + JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) + + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + pull_request = relationship('PullRequest') + + rule_data = Column( + 'rule_data_json', + JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) + + def rule_user_group_data(self): + """ + Returns the voting user group rule data for this reviewer + """ + + if self.rule_data and 'vote_rule' in self.rule_data: + user_group_data = {} + if 'rule_user_group_entry_id' in self.rule_data: + # means a group with voting rules ! + user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] + user_group_data['name'] = self.rule_data['rule_name'] + user_group_data['vote_rule'] = self.rule_data['vote_rule'] + + return user_group_data + + def __unicode__(self): + return u"<%s('id:%s')>" % (self.__class__.__name__, + self.pull_requests_reviewers_id) + + +class Notification(Base, BaseModel): + __tablename__ = 'notifications' + __table_args__ = ( + Index('notification_type_idx', 'type'), + base_table_args, + ) + + TYPE_CHANGESET_COMMENT = u'cs_comment' + TYPE_MESSAGE = u'message' + TYPE_MENTION = u'mention' + TYPE_REGISTRATION = u'registration' + TYPE_PULL_REQUEST = u'pull_request' + TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' + + notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) + subject = Column('subject', Unicode(512), nullable=True) + body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) + created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + type_ = Column('type', Unicode(255)) + + created_by_user = relationship('User') + notifications_to_users = relationship('UserNotification', lazy='joined', + cascade="all, delete-orphan") + + @property + def recipients(self): + return [x.user for x in UserNotification.query()\ + .filter(UserNotification.notification == self)\ + .order_by(UserNotification.user_id.asc()).all()] + + @classmethod + def create(cls, created_by, subject, body, recipients, type_=None): + if type_ is None: + type_ = Notification.TYPE_MESSAGE + + notification = cls() + notification.created_by_user = created_by + notification.subject = subject + notification.body = body + notification.type_ = type_ + notification.created_on = datetime.datetime.now() + + # For each recipient link the created notification to his account + for u in recipients: + assoc = UserNotification() + assoc.user_id = u.user_id + assoc.notification = notification + + # if created_by is inside recipients mark his notification + # as read + if u.user_id == created_by.user_id: + assoc.read = True + Session().add(assoc) + + Session().add(notification) + + return notification + + +class UserNotification(Base, BaseModel): + __tablename__ = 'user_to_notification' + __table_args__ = ( + UniqueConstraint('user_id', 'notification_id'), + base_table_args + ) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) + read = Column('read', Boolean, default=False) + sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) + + user = relationship('User', lazy="joined") + notification = relationship('Notification', lazy="joined", + order_by=lambda: Notification.created_on.desc(),) + + def mark_as_read(self): + self.read = True + Session().add(self) + + +class Gist(Base, BaseModel): + __tablename__ = 'gists' + __table_args__ = ( + Index('g_gist_access_id_idx', 'gist_access_id'), + Index('g_created_on_idx', 'created_on'), + base_table_args + ) + + GIST_PUBLIC = u'public' + GIST_PRIVATE = u'private' + DEFAULT_FILENAME = u'gistfile1.txt' + + ACL_LEVEL_PUBLIC = u'acl_public' + ACL_LEVEL_PRIVATE = u'acl_private' + + gist_id = Column('gist_id', Integer(), primary_key=True) + gist_access_id = Column('gist_access_id', Unicode(250)) + gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) + gist_expires = Column('gist_expires', Float(53), nullable=False) + gist_type = Column('gist_type', Unicode(128), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + acl_level = Column('acl_level', Unicode(128), nullable=True) + + owner = relationship('User') + + def __repr__(self): + return '' % (self.gist_type, self.gist_access_id) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.gist_description) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + res = cls.query().filter(cls.gist_access_id == id_).scalar() + if not res: + raise HTTPNotFound() + return res + + @classmethod + def get_by_access_id(cls, gist_access_id): + return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() + + def gist_url(self): + from rhodecode.model.gist import GistModel + return GistModel().get_url(self) + + @classmethod + def base_path(cls): + """ + Returns base path when all gists are stored + + :param cls: + """ + from rhodecode.model.gist import GIST_STORE_LOC + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == URL_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return os.path.join(q.one().ui_value, GIST_STORE_LOC) + + def get_api_data(self): + """ + Common function for generating gist related data for API + """ + gist = self + data = { + 'gist_id': gist.gist_id, + 'type': gist.gist_type, + 'access_id': gist.gist_access_id, + 'description': gist.gist_description, + 'url': gist.gist_url(), + 'expires': gist.gist_expires, + 'created_on': gist.created_on, + 'modified_at': gist.modified_at, + 'content': None, + 'acl_level': gist.acl_level, + } + return data + + def __json__(self): + data = dict( + ) + data.update(self.get_api_data()) + return data + # SCM functions + + def scm_instance(self, **kwargs): + """ + Get an instance of VCS Repository + + :param kwargs: + """ + from rhodecode.model.gist import GistModel + full_repo_path = os.path.join(self.base_path(), self.gist_access_id) + return get_vcs_instance( + repo_path=safe_str(full_repo_path), create=False, + _vcs_alias=GistModel.vcs_backend) + + +class ExternalIdentity(Base, BaseModel): + __tablename__ = 'external_identities' + __table_args__ = ( + Index('local_user_id_idx', 'local_user_id'), + Index('external_id_idx', 'external_id'), + base_table_args + ) + + external_id = Column('external_id', Unicode(255), default=u'', primary_key=True) + external_username = Column('external_username', Unicode(1024), default=u'') + local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True) + access_token = Column('access_token', String(1024), default=u'') + alt_token = Column('alt_token', String(1024), default=u'') + token_secret = Column('token_secret', String(1024), default=u'') + + @classmethod + def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): + """ + Returns ExternalIdentity instance based on search params + + :param external_id: + :param provider_name: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + if local_user_id: + query = query.filter(cls.local_user_id == local_user_id) + return query.first() + + @classmethod + def user_by_external_id_and_provider(cls, external_id, provider_name): + """ + Returns User instance based on search params + + :param external_id: + :param provider_name: + :return: User + """ + query = User.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + query = query.filter(User.user_id == cls.local_user_id) + return query.first() + + @classmethod + def by_local_user_id(cls, local_user_id): + """ + Returns all tokens for user + + :param local_user_id: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.local_user_id == local_user_id) + return query + + @classmethod + def load_provider_plugin(cls, plugin_id): + from rhodecode.authentication.base import loadplugin + _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) + auth_plugin = loadplugin(_plugin_id) + return auth_plugin + + +class Integration(Base, BaseModel): + __tablename__ = 'integrations' + __table_args__ = ( + base_table_args + ) + + integration_id = Column('integration_id', Integer(), primary_key=True) + integration_type = Column('integration_type', String(255)) + enabled = Column('enabled', Boolean(), nullable=False) + name = Column('name', String(255), nullable=False) + child_repos_only = Column('child_repos_only', Boolean(), nullable=False, + default=False) + + settings = Column( + 'settings_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @property + def scope(self): + if self.repo: + return repr(self.repo) + if self.repo_group: + if self.child_repos_only: + return repr(self.repo_group) + ' (child repos only)' + else: + return repr(self.repo_group) + ' (recursive)' + if self.child_repos_only: + return 'root_repos' + return 'global' + + def __repr__(self): + return '' % (self.integration_type, self.scope) + + +class RepoReviewRuleUser(Base, BaseModel): + __tablename__ = 'repo_review_rules_users' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + + def rule_data(self): + return { + 'mandatory': self.mandatory + } + + +class RepoReviewRuleUserGroup(Base, BaseModel): + __tablename__ = 'repo_review_rules_users_groups' + __table_args__ = ( + base_table_args + ) + + VOTE_RULE_ALL = -1 + + repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) + users_group = relationship('UserGroup') + + def rule_data(self): + return { + 'mandatory': self.mandatory, + 'vote_rule': self.vote_rule + } + + @property + def vote_rule_label(self): + if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: + return 'all must vote' + else: + return 'min. vote {}'.format(self.vote_rule) + + +class RepoReviewRule(Base, BaseModel): + __tablename__ = 'repo_review_rules' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_id = Column( + 'repo_review_rule_id', Integer(), primary_key=True) + repo_id = Column( + "repo_id", Integer(), ForeignKey('repositories.repo_id')) + repo = relationship('Repository', backref='review_rules') + + review_rule_name = Column('review_rule_name', String(255)) + _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + + use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) + forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) + forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) + forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) + + rule_users = relationship('RepoReviewRuleUser') + rule_user_groups = relationship('RepoReviewRuleUserGroup') + + def _validate_pattern(self, value): + re.compile('^' + glob2re(value) + '$') + + @hybrid_property + def source_branch_pattern(self): + return self._branch_pattern or '*' + + @source_branch_pattern.setter + def source_branch_pattern(self, value): + self._validate_pattern(value) + self._branch_pattern = value or '*' + + @hybrid_property + def target_branch_pattern(self): + return self._target_branch_pattern or '*' + + @target_branch_pattern.setter + def target_branch_pattern(self, value): + self._validate_pattern(value) + self._target_branch_pattern = value or '*' + + @hybrid_property + def file_pattern(self): + return self._file_pattern or '*' + + @file_pattern.setter + def file_pattern(self, value): + self._validate_pattern(value) + self._file_pattern = value or '*' + + def matches(self, source_branch, target_branch, files_changed): + """ + Check if this review rule matches a branch/files in a pull request + + :param source_branch: source branch name for the commit + :param target_branch: target branch name for the commit + :param files_changed: list of file paths changed in the pull request + """ + + source_branch = source_branch or '' + target_branch = target_branch or '' + files_changed = files_changed or [] + + branch_matches = True + if source_branch or target_branch: + if self.source_branch_pattern == '*': + source_branch_match = True + else: + if self.source_branch_pattern.startswith('re:'): + source_pattern = self.source_branch_pattern[3:] + else: + source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' + source_branch_regex = re.compile(source_pattern) + source_branch_match = bool(source_branch_regex.search(source_branch)) + if self.target_branch_pattern == '*': + target_branch_match = True + else: + if self.target_branch_pattern.startswith('re:'): + target_pattern = self.target_branch_pattern[3:] + else: + target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' + target_branch_regex = re.compile(target_pattern) + target_branch_match = bool(target_branch_regex.search(target_branch)) + + branch_matches = source_branch_match and target_branch_match + + files_matches = True + if self.file_pattern != '*': + files_matches = False + if self.file_pattern.startswith('re:'): + file_pattern = self.file_pattern[3:] + else: + file_pattern = glob2re(self.file_pattern) + file_regex = re.compile(file_pattern) + for filename in files_changed: + if file_regex.search(filename): + files_matches = True + break + + return branch_matches and files_matches + + @property + def review_users(self): + """ Returns the users which this rule applies to """ + + users = collections.OrderedDict() + + for rule_user in self.rule_users: + if rule_user.user.active: + if rule_user.user not in users: + users[rule_user.user.username] = { + 'user': rule_user.user, + 'source': 'user', + 'source_data': {}, + 'data': rule_user.rule_data() + } + + for rule_user_group in self.rule_user_groups: + source_data = { + 'user_group_id': rule_user_group.users_group.users_group_id, + 'name': rule_user_group.users_group.users_group_name, + 'members': len(rule_user_group.users_group.members) + } + for member in rule_user_group.users_group.members: + if member.user.active: + key = member.user.username + if key in users: + # skip this member as we have him already + # this prevents from override the "first" matched + # users with duplicates in multiple groups + continue + + users[key] = { + 'user': member.user, + 'source': 'user_group', + 'source_data': source_data, + 'data': rule_user_group.rule_data() + } + + return users + + def user_group_vote_rule(self, user_id): + + rules = [] + if not self.rule_user_groups: + return rules + + for user_group in self.rule_user_groups: + user_group_members = [x.user_id for x in user_group.users_group.members] + if user_id in user_group_members: + rules.append(user_group) + return rules + + def __repr__(self): + return '' % ( + self.repo_review_rule_id, self.repo) + + +class ScheduleEntry(Base, BaseModel): + __tablename__ = 'schedule_entries' + __table_args__ = ( + UniqueConstraint('schedule_name', name='s_schedule_name_idx'), + UniqueConstraint('task_uid', name='s_task_uid_idx'), + base_table_args, + ) + + schedule_types = ['crontab', 'timedelta', 'integer'] + schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) + + schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) + schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) + schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) + + _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) + schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) + + schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) + schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) + + # task + task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) + task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) + task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) + task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) + + @hybrid_property + def schedule_type(self): + return self._schedule_type + + @schedule_type.setter + def schedule_type(self, val): + if val not in self.schedule_types: + raise ValueError('Value must be on of `{}` and got `{}`'.format( + val, self.schedule_type)) + + self._schedule_type = val + + @classmethod + def get_uid(cls, obj): + args = obj.task_args + kwargs = obj.task_kwargs + if isinstance(args, JsonRaw): + try: + args = json.loads(args) + except ValueError: + args = tuple() + + if isinstance(kwargs, JsonRaw): + try: + kwargs = json.loads(kwargs) + except ValueError: + kwargs = dict() + + dot_notation = obj.task_dot_notation + val = '.'.join(map(safe_str, [ + sorted(dot_notation), args, sorted(kwargs.items())])) + return hashlib.sha1(val).hexdigest() + + @classmethod + def get_by_schedule_name(cls, schedule_name): + return cls.query().filter(cls.schedule_name == schedule_name).scalar() + + @classmethod + def get_by_schedule_id(cls, schedule_id): + return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() + + @property + def task(self): + return self.task_dot_notation + + @property + def schedule(self): + from rhodecode.lib.celerylib.utils import raw_2_schedule + schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) + return schedule + + @property + def args(self): + try: + return list(self.task_args or []) + except ValueError: + return list() + + @property + def kwargs(self): + try: + return dict(self.task_kwargs or {}) + except ValueError: + return dict() + + def _as_raw(self, val): + if hasattr(val, 'de_coerce'): + val = val.de_coerce() + if val: + val = json.dumps(val) + + return val + + @property + def schedule_definition_raw(self): + return self._as_raw(self.schedule_definition) + + @property + def args_raw(self): + return self._as_raw(self.task_args) + + @property + def kwargs_raw(self): + return self._as_raw(self.task_kwargs) + + def __repr__(self): + return ''.format( + self.schedule_entry_id, self.schedule_name) + + +@event.listens_for(ScheduleEntry, 'before_update') +def update_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +@event.listens_for(ScheduleEntry, 'before_insert') +def set_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +class _BaseBranchPerms(BaseModel): + @classmethod + def compute_hash(cls, value): + return sha1_safe(value) + + @hybrid_property + def branch_pattern(self): + return self._branch_pattern or '*' + + @hybrid_property + def branch_hash(self): + return self._branch_hash + + def _validate_glob(self, value): + re.compile('^' + glob2re(value) + '$') + + @branch_pattern.setter + def branch_pattern(self, value): + self._validate_glob(value) + self._branch_pattern = value or '*' + # set the Hash when setting the branch pattern + self._branch_hash = self.compute_hash(self._branch_pattern) + + def matches(self, branch): + """ + Check if this the branch matches entry + + :param branch: branch name for the commit + """ + + branch = branch or '' + + branch_matches = True + if branch: + branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') + branch_matches = bool(branch_regex.search(branch)) + + return branch_matches + + +class UserToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) + user_repo_to_perm = relationship('UserRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_repo_to_perm, self.branch_pattern) + + +class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_group_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_group_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) + user_group_repo_to_perm = relationship('UserGroupRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_group_repo_to_perm, self.branch_pattern) + + +class UserBookmark(Base, BaseModel): + __tablename__ = 'user_bookmarks' + __table_args__ = ( + UniqueConstraint('user_id', 'bookmark_repo_id'), + UniqueConstraint('user_id', 'bookmark_repo_group_id'), + UniqueConstraint('user_id', 'bookmark_position'), + base_table_args + ) + + user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + position = Column("bookmark_position", Integer(), nullable=False) + title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) + redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) + created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) + bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) + + user = relationship("User") + + repository = relationship("Repository") + repository_group = relationship("RepoGroup") + + @classmethod + def get_by_position_for_user(cls, position, user_id): + return cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .filter(UserBookmark.position == position).scalar() + + @classmethod + def get_bookmarks_for_user(cls, user_id): + return cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .options(joinedload(UserBookmark.repository)) \ + .options(joinedload(UserBookmark.repository_group)) \ + .order_by(UserBookmark.position.asc()) \ + .all() + + def __unicode__(self): + return u'' % (self.position, self.redirect_url) + + +class FileStore(Base, BaseModel): + __tablename__ = 'file_store' + __table_args__ = ( + base_table_args + ) + + file_store_id = Column('file_store_id', Integer(), primary_key=True) + file_uid = Column('file_uid', String(1024), nullable=False) + file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) + file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) + file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) + + # sha256 hash + file_hash = Column('file_hash', String(512), nullable=False) + file_size = Column('file_size', Integer(), nullable=False) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) + accessed_count = Column('accessed_count', Integer(), default=0) + + enabled = Column('enabled', Boolean(), nullable=False, default=True) + + # if repo/repo_group reference is set, check for permissions + check_acl = Column('check_acl', Boolean(), nullable=False, default=True) + + # hidden defines an attachment that should be hidden from showing in artifact listing + hidden = Column('hidden', Boolean(), nullable=False, default=False) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id') + + file_metadata = relationship('FileStoreMetadata', lazy='joined') + + # scope limited to user, which requester have access to + scope_user_id = Column( + 'scope_user_id', Integer(), ForeignKey('users.user_id'), + nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id') + + # scope limited to user group, which requester have access to + scope_user_group_id = Column( + 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), + nullable=True, unique=None, default=None) + user_group = relationship('UserGroup', lazy='joined') + + # scope limited to repo, which requester have access to + scope_repo_id = Column( + 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + # scope limited to repo group, which requester have access to + scope_repo_group_id = Column( + 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @classmethod + def get_by_store_uid(cls, file_store_uid): + return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() + + @classmethod + def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', + file_description='', enabled=True, hidden=False, check_acl=True, + user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): + + store_entry = FileStore() + store_entry.file_uid = file_uid + store_entry.file_display_name = file_display_name + store_entry.file_org_name = filename + store_entry.file_size = file_size + store_entry.file_hash = file_hash + store_entry.file_description = file_description + + store_entry.check_acl = check_acl + store_entry.enabled = enabled + store_entry.hidden = hidden + + store_entry.user_id = user_id + store_entry.scope_user_id = scope_user_id + store_entry.scope_repo_id = scope_repo_id + store_entry.scope_repo_group_id = scope_repo_group_id + + return store_entry + + @classmethod + def store_metadata(cls, file_store_id, args, commit=True): + file_store = FileStore.get(file_store_id) + if file_store is None: + return + + for section, key, value, value_type in args: + has_key = FileStoreMetadata().query() \ + .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ + .filter(FileStoreMetadata.file_store_meta_section == section) \ + .filter(FileStoreMetadata.file_store_meta_key == key) \ + .scalar() + if has_key: + msg = 'key `{}` already defined under section `{}` for this file.'\ + .format(key, section) + raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) + + # NOTE(marcink): raises ArtifactMetadataBadValueType + FileStoreMetadata.valid_value_type(value_type) + + meta_entry = FileStoreMetadata() + meta_entry.file_store = file_store + meta_entry.file_store_meta_section = section + meta_entry.file_store_meta_key = key + meta_entry.file_store_meta_value_type = value_type + meta_entry.file_store_meta_value = value + + Session().add(meta_entry) + + try: + if commit: + Session().commit() + except IntegrityError: + Session().rollback() + raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') + + @classmethod + def bump_access_counter(cls, file_uid, commit=True): + FileStore().query()\ + .filter(FileStore.file_uid == file_uid)\ + .update({FileStore.accessed_count: (FileStore.accessed_count + 1), + FileStore.accessed_on: datetime.datetime.now()}) + if commit: + Session().commit() + + def __repr__(self): + return ''.format(self.file_store_id) + + +class FileStoreMetadata(Base, BaseModel): + __tablename__ = 'file_store_metadata' + __table_args__ = ( + UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), + Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), + Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), + base_table_args + ) + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + + file_store_meta_id = Column( + "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _file_store_meta_section = Column( + "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_section_hash = Column( + "file_store_meta_section_hash", String(255), + nullable=True, unique=None, default=None) + _file_store_meta_key = Column( + "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_key_hash = Column( + "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) + _file_store_meta_value = Column( + "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_value_type = Column( + "file_store_meta_value_type", String(255), nullable=True, unique=None, + default='unicode') + + file_store_id = Column( + 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), + nullable=True, unique=None, default=None) + + file_store = relationship('FileStore', lazy='joined') + + @classmethod + def valid_value_type(cls, value): + if value.split('.')[0] not in cls.SETTINGS_TYPES: + raise ArtifactMetadataBadValueType( + 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) + + @hybrid_property + def file_store_meta_section(self): + return self._file_store_meta_section + + @file_store_meta_section.setter + def file_store_meta_section(self, value): + self._file_store_meta_section = value + self._file_store_meta_section_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_key(self): + return self._file_store_meta_key + + @file_store_meta_key.setter + def file_store_meta_key(self, value): + self._file_store_meta_key = value + self._file_store_meta_key_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_value(self): + val = self._file_store_meta_value + + if self._file_store_meta_value_type: + # e.g unicode.encrypted == unicode + _type = self._file_store_meta_value_type.split('.')[0] + # decode the encrypted value if it's encrypted field type + if '.encrypted' in self._file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_result_value(val, None)) + # do final type conversion + converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] + val = converter(val) + + return val + + @file_store_meta_value.setter + def file_store_meta_value(self, val): + val = safe_unicode(val) + # encode the encrypted value + if '.encrypted' in self.file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._file_store_meta_value = val + + @hybrid_property + def file_store_meta_value_type(self): + return self._file_store_meta_value_type + + @file_store_meta_value_type.setter + def file_store_meta_value_type(self, val): + # e.g unicode.encrypted + self.valid_value_type(val) + self._file_store_meta_value_type = val + + def __json__(self): + data = { + 'artifact': self.file_store.file_uid, + 'section': self.file_store_meta_section, + 'key': self.file_store_meta_key, + 'value': self.file_store_meta_value, + } + + return data + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section, + self.file_store_meta_key, self.file_store_meta_value) + + +class DbMigrateVersion(Base, BaseModel): + __tablename__ = 'db_migrate_version' + __table_args__ = ( + base_table_args, + ) + + repository_id = Column('repository_id', String(250), primary_key=True) + repository_path = Column('repository_path', Text) + version = Column('version', Integer) + + @classmethod + def set_version(cls, version): + """ + Helper for forcing a different version, usually for debugging purposes via ishell. + """ + ver = DbMigrateVersion.query().first() + ver.version = version + Session().commit() + + +class DbSession(Base, BaseModel): + __tablename__ = 'db_session' + __table_args__ = ( + base_table_args, + ) + + def __repr__(self): + return ''.format(self.id) + + id = Column('id', Integer()) + namespace = Column('namespace', String(255), primary_key=True) + accessed = Column('accessed', DateTime, nullable=False) + created = Column('created', DateTime, nullable=False) + data = Column('data', PickleType, nullable=False) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py @@ -2037,7 +2037,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py @@ -2029,7 +2029,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py @@ -2028,7 +2028,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py @@ -2032,7 +2032,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py @@ -2032,7 +2032,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py @@ -2076,7 +2076,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3343,7 +3343,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py @@ -2077,7 +2077,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3344,7 +3344,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py @@ -2270,7 +2270,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -3602,7 +3602,7 @@ class PullRequest(Base, _PullRequestBase vcs_obj = self.target_repo.scm_instance() shadow_repository_path = vcs_obj._get_shadow_repository_path( workspace_id) - return vcs_obj._get_shadow_instance(shadow_repository_path) + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): diff --git a/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/099_version_4_18_0.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +import logging + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import Column, String + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 + + init_model_encryption(db_4_16_0_2) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + cache_key = db_4_16_0_2.CacheKey.__table__ + + with op.batch_alter_table(cache_key.name) as batch_op: + batch_op.add_column( + Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)) + + +def downgrade(migrate_engine): + pass diff --git a/rhodecode/lib/dbmigrate/versions/100_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/100_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/100_version_4_18_0.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +import logging + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import Column, Boolean + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 + + init_model_encryption(db_4_16_0_2) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + cache_key = db_4_16_0_2.FileStore.__table__ + + with op.batch_alter_table(cache_key.name) as batch_op: + batch_op.add_column( + Column('hidden', Boolean(), nullable=True, default=False)) + + +def downgrade(migrate_engine): + pass diff --git a/rhodecode/lib/dbmigrate/versions/101_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/101_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/101_version_4_18_0.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +import logging + +from sqlalchemy import * + +from rhodecode.model import meta +from rhodecode.lib.dbmigrate.versions import _reset_base, notify + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 as db + + db.FileStoreMetadata.__table__.create() + + fixups(db, meta.Session) + + +def downgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + +def fixups(models, _SESSION): + pass diff --git a/rhodecode/lib/dbmigrate/versions/102_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/102_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/102_version_4_18_0.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +import logging +from sqlalchemy import * + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import BigInteger + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 + + init_model_encryption(db_4_18_0_1) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + file_store = db_4_18_0_1.FileStore.__table__ + + with op.batch_alter_table(file_store.name) as batch_op: + batch_op.alter_column("file_size", type_=BigInteger()) + + +def downgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + +def fixups(models, _SESSION): + pass diff --git a/rhodecode/lib/dbmigrate/versions/103_version_4_18_0.py b/rhodecode/lib/dbmigrate/versions/103_version_4_18_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/103_version_4_18_0.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +import logging +from sqlalchemy import * + +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import BigInteger + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 + + init_model_encryption(db_4_18_0_1) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + user = db_4_18_0_1.User.__table__ + + with op.batch_alter_table(user.name) as batch_op: + batch_op.add_column(Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))) + + +def downgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + +def fixups(models, _SESSION): + pass diff --git a/rhodecode/lib/diffs.py b/rhodecode/lib/diffs.py --- a/rhodecode/lib/diffs.py +++ b/rhodecode/lib/diffs.py @@ -26,6 +26,8 @@ Set of diffing helpers, previously part import os import re import bz2 +import gzip +import time import collections import difflib @@ -1156,7 +1158,17 @@ def _cleanup_cache_file(cached_diff_file log.exception('Failed to cleanup path %s', cached_diff_file) +def _get_compression_mode(cached_diff_file): + mode = 'bz2' + if 'mode:plain' in cached_diff_file: + mode = 'plain' + elif 'mode:gzip' in cached_diff_file: + mode = 'gzip' + return mode + + def cache_diff(cached_diff_file, diff, commits): + compression_mode = _get_compression_mode(cached_diff_file) struct = { 'version': CURRENT_DIFF_VERSION, @@ -1164,16 +1176,26 @@ def cache_diff(cached_diff_file, diff, c 'commits': commits } + start = time.time() try: - with bz2.BZ2File(cached_diff_file, 'wb') as f: - pickle.dump(struct, f) - log.debug('Saved diff cache under %s', cached_diff_file) + if compression_mode == 'plain': + with open(cached_diff_file, 'wb') as f: + pickle.dump(struct, f) + elif compression_mode == 'gzip': + with gzip.GzipFile(cached_diff_file, 'wb') as f: + pickle.dump(struct, f) + else: + with bz2.BZ2File(cached_diff_file, 'wb') as f: + pickle.dump(struct, f) except Exception: log.warn('Failed to save cache', exc_info=True) _cleanup_cache_file(cached_diff_file) + log.debug('Saved diff cache under %s in %.4fs', cached_diff_file, time.time() - start) + def load_cached_diff(cached_diff_file): + compression_mode = _get_compression_mode(cached_diff_file) default_struct = { 'version': CURRENT_DIFF_VERSION, @@ -1183,13 +1205,22 @@ def load_cached_diff(cached_diff_file): has_cache = os.path.isfile(cached_diff_file) if not has_cache: + log.debug('Reading diff cache file failed %s', cached_diff_file) return default_struct data = None + + start = time.time() try: - with bz2.BZ2File(cached_diff_file, 'rb') as f: - data = pickle.load(f) - log.debug('Loaded diff cache from %s', cached_diff_file) + if compression_mode == 'plain': + with open(cached_diff_file, 'rb') as f: + data = pickle.load(f) + elif compression_mode == 'gzip': + with gzip.GzipFile(cached_diff_file, 'rb') as f: + data = pickle.load(f) + else: + with bz2.BZ2File(cached_diff_file, 'rb') as f: + data = pickle.load(f) except Exception: log.warn('Failed to read diff cache file', exc_info=True) @@ -1206,6 +1237,8 @@ def load_cached_diff(cached_diff_file): _cleanup_cache_file(cached_diff_file) return default_struct + log.debug('Loaded diff cache from %s in %.4fs', cached_diff_file, time.time() - start) + return data @@ -1228,6 +1261,7 @@ def diff_cache_exist(cache_storage, *arg """ Based on all generated arguments check and return a cache path """ + args = list(args) + ['mode:gzip'] cache_key = generate_diff_cache_key(*args) cache_file_path = os.path.join(cache_storage, cache_key) # prevent path traversal attacks using some param that have e.g '../../' diff --git a/rhodecode/lib/exc_tracking.py b/rhodecode/lib/exc_tracking.py --- a/rhodecode/lib/exc_tracking.py +++ b/rhodecode/lib/exc_tracking.py @@ -25,6 +25,7 @@ import msgpack import logging import traceback import tempfile +import glob log = logging.getLogger(__name__) @@ -50,13 +51,20 @@ def exc_serialize(exc_id, tb, exc_type): def exc_unserialize(tb): return msgpack.unpackb(tb) +_exc_store = None + def get_exc_store(): """ Get and create exception store if it's not existing """ + global _exc_store import rhodecode as app + if _exc_store is not None: + # quick global cache + return _exc_store + exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir() _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name) @@ -64,6 +72,8 @@ def get_exc_store(): if not os.path.isdir(_exc_store_path): os.makedirs(_exc_store_path) log.debug('Initializing exceptions store at %s', _exc_store_path) + _exc_store = _exc_store_path + return _exc_store_path @@ -105,6 +115,7 @@ def store_exception(exc_id, exc_info, pr exc_type_name, exc_traceback = _prepare_exception(exc_info) _store_exception(exc_id=exc_id, exc_type_name=exc_type_name, exc_traceback=exc_traceback, prefix=prefix) + return exc_id, exc_type_name except Exception: log.exception('Failed to store exception `%s` information', exc_id) # there's no way this can fail, it will crash server badly if it does. @@ -119,16 +130,12 @@ def _find_exc_file(exc_id, prefix=global # search without a prefix exc_id = '{}'.format(exc_id) - # we need to search the store for such start pattern as above - for fname in os.listdir(exc_store_path): - if fname.startswith(exc_id): - exc_id = os.path.join(exc_store_path, fname) - break - continue - else: - exc_id = None + found_exc_id = None + matches = glob.glob(os.path.join(exc_store_path, exc_id) + '*') + if matches: + found_exc_id = matches[0] - return exc_id + return found_exc_id def _read_exception(exc_id, prefix): diff --git a/rhodecode/lib/exceptions.py b/rhodecode/lib/exceptions.py --- a/rhodecode/lib/exceptions.py +++ b/rhodecode/lib/exceptions.py @@ -58,6 +58,10 @@ class UserOwnsUserGroupsException(Except pass +class UserOwnsArtifactsException(Exception): + pass + + class UserGroupAssignedException(Exception): pass @@ -157,3 +161,15 @@ class VCSServerUnavailable(HTTPBadGatewa if message: self.explanation += ': ' + message super(VCSServerUnavailable, self).__init__() + + +class ArtifactMetadataDuplicate(ValueError): + + def __init__(self, *args, **kwargs): + self.err_section = kwargs.pop('err_section', None) + self.err_key = kwargs.pop('err_key', None) + super(ArtifactMetadataDuplicate, self).__init__(*args, **kwargs) + + +class ArtifactMetadataBadValueType(ValueError): + pass diff --git a/rhodecode/lib/feedgenerator/__init__.py b/rhodecode/lib/feedgenerator/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/feedgenerator/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +from feedgenerator import Rss201rev2Feed, Atom1Feed \ No newline at end of file diff --git a/rhodecode/lib/feedgenerator/datetime_safe.py b/rhodecode/lib/feedgenerator/datetime_safe.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/feedgenerator/datetime_safe.py @@ -0,0 +1,117 @@ +# Copyright (c) Django Software Foundation and individual contributors. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of Django nor the names of its contributors may be used +# to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Python's datetime strftime doesn't handle dates before 1900. +# These classes override date and datetime to support the formatting of a date +# through its full "proleptic Gregorian" date range. +# +# Based on code submitted to comp.lang.python by Andrew Dalke +# +# >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A") +# '1850/08/02 was a Friday' + +from datetime import date as real_date, datetime as real_datetime +import re +import time + +class date(real_date): + def strftime(self, fmt): + return strftime(self, fmt) + +class datetime(real_datetime): + def strftime(self, fmt): + return strftime(self, fmt) + + def combine(self, date, time): + return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo) + + def date(self): + return date(self.year, self.month, self.day) + +def new_date(d): + "Generate a safe date from a datetime.date object." + return date(d.year, d.month, d.day) + +def new_datetime(d): + """ + Generate a safe datetime from a datetime.date or datetime.datetime object. + """ + kw = [d.year, d.month, d.day] + if isinstance(d, real_datetime): + kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) + return datetime(*kw) + +# This library does not support strftime's "%s" or "%y" format strings. +# Allowed if there's an even number of "%"s because they are escaped. +_illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])") + +def _findall(text, substr): + # Also finds overlaps + sites = [] + i = 0 + while 1: + j = text.find(substr, i) + if j == -1: + break + sites.append(j) + i=j+1 + return sites + +def strftime(dt, fmt): + if dt.year >= 1900: + return super(type(dt), dt).strftime(fmt) + illegal_formatting = _illegal_formatting.search(fmt) + if illegal_formatting: + raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0)) + + year = dt.year + # For every non-leap year century, advance by + # 6 years to get into the 28-year repeat cycle + delta = 2000 - year + off = 6 * (delta // 100 + delta // 400) + year = year + off + + # Move to around the year 2000 + year = year + ((2000 - year) // 28) * 28 + timetuple = dt.timetuple() + s1 = time.strftime(fmt, (year,) + timetuple[1:]) + sites1 = _findall(s1, str(year)) + + s2 = time.strftime(fmt, (year+28,) + timetuple[1:]) + sites2 = _findall(s2, str(year+28)) + + sites = [] + for site in sites1: + if site in sites2: + sites.append(site) + + s = s1 + syear = "%04d" % (dt.year,) + for site in sites: + s = s[:site] + syear + s[site+4:] + return s diff --git a/rhodecode/lib/feedgenerator/feedgenerator.py b/rhodecode/lib/feedgenerator/feedgenerator.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/feedgenerator/feedgenerator.py @@ -0,0 +1,444 @@ +# Copyright (c) Django Software Foundation and individual contributors. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# 3. Neither the name of Django nor the names of its contributors may be used +# to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +For definitions of the different versions of RSS, see: +http://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss +""" +from __future__ import unicode_literals + +import datetime +from StringIO import StringIO +from six.moves.urllib import parse as urlparse + +from rhodecode.lib.feedgenerator import datetime_safe +from rhodecode.lib.feedgenerator.utils import SimplerXMLGenerator, iri_to_uri, force_text + + +#### The following code comes from ``django.utils.feedgenerator`` #### + + +def rfc2822_date(date): + # We can't use strftime() because it produces locale-dependent results, so + # we have to map english month and day names manually + months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',) + days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun') + # Support datetime objects older than 1900 + date = datetime_safe.new_datetime(date) + # We do this ourselves to be timezone aware, email.Utils is not tz aware. + dow = days[date.weekday()] + month = months[date.month - 1] + time_str = date.strftime('%s, %%d %s %%Y %%H:%%M:%%S ' % (dow, month)) + + time_str = time_str.decode('utf-8') + offset = date.utcoffset() + # Historically, this function assumes that naive datetimes are in UTC. + if offset is None: + return time_str + '-0000' + else: + timezone = (offset.days * 24 * 60) + (offset.seconds // 60) + hour, minute = divmod(timezone, 60) + return time_str + '%+03d%02d' % (hour, minute) + + +def rfc3339_date(date): + # Support datetime objects older than 1900 + date = datetime_safe.new_datetime(date) + time_str = date.strftime('%Y-%m-%dT%H:%M:%S') + + time_str = time_str.decode('utf-8') + offset = date.utcoffset() + # Historically, this function assumes that naive datetimes are in UTC. + if offset is None: + return time_str + 'Z' + else: + timezone = (offset.days * 24 * 60) + (offset.seconds // 60) + hour, minute = divmod(timezone, 60) + return time_str + '%+03d:%02d' % (hour, minute) + + +def get_tag_uri(url, date): + """ + Creates a TagURI. + + See http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id + """ + bits = urlparse(url) + d = '' + if date is not None: + d = ',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d') + return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment) + + +class SyndicationFeed(object): + """Base class for all syndication feeds. Subclasses should provide write()""" + + def __init__(self, title, link, description, language=None, author_email=None, + author_name=None, author_link=None, subtitle=None, categories=None, + feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs): + def to_unicode(s): + return force_text(s, strings_only=True) + if categories: + categories = [force_text(c) for c in categories] + if ttl is not None: + # Force ints to unicode + ttl = force_text(ttl) + self.feed = { + 'title': to_unicode(title), + 'link': iri_to_uri(link), + 'description': to_unicode(description), + 'language': to_unicode(language), + 'author_email': to_unicode(author_email), + 'author_name': to_unicode(author_name), + 'author_link': iri_to_uri(author_link), + 'subtitle': to_unicode(subtitle), + 'categories': categories or (), + 'feed_url': iri_to_uri(feed_url), + 'feed_copyright': to_unicode(feed_copyright), + 'id': feed_guid or link, + 'ttl': ttl, + } + self.feed.update(kwargs) + self.items = [] + + def add_item(self, title, link, description, author_email=None, + author_name=None, author_link=None, pubdate=None, comments=None, + unique_id=None, unique_id_is_permalink=None, enclosure=None, + categories=(), item_copyright=None, ttl=None, updateddate=None, + enclosures=None, **kwargs): + """ + Adds an item to the feed. All args are expected to be Python Unicode + objects except pubdate and updateddate, which are datetime.datetime + objects, and enclosures, which is an iterable of instances of the + Enclosure class. + """ + def to_unicode(s): + return force_text(s, strings_only=True) + if categories: + categories = [to_unicode(c) for c in categories] + if ttl is not None: + # Force ints to unicode + ttl = force_text(ttl) + if enclosure is None: + enclosures = [] if enclosures is None else enclosures + + item = { + 'title': to_unicode(title), + 'link': iri_to_uri(link), + 'description': to_unicode(description), + 'author_email': to_unicode(author_email), + 'author_name': to_unicode(author_name), + 'author_link': iri_to_uri(author_link), + 'pubdate': pubdate, + 'updateddate': updateddate, + 'comments': to_unicode(comments), + 'unique_id': to_unicode(unique_id), + 'unique_id_is_permalink': unique_id_is_permalink, + 'enclosures': enclosures, + 'categories': categories or (), + 'item_copyright': to_unicode(item_copyright), + 'ttl': ttl, + } + item.update(kwargs) + self.items.append(item) + + def num_items(self): + return len(self.items) + + def root_attributes(self): + """ + Return extra attributes to place on the root (i.e. feed/channel) element. + Called from write(). + """ + return {} + + def add_root_elements(self, handler): + """ + Add elements in the root (i.e. feed/channel) element. Called + from write(). + """ + pass + + def item_attributes(self, item): + """ + Return extra attributes to place on each item (i.e. item/entry) element. + """ + return {} + + def add_item_elements(self, handler, item): + """ + Add elements on each item (i.e. item/entry) element. + """ + pass + + def write(self, outfile, encoding): + """ + Outputs the feed in the given encoding to outfile, which is a file-like + object. Subclasses should override this. + """ + raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method') + + def writeString(self, encoding): + """ + Returns the feed in the given encoding as a string. + """ + s = StringIO() + self.write(s, encoding) + return s.getvalue() + + def latest_post_date(self): + """ + Returns the latest item's pubdate or updateddate. If no items + have either of these attributes this returns the current UTC date/time. + """ + latest_date = None + date_keys = ('updateddate', 'pubdate') + + for item in self.items: + for date_key in date_keys: + item_date = item.get(date_key) + if item_date: + if latest_date is None or item_date > latest_date: + latest_date = item_date + + # datetime.now(tz=utc) is slower, as documented in django.utils.timezone.now + return latest_date or datetime.datetime.utcnow().replace(tzinfo=utc) + + +class Enclosure(object): + "Represents an RSS enclosure" + def __init__(self, url, length, mime_type): + "All args are expected to be Python Unicode objects" + self.length, self.mime_type = length, mime_type + self.url = iri_to_uri(url) + + +class RssFeed(SyndicationFeed): + content_type = 'application/rss+xml; charset=utf-8' + + def write(self, outfile, encoding): + handler = SimplerXMLGenerator(outfile, encoding) + handler.startDocument() + handler.startElement("rss", self.rss_attributes()) + handler.startElement("channel", self.root_attributes()) + self.add_root_elements(handler) + self.write_items(handler) + self.endChannelElement(handler) + handler.endElement("rss") + + def rss_attributes(self): + return {"version": self._version, + "xmlns:atom": "http://www.w3.org/2005/Atom"} + + def write_items(self, handler): + for item in self.items: + handler.startElement('item', self.item_attributes(item)) + self.add_item_elements(handler, item) + handler.endElement("item") + + def add_root_elements(self, handler): + handler.addQuickElement("title", self.feed['title']) + handler.addQuickElement("link", self.feed['link']) + handler.addQuickElement("description", self.feed['description']) + if self.feed['feed_url'] is not None: + handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']}) + if self.feed['language'] is not None: + handler.addQuickElement("language", self.feed['language']) + for cat in self.feed['categories']: + handler.addQuickElement("category", cat) + if self.feed['feed_copyright'] is not None: + handler.addQuickElement("copyright", self.feed['feed_copyright']) + handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date())) + if self.feed['ttl'] is not None: + handler.addQuickElement("ttl", self.feed['ttl']) + + def endChannelElement(self, handler): + handler.endElement("channel") + + +class RssUserland091Feed(RssFeed): + _version = "0.91" + + def add_item_elements(self, handler, item): + handler.addQuickElement("title", item['title']) + handler.addQuickElement("link", item['link']) + if item['description'] is not None: + handler.addQuickElement("description", item['description']) + + +class Rss201rev2Feed(RssFeed): + # Spec: http://blogs.law.harvard.edu/tech/rss + _version = "2.0" + + def add_item_elements(self, handler, item): + handler.addQuickElement("title", item['title']) + handler.addQuickElement("link", item['link']) + if item['description'] is not None: + handler.addQuickElement("description", item['description']) + + # Author information. + if item["author_name"] and item["author_email"]: + handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name'])) + elif item["author_email"]: + handler.addQuickElement("author", item["author_email"]) + elif item["author_name"]: + handler.addQuickElement( + "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"} + ) + + if item['pubdate'] is not None: + handler.addQuickElement("pubDate", rfc2822_date(item['pubdate'])) + if item['comments'] is not None: + handler.addQuickElement("comments", item['comments']) + if item['unique_id'] is not None: + guid_attrs = {} + if isinstance(item.get('unique_id_is_permalink'), bool): + guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower() + handler.addQuickElement("guid", item['unique_id'], guid_attrs) + if item['ttl'] is not None: + handler.addQuickElement("ttl", item['ttl']) + + # Enclosure. + if item['enclosures']: + enclosures = list(item['enclosures']) + if len(enclosures) > 1: + raise ValueError( + "RSS feed items may only have one enclosure, see " + "http://www.rssboard.org/rss-profile#element-channel-item-enclosure" + ) + enclosure = enclosures[0] + handler.addQuickElement('enclosure', '', { + 'url': enclosure.url, + 'length': enclosure.length, + 'type': enclosure.mime_type, + }) + + # Categories. + for cat in item['categories']: + handler.addQuickElement("category", cat) + + +class Atom1Feed(SyndicationFeed): + # Spec: https://tools.ietf.org/html/rfc4287 + content_type = 'application/atom+xml; charset=utf-8' + ns = "http://www.w3.org/2005/Atom" + + def write(self, outfile, encoding): + handler = SimplerXMLGenerator(outfile, encoding) + handler.startDocument() + handler.startElement('feed', self.root_attributes()) + self.add_root_elements(handler) + self.write_items(handler) + handler.endElement("feed") + + def root_attributes(self): + if self.feed['language'] is not None: + return {"xmlns": self.ns, "xml:lang": self.feed['language']} + else: + return {"xmlns": self.ns} + + def add_root_elements(self, handler): + handler.addQuickElement("title", self.feed['title']) + handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']}) + if self.feed['feed_url'] is not None: + handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']}) + handler.addQuickElement("id", self.feed['id']) + handler.addQuickElement("updated", rfc3339_date(self.latest_post_date())) + if self.feed['author_name'] is not None: + handler.startElement("author", {}) + handler.addQuickElement("name", self.feed['author_name']) + if self.feed['author_email'] is not None: + handler.addQuickElement("email", self.feed['author_email']) + if self.feed['author_link'] is not None: + handler.addQuickElement("uri", self.feed['author_link']) + handler.endElement("author") + if self.feed['subtitle'] is not None: + handler.addQuickElement("subtitle", self.feed['subtitle']) + for cat in self.feed['categories']: + handler.addQuickElement("category", "", {"term": cat}) + if self.feed['feed_copyright'] is not None: + handler.addQuickElement("rights", self.feed['feed_copyright']) + + def write_items(self, handler): + for item in self.items: + handler.startElement("entry", self.item_attributes(item)) + self.add_item_elements(handler, item) + handler.endElement("entry") + + def add_item_elements(self, handler, item): + handler.addQuickElement("title", item['title']) + handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"}) + + if item['pubdate'] is not None: + handler.addQuickElement('published', rfc3339_date(item['pubdate'])) + + if item['updateddate'] is not None: + handler.addQuickElement('updated', rfc3339_date(item['updateddate'])) + + # Author information. + if item['author_name'] is not None: + handler.startElement("author", {}) + handler.addQuickElement("name", item['author_name']) + if item['author_email'] is not None: + handler.addQuickElement("email", item['author_email']) + if item['author_link'] is not None: + handler.addQuickElement("uri", item['author_link']) + handler.endElement("author") + + # Unique ID. + if item['unique_id'] is not None: + unique_id = item['unique_id'] + else: + unique_id = get_tag_uri(item['link'], item['pubdate']) + handler.addQuickElement("id", unique_id) + + # Summary. + if item['description'] is not None: + handler.addQuickElement("summary", item['description'], {"type": "html"}) + + # Enclosures. + for enclosure in item['enclosures']: + handler.addQuickElement('link', '', { + 'rel': 'enclosure', + 'href': enclosure.url, + 'length': enclosure.length, + 'type': enclosure.mime_type, + }) + + # Categories. + for cat in item['categories']: + handler.addQuickElement("category", "", {"term": cat}) + + # Rights. + if item['item_copyright'] is not None: + handler.addQuickElement("rights", item['item_copyright']) + + +# This isolates the decision of what the system default is, so calling code can +# do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed". +DefaultFeed = Rss201rev2Feed \ No newline at end of file diff --git a/rhodecode/lib/feedgenerator/utils.py b/rhodecode/lib/feedgenerator/utils.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/feedgenerator/utils.py @@ -0,0 +1,57 @@ +""" +Utilities for XML generation/parsing. +""" + +import six + +from xml.sax.saxutils import XMLGenerator, quoteattr +from urllib import quote +from rhodecode.lib.utils import safe_str, safe_unicode + + +class SimplerXMLGenerator(XMLGenerator): + def addQuickElement(self, name, contents=None, attrs=None): + "Convenience method for adding an element with no children" + if attrs is None: + attrs = {} + self.startElement(name, attrs) + if contents is not None: + self.characters(contents) + self.endElement(name) + + def startElement(self, name, attrs): + self._write('<' + name) + # sort attributes for consistent output + for (name, value) in sorted(attrs.items()): + self._write(' %s=%s' % (name, quoteattr(value))) + self._write(six.u('>')) + + +def iri_to_uri(iri): + """ + Convert an Internationalized Resource Identifier (IRI) portion to a URI + portion that is suitable for inclusion in a URL. + This is the algorithm from section 3.1 of RFC 3987. However, since we are + assuming input is either UTF-8 or unicode already, we can simplify things a + little from the full method. + Returns an ASCII string containing the encoded result. + """ + # The list of safe characters here is constructed from the "reserved" and + # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: + # reserved = gen-delims / sub-delims + # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" + # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + # / "*" / "+" / "," / ";" / "=" + # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + # Of the unreserved characters, urllib.quote already considers all but + # the ~ safe. + # The % character is also added to the list of safe characters here, as the + # end of section 3.1 of RFC 3987 specifically mentions that % must not be + # converted. + if iri is None: + return iri + return quote(safe_str(iri), safe=b"/#%[]=:;$&()+,!?*@'~") + + +def force_text(text, strings_only=False): + return safe_unicode(text) diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py --- a/rhodecode/lib/helpers.py +++ b/rhodecode/lib/helpers.py @@ -53,30 +53,35 @@ from pygments.lexers import ( from pyramid.threadlocal import get_current_request -from webhelpers.html import literal, HTML, escape -from webhelpers.html.tools import * -from webhelpers.html.builder import make_tag -from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ - end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \ - link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ - submit, text, password, textarea, title, ul, xml_declaration, radio -from webhelpers.html.tools import auto_link, button_to, highlight, \ - js_obfuscate, mail_to, strip_links, strip_tags, tag_re -from webhelpers.text import chop_at, collapse, convert_accented_entities, \ - convert_misc_entities, lchop, plural, rchop, remove_formatting, \ - replace_whitespace, urlify, truncate, wrap_paragraphs -from webhelpers.date import time_ago_in_words -from webhelpers.paginate import Page as _Page -from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ - convert_boolean_attrs, NotGiven, _make_safe_id_component +from webhelpers2.html import literal, HTML, escape +from webhelpers2.html._autolink import _auto_link_urls +from webhelpers2.html.tools import ( + button_to, highlight, js_obfuscate, strip_links, strip_tags) + +from webhelpers2.text import ( + chop_at, collapse, convert_accented_entities, + convert_misc_entities, lchop, plural, rchop, remove_formatting, + replace_whitespace, urlify, truncate, wrap_paragraphs) +from webhelpers2.date import time_ago_in_words + +from webhelpers2.html.tags import ( + _input, NotGiven, _make_safe_id_component as safeid, + form as insecure_form, + auto_discovery_link, checkbox, end_form, file, + hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol, + select as raw_select, stylesheet_link, submit, text, password, textarea, + ul, radio, Options) + from webhelpers2.number import format_byte_size from rhodecode.lib.action_parser import action_parser +from rhodecode.lib.pagination import Page, RepoPage, SqlPage from rhodecode.lib.ext_json import json from rhodecode.lib.utils import repo_name_slug, get_custom_lexer -from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ - get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ - AttributeDict, safe_int, md5, md5_safe +from rhodecode.lib.utils2 import ( + str2bool, safe_unicode, safe_str, + get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, + AttributeDict, safe_int, md5, md5_safe, get_host_info) from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit @@ -150,24 +155,53 @@ def chop_at_smart(s, sub, inclusive=Fals return chopped -def shorter(text, size=20): +def shorter(text, size=20, prefix=False): postfix = '...' if len(text) > size: - return text[:size - len(postfix)] + postfix + if prefix: + # shorten in front + return postfix + text[-(size - len(postfix)):] + else: + return text[:size - len(postfix)] + postfix return text -def _reset(name, value=None, id=NotGiven, type="reset", **attrs): +def reset(name, value=None, id=NotGiven, type="reset", **attrs): """ Reset button """ - _set_input_attrs(attrs, type, name, value) - _set_id_attr(attrs, id, name) - convert_boolean_attrs(attrs, ["disabled"]) - return HTML.input(**attrs) + return _input(type, name, value, id, attrs) + + +def select(name, selected_values, options, id=NotGiven, **attrs): -reset = _reset -safeid = _make_safe_id_component + if isinstance(options, (list, tuple)): + options_iter = options + # Handle old value,label lists ... where value also can be value,label lists + options = Options() + for opt in options_iter: + if isinstance(opt, tuple) and len(opt) == 2: + value, label = opt + elif isinstance(opt, basestring): + value = label = opt + else: + raise ValueError('invalid select option type %r' % type(opt)) + + if isinstance(value, (list, tuple)): + option_group = options.add_optgroup(label) + for opt2 in value: + if isinstance(opt2, tuple) and len(opt2) == 2: + group_value, group_label = opt2 + elif isinstance(opt2, basestring): + group_value = group_label = opt2 + else: + raise ValueError('invalid select option type %r' % type(opt2)) + + option_group.add_option(group_label, group_value) + else: + options.add_option(label, value) + + return raw_select(name, selected_values, options, id=id, **attrs) def branding(name, length=40): @@ -660,7 +694,7 @@ class Flash(object): }) return json.dumps(payloads) - def __call__(self, message, category=None, ignore_duplicate=False, + def __call__(self, message, category=None, ignore_duplicate=True, session=None, request=None): if not session: @@ -692,7 +726,7 @@ import tzlocal local_timezone = tzlocal.get_localzone() -def age_component(datetime_iso, value=None, time_is_local=False): +def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): title = value or format_date(datetime_iso) tzinfo = '+00:00' @@ -706,9 +740,11 @@ def age_component(datetime_iso, value=No tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) return literal( - ''.format( - datetime_iso, title, tzinfo)) + ''.format( + cls='tooltip' if tooltip else '', + tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '', + title=title, dt=datetime_iso, tzinfo=tzinfo + )) def _shorten_commit_id(commit_id, commit_len=None): @@ -787,7 +823,7 @@ def is_svn_without_proxy(repository): def discover_user(author): """ - Tries to discover RhodeCode User based on the autho string. Author string + Tries to discover RhodeCode User based on the author string. Author string is typically `FirstName LastName ` """ @@ -895,10 +931,9 @@ def person_by_id(id_, show_attr="usernam return id_ -def gravatar_with_user(request, author, show_disabled=False): - _render = request.get_partial_renderer( - 'rhodecode:templates/base/base.mako') - return _render('gravatar_with_user', author, show_disabled=show_disabled) +def gravatar_with_user(request, author, show_disabled=False, tooltip=False): + _render = request.get_partial_renderer('rhodecode:templates/base/base.mako') + return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip) tags_paterns = OrderedDict(( @@ -973,19 +1008,20 @@ def bool2icon(value, show_at_false=True) """ if value: # does bool conversion - return HTML.tag('i', class_="icon-true") + return HTML.tag('i', class_="icon-true", title='True') else: # not true as bool if show_at_false: - return HTML.tag('i', class_="icon-false") + return HTML.tag('i', class_="icon-false", title='False') return HTML.tag('i') #============================================================================== # PERMS #============================================================================== -from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ -HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \ -HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \ -csrf_token_key +from rhodecode.lib.auth import ( + HasPermissionAny, HasPermissionAll, + HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, + HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, + csrf_token_key, AuthUser) #============================================================================== @@ -1276,233 +1312,6 @@ def gravatar_url(email_address, size=30, return initials_gravatar(email_address, '', '', size=size) -class Page(_Page): - """ - Custom pager to match rendering style with paginator - """ - - def _get_pos(self, cur_page, max_page, items): - edge = (items / 2) + 1 - if (cur_page <= edge): - radius = max(items / 2, items - cur_page) - elif (max_page - cur_page) < edge: - radius = (items - 1) - (max_page - cur_page) - else: - radius = items / 2 - - left = max(1, (cur_page - (radius))) - right = min(max_page, cur_page + (radius)) - return left, cur_page, right - - def _range(self, regexp_match): - """ - Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8'). - - Arguments: - - regexp_match - A "re" (regular expressions) match object containing the - radius of linked pages around the current page in - regexp_match.group(1) as a string - - This function is supposed to be called as a callable in - re.sub. - - """ - radius = int(regexp_match.group(1)) - - # Compute the first and last page number within the radius - # e.g. '1 .. 5 6 [7] 8 9 .. 12' - # -> leftmost_page = 5 - # -> rightmost_page = 9 - leftmost_page, _cur, rightmost_page = self._get_pos(self.page, - self.last_page, - (radius * 2) + 1) - nav_items = [] - - # Create a link to the first page (unless we are on the first page - # or there would be no need to insert '..' spacers) - if self.page != self.first_page and self.first_page < leftmost_page: - nav_items.append(self._pagerlink(self.first_page, self.first_page)) - - # Insert dots if there are pages between the first page - # and the currently displayed page range - if leftmost_page - self.first_page > 1: - # Wrap in a SPAN tag if nolink_attr is set - text = '..' - if self.dotdot_attr: - text = HTML.span(c=text, **self.dotdot_attr) - nav_items.append(text) - - for thispage in xrange(leftmost_page, rightmost_page + 1): - # Hilight the current page number and do not use a link - if thispage == self.page: - text = '%s' % (thispage,) - # Wrap in a SPAN tag if nolink_attr is set - if self.curpage_attr: - text = HTML.span(c=text, **self.curpage_attr) - nav_items.append(text) - # Otherwise create just a link to that page - else: - text = '%s' % (thispage,) - nav_items.append(self._pagerlink(thispage, text)) - - # Insert dots if there are pages between the displayed - # page numbers and the end of the page range - if self.last_page - rightmost_page > 1: - text = '..' - # Wrap in a SPAN tag if nolink_attr is set - if self.dotdot_attr: - text = HTML.span(c=text, **self.dotdot_attr) - nav_items.append(text) - - # Create a link to the very last page (unless we are on the last - # page or there would be no need to insert '..' spacers) - if self.page != self.last_page and rightmost_page < self.last_page: - nav_items.append(self._pagerlink(self.last_page, self.last_page)) - - ## prerender links - #_page_link = url.current() - #nav_items.append(literal('' % (_page_link, str(int(self.page)+1)))) - #nav_items.append(literal('' % (_page_link, str(int(self.page)+1)))) - return self.separator.join(nav_items) - - def pager(self, format='~2~', page_param='page', partial_param='partial', - show_if_single_page=False, separator=' ', onclick=None, - symbol_first='<<', symbol_last='>>', - symbol_previous='<', symbol_next='>', - link_attr={'class': 'pager_link', 'rel': 'prerender'}, - curpage_attr={'class': 'pager_curpage'}, - dotdot_attr={'class': 'pager_dotdot'}, **kwargs): - - self.curpage_attr = curpage_attr - self.separator = separator - self.pager_kwargs = kwargs - self.page_param = page_param - self.partial_param = partial_param - self.onclick = onclick - self.link_attr = link_attr - self.dotdot_attr = dotdot_attr - - # Don't show navigator if there is no more than one page - if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): - return '' - - from string import Template - # Replace ~...~ in token format by range of pages - result = re.sub(r'~(\d+)~', self._range, format) - - # Interpolate '%' variables - result = Template(result).safe_substitute({ - 'first_page': self.first_page, - 'last_page': self.last_page, - 'page': self.page, - 'page_count': self.page_count, - 'items_per_page': self.items_per_page, - 'first_item': self.first_item, - 'last_item': self.last_item, - 'item_count': self.item_count, - 'link_first': self.page > self.first_page and \ - self._pagerlink(self.first_page, symbol_first) or '', - 'link_last': self.page < self.last_page and \ - self._pagerlink(self.last_page, symbol_last) or '', - 'link_previous': self.previous_page and \ - self._pagerlink(self.previous_page, symbol_previous) \ - or HTML.span(symbol_previous, class_="pg-previous disabled"), - 'link_next': self.next_page and \ - self._pagerlink(self.next_page, symbol_next) \ - or HTML.span(symbol_next, class_="pg-next disabled") - }) - - return literal(result) - - -#============================================================================== -# REPO PAGER, PAGER FOR REPOSITORY -#============================================================================== -class RepoPage(Page): - - def __init__(self, collection, page=1, items_per_page=20, - item_count=None, url=None, **kwargs): - - """Create a "RepoPage" instance. special pager for paging - repository - """ - self._url_generator = url - - # Safe the kwargs class-wide so they can be used in the pager() method - self.kwargs = kwargs - - # Save a reference to the collection - self.original_collection = collection - - self.collection = collection - - # The self.page is the number of the current page. - # The first page has the number 1! - try: - self.page = int(page) # make it int() if we get it as a string - except (ValueError, TypeError): - self.page = 1 - - self.items_per_page = items_per_page - - # Unless the user tells us how many items the collections has - # we calculate that ourselves. - if item_count is not None: - self.item_count = item_count - else: - self.item_count = len(self.collection) - - # Compute the number of the first and last available page - if self.item_count > 0: - self.first_page = 1 - self.page_count = int(math.ceil(float(self.item_count) / - self.items_per_page)) - self.last_page = self.first_page + self.page_count - 1 - - # Make sure that the requested page number is the range of - # valid pages - if self.page > self.last_page: - self.page = self.last_page - elif self.page < self.first_page: - self.page = self.first_page - - # Note: the number of items on this page can be less than - # items_per_page if the last page is not full - self.first_item = max(0, (self.item_count) - (self.page * - items_per_page)) - self.last_item = ((self.item_count - 1) - items_per_page * - (self.page - 1)) - - self.items = list(self.collection[self.first_item:self.last_item + 1]) - - # Links to previous and next page - if self.page > self.first_page: - self.previous_page = self.page - 1 - else: - self.previous_page = None - - if self.page < self.last_page: - self.next_page = self.page + 1 - else: - self.next_page = None - - # No items available - else: - self.first_page = None - self.page_count = 0 - self.last_page = None - self.first_item = None - self.last_item = None - self.previous_page = None - self.next_page = None - self.items = [] - - # This is a subclass of the 'list' type. Initialise the list now. - list.__init__(self, reversed(self.items)) - - def breadcrumb_repo_link(repo): """ Makes a breadcrumbs path link to repo @@ -1560,11 +1369,9 @@ def format_byte_size_binary(file_size): return formatted_size -def urlify_text(text_, safe=True): +def urlify_text(text_, safe=True, **href_attrs): """ - Extrac urls from text and make html links out of them - - :param text_: + Extract urls from text and make html links out of them """ url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' @@ -1572,22 +1379,27 @@ def urlify_text(text_, safe=True): def url_func(match_obj): url_full = match_obj.groups()[0] - return '%(url)s' % ({'url': url_full}) - _newtext = url_pat.sub(url_func, text_) + a_options = dict(href_attrs) + a_options['href'] = url_full + a_text = url_full + return HTML.tag("a", a_text, **a_options) + + _new_text = url_pat.sub(url_func, text_) + if safe: - return literal(_newtext) - return _newtext + return literal(_new_text) + return _new_text -def urlify_commits(text_, repository): +def urlify_commits(text_, repo_name): """ Extract commit ids from text and make link from them :param text_: - :param repository: repo name to build the URL with + :param repo_name: repo name to build the URL with """ - URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') + url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') def url_func(match_obj): commit_id = match_obj.groups()[1] @@ -1595,20 +1407,24 @@ def urlify_commits(text_, repository): suf = match_obj.groups()[2] tmpl = ( - '%(pref)s' + '%(pref)s' '%(commit_id)s%(suf)s' ) return tmpl % { 'pref': pref, 'cls': 'revision-link', - 'url': route_url('repo_commit', repo_name=repository, commit_id=commit_id), + 'url': route_url( + 'repo_commit', repo_name=repo_name, commit_id=commit_id), 'commit_id': commit_id, - 'suf': suf + 'suf': suf, + 'hovercard_alt': 'Commit: {}'.format(commit_id), + 'hovercard_url': route_url( + 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id) } - newtext = URL_PAT.sub(url_func, text_) + new_text = url_pat.sub(url_func, text_) - return newtext + return new_text def _process_url_func(match_obj, repo_name, uid, entry, @@ -1621,13 +1437,18 @@ def _process_url_func(match_obj, repo_na if link_format == 'html': tmpl = ( - '%(pref)s' + '%(pref)s' '%(issue-prefix)s%(id-repr)s' '') - elif link_format == 'rst': + elif link_format == 'html+hovercard': + tmpl = ( + '%(pref)s' + '%(issue-prefix)s%(id-repr)s' + '') + elif link_format in ['rst', 'rst+hovercard']: tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' - elif link_format == 'markdown': - tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)' + elif link_format in ['markdown', 'markdown+hovercard']: + tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)' else: raise ValueError('Bad link_format:{}'.format(link_format)) @@ -1639,11 +1460,24 @@ def _process_url_func(match_obj, repo_na 'id': issue_id, 'repo': repo_name, 'repo_name': repo_name_cleaned, - 'group_name': parent_group_name + 'group_name': parent_group_name, + # set dummy keys so we always have them + 'hostname': '', + 'netloc': '', + 'scheme': '' } + + request = get_current_request() + if request: + # exposes, hostname, netloc, scheme + host_data = get_host_info(request) + named_vars.update(host_data) + # named regex variables named_vars.update(match_obj.groupdict()) _url = string.Template(entry['url']).safe_substitute(**named_vars) + desc = string.Template(entry['desc']).safe_substitute(**named_vars) + hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) def quote_cleaner(input_str): """Remove quotes as it's HTML""" @@ -1656,7 +1490,10 @@ def _process_url_func(match_obj, repo_na 'id-repr': issue_id, 'issue-prefix': entry['pref'], 'serv': entry['url'], + 'title': desc, + 'hovercard_url': hovercard_url } + if return_raw_data: return { 'id': issue_id, @@ -1679,15 +1516,17 @@ def get_active_pattern_entries(repo_name def process_patterns(text_string, repo_name, link_format='html', active_entries=None): - allowed_formats = ['html', 'rst', 'markdown'] + allowed_formats = ['html', 'rst', 'markdown', + 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] if link_format not in allowed_formats: raise ValueError('Link format can be only one of:{} got {}'.format( allowed_formats, link_format)) active_entries = active_entries or get_active_pattern_entries(repo_name) issues_data = [] - newtext = text_string + new_text = text_string + log.debug('Got %s entries to process', len(active_entries)) for uid, entry in active_entries.items(): log.debug('found issue tracker entry with uid %s', uid) @@ -1701,9 +1540,7 @@ def process_patterns(text_string, repo_n try: pattern = re.compile(r'%s' % entry['pat']) except re.error: - log.exception( - 'issue tracker pattern: `%s` failed to compile', - entry['pat']) + log.exception('issue tracker pattern: `%s` failed to compile', entry['pat']) continue data_func = partial( @@ -1717,38 +1554,50 @@ def process_patterns(text_string, repo_n _process_url_func, repo_name=repo_name, entry=entry, uid=uid, link_format=link_format) - newtext = pattern.sub(url_func, newtext) + new_text = pattern.sub(url_func, new_text) log.debug('processed prefix:uid `%s`', uid) - return newtext, issues_data + # finally use global replace, eg !123 -> pr-link, those will not catch + # if already similar pattern exists + server_url = '${scheme}://${netloc}' + pr_entry = { + 'pref': '!', + 'url': server_url + '/_admin/pull-requests/${id}', + 'desc': 'Pull Request !${id}', + 'hovercard_url': server_url + '/_hovercard/pull_request/${id}' + } + pr_url_func = partial( + _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None, + link_format=link_format+'+hovercard') + new_text = re.compile(r'(?:(?:^!)|(?: !))(\d+)').sub(pr_url_func, new_text) + log.debug('processed !pr pattern') + + return new_text, issues_data def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None): """ Parses given text message and makes proper links. issues are linked to given issue-server, and rest is a commit link + """ + def escaper(_text): + return _text.replace('<', '<').replace('>', '>') - :param commit_text: - :param repository: - """ - def escaper(string): - return string.replace('<', '<').replace('>', '>') - - newtext = escaper(commit_text) + new_text = escaper(commit_text) # extract http/https links and make them real urls - newtext = urlify_text(newtext, safe=False) + new_text = urlify_text(new_text, safe=False) # urlify commits - extract commit ids and make link out of them, if we have # the scope of repository present. if repository: - newtext = urlify_commits(newtext, repository) + new_text = urlify_commits(new_text, repository) # process issue tracker patterns - newtext, issues = process_patterns(newtext, repository or '', - active_entries=active_pattern_entries) + new_text, issues = process_patterns(new_text, repository or '', + active_entries=active_pattern_entries) - return literal(newtext) + return literal(new_text) def render_binary(repo_name, file_obj): @@ -1898,7 +1747,7 @@ def form(url, method='post', needs_csrf_ 'CSRF token. If the endpoint does not require such token you can ' + 'explicitly set the parameter needs_csrf_token to false.') - return wh_form(url, method=method, **attrs) + return insecure_form(url, method=method, **attrs) def secure_form(form_url, method="POST", multipart=False, **attrs): @@ -1920,7 +1769,6 @@ def secure_form(form_url, method="POST", over POST. """ - from webhelpers.pylonslib.secure_form import insecure_form if 'request' in attrs: session = attrs['request'].session @@ -1929,12 +1777,12 @@ def secure_form(form_url, method="POST", raise ValueError( 'Calling this form requires request= to be passed as argument') - form = insecure_form(form_url, method, multipart, **attrs) + _form = insecure_form(form_url, method, multipart, **attrs) token = literal( - ''.format( - csrf_token_key, csrf_token_key, get_csrf_token(session))) + ''.format( + csrf_token_key, get_csrf_token(session))) - return literal("%s\n%s" % (form, token)) + return literal("%s\n%s" % (_form, token)) def dropdownmenu(name, selected, options, enable_filter=False, **attrs): @@ -1987,7 +1835,7 @@ def get_last_path_part(file_node): def route_url(*args, **kwargs): """ - Wrapper around pyramids `route_url` (fully qualified url) function. + Wrapper around pyramids `route_url` (fully qualified url) function. """ req = get_current_request() return req.route_url(*args, **kwargs) @@ -2014,24 +1862,31 @@ def current_route_path(request, **kw): return request.current_route_path(_query=new_args) -def api_call_example(method, args): - """ - Generates an API call example via CURL - """ +def curl_api_example(method, args): args_json = json.dumps(OrderedDict([ ('id', 1), ('auth_token', 'SECRET'), ('method', method), ('args', args) ])) + + return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format( + api_url=route_url('apiv2'), + args_json=args_json + ) + + +def api_call_example(method, args): + """ + Generates an API call example via CURL + """ + curl_call = curl_api_example(method, args) + return literal( - "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'" + curl_call + "

SECRET can be found in auth-tokens page, " "and needs to be of `api calls` role." - .format( - api_url=route_url('apiv2'), - token_url=route_url('my_account_auth_tokens'), - data=args_json)) + .format(token_url=route_url('my_account_auth_tokens'))) def notification_description(notification, request): @@ -2076,3 +1931,15 @@ def get_repo_view_type(request): } return route_to_view_type.get(route_name) + + +def is_active(menu_entry, selected): + """ + Returns active class for selecting menus in templates +
  • + """ + if not isinstance(menu_entry, list): + menu_entry = [menu_entry] + + if selected in menu_entry: + return "active" diff --git a/rhodecode/lib/hooks_daemon.py b/rhodecode/lib/hooks_daemon.py --- a/rhodecode/lib/hooks_daemon.py +++ b/rhodecode/lib/hooks_daemon.py @@ -106,11 +106,11 @@ class DummyHooksCallbackDaemon(object): self.hooks_module = Hooks.__module__ def __enter__(self): - log.debug('Running dummy hooks callback daemon') + log.debug('Running `%s` callback daemon', self.__class__.__name__) return self def __exit__(self, exc_type, exc_val, exc_tb): - log.debug('Exiting dummy hooks callback daemon') + log.debug('Exiting `%s` callback daemon', self.__class__.__name__) class ThreadedHookCallbackDaemon(object): @@ -123,11 +123,12 @@ class ThreadedHookCallbackDaemon(object) self._prepare(txn_id=txn_id, host=None, port=port) def __enter__(self): + log.debug('Running `%s` callback daemon', self.__class__.__name__) self._run() return self def __exit__(self, exc_type, exc_val, exc_tb): - log.debug('Callback daemon exiting now...') + log.debug('Exiting `%s` callback daemon', self.__class__.__name__) self._stop() def _prepare(self, txn_id=None, host=None, port=None): diff --git a/rhodecode/lib/index/__init__.py b/rhodecode/lib/index/__init__.py --- a/rhodecode/lib/index/__init__.py +++ b/rhodecode/lib/index/__init__.py @@ -46,6 +46,8 @@ class BaseSearcher(object): query_lang_doc = '' es_version = None name = None + DIRECTION_ASC = 'asc' + DIRECTION_DESC = 'desc' def __init__(self): pass @@ -87,6 +89,46 @@ class BaseSearcher(object): """ return val + def sort_def(self, search_type, direction, sort_field): + """ + Defines sorting for search. This function should decide if for given + search_type, sorting can be done with sort_field. + + It also should translate common sort fields into backend specific. e.g elasticsearch + """ + raise NotImplementedError() + + @staticmethod + def get_sort(search_type, search_val): + """ + Method used to parse the GET search sort value to a field and direction. + e.g asc:lines == asc, lines + + There's also a legacy support for newfirst/oldfirst which defines commit + sorting only + """ + + direction = BaseSearcher.DIRECTION_ASC + sort_field = None + + if not search_val: + return direction, sort_field + + if search_val.startswith('asc:'): + sort_field = search_val[4:] + direction = BaseSearcher.DIRECTION_ASC + elif search_val.startswith('desc:'): + sort_field = search_val[5:] + direction = BaseSearcher.DIRECTION_DESC + elif search_val == 'newfirst' and search_type == 'commit': + sort_field = 'date' + direction = BaseSearcher.DIRECTION_DESC + elif search_val == 'oldfirst' and search_type == 'commit': + sort_field = 'date' + direction = BaseSearcher.DIRECTION_ASC + + return direction, sort_field + def search_config(config, prefix='search.'): _config = {} diff --git a/rhodecode/lib/index/whoosh.py b/rhodecode/lib/index/whoosh.py --- a/rhodecode/lib/index/whoosh.py +++ b/rhodecode/lib/index/whoosh.py @@ -99,6 +99,29 @@ class WhooshSearcher(BaseSearcher): query = u'(%s) OR %s' % (query, hashes_or_query) return query + def sort_def(self, search_type, direction, sort_field): + + if search_type == 'commit': + field_defs = { + 'message': 'message', + 'date': 'date', + 'author_email': 'author', + } + elif search_type == 'path': + field_defs = { + 'file': 'path', + 'size': 'size', + 'lines': 'lines', + } + elif search_type == 'content': + # NOTE(dan): content doesn't support any sorting + field_defs = {} + else: + return '' + + if sort_field in field_defs: + return field_defs[sort_field] + def search(self, query, document_type, search_user, repo_name=None, repo_group_name=None, requested_page=1, page_limit=10, sort=None, raise_on_exc=True): @@ -124,18 +147,20 @@ class WhooshSearcher(BaseSearcher): query = qp.parse(safe_unicode(query)) log.debug('query: %s (%s)', query, repr(query)) - reverse, sortedby = False, None - if search_type == 'message': - if sort == 'oldfirst': - sortedby = 'date' - reverse = False - elif sort == 'newfirst': - sortedby = 'date' - reverse = True + reverse, sorted_by = False, None + direction, sort_field = self.get_sort(search_type, sort) + if sort_field: + sort_definition = self.sort_def(search_type, direction, sort_field) + if sort_definition: + sorted_by = sort_definition + if direction == Searcher.DIRECTION_DESC: + reverse = True + if direction == Searcher.DIRECTION_ASC: + reverse = False whoosh_results = self.searcher.search( query, filter=allowed_repos_filter, limit=None, - sortedby=sortedby, reverse=reverse) + sortedby=sorted_by, reverse=reverse) # fixes for 32k limit that whoosh uses for highlight whoosh_results.fragmenter.charlimit = None diff --git a/rhodecode/lib/jsonalchemy.py b/rhodecode/lib/jsonalchemy.py --- a/rhodecode/lib/jsonalchemy.py +++ b/rhodecode/lib/jsonalchemy.py @@ -25,6 +25,7 @@ from sqlalchemy import UnicodeText from sqlalchemy.ext.mutable import Mutable from rhodecode.lib.ext_json import json +from rhodecode.lib.utils2 import safe_unicode class JsonRaw(unicode): @@ -56,10 +57,12 @@ class JSONEncodedObj(sqlalchemy.types.Ty impl = UnicodeText safe = True + enforce_unicode = True def __init__(self, *args, **kwargs): self.default = kwargs.pop('default', None) self.safe = kwargs.pop('safe_json', self.safe) + self.enforce_unicode = kwargs.pop('enforce_unicode', self.enforce_unicode) self.dialect_map = kwargs.pop('dialect_map', {}) super(JSONEncodedObj, self).__init__(*args, **kwargs) @@ -73,6 +76,8 @@ class JSONEncodedObj(sqlalchemy.types.Ty value = value elif value is not None: value = json.dumps(value) + if self.enforce_unicode: + value = safe_unicode(value) return value def process_result_value(self, value, dialect): diff --git a/rhodecode/lib/markup_renderer.py b/rhodecode/lib/markup_renderer.py --- a/rhodecode/lib/markup_renderer.py +++ b/rhodecode/lib/markup_renderer.py @@ -53,6 +53,8 @@ class CustomHTMLTranslator(writers.html4 Custom HTML Translator used for sandboxing potential JS injections in ref links """ + def visit_literal_block(self, node): + self.body.append(self.starttag(node, 'pre', CLASS='codehilite literal-block')) def visit_reference(self, node): if 'refuri' in node.attributes: @@ -423,7 +425,10 @@ class MarkupRenderer(object): cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES]) docutils_settings.update({ - 'input_encoding': 'unicode', 'report_level': 4}) + 'input_encoding': 'unicode', + 'report_level': 4, + 'syntax_highlight': 'short', + }) for k, v in docutils_settings.iteritems(): directives.register_directive(k, v) diff --git a/rhodecode/lib/middleware/request_wrapper.py b/rhodecode/lib/middleware/request_wrapper.py --- a/rhodecode/lib/middleware/request_wrapper.py +++ b/rhodecode/lib/middleware/request_wrapper.py @@ -18,11 +18,10 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ - import time import logging - +import rhodecode from rhodecode.lib.base import get_ip_addr, get_access_path, get_user_agent from rhodecode.lib.utils2 import safe_str @@ -39,16 +38,19 @@ class RequestWrapperTween(object): def __call__(self, request): start = time.time() + log.debug('Starting request time measurement') try: response = self.handler(request) finally: end = time.time() total = end - start + count = request.request_count() + _ver_ = rhodecode.__version__ log.info( - 'IP: %s %s Request to %s time: %.3fs [%s]', - get_ip_addr(request.environ), request.environ.get('REQUEST_METHOD'), + 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], RhodeCode %s', + count, get_ip_addr(request.environ), request.environ.get('REQUEST_METHOD'), safe_str(get_access_path(request.environ)), total, - get_user_agent(request. environ) + get_user_agent(request. environ), _ver_ ) return response diff --git a/rhodecode/lib/middleware/simplevcs.py b/rhodecode/lib/middleware/simplevcs.py --- a/rhodecode/lib/middleware/simplevcs.py +++ b/rhodecode/lib/middleware/simplevcs.py @@ -201,14 +201,11 @@ class SimpleVCS(object): # Only proceed if we got a pull request and if acl repo name from # URL equals the target repo name of the pull request. - if pull_request and \ - (acl_repo_name == pull_request.target_repo.repo_name): - repo_id = pull_request.target_repo.repo_id + if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): + # Get file system path to shadow repository. workspace_id = PullRequestModel()._workspace_id(pull_request) - target_vcs = pull_request.target_repo.scm_instance() - vcs_repo_name = target_vcs._get_shadow_repository_path( - repo_id, workspace_id) + vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) # Store names for later usage. self.vcs_repo_name = vcs_repo_name @@ -225,10 +222,10 @@ class SimpleVCS(object): def scm_app(self): custom_implementation = self.config['vcs.scm_app_implementation'] if custom_implementation == 'http': - log.info('Using HTTP implementation of scm app.') + log.debug('Using HTTP implementation of scm app.') scm_app_impl = scm_app_http else: - log.info('Using custom implementation of scm_app: "{}"'.format( + log.debug('Using custom implementation of scm_app: "{}"'.format( custom_implementation)) scm_app_impl = importlib.import_module(custom_implementation) return scm_app_impl @@ -354,7 +351,7 @@ class SimpleVCS(object): 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr) auth_time = time.time() - start - log.debug('Permissions for plugin `%s` completed in %.3fs, ' + log.debug('Permissions for plugin `%s` completed in %.4fs, ' 'expiration time of fetched cache %.1fs.', plugin_id, auth_time, cache_ttl) @@ -657,6 +654,9 @@ class SimpleVCS(object): raise NotImplementedError() def _should_use_callback_daemon(self, extras, environ, action): + if extras.get('is_shadow_repo'): + # we don't want to execute hooks, and callback daemon for shadow repos + return False return True def _prepare_callback_daemon(self, extras, environ, action, txn_id=None): diff --git a/rhodecode/lib/pagination.py b/rhodecode/lib/pagination.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/pagination.py @@ -0,0 +1,1060 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2007-2012 Christoph Haas +# NOTE: MIT license based code, backported and edited by RhodeCode GmbH + +""" +paginate: helps split up large collections into individual pages +================================================================ + +What is pagination? +--------------------- + +This module helps split large lists of items into pages. The user is shown one page at a time and +can navigate to other pages. Imagine you are offering a company phonebook and let the user search +the entries. The entire search result may contains 23 entries but you want to display no more than +10 entries at once. The first page contains entries 1-10, the second 11-20 and the third 21-23. +Each "Page" instance represents the items of one of these three pages. + +See the documentation of the "Page" class for more information. + +How do I use it? +------------------ + +A page of items is represented by the *Page* object. A *Page* gets initialized with these arguments: + +- The collection of items to pick a range from. Usually just a list. +- The page number you want to display. Default is 1: the first page. + +Now we can make up a collection and create a Page instance of it:: + + # Create a sample collection of 1000 items + >> my_collection = range(1000) + + # Create a Page object for the 3rd page (20 items per page is the default) + >> my_page = Page(my_collection, page=3) + + # The page object can be printed as a string to get its details + >> str(my_page) + Page: + Collection type: + Current page: 3 + First item: 41 + Last item: 60 + First page: 1 + Last page: 50 + Previous page: 2 + Next page: 4 + Items per page: 20 + Number of items: 1000 + Number of pages: 50 + + # Print a list of items on the current page + >> my_page.items + [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59] + + # The *Page* object can be used as an iterator: + >> for my_item in my_page: print(my_item) + 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 + + # The .pager() method returns an HTML fragment with links to surrounding pages. + >> my_page.pager(url="http://example.org/foo/page=$page") + + 1 + 2 + 3 + 4 + 5 + .. + 50' + + # Without the HTML it would just look like: + # 1 2 [3] 4 5 .. 50 + + # The pager can be customized: + >> my_page.pager('$link_previous ~3~ $link_next (Page $page of $page_count)', + url="http://example.org/foo/page=$page") + + < + 1 + 2 + 3 + 4 + 5 + 6 + .. + 50 + > + (Page 3 of 50) + + # Without the HTML it would just look like: + # 1 2 [3] 4 5 6 .. 50 > (Page 3 of 50) + + # The url argument to the pager method can be omitted when an url_maker is + # given during instantiation: + >> my_page = Page(my_collection, page=3, + url_maker=lambda p: "http://example.org/%s" % p) + >> page.pager() + +There are some interesting parameters that customize the Page's behavior. See the documentation on +``Page`` and ``Page.pager()``. + + +Notes +------- + +Page numbers and item numbers start at 1. This concept has been used because users expect that the +first page has number 1 and the first item on a page also has number 1. So if you want to use the +page's items by their index number please note that you have to subtract 1. +""" + +import re +import sys +from string import Template +from webhelpers2.html import literal + +# are we running at least python 3.x ? +PY3 = sys.version_info[0] >= 3 + +if PY3: + unicode = str + + +def make_html_tag(tag, text=None, **params): + """Create an HTML tag string. + + tag + The HTML tag to use (e.g. 'a', 'span' or 'div') + + text + The text to enclose between opening and closing tag. If no text is specified then only + the opening tag is returned. + + Example:: + make_html_tag('a', text="Hello", href="/another/page") + -> Hello + + To use reserved Python keywords like "class" as a parameter prepend it with + an underscore. Instead of "class='green'" use "_class='green'". + + Warning: Quotes and apostrophes are not escaped.""" + params_string = "" + + # Parameters are passed. Turn the dict into a string like "a=1 b=2 c=3" string. + for key, value in sorted(params.items()): + # Strip off a leading underscore from the attribute's key to allow attributes like '_class' + # to be used as a CSS class specification instead of the reserved Python keyword 'class'. + key = key.lstrip("_") + + params_string += u' {0}="{1}"'.format(key, value) + + # Create the tag string + tag_string = u"<{0}{1}>".format(tag, params_string) + + # Add text and closing tag if required. + if text: + tag_string += u"{0}".format(text, tag) + + return tag_string + + +# Since the items on a page are mainly a list we subclass the "list" type +class _Page(list): + """A list/iterator representing the items on one page of a larger collection. + + An instance of the "Page" class is created from a _collection_ which is any + list-like object that allows random access to its elements. + + The instance works as an iterator running from the first item to the last item on the given + page. The Page.pager() method creates a link list allowing the user to go to other pages. + + A "Page" does not only carry the items on a certain page. It gives you additional information + about the page in these "Page" object attributes: + + item_count + Number of items in the collection + + **WARNING:** Unless you pass in an item_count, a count will be + performed on the collection every time a Page instance is created. + + page + Number of the current page + + items_per_page + Maximal number of items displayed on a page + + first_page + Number of the first page - usually 1 :) + + last_page + Number of the last page + + previous_page + Number of the previous page. If this is the first page it returns None. + + next_page + Number of the next page. If this is the last page it returns None. + + page_count + Number of pages + + items + Sequence/iterator of items on the current page + + first_item + Index of first item on the current page - starts with 1 + + last_item + Index of last item on the current page + """ + + def __init__( + self, + collection, + page=1, + items_per_page=20, + item_count=None, + wrapper_class=None, + url_maker=None, + bar_size=10, + **kwargs + ): + """Create a "Page" instance. + + Parameters: + + collection + Sequence representing the collection of items to page through. + + page + The requested page number - starts with 1. Default: 1. + + items_per_page + The maximal number of items to be displayed per page. + Default: 20. + + item_count (optional) + The total number of items in the collection - if known. + If this parameter is not given then the paginator will count + the number of elements in the collection every time a "Page" + is created. Giving this parameter will speed up things. In a busy + real-life application you may want to cache the number of items. + + url_maker (optional) + Callback to generate the URL of other pages, given its numbers. + Must accept one int parameter and return a URI string. + + bar_size + maximum size of rendered pages numbers within radius + + """ + if collection is not None: + if wrapper_class is None: + # Default case. The collection is already a list-type object. + self.collection = collection + else: + # Special case. A custom wrapper class is used to access elements of the collection. + self.collection = wrapper_class(collection) + else: + self.collection = [] + + self.collection_type = type(collection) + + if url_maker is not None: + self.url_maker = url_maker + else: + self.url_maker = self._default_url_maker + self.bar_size = bar_size + # Assign kwargs to self + self.kwargs = kwargs + + # The self.page is the number of the current page. + # The first page has the number 1! + try: + self.page = int(page) # make it int() if we get it as a string + except (ValueError, TypeError): + self.page = 1 + # normally page should be always at least 1 but the original maintainer + # decided that for empty collection and empty page it can be...0? (based on tests) + # preserving behavior for BW compat + if self.page < 1: + self.page = 1 + + self.items_per_page = items_per_page + + # We subclassed "list" so we need to call its init() method + # and fill the new list with the items to be displayed on the page. + # We use list() so that the items on the current page are retrieved + # only once. In an SQL context that could otherwise lead to running the + # same SQL query every time items would be accessed. + # We do this here, prior to calling len() on the collection so that a + # wrapper class can execute a query with the knowledge of what the + # slice will be (for efficiency) and, in the same query, ask for the + # total number of items and only execute one query. + try: + first = (self.page - 1) * items_per_page + last = first + items_per_page + self.items = list(self.collection[first:last]) + except TypeError: + raise TypeError( + "Your collection of type {} cannot be handled " + "by paginate.".format(type(self.collection)) + ) + + # Unless the user tells us how many items the collections has + # we calculate that ourselves. + if item_count is not None: + self.item_count = item_count + else: + self.item_count = len(self.collection) + + # Compute the number of the first and last available page + if self.item_count > 0: + self.first_page = 1 + self.page_count = ((self.item_count - 1) // self.items_per_page) + 1 + self.last_page = self.first_page + self.page_count - 1 + + # Make sure that the requested page number is the range of valid pages + if self.page > self.last_page: + self.page = self.last_page + elif self.page < self.first_page: + self.page = self.first_page + + # Note: the number of items on this page can be less than + # items_per_page if the last page is not full + self.first_item = (self.page - 1) * items_per_page + 1 + self.last_item = min(self.first_item + items_per_page - 1, self.item_count) + + # Links to previous and next page + if self.page > self.first_page: + self.previous_page = self.page - 1 + else: + self.previous_page = None + + if self.page < self.last_page: + self.next_page = self.page + 1 + else: + self.next_page = None + + # No items available + else: + self.first_page = None + self.page_count = 0 + self.last_page = None + self.first_item = None + self.last_item = None + self.previous_page = None + self.next_page = None + self.items = [] + + # This is a subclass of the 'list' type. Initialise the list now. + list.__init__(self, self.items) + + def __str__(self): + return ( + "Page:\n" + "Collection type: {0.collection_type}\n" + "Current page: {0.page}\n" + "First item: {0.first_item}\n" + "Last item: {0.last_item}\n" + "First page: {0.first_page}\n" + "Last page: {0.last_page}\n" + "Previous page: {0.previous_page}\n" + "Next page: {0.next_page}\n" + "Items per page: {0.items_per_page}\n" + "Total number of items: {0.item_count}\n" + "Number of pages: {0.page_count}\n" + ).format(self) + + def __repr__(self): + return "".format(self.page, self.page_count) + + def pager( + self, + tmpl_format="~2~", + url=None, + show_if_single_page=False, + separator=" ", + symbol_first="<<", + symbol_last=">>", + symbol_previous="<", + symbol_next=">", + link_attr=None, + curpage_attr=None, + dotdot_attr=None, + link_tag=None, + ): + """ + Return string with links to other pages (e.g. '1 .. 5 6 7 [8] 9 10 11 .. 50'). + + tmpl_format: + Format string that defines how the pager is rendered. The string + can contain the following $-tokens that are substituted by the + string.Template module: + + - $first_page: number of first reachable page + - $last_page: number of last reachable page + - $page: number of currently selected page + - $page_count: number of reachable pages + - $items_per_page: maximal number of items per page + - $first_item: index of first item on the current page + - $last_item: index of last item on the current page + - $item_count: total number of items + - $link_first: link to first page (unless this is first page) + - $link_last: link to last page (unless this is last page) + - $link_previous: link to previous page (unless this is first page) + - $link_next: link to next page (unless this is last page) + + To render a range of pages the token '~3~' can be used. The + number sets the radius of pages around the current page. + Example for a range with radius 3: + + '1 .. 5 6 7 [8] 9 10 11 .. 50' + + Default: '~2~' + + url + The URL that page links will point to. Make sure it contains the string + $page which will be replaced by the actual page number. + Must be given unless a url_maker is specified to __init__, in which + case this parameter is ignored. + + symbol_first + String to be displayed as the text for the $link_first link above. + + Default: '<<' (<<) + + symbol_last + String to be displayed as the text for the $link_last link above. + + Default: '>>' (>>) + + symbol_previous + String to be displayed as the text for the $link_previous link above. + + Default: '<' (<) + + symbol_next + String to be displayed as the text for the $link_next link above. + + Default: '>' (>) + + separator: + String that is used to separate page links/numbers in the above range of pages. + + Default: ' ' + + show_if_single_page: + if True the navigator will be shown even if there is only one page. + + Default: False + + link_attr (optional) + A dictionary of attributes that get added to A-HREF links pointing to other pages. Can + be used to define a CSS style or class to customize the look of links. + + Example: { 'style':'border: 1px solid green' } + Example: { 'class':'pager_link' } + + curpage_attr (optional) + A dictionary of attributes that get added to the current page number in the pager (which + is obviously not a link). If this dictionary is not empty then the elements will be + wrapped in a SPAN tag with the given attributes. + + Example: { 'style':'border: 3px solid blue' } + Example: { 'class':'pager_curpage' } + + dotdot_attr (optional) + A dictionary of attributes that get added to the '..' string in the pager (which is + obviously not a link). If this dictionary is not empty then the elements will be wrapped + in a SPAN tag with the given attributes. + + Example: { 'style':'color: #808080' } + Example: { 'class':'pager_dotdot' } + + link_tag (optional) + A callable that accepts single argument `page` (page link information) + and generates string with html that represents the link for specific page. + Page objects are supplied from `link_map()` so the keys are the same. + + + """ + link_attr = link_attr or {} + curpage_attr = curpage_attr or {} + dotdot_attr = dotdot_attr or {} + self.curpage_attr = curpage_attr + self.separator = separator + self.link_attr = link_attr + self.dotdot_attr = dotdot_attr + self.url = url + self.link_tag = link_tag or self.default_link_tag + + # Don't show navigator if there is no more than one page + if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): + return "" + + regex_res = re.search(r"~(\d+)~", tmpl_format) + if regex_res: + radius = regex_res.group(1) + else: + radius = 2 + + self.radius = int(radius) + link_map = self.link_map( + tmpl_format=tmpl_format, + url=url, + show_if_single_page=show_if_single_page, + separator=separator, + symbol_first=symbol_first, + symbol_last=symbol_last, + symbol_previous=symbol_previous, + symbol_next=symbol_next, + link_attr=link_attr, + curpage_attr=curpage_attr, + dotdot_attr=dotdot_attr, + link_tag=link_tag, + ) + links_markup = self._range(link_map, self.radius) + + # Replace ~...~ in token tmpl_format by range of pages + result = re.sub(r"~(\d+)~", links_markup, tmpl_format) + + link_first = ( + self.page > self.first_page and self.link_tag(link_map["first_page"]) or "" + ) + link_last = ( + self.page < self.last_page and self.link_tag(link_map["last_page"]) or "" + ) + link_previous = ( + self.previous_page and self.link_tag(link_map["previous_page"]) or "" + ) + link_next = self.next_page and self.link_tag(link_map["next_page"]) or "" + # Interpolate '$' variables + result = Template(result).safe_substitute( + { + "first_page": self.first_page, + "last_page": self.last_page, + "page": self.page, + "page_count": self.page_count, + "items_per_page": self.items_per_page, + "first_item": self.first_item, + "last_item": self.last_item, + "item_count": self.item_count, + "link_first": link_first, + "link_last": link_last, + "link_previous": link_previous, + "link_next": link_next, + } + ) + + return result + + def _get_edges(self, cur_page, max_page, items): + cur_page = int(cur_page) + edge = (items / 2) + 1 + if cur_page <= edge: + radius = max(items / 2, items - cur_page) + elif (max_page - cur_page) < edge: + radius = (items - 1) - (max_page - cur_page) + else: + radius = (items / 2) - 1 + + left = max(1, (cur_page - radius)) + right = min(max_page, cur_page + radius) + return left, right + + def link_map( + self, + tmpl_format="~2~", + url=None, + show_if_single_page=False, + separator=" ", + symbol_first="<<", + symbol_last=">>", + symbol_previous="<", + symbol_next=">", + link_attr=None, + curpage_attr=None, + dotdot_attr=None, + link_tag=None + ): + """ Return map with links to other pages if default pager() function is not suitable solution. + tmpl_format: + Format string that defines how the pager would be normally rendered rendered. Uses same arguments as pager() + method, but returns a simple dictionary in form of: + {'current_page': {'attrs': {}, + 'href': 'http://example.org/foo/page=1', + 'value': 1}, + 'first_page': {'attrs': {}, + 'href': 'http://example.org/foo/page=1', + 'type': 'first_page', + 'value': 1}, + 'last_page': {'attrs': {}, + 'href': 'http://example.org/foo/page=8', + 'type': 'last_page', + 'value': 8}, + 'next_page': {'attrs': {}, 'href': 'HREF', 'type': 'next_page', 'value': 2}, + 'previous_page': None, + 'range_pages': [{'attrs': {}, + 'href': 'http://example.org/foo/page=1', + 'type': 'current_page', + 'value': 1}, + .... + {'attrs': {}, 'href': '', 'type': 'span', 'value': '..'}]} + + + The string can contain the following $-tokens that are substituted by the + string.Template module: + + - $first_page: number of first reachable page + - $last_page: number of last reachable page + - $page: number of currently selected page + - $page_count: number of reachable pages + - $items_per_page: maximal number of items per page + - $first_item: index of first item on the current page + - $last_item: index of last item on the current page + - $item_count: total number of items + - $link_first: link to first page (unless this is first page) + - $link_last: link to last page (unless this is last page) + - $link_previous: link to previous page (unless this is first page) + - $link_next: link to next page (unless this is last page) + + To render a range of pages the token '~3~' can be used. The + number sets the radius of pages around the current page. + Example for a range with radius 3: + + '1 .. 5 6 7 [8] 9 10 11 .. 50' + + Default: '~2~' + + url + The URL that page links will point to. Make sure it contains the string + $page which will be replaced by the actual page number. + Must be given unless a url_maker is specified to __init__, in which + case this parameter is ignored. + + symbol_first + String to be displayed as the text for the $link_first link above. + + Default: '<<' (<<) + + symbol_last + String to be displayed as the text for the $link_last link above. + + Default: '>>' (>>) + + symbol_previous + String to be displayed as the text for the $link_previous link above. + + Default: '<' (<) + + symbol_next + String to be displayed as the text for the $link_next link above. + + Default: '>' (>) + + separator: + String that is used to separate page links/numbers in the above range of pages. + + Default: ' ' + + show_if_single_page: + if True the navigator will be shown even if there is only one page. + + Default: False + + link_attr (optional) + A dictionary of attributes that get added to A-HREF links pointing to other pages. Can + be used to define a CSS style or class to customize the look of links. + + Example: { 'style':'border: 1px solid green' } + Example: { 'class':'pager_link' } + + curpage_attr (optional) + A dictionary of attributes that get added to the current page number in the pager (which + is obviously not a link). If this dictionary is not empty then the elements will be + wrapped in a SPAN tag with the given attributes. + + Example: { 'style':'border: 3px solid blue' } + Example: { 'class':'pager_curpage' } + + dotdot_attr (optional) + A dictionary of attributes that get added to the '..' string in the pager (which is + obviously not a link). If this dictionary is not empty then the elements will be wrapped + in a SPAN tag with the given attributes. + + Example: { 'style':'color: #808080' } + Example: { 'class':'pager_dotdot' } + """ + link_attr = link_attr or {} + curpage_attr = curpage_attr or {} + dotdot_attr = dotdot_attr or {} + self.curpage_attr = curpage_attr + self.separator = separator + self.link_attr = link_attr + self.dotdot_attr = dotdot_attr + self.url = url + + regex_res = re.search(r"~(\d+)~", tmpl_format) + if regex_res: + radius = regex_res.group(1) + else: + radius = 2 + + self.radius = int(radius) + + # Compute the first and last page number within the radius + # e.g. '1 .. 5 6 [7] 8 9 .. 12' + # -> leftmost_page = 5 + # -> rightmost_page = 9 + leftmost_page, rightmost_page = self._get_edges( + self.page, self.last_page, (self.radius * 2) + 1) + + nav_items = { + "first_page": None, + "last_page": None, + "previous_page": None, + "next_page": None, + "current_page": None, + "radius": self.radius, + "range_pages": [], + } + + if leftmost_page is None or rightmost_page is None: + return nav_items + + nav_items["first_page"] = { + "type": "first_page", + "value": unicode(symbol_first), + "attrs": self.link_attr, + "number": self.first_page, + "href": self.url_maker(self.first_page), + } + + # Insert dots if there are pages between the first page + # and the currently displayed page range + if leftmost_page - self.first_page > 1: + # Wrap in a SPAN tag if dotdot_attr is set + nav_items["range_pages"].append( + { + "type": "span", + "value": "..", + "attrs": self.dotdot_attr, + "href": "", + "number": None, + } + ) + + for this_page in range(leftmost_page, rightmost_page + 1): + # Highlight the current page number and do not use a link + if this_page == self.page: + # Wrap in a SPAN tag if curpage_attr is set + nav_items["range_pages"].append( + { + "type": "current_page", + "value": unicode(this_page), + "number": this_page, + "attrs": self.curpage_attr, + "href": self.url_maker(this_page), + } + ) + nav_items["current_page"] = { + "value": this_page, + "attrs": self.curpage_attr, + "type": "current_page", + "href": self.url_maker(this_page), + } + # Otherwise create just a link to that page + else: + nav_items["range_pages"].append( + { + "type": "page", + "value": unicode(this_page), + "number": this_page, + "attrs": self.link_attr, + "href": self.url_maker(this_page), + } + ) + + # Insert dots if there are pages between the displayed + # page numbers and the end of the page range + if self.last_page - rightmost_page > 1: + # Wrap in a SPAN tag if dotdot_attr is set + nav_items["range_pages"].append( + { + "type": "span", + "value": "..", + "attrs": self.dotdot_attr, + "href": "", + "number": None, + } + ) + + # Create a link to the very last page (unless we are on the last + # page or there would be no need to insert '..' spacers) + nav_items["last_page"] = { + "type": "last_page", + "value": unicode(symbol_last), + "attrs": self.link_attr, + "href": self.url_maker(self.last_page), + "number": self.last_page, + } + + nav_items["previous_page"] = { + "type": "previous_page", + "value": unicode(symbol_previous), + "attrs": self.link_attr, + "number": self.previous_page or self.first_page, + "href": self.url_maker(self.previous_page or self.first_page), + } + + nav_items["next_page"] = { + "type": "next_page", + "value": unicode(symbol_next), + "attrs": self.link_attr, + "number": self.next_page or self.last_page, + "href": self.url_maker(self.next_page or self.last_page), + } + + return nav_items + + def _range(self, link_map, radius): + """ + Return range of linked pages to substitute placeholder in pattern + """ + # Compute the first and last page number within the radius + # e.g. '1 .. 5 6 [7] 8 9 .. 12' + # -> leftmost_page = 5 + # -> rightmost_page = 9 + leftmost_page, rightmost_page = self._get_edges( + self.page, self.last_page, (radius * 2) + 1) + + nav_items = [] + # Create a link to the first page (unless we are on the first page + # or there would be no need to insert '..' spacers) + if self.first_page and self.page != self.first_page and self.first_page < leftmost_page: + page = link_map["first_page"].copy() + page["value"] = unicode(page["number"]) + nav_items.append(self.link_tag(page)) + + for item in link_map["range_pages"]: + nav_items.append(self.link_tag(item)) + + # Create a link to the very last page (unless we are on the last + # page or there would be no need to insert '..' spacers) + if self.last_page and self.page != self.last_page and rightmost_page < self.last_page: + page = link_map["last_page"].copy() + page["value"] = unicode(page["number"]) + nav_items.append(self.link_tag(page)) + + return self.separator.join(nav_items) + + def _default_url_maker(self, page_number): + if self.url is None: + raise Exception( + "You need to specify a 'url' parameter containing a '$page' placeholder." + ) + + if "$page" not in self.url: + raise Exception("The 'url' parameter must contain a '$page' placeholder.") + + return self.url.replace("$page", unicode(page_number)) + + @staticmethod + def default_link_tag(item): + """ + Create an A-HREF tag that points to another page. + """ + text = item["value"] + target_url = item["href"] + + if not item["href"] or item["type"] in ("span", "current_page"): + if item["attrs"]: + text = make_html_tag("span", **item["attrs"]) + text + "" + return text + + return make_html_tag("a", text=text, href=target_url, **item["attrs"]) + +# Below is RhodeCode custom code + +# Copyright (C) 2010-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + + +PAGE_FORMAT = '$link_previous ~3~ $link_next' + + +class SqlalchemyOrmWrapper(object): + """Wrapper class to access elements of a collection.""" + + def __init__(self, pager, collection): + self.pager = pager + self.collection = collection + + def __getitem__(self, range): + # Return a range of objects of an sqlalchemy.orm.query.Query object + return self.collection[range] + + def __len__(self): + # support empty types, without actually making a query. + if self.collection is None or self.collection == []: + return 0 + + # Count the number of objects in an sqlalchemy.orm.query.Query object + return self.collection.count() + + +class CustomPager(_Page): + + @staticmethod + def disabled_link_tag(item): + """ + Create an A-HREF tag that is disabled + """ + text = item['value'] + attrs = item['attrs'].copy() + attrs['class'] = 'disabled ' + attrs['class'] + + return make_html_tag('a', text=text, **attrs) + + def render(self): + # Don't show navigator if there is no more than one page + if self.page_count == 0: + return "" + + self.link_tag = self.default_link_tag + + link_map = self.link_map( + tmpl_format=PAGE_FORMAT, url=None, + show_if_single_page=False, separator=' ', + symbol_first='<<', symbol_last='>>', + symbol_previous='<', symbol_next='>', + link_attr={'class': 'pager_link'}, + curpage_attr={'class': 'pager_curpage'}, + dotdot_attr={'class': 'pager_dotdot'}) + + links_markup = self._range(link_map, self.radius) + + link_first = ( + self.page > self.first_page and self.link_tag(link_map['first_page']) or '' + ) + link_last = ( + self.page < self.last_page and self.link_tag(link_map['last_page']) or '' + ) + + link_previous = ( + self.previous_page and self.link_tag(link_map['previous_page']) + or self.disabled_link_tag(link_map['previous_page']) + ) + link_next = ( + self.next_page and self.link_tag(link_map['next_page']) + or self.disabled_link_tag(link_map['next_page']) + ) + + # Interpolate '$' variables + # Replace ~...~ in token tmpl_format by range of pages + result = re.sub(r"~(\d+)~", links_markup, PAGE_FORMAT) + result = Template(result).safe_substitute( + { + "links": links_markup, + "first_page": self.first_page, + "last_page": self.last_page, + "page": self.page, + "page_count": self.page_count, + "items_per_page": self.items_per_page, + "first_item": self.first_item, + "last_item": self.last_item, + "item_count": self.item_count, + "link_first": link_first, + "link_last": link_last, + "link_previous": link_previous, + "link_next": link_next, + } + ) + + return literal(result) + + +class Page(CustomPager): + """ + Custom pager to match rendering style with paginator + """ + + def __init__(self, collection, page=1, items_per_page=20, item_count=None, + url_maker=None, **kwargs): + """ + Special type of pager. We intercept collection to wrap it in our custom + logic instead of using wrapper_class + """ + + super(Page, self).__init__(collection=collection, page=page, + items_per_page=items_per_page, item_count=item_count, + wrapper_class=None, url_maker=url_maker, **kwargs) + + +class SqlPage(CustomPager): + """ + Custom pager to match rendering style with paginator + """ + + def __init__(self, collection, page=1, items_per_page=20, item_count=None, + url_maker=None, **kwargs): + """ + Special type of pager. We intercept collection to wrap it in our custom + logic instead of using wrapper_class + """ + collection = SqlalchemyOrmWrapper(self, collection) + + super(SqlPage, self).__init__(collection=collection, page=page, + items_per_page=items_per_page, item_count=item_count, + wrapper_class=None, url_maker=url_maker, **kwargs) + + +class RepoCommitsWrapper(object): + """Wrapper class to access elements of a collection.""" + + def __init__(self, pager, collection): + self.pager = pager + self.collection = collection + + def __getitem__(self, range): + cur_page = self.pager.page + items_per_page = self.pager.items_per_page + first_item = max(0, (len(self.collection) - (cur_page * items_per_page))) + last_item = ((len(self.collection) - 1) - items_per_page * (cur_page - 1)) + return reversed(list(self.collection[first_item:last_item + 1])) + + def __len__(self): + return len(self.collection) + + +class RepoPage(CustomPager): + """ + Create a "RepoPage" instance. special pager for paging repository + """ + + def __init__(self, collection, page=1, items_per_page=20, item_count=None, + url_maker=None, **kwargs): + """ + Special type of pager. We intercept collection to wrap it in our custom + logic instead of using wrapper_class + """ + collection = RepoCommitsWrapper(self, collection) + super(RepoPage, self).__init__(collection=collection, page=page, + items_per_page=items_per_page, item_count=item_count, + wrapper_class=None, url_maker=url_maker, **kwargs) diff --git a/rhodecode/lib/rc_cache/__init__.py b/rhodecode/lib/rc_cache/__init__.py --- a/rhodecode/lib/rc_cache/__init__.py +++ b/rhodecode/lib/rc_cache/__init__.py @@ -33,16 +33,23 @@ register_backend( "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends", "RedisPickleBackend") +register_backend( + "dogpile.cache.rc.redis_msgpack", "rhodecode.lib.rc_cache.backends", + "RedisMsgPackBackend") + log = logging.getLogger(__name__) from . import region_meta from .utils import ( - get_default_cache_settings, key_generator, get_or_create_region, + get_default_cache_settings, backend_key_generator, get_or_create_region, clear_cache_namespace, make_region, InvalidationContext, FreshRegionCache, ActiveRegionCache) +FILE_TREE_CACHE_VER = 'v2' + + def configure_dogpile_cache(settings): cache_dir = settings.get('cache_dir') if cache_dir: @@ -61,13 +68,12 @@ def configure_dogpile_cache(settings): for region_name in avail_regions: new_region = make_region( name=region_name, - function_key_generator=key_generator + function_key_generator=None ) new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name)) - - log.debug('dogpile: registering a new region %s[%s]', - region_name, new_region.__dict__) + new_region.function_key_generator = backend_key_generator(new_region.actual_backend) + log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__) region_meta.dogpile_cache_regions[region_name] = new_region diff --git a/rhodecode/lib/rc_cache/backends.py b/rhodecode/lib/rc_cache/backends.py --- a/rhodecode/lib/rc_cache/backends.py +++ b/rhodecode/lib/rc_cache/backends.py @@ -17,12 +17,16 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ + import time import errno import logging +import msgpack import gevent +import redis +from dogpile.cache.api import CachedValue from dogpile.cache.backends import memory as memory_backend from dogpile.cache.backends import file as file_backend from dogpile.cache.backends import redis as redis_backend @@ -38,6 +42,7 @@ log = logging.getLogger(__name__) class LRUMemoryBackend(memory_backend.MemoryBackend): + key_prefix = 'lru_mem_backend' pickle_values = False def __init__(self, arguments): @@ -62,7 +67,8 @@ class LRUMemoryBackend(memory_backend.Me self.delete(key) -class Serializer(object): +class PickleSerializer(object): + def _dumps(self, value, safe=False): try: return compat.pickle.dumps(value) @@ -82,6 +88,32 @@ class Serializer(object): raise +class MsgPackSerializer(object): + + def _dumps(self, value, safe=False): + try: + return msgpack.packb(value) + except Exception: + if safe: + return NO_VALUE + else: + raise + + def _loads(self, value, safe=True): + """ + pickle maintained the `CachedValue` wrapper of the tuple + msgpack does not, so it must be added back in. + """ + try: + value = msgpack.unpackb(value, use_list=False) + return CachedValue(*value) + except Exception: + if safe: + return NO_VALUE + else: + raise + + import fcntl flock_org = fcntl.flock @@ -122,13 +154,19 @@ class CustomLockFactory(FileLock): return fcntl -class FileNamespaceBackend(Serializer, file_backend.DBMBackend): +class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend): + key_prefix = 'file_backend' def __init__(self, arguments): arguments['lock_factory'] = CustomLockFactory super(FileNamespaceBackend, self).__init__(arguments) + def __repr__(self): + return '{} `{}`'.format(self.__class__, self.filename) + def list_keys(self, prefix=''): + prefix = '{}:{}'.format(self.key_prefix, prefix) + def cond(v): if not prefix: return True @@ -144,7 +182,7 @@ class FileNamespaceBackend(Serializer, f def get_store(self): return self.filename - def get(self, key): + def _dbm_get(self, key): with self._dbm_file(False) as dbm: if hasattr(dbm, 'get'): value = dbm.get(key, NO_VALUE) @@ -158,6 +196,13 @@ class FileNamespaceBackend(Serializer, f value = self._loads(value) return value + def get(self, key): + try: + return self._dbm_get(key) + except Exception: + log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store()) + raise + def set(self, key, value): with self._dbm_file(True) as dbm: dbm[key] = self._dumps(value) @@ -168,10 +213,26 @@ class FileNamespaceBackend(Serializer, f dbm[key] = self._dumps(value) -class RedisPickleBackend(Serializer, redis_backend.RedisBackend): +class BaseRedisBackend(redis_backend.RedisBackend): + + def _create_client(self): + args = {} + + if self.url is not None: + args.update(url=self.url) + + else: + args.update( + host=self.host, password=self.password, + port=self.port, db=self.db + ) + + connection_pool = redis.ConnectionPool(**args) + + return redis.StrictRedis(connection_pool=connection_pool) + def list_keys(self, prefix=''): - if prefix: - prefix = prefix + '*' + prefix = '{}:{}*'.format(self.key_prefix, prefix) return self.client.keys(prefix) def get_store(self): @@ -183,6 +244,15 @@ class RedisPickleBackend(Serializer, red return NO_VALUE return self._loads(value) + def get_multi(self, keys): + if not keys: + return [] + values = self.client.mget(keys) + loads = self._loads + return [ + loads(v) if v is not None else NO_VALUE + for v in values] + def set(self, key, value): if self.redis_expiration_time: self.client.setex(key, self.redis_expiration_time, @@ -191,8 +261,9 @@ class RedisPickleBackend(Serializer, red self.client.set(key, self._dumps(value)) def set_multi(self, mapping): + dumps = self._dumps mapping = dict( - (k, self._dumps(v)) + (k, dumps(v)) for k, v in mapping.items() ) @@ -212,3 +283,13 @@ class RedisPickleBackend(Serializer, red return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep) else: return None + + +class RedisPickleBackend(PickleSerializer, BaseRedisBackend): + key_prefix = 'redis_pickle_backend' + pass + + +class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend): + key_prefix = 'redis_msgpack_backend' + pass diff --git a/rhodecode/lib/rc_cache/cache_key_meta.py b/rhodecode/lib/rc_cache/cache_key_meta.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/rc_cache/cache_key_meta.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2015-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import os +import atexit +import logging + +log = logging.getLogger(__name__) + +cache_keys_by_pid = [] + + +@atexit.register +def free_cache_keys(): + ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') + if ssh_cmd: + return + + from rhodecode.model.db import Session, CacheKey + log.info('Clearing %s cache keys', len(cache_keys_by_pid)) + + if cache_keys_by_pid: + try: + for cache_key in cache_keys_by_pid: + CacheKey.query().filter(CacheKey.cache_key == cache_key).delete() + Session().commit() + except Exception: + log.warn('Failed to clear keys, exiting gracefully') diff --git a/rhodecode/lib/rc_cache/utils.py b/rhodecode/lib/rc_cache/utils.py --- a/rhodecode/lib/rc_cache/utils.py +++ b/rhodecode/lib/rc_cache/utils.py @@ -31,11 +31,19 @@ from rhodecode.lib.utils import safe_str from rhodecode.lib.utils2 import safe_unicode, str2bool from rhodecode.model.db import Session, CacheKey, IntegrityError -from . import region_meta +from rhodecode.lib.rc_cache import cache_key_meta +from rhodecode.lib.rc_cache import region_meta log = logging.getLogger(__name__) +def isCython(func): + """ + Private helper that checks if a function is a cython function. + """ + return func.__class__.__name__ == 'cython_function_or_method' + + class RhodeCodeCacheRegion(CacheRegion): def conditional_cache_on_arguments( @@ -55,28 +63,90 @@ class RhodeCodeCacheRegion(CacheRegion): if function_key_generator is None: function_key_generator = self.function_key_generator - def decorator(fn): + # workaround for py2 and cython problems, this block should be removed + # once we've migrated to py3 + if 'cython' == 'cython': + def decorator(fn): + if to_str is compat.string_type: + # backwards compatible + key_generator = function_key_generator(namespace, fn) + else: + key_generator = function_key_generator(namespace, fn, to_str=to_str) + + @functools.wraps(fn) + def decorate(*arg, **kw): + key = key_generator(*arg, **kw) + + @functools.wraps(fn) + def creator(): + return fn(*arg, **kw) + + if not condition: + return creator() + + timeout = expiration_time() if expiration_time_is_callable \ + else expiration_time + + return self.get_or_create(key, creator, timeout, should_cache_fn) + + def invalidate(*arg, **kw): + key = key_generator(*arg, **kw) + self.delete(key) + + def set_(value, *arg, **kw): + key = key_generator(*arg, **kw) + self.set(key, value) + + def get(*arg, **kw): + key = key_generator(*arg, **kw) + return self.get(key) + + def refresh(*arg, **kw): + key = key_generator(*arg, **kw) + value = fn(*arg, **kw) + self.set(key, value) + return value + + decorate.set = set_ + decorate.invalidate = invalidate + decorate.refresh = refresh + decorate.get = get + decorate.original = fn + decorate.key_generator = key_generator + decorate.__wrapped__ = fn + + return decorate + return decorator + + def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): + + if not condition: + log.debug('Calling un-cached func:%s', user_func.func_name) + return user_func(*arg, **kw) + + key = key_generator(*arg, **kw) + + timeout = expiration_time() if expiration_time_is_callable \ + else expiration_time + + log.debug('Calling cached fn:%s', user_func.func_name) + return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) + + def cache_decorator(user_func): if to_str is compat.string_type: # backwards compatible - key_generator = function_key_generator(namespace, fn) + key_generator = function_key_generator(namespace, user_func) else: - key_generator = function_key_generator(namespace, fn, to_str=to_str) - - @functools.wraps(fn) - def decorate(*arg, **kw): - key = key_generator(*arg, **kw) + key_generator = function_key_generator(namespace, user_func, to_str=to_str) - @functools.wraps(fn) - def creator(): - return fn(*arg, **kw) - - if not condition: - return creator() - - timeout = expiration_time() if expiration_time_is_callable \ - else expiration_time - - return self.get_or_create(key, creator, timeout, should_cache_fn) + def refresh(*arg, **kw): + """ + Like invalidate, but regenerates the value instead + """ + key = key_generator(*arg, **kw) + value = user_func(*arg, **kw) + self.set(key, value) + return value def invalidate(*arg, **kw): key = key_generator(*arg, **kw) @@ -90,23 +160,18 @@ class RhodeCodeCacheRegion(CacheRegion): key = key_generator(*arg, **kw) return self.get(key) - def refresh(*arg, **kw): - key = key_generator(*arg, **kw) - value = fn(*arg, **kw) - self.set(key, value) - return value + user_func.set = set_ + user_func.invalidate = invalidate + user_func.get = get + user_func.refresh = refresh + user_func.key_generator = key_generator + user_func.original = user_func - decorate.set = set_ - decorate.invalidate = invalidate - decorate.refresh = refresh - decorate.get = get - decorate.original = fn - decorate.key_generator = key_generator - decorate.__wrapped__ = fn + # Use `decorate` to preserve the signature of :param:`user_func`. + return decorator.decorate(user_func, functools.partial( + get_or_create_for_user_func, key_generator)) - return decorate - - return decorator + return cache_decorator def make_region(*arg, **kw): @@ -134,13 +199,23 @@ def compute_key_from_params(*args): return sha1("_".join(map(safe_str, args))) -def key_generator(namespace, fn): +def backend_key_generator(backend): + """ + Special wrapper that also sends over the backend to the key generator + """ + def wrapper(namespace, fn): + return key_generator(backend, namespace, fn) + return wrapper + + +def key_generator(backend, namespace, fn): fname = fn.__name__ def generate_key(*args): - namespace_pref = namespace or 'default' + backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix' + namespace_pref = namespace or 'default_namespace' arg_key = compute_key_from_params(*args) - final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key) + final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key) return final_key @@ -167,7 +242,8 @@ def get_or_create_region(region_name, re if not os.path.isdir(cache_dir): os.makedirs(cache_dir) new_region = make_region( - name=region_uid_name, function_key_generator=key_generator + name=region_uid_name, + function_key_generator=backend_key_generator(region_obj.actual_backend) ) namespace_filename = os.path.join( cache_dir, "{}.cache.dbm".format(region_namespace)) @@ -179,7 +255,7 @@ def get_or_create_region(region_name, re ) # create and save in region caches - log.debug('configuring new region: %s',region_uid_name) + log.debug('configuring new region: %s', region_uid_name) region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region return region_obj @@ -195,16 +271,18 @@ def clear_cache_namespace(cache_region, class ActiveRegionCache(object): - def __init__(self, context): + def __init__(self, context, cache_data): self.context = context + self.cache_data = cache_data def should_invalidate(self): return False class FreshRegionCache(object): - def __init__(self, context): + def __init__(self, context, cache_data): self.context = context + self.cache_data = cache_data def should_invalidate(self): return True @@ -238,7 +316,7 @@ class InvalidationContext(object): result = heavy_compute(*args) compute_time = inv_context_manager.compute_time - log.debug('result computed in %.3fs', compute_time) + log.debug('result computed in %.4fs', compute_time) # To send global invalidation signal, simply run CacheKey.set_invalidate(invalidation_namespace) @@ -267,16 +345,28 @@ class InvalidationContext(object): self.thread_id = threading.current_thread().ident self.cache_key = compute_key_from_params(uid) - self.cache_key = 'proc:{}_thread:{}_{}'.format( + self.cache_key = 'proc:{}|thread:{}|params:{}'.format( self.proc_id, self.thread_id, self.cache_key) self.compute_time = 0 - def get_or_create_cache_obj(self, uid, invalidation_namespace=''): - cache_obj = CacheKey.get_active_cache(self.cache_key) + def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''): + invalidation_namespace = invalidation_namespace or self.invalidation_namespace + # fetch all cache keys for this namespace and convert them to a map to find if we + # have specific cache_key object registered. We do this because we want to have + # all consistent cache_state_uid for newly registered objects + cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace) + cache_obj = cache_obj_map.get(self.cache_key) log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key) - invalidation_namespace = invalidation_namespace or self.invalidation_namespace if not cache_obj: - cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace) + new_cache_args = invalidation_namespace + first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None + cache_state_uid = None + if first_cache_obj: + cache_state_uid = first_cache_obj.cache_state_uid + cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args, + cache_state_uid=cache_state_uid) + cache_key_meta.cache_keys_by_pid.append(self.cache_key) + return cache_obj def __enter__(self): @@ -284,21 +374,23 @@ class InvalidationContext(object): Test if current object is valid, and return CacheRegion function that does invalidation and calculation """ + log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace) # register or get a new key based on uid - self.cache_obj = self.get_or_create_cache_obj(uid=self.uid) + self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid) + cache_data = self.cache_obj.get_dict() self._start_time = time.time() if self.cache_obj.cache_active: # means our cache obj is existing and marked as it's # cache is not outdated, we return ActiveRegionCache self.skip_cache_active_change = True - return ActiveRegionCache(context=self) + return ActiveRegionCache(context=self, cache_data=cache_data) - # the key is either not existing or set to False, we return + # the key is either not existing or set to False, we return # the real invalidator which re-computes value. We additionally set # the flag to actually update the Database objects self.skip_cache_active_change = False - return FreshRegionCache(context=self) + return FreshRegionCache(context=self, cache_data=cache_data) def __exit__(self, exc_type, exc_val, exc_tb): # save compute time diff --git a/rhodecode/lib/rc_commands/add_artifact.py b/rhodecode/lib/rc_commands/add_artifact.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/rc_commands/add_artifact.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import sys +import logging + +import click + +from rhodecode.lib.pyramid_utils import bootstrap +from rhodecode.model.db import Session, User, Repository +from rhodecode.model.user import UserModel +from rhodecode.apps.file_store import utils as store_utils + +log = logging.getLogger(__name__) + + +@click.command() +@click.argument('ini_path', type=click.Path(exists=True)) +@click.option( + '--filename', + required=True, + help='Filename for artifact.') +@click.option( + '--file-path', + required=True, + type=click.Path(exists=True, dir_okay=False, readable=True), + help='Path to a file to be added as artifact') +@click.option( + '--repo-id', + required=True, + type=int, + help='ID of repository to add this artifact to.') +@click.option( + '--user-id', + default=None, + type=int, + help='User ID for creator of artifact. ' + 'Default would be first super admin.') +@click.option( + '--description', + default=None, + type=str, + help='Add description to this artifact') +def main(ini_path, filename, file_path, repo_id, user_id, description): + return command(ini_path, filename, file_path, repo_id, user_id, description) + + +def command(ini_path, filename, file_path, repo_id, user_id, description): + with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env: + try: + from rc_ee.api.views.store_api import _store_file + except ImportError: + click.secho('ERROR: Unable to import store_api. ' + 'store_api is only available in EE edition of RhodeCode', + fg='red') + sys.exit(-1) + + request = env['request'] + + repo = Repository.get(repo_id) + if not repo: + click.secho('ERROR: Unable to find repository with id `{}`'.format(repo_id), + fg='red') + sys.exit(-1) + + # if we don't give user, or it's "DEFAULT" user we pick super-admin + if user_id is not None or user_id == 1: + db_user = User.get(user_id) + else: + db_user = User.get_first_super_admin() + + if not db_user: + click.secho('ERROR: Unable to find user with id/username `{}`'.format(user_id), + fg='red') + sys.exit(-1) + + auth_user = db_user.AuthUser(ip_addr='127.0.0.1') + + storage = store_utils.get_file_storage(request.registry.settings) + + with open(file_path, 'rb') as f: + click.secho('Adding new artifact from path: `{}`'.format(file_path), + fg='green') + + file_data = _store_file( + storage, auth_user, filename, content=None, check_acl=True, + file_obj=f, description=description, + scope_repo_id=repo.repo_id) + click.secho('File Data: {}'.format(file_data), + fg='green') diff --git a/rhodecode/lib/repo_maintenance.py b/rhodecode/lib/repo_maintenance.py --- a/rhodecode/lib/repo_maintenance.py +++ b/rhodecode/lib/repo_maintenance.py @@ -139,6 +139,15 @@ class HGVerify(MaintenanceTask): return res +class HGUpdateCaches(MaintenanceTask): + human_name = 'HG update caches' + + def run(self): + instance = self.db_repo.scm_instance() + res = instance.hg_update_cache() + return res + + class SVNVerify(MaintenanceTask): human_name = 'SVN Verify repo' @@ -153,7 +162,7 @@ class RepoMaintenance(object): Performs maintenance of repository based on it's type """ tasks = { - 'hg': [HGVerify], + 'hg': [HGVerify, HGUpdateCaches], 'git': [GitFSCK, GitGC, GitRepack], 'svn': [SVNVerify], } diff --git a/rhodecode/lib/request_counter.py b/rhodecode/lib/request_counter.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/request_counter.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2017-2019 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +counter = 0 + + +def get_request_counter(request): + global counter + counter += 1 + return counter diff --git a/rhodecode/lib/system_info.py b/rhodecode/lib/system_info.py --- a/rhodecode/lib/system_info.py +++ b/rhodecode/lib/system_info.py @@ -90,6 +90,7 @@ def get_cert_path(ini_path): return default + class SysInfoRes(object): def __init__(self, value, state=None, human_value=None): self.value = value @@ -187,7 +188,7 @@ def ulimit_info(): ('open files', get_resource(resource.RLIMIT_NOFILE)), ]) - text = ', '.join('{}:{}'.format(k,v) for k,v in data.items()) + text = ', '.join('{}:{}'.format(k, v) for k, v in data.items()) value = { 'limits': data, @@ -593,6 +594,23 @@ def vcs_server(): return SysInfoRes(value=value, state=state, human_value=human_value) +def vcs_server_config(): + from rhodecode.lib.vcs.backends import get_vcsserver_service_data + state = STATE_OK_DEFAULT + + value = {} + try: + data = get_vcsserver_service_data() + value = data['app_config'] + except Exception as e: + state = {'message': str(e), 'type': STATE_ERR} + + human_value = value.copy() + human_value['text'] = 'VCS Server config' + + return SysInfoRes(value=value, state=state, human_value=human_value) + + def rhodecode_app_info(): import rhodecode edition = rhodecode.CONFIG.get('rhodecode.edition') @@ -770,6 +788,8 @@ def get_system_info(environ): 'vcs_backends': SysInfo(vcs_backends)(), 'vcs_server': SysInfo(vcs_server)(), + 'vcs_server_config': SysInfo(vcs_server_config)(), + 'git': SysInfo(git_info)(), 'hg': SysInfo(hg_info)(), 'svn': SysInfo(svn_info)(), diff --git a/rhodecode/lib/user_log_filter.py b/rhodecode/lib/user_log_filter.py --- a/rhodecode/lib/user_log_filter.py +++ b/rhodecode/lib/user_log_filter.py @@ -22,7 +22,7 @@ import logging from whoosh.qparser.default import QueryParser, query from whoosh.qparser.dateparse import DateParserPlugin -from whoosh.fields import (TEXT, Schema, DATETIME) +from whoosh.fields import (TEXT, Schema, DATETIME, KEYWORD) from sqlalchemy.sql.expression import or_, and_, not_, func from rhodecode.model.db import UserLog @@ -30,11 +30,12 @@ from rhodecode.lib.utils2 import remove_ # JOURNAL SCHEMA used only to generate queries in journal. We use whoosh # querylang to build sql queries and filter journals -JOURNAL_SCHEMA = Schema( - username=TEXT(), +AUDIT_LOG_SCHEMA = Schema( + username=KEYWORD(), + repository=KEYWORD(), + date=DATETIME(), action=TEXT(), - repository=TEXT(), ip=TEXT(), ) @@ -52,7 +53,7 @@ def user_log_filter(user_log, search_ter log.debug('Initial search term: %r', search_term) qry = None if search_term: - qp = QueryParser('repository', schema=JOURNAL_SCHEMA) + qp = QueryParser('repository', schema=AUDIT_LOG_SCHEMA) qp.add_plugin(DateParserPlugin()) qry = qp.parse(safe_unicode(search_term)) log.debug('Filtering using parsed query %r', qry) diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py --- a/rhodecode/lib/utils.py +++ b/rhodecode/lib/utils.py @@ -39,7 +39,7 @@ from os.path import join as jn import paste import pkg_resources -from webhelpers.text import collapse, remove_formatting, strip_tags +from webhelpers2.text import collapse, remove_formatting from mako import exceptions from pyramid.threadlocal import get_current_registry from rhodecode.lib.request import Request diff --git a/rhodecode/lib/utils2.py b/rhodecode/lib/utils2.py --- a/rhodecode/lib/utils2.py +++ b/rhodecode/lib/utils2.py @@ -35,6 +35,7 @@ import urllib import urlobject import uuid import getpass +from functools import update_wrapper, partial import pygments.lexers import sqlalchemy @@ -206,7 +207,7 @@ def safe_int(val, default=None): return val -def safe_unicode(str_, from_encoding=None): +def safe_unicode(str_, from_encoding=None, use_chardet=False): """ safe unicode function. Does few trick to turn str_ into unicode @@ -239,17 +240,19 @@ def safe_unicode(str_, from_encoding=Non except UnicodeDecodeError: pass - try: - import chardet - encoding = chardet.detect(str_)['encoding'] - if encoding is None: - raise Exception() - return str_.decode(encoding) - except (ImportError, UnicodeDecodeError, Exception): + if use_chardet: + try: + import chardet + encoding = chardet.detect(str_)['encoding'] + if encoding is None: + raise Exception() + return str_.decode(encoding) + except (ImportError, UnicodeDecodeError, Exception): + return unicode(str_, from_encoding[0], 'replace') + else: return unicode(str_, from_encoding[0], 'replace') - -def safe_str(unicode_, to_encoding=None): +def safe_str(unicode_, to_encoding=None, use_chardet=False): """ safe str function. Does few trick to turn unicode_ into string @@ -282,14 +285,17 @@ def safe_str(unicode_, to_encoding=None) except UnicodeEncodeError: pass - try: - import chardet - encoding = chardet.detect(unicode_)['encoding'] - if encoding is None: - raise UnicodeEncodeError() + if use_chardet: + try: + import chardet + encoding = chardet.detect(unicode_)['encoding'] + if encoding is None: + raise UnicodeEncodeError() - return unicode_.encode(encoding) - except (ImportError, UnicodeEncodeError): + return unicode_.encode(encoding) + except (ImportError, UnicodeEncodeError): + return unicode_.encode(to_encoding[0], 'replace') + else: return unicode_.encode(to_encoding[0], 'replace') @@ -364,7 +370,7 @@ def engine_from_config(configuration, pr """Custom engine_from_config functions.""" log = logging.getLogger('sqlalchemy.engine') use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None)) - debug = asbool(configuration.get('debug')) + debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None)) engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs) @@ -628,9 +634,29 @@ def credentials_filter(uri): return ''.join(uri) -def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override): - qualifed_home_url = request.route_url('home') - parsed_url = urlobject.URLObject(qualifed_home_url) +def get_host_info(request): + """ + Generate host info, to obtain full url e.g https://server.com + use this + `{scheme}://{netloc}` + """ + if not request: + return {} + + qualified_home_url = request.route_url('home') + parsed_url = urlobject.URLObject(qualified_home_url) + decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) + + return { + 'scheme': parsed_url.scheme, + 'netloc': parsed_url.netloc+decoded_path, + 'hostname': parsed_url.hostname, + } + + +def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override): + qualified_home_url = request.route_url('home') + parsed_url = urlobject.URLObject(qualified_home_url) decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) args = { @@ -642,7 +668,8 @@ def get_clone_url(request, uri_tmpl, rep 'hostname': parsed_url.hostname, 'prefix': decoded_path, 'repo': repo_name, - 'repoid': str(repo_id) + 'repoid': str(repo_id), + 'repo_type': repo_type } args.update(override) args['user'] = urllib.quote(safe_str(args['user'])) @@ -650,6 +677,10 @@ def get_clone_url(request, uri_tmpl, rep for k, v in args.items(): uri_tmpl = uri_tmpl.replace('{%s}' % k, v) + # special case for SVN clone url + if repo_type == 'svn': + uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://') + # remove leading @ sign if it's present. Case of empty user url_obj = urlobject.URLObject(uri_tmpl) url = url_obj.with_netloc(url_obj.netloc.lstrip('@')) @@ -1027,3 +1058,43 @@ def parse_byte_string(size_str): _parts = match.groups() num, type_ = _parts return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()] + + +class CachedProperty(object): + """ + Lazy Attributes. With option to invalidate the cache by running a method + + class Foo(): + + @CachedProperty + def heavy_func(): + return 'super-calculation' + + foo = Foo() + foo.heavy_func() # first computions + foo.heavy_func() # fetch from cache + foo._invalidate_prop_cache('heavy_func') + # at this point calling foo.heavy_func() will be re-computed + """ + + def __init__(self, func, func_name=None): + + if func_name is None: + func_name = func.__name__ + self.data = (func, func_name) + update_wrapper(self, func) + + def __get__(self, inst, class_): + if inst is None: + return self + + func, func_name = self.data + value = func(inst) + inst.__dict__[func_name] = value + if '_invalidate_prop_cache' not in inst.__dict__: + inst.__dict__['_invalidate_prop_cache'] = partial( + self._invalidate_prop_cache, inst) + return value + + def _invalidate_prop_cache(self, inst, name): + inst.__dict__.pop(name, None) diff --git a/rhodecode/lib/vcs/__init__.py b/rhodecode/lib/vcs/__init__.py --- a/rhodecode/lib/vcs/__init__.py +++ b/rhodecode/lib/vcs/__init__.py @@ -27,6 +27,7 @@ import logging import urlparse from cStringIO import StringIO +import rhodecode from rhodecode.lib.vcs.conf import settings from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend from rhodecode.lib.vcs.exceptions import ( @@ -71,11 +72,11 @@ def connect_http(server_and_port): session_factory = client_http.ThreadlocalSessionFactory() - connection.Git = client_http.RepoMaker( + connection.Git = client_http.RemoteVCSMaker( server_and_port, '/git', 'git', session_factory) - connection.Hg = client_http.RepoMaker( + connection.Hg = client_http.RemoteVCSMaker( server_and_port, '/hg', 'hg', session_factory) - connection.Svn = client_http.RepoMaker( + connection.Svn = client_http.RemoteVCSMaker( server_and_port, '/svn', 'svn', session_factory) connection.Service = client_http.ServiceConnection( server_and_port, '/_service', session_factory) @@ -106,21 +107,6 @@ def connect_vcs(server_and_port, protoco raise Exception('Invalid vcs server protocol "{}"'.format(protocol)) -def create_vcsserver_proxy(server_and_port, protocol): - if protocol == 'http': - return _create_vcsserver_proxy_http(server_and_port) - else: - raise Exception('Invalid vcs server protocol "{}"'.format(protocol)) - - -def _create_vcsserver_proxy_http(server_and_port): - from rhodecode.lib.vcs import client_http - - session = _create_http_rpc_session() - url = urlparse.urljoin('http://%s' % server_and_port, '/server') - return client_http.RemoteObject(url, session) - - class CurlSession(object): """ Modeled so that it provides a subset of the requests interface. @@ -141,6 +127,9 @@ class CurlSession(object): curl.setopt(curl.HTTPHEADER, ["Expect:"]) curl.setopt(curl.TCP_NODELAY, True) curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP) + curl.setopt(curl.USERAGENT, 'RhodeCode HTTP {}'.format(rhodecode.__version__)) + curl.setopt(curl.SSL_VERIFYPEER, 0) + curl.setopt(curl.SSL_VERIFYHOST, 0) self._curl = curl def post(self, url, data, allow_redirects=False): @@ -174,12 +163,23 @@ class CurlResponse(object): @property def content(self): - return self._response_buffer.getvalue() + try: + return self._response_buffer.getvalue() + finally: + self._response_buffer.close() @property def status_code(self): return self._status_code + def iter_content(self, chunk_size): + self._response_buffer.seek(0) + while 1: + chunk = self._response_buffer.read(chunk_size) + if not chunk: + break + yield chunk + def _create_http_rpc_session(): session = CurlSession() diff --git a/rhodecode/lib/vcs/backends/base.py b/rhodecode/lib/vcs/backends/base.py --- a/rhodecode/lib/vcs/backends/base.py +++ b/rhodecode/lib/vcs/backends/base.py @@ -33,12 +33,12 @@ import collections import warnings from zope.cachedescriptors.property import Lazy as LazyProperty -from zope.cachedescriptors.property import CachedProperty from pyramid import compat +import rhodecode from rhodecode.translation import lazy_ugettext -from rhodecode.lib.utils2 import safe_str, safe_unicode +from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty from rhodecode.lib.vcs import connection from rhodecode.lib.vcs.utils import author_name, author_email from rhodecode.lib.vcs.conf import settings @@ -153,7 +153,7 @@ class MergeResponse(object): u'This pull request cannot be merged because of an unhandled exception. ' u'{exception}'), MergeFailureReason.MERGE_FAILED: lazy_ugettext( - u'This pull request cannot be merged because of merge conflicts.'), + u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'), MergeFailureReason.PUSH_FAILED: lazy_ugettext( u'This pull request could not be merged because push to ' u'target:`{target}@{merge_commit}` failed.'), @@ -264,7 +264,9 @@ class BaseRepository(object): EMPTY_COMMIT_ID = '0' * 40 path = None - _commit_ids_ver = 0 + + _is_empty = None + _commit_ids = {} def __init__(self, repo_path, config=None, create=False, **kwargs): """ @@ -386,8 +388,23 @@ class BaseRepository(object): commit = self.get_commit(commit_id) return commit.size + def _check_for_empty(self): + no_commits = len(self._commit_ids) == 0 + if no_commits: + # check on remote to be sure + return self._remote.is_empty() + else: + return False + def is_empty(self): - return self._remote.is_empty() + if rhodecode.is_test: + return self._check_for_empty() + + if self._is_empty is None: + # cache empty for production, but not tests + self._is_empty = self._check_for_empty() + + return self._is_empty @staticmethod def check_url(url, config): @@ -408,14 +425,17 @@ class BaseRepository(object): # COMMITS # ========================================================================== - @CachedProperty('_commit_ids_ver') + @CachedProperty def commit_ids(self): raise NotImplementedError def append_commit_id(self, commit_id): if commit_id not in self.commit_ids: self._rebuild_cache(self.commit_ids + [commit_id]) - self._commit_ids_ver = time.time() + + # clear cache + self._invalidate_prop_cache('commit_ids') + self._is_empty = False def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None): """ @@ -625,24 +645,26 @@ class BaseRepository(object): """ raise NotImplementedError - def _get_legacy_shadow_repository_path(self, workspace_id): + @classmethod + def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id): """ Legacy version that was used before. We still need it for backward compat """ return os.path.join( - os.path.dirname(self.path), - '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id)) + os.path.dirname(repo_path), + '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id)) - def _get_shadow_repository_path(self, repo_id, workspace_id): + @classmethod + def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id): # The name of the shadow repository must start with '.', so it is # skipped by 'rhodecode.lib.utils.get_filesystem_repos'. - legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id) + legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id) if os.path.exists(legacy_repository_path): return legacy_repository_path else: return os.path.join( - os.path.dirname(self.path), + os.path.dirname(repo_path), '.__shadow_repo_%s_%s' % (repo_id, workspace_id)) def cleanup_merge_workspace(self, repo_id, workspace_id): @@ -654,7 +676,8 @@ class BaseRepository(object): :param workspace_id: `workspace_id` unique identifier. """ - shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id) + shadow_repository_path = self._get_shadow_repository_path( + self.path, repo_id, workspace_id) shadow_repository_path_del = '{}.{}.delete'.format( shadow_repository_path, time.time()) @@ -712,7 +735,7 @@ class BaseRepository(object): def _validate_commit_id(self, commit_id): if not isinstance(commit_id, compat.string_types): - raise TypeError("commit_id must be a string value") + raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id))) def _validate_commit_idx(self, commit_idx): if not isinstance(commit_idx, (int, long)): @@ -1034,12 +1057,24 @@ class BaseCommit(object): """ raise NotImplementedError + def is_node_binary(self, path): + """ + Returns ``True`` is given path is a binary file + """ + raise NotImplementedError + def get_file_content(self, path): """ Returns content of the file at the given `path`. """ raise NotImplementedError + def get_file_content_streamed(self, path): + """ + returns a streaming response from vcsserver with file content + """ + raise NotImplementedError + def get_file_size(self, path): """ Returns size of the file at the given `path`. @@ -1611,6 +1646,9 @@ class EmptyCommit(BaseCommit): def get_file_content(self, path): return u'' + def get_file_content_streamed(self, path): + yield self.get_file_content() + def get_file_size(self, path): return 0 diff --git a/rhodecode/lib/vcs/backends/git/commit.py b/rhodecode/lib/vcs/backends/git/commit.py --- a/rhodecode/lib/vcs/backends/git/commit.py +++ b/rhodecode/lib/vcs/backends/git/commit.py @@ -46,18 +46,10 @@ class GitCommit(base.BaseCommit): """ Represents state of the repository at single commit id. """ - _author_property = 'author' - _committer_property = 'committer' - _date_property = 'commit_time' - _date_tz_property = 'commit_timezone' - _message_property = 'message' - _parents_property = 'parents' _filter_pre_load = [ # done through a more complex tree walk on parents "affected_files", - # based on repository cached property - "branch", # done through subprocess not remote call "children", # done through a more complex tree walk on parents @@ -88,6 +80,7 @@ class GitCommit(base.BaseCommit): self._submodules = None def _set_bulk_properties(self, pre_load): + if not pre_load: return pre_load = [entry for entry in pre_load @@ -104,6 +97,8 @@ class GitCommit(base.BaseCommit): value = utcdate_fromtimestamp(*value) elif attr == "parents": value = self._make_commits(value) + elif attr == "branch": + value = value[0] if value else None self.__dict__[attr] = value @LazyProperty @@ -124,23 +119,19 @@ class GitCommit(base.BaseCommit): @LazyProperty def message(self): - return safe_unicode( - self._remote.commit_attribute(self.id, self._message_property)) + return safe_unicode(self._remote.message(self.id)) @LazyProperty def committer(self): - return safe_unicode( - self._remote.commit_attribute(self.id, self._committer_property)) + return safe_unicode(self._remote.author(self.id)) @LazyProperty def author(self): - return safe_unicode( - self._remote.commit_attribute(self.id, self._author_property)) + return safe_unicode(self._remote.author(self.id)) @LazyProperty def date(self): - unix_ts, tz = self._remote.get_object_attrs( - self.raw_id, self._date_property, self._date_tz_property) + unix_ts, tz = self._remote.date(self.raw_id) return utcdate_fromtimestamp(unix_ts, tz) @LazyProperty @@ -158,13 +149,22 @@ class GitCommit(base.BaseCommit): return tags @LazyProperty - def branch(self): + def commit_branches(self): + branches = [] for name, commit_id in self.repository.branches.iteritems(): if commit_id == self.raw_id: - return safe_unicode(name) - return None + branches.append(name) + return branches - def _get_id_for_path(self, path): + @LazyProperty + def branch(self): + branches = self._remote.branch(self.raw_id) + + if branches: + # actually commit can have multiple branches in git + return safe_unicode(branches[0]) + + def _get_tree_id_for_path(self, path): path = safe_str(path) if path in self._paths: return self._paths[path] @@ -177,56 +177,26 @@ class GitCommit(base.BaseCommit): self._paths[''] = data return data - parts = path.split('/') - dirs, name = parts[:-1], parts[-1] - cur_dir = '' - - # initially extract things from root dir - tree_items = self._remote.tree_items(tree_id) - self._process_tree_items(tree_items, cur_dir) + tree_id, tree_type, tree_mode = \ + self._remote.tree_and_type_for_path(self.raw_id, path) + if tree_id is None: + raise self.no_node_at_path(path) - for dir in dirs: - if cur_dir: - cur_dir = '/'.join((cur_dir, dir)) - else: - cur_dir = dir - dir_id = None - for item, stat_, id_, type_ in tree_items: - if item == dir: - dir_id = id_ - break - if dir_id: - if type_ != "tree": - raise CommitError('%s is not a directory' % cur_dir) - # update tree - tree_items = self._remote.tree_items(dir_id) - else: - raise CommitError('%s have not been found' % cur_dir) - - # cache all items from the given traversed tree - self._process_tree_items(tree_items, cur_dir) + self._paths[path] = [tree_id, tree_type] + self._stat_modes[path] = tree_mode if path not in self._paths: raise self.no_node_at_path(path) return self._paths[path] - def _process_tree_items(self, items, cur_dir): - for item, stat_, id_, type_ in items: - if cur_dir: - name = '/'.join((cur_dir, item)) - else: - name = item - self._paths[name] = [id_, type_] - self._stat_modes[name] = stat_ - def _get_kind(self, path): - path_id, type_ = self._get_id_for_path(path) + tree_id, type_ = self._get_tree_id_for_path(path) if type_ == 'blob': return NodeKind.FILE elif type_ == 'tree': return NodeKind.DIR - elif type == 'link': + elif type_ == 'link': return NodeKind.SUBMODULE return None @@ -245,8 +215,7 @@ class GitCommit(base.BaseCommit): """ Returns list of parent commits. """ - parent_ids = self._remote.commit_attribute( - self.id, self._parents_property) + parent_ids = self._remote.parents(self.id) return self._make_commits(parent_ids) @LazyProperty @@ -254,23 +223,15 @@ class GitCommit(base.BaseCommit): """ Returns list of child commits. """ - rev_filter = settings.GIT_REV_FILTER - output, __ = self.repository.run_git_command( - ['rev-list', '--children'] + rev_filter) + + children = self._remote.children(self.raw_id) + return self._make_commits(children) - child_ids = [] - pat = re.compile(r'^%s' % self.raw_id) - for l in output.splitlines(): - if pat.match(l): - found_ids = l.split(' ')[1:] - child_ids.extend(found_ids) - return self._make_commits(child_ids) + def _make_commits(self, commit_ids): + def commit_maker(_commit_id): + return self.repository.get_commit(commit_id=commit_id) - def _make_commits(self, commit_ids, pre_load=None): - return [ - self.repository.get_commit(commit_id=commit_id, pre_load=pre_load, - translate_tag=False) - for commit_id in commit_ids] + return [commit_maker(commit_id) for commit_id in commit_ids] def get_file_mode(self, path): """ @@ -278,92 +239,79 @@ class GitCommit(base.BaseCommit): """ path = safe_str(path) # ensure path is traversed - self._get_id_for_path(path) + self._get_tree_id_for_path(path) return self._stat_modes[path] def is_link(self, path): return stat.S_ISLNK(self.get_file_mode(path)) + def is_node_binary(self, path): + tree_id, _ = self._get_tree_id_for_path(path) + return self._remote.is_binary(tree_id) + def get_file_content(self, path): """ Returns content of the file at given `path`. """ - id_, _ = self._get_id_for_path(path) - return self._remote.blob_as_pretty_string(id_) + tree_id, _ = self._get_tree_id_for_path(path) + return self._remote.blob_as_pretty_string(tree_id) + + def get_file_content_streamed(self, path): + tree_id, _ = self._get_tree_id_for_path(path) + stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') + return stream_method(tree_id) def get_file_size(self, path): """ Returns size of the file at given `path`. """ - id_, _ = self._get_id_for_path(path) - return self._remote.blob_raw_length(id_) + tree_id, _ = self._get_tree_id_for_path(path) + return self._remote.blob_raw_length(tree_id) def get_path_history(self, path, limit=None, pre_load=None): """ Returns history of file as reversed list of `GitCommit` objects for which file at given `path` has been modified. - - TODO: This function now uses an underlying 'git' command which works - quickly but ideally we should replace with an algorithm. """ - self._get_filectx(path) - f_path = safe_str(path) - # optimize for n==1, rev-list is much faster for that use-case - if limit == 1: - cmd = ['rev-list', '-1', self.raw_id, '--', f_path] - else: - cmd = ['log'] - if limit: - cmd.extend(['-n', str(safe_int(limit, 0))]) - cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path]) - - output, __ = self.repository.run_git_command(cmd) - commit_ids = re.findall(r'[0-9a-fA-F]{40}', output) - + path = self._get_filectx(path) + hist = self._remote.node_history(self.raw_id, path, limit) return [ self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) - for commit_id in commit_ids] + for commit_id in hist] def get_file_annotate(self, path, pre_load=None): """ Returns a generator of four element tuples with lineno, commit_id, commit lazy loader and line + """ - TODO: This function now uses os underlying 'git' command which is - generally not good. Should be replaced with algorithm iterating - commits. - """ - cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path] - # -l ==> outputs long shas (and we need all 40 characters) - # --root ==> doesn't put '^' character for bounderies - # -r commit_id ==> blames for the given commit - output, __ = self.repository.run_git_command(cmd) + result = self._remote.node_annotate(self.raw_id, path) - for i, blame_line in enumerate(output.split('\n')[:-1]): - line_no = i + 1 - commit_id, line = re.split(r' ', blame_line, 1) + for ln_no, commit_id, content in result: yield ( - line_no, commit_id, - lambda: self.repository.get_commit(commit_id=commit_id, - pre_load=pre_load), - line) + ln_no, commit_id, + lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), + content) def get_nodes(self, path): + if self._get_kind(path) != NodeKind.DIR: raise CommitError( "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) path = self._fix_path(path) - id_, _ = self._get_id_for_path(path) - tree_id = self._remote[id_]['id'] + + tree_id, _ = self._get_tree_id_for_path(path) + dirnodes = [] filenodes = [] - alias = self.repository.alias + + # extracted tree ID gives us our files... for name, stat_, id_, type_ in self._remote.tree_items(tree_id): if type_ == 'link': url = self._get_submodule_url('/'.join((path, name))) dirnodes.append(SubModuleNode( - name, url=url, commit=id_, alias=alias)) + name, url=url, commit=id_, alias=self.repository.alias)) continue if path != '': @@ -394,15 +342,15 @@ class GitCommit(base.BaseCommit): path = self._fix_path(path) if path not in self.nodes: try: - id_, type_ = self._get_id_for_path(path) + tree_id, type_ = self._get_tree_id_for_path(path) except CommitError: raise NodeDoesNotExistError( "Cannot find one of parents' directories for a given " "path: %s" % path) - if type_ == 'link': + if type_ in ['link', 'commit']: url = self._get_submodule_url(path) - node = SubModuleNode(path, url=url, commit=id_, + node = SubModuleNode(path, url=url, commit=tree_id, alias=self.repository.alias) elif type_ == 'tree': if path == '': @@ -411,16 +359,18 @@ class GitCommit(base.BaseCommit): node = DirNode(path, commit=self) elif type_ == 'blob': node = FileNode(path, commit=self, pre_load=pre_load) + self._stat_modes[path] = node.mode else: raise self.no_node_at_path(path) # cache node self.nodes[path] = node + return self.nodes[path] def get_largefile_node(self, path): - id_, _ = self._get_id_for_path(path) - pointer_spec = self._remote.is_large_file(id_) + tree_id, _ = self._get_tree_id_for_path(path) + pointer_spec = self._remote.is_large_file(tree_id) if pointer_spec: # content of that file regular FileNode is the hash of largefile @@ -516,13 +466,14 @@ class GitCommit(base.BaseCommit): except NodeDoesNotExistError: return None - content = submodules_node.content - - # ConfigParser fails if there are whitespaces - content = '\n'.join(l.strip() for l in content.split('\n')) + # ConfigParser fails if there are whitespaces, also it needs an iterable + # file like content + def iter_content(_content): + for line in _content.splitlines(): + yield line parser = configparser.ConfigParser() - parser.readfp(StringIO(content)) + parser.read_file(iter_content(submodules_node.content)) for section in parser.sections(): path = parser.get(section, 'path') diff --git a/rhodecode/lib/vcs/backends/git/inmemory.py b/rhodecode/lib/vcs/backends/git/inmemory.py --- a/rhodecode/lib/vcs/backends/git/inmemory.py +++ b/rhodecode/lib/vcs/backends/git/inmemory.py @@ -58,10 +58,12 @@ class GitInMemoryCommit(base.BaseInMemor updated = [] for node in self.added + self.changed: - if not node.is_binary: + + if node.is_binary: + content = node.content + else: content = node.content.encode(ENCODING) - else: - content = node.content + updated.append({ 'path': node.path, 'node_path': node.name.encode(ENCODING), diff --git a/rhodecode/lib/vcs/backends/git/repository.py b/rhodecode/lib/vcs/backends/git/repository.py --- a/rhodecode/lib/vcs/backends/git/repository.py +++ b/rhodecode/lib/vcs/backends/git/repository.py @@ -25,15 +25,14 @@ GIT repository module import logging import os import re -import time from zope.cachedescriptors.property import Lazy as LazyProperty -from zope.cachedescriptors.property import CachedProperty from rhodecode.lib.compat import OrderedDict from rhodecode.lib.datelib import ( utcdate_fromtimestamp, makedate, date_astimestamp) from rhodecode.lib.utils import safe_unicode, safe_str +from rhodecode.lib.utils2 import CachedProperty from rhodecode.lib.vcs import connection, path as vcspath from rhodecode.lib.vcs.backends.base import ( BaseRepository, CollectionGenerator, Config, MergeResponse, @@ -43,7 +42,7 @@ from rhodecode.lib.vcs.backends.git.diff from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit from rhodecode.lib.vcs.exceptions import ( CommitDoesNotExistError, EmptyRepositoryError, - RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) + RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo) SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') @@ -64,19 +63,17 @@ class GitRepository(BaseRepository): self.path = safe_str(os.path.abspath(repo_path)) self.config = config if config else self.get_default_config() - self.with_wire = with_wire + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url, do_workspace_checkout, bare) # caches self._commit_ids = {} - # dependent that trigger re-computation of commit_ids - self._commit_ids_ver = 0 - @LazyProperty def _remote(self): - return connection.Git(self.path, self.config, with_wire=self.with_wire) + repo_id = self.path + return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire) @LazyProperty def bare(self): @@ -86,7 +83,7 @@ class GitRepository(BaseRepository): def head(self): return self._remote.head() - @CachedProperty('_commit_ids_ver') + @CachedProperty def commit_ids(self): """ Returns list of commit ids, in ascending order. Being lazy @@ -190,12 +187,16 @@ class GitRepository(BaseRepository): except OSError as err: raise RepositoryError(err) - def _get_all_commit_ids(self, filters=None): + def _get_all_commit_ids(self): + return self._remote.get_all_commit_ids() + + def _get_commit_ids(self, filters=None): # we must check if this repo is not empty, since later command # fails if it is. And it's cheaper to ask than throw the subprocess # errors head = self._remote.head(show_exc=False) + if not head: return [] @@ -208,7 +209,7 @@ class GitRepository(BaseRepository): if filters.get('until'): extra_filter.append('--until=%s' % (filters['until'])) if filters.get('branch_name'): - rev_filter = ['--tags'] + rev_filter = [] extra_filter.append(filters['branch_name']) rev_filter.extend(extra_filter) @@ -233,6 +234,8 @@ class GitRepository(BaseRepository): if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1): return self.commit_ids[-1] + commit_missing_err = "Commit {} does not exist for `{}`".format( + *map(safe_str, [commit_id_or_idx, self.name])) is_bstr = isinstance(commit_id_or_idx, (str, unicode)) if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) @@ -240,30 +243,15 @@ class GitRepository(BaseRepository): try: commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)] except Exception: - msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name) - raise CommitDoesNotExistError(msg) + raise CommitDoesNotExistError(commit_missing_err) elif is_bstr: - # check full path ref, eg. refs/heads/master - ref_id = self._refs.get(commit_id_or_idx) - if ref_id: - return ref_id - - # check branch name - branch_ids = self.branches.values() - ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx) - if ref_id: - return ref_id - - # check tag name - ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx) - if ref_id: - return ref_id - - if (not SHA_PATTERN.match(commit_id_or_idx) or - commit_id_or_idx not in self.commit_ids): - msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name) - raise CommitDoesNotExistError(msg) + # Need to call remote to translate id for tagging scenario + try: + remote_data = self._remote.get_object(commit_id_or_idx) + commit_id_or_idx = remote_data["commit_id"] + except (CommitDoesNotExistError,): + raise CommitDoesNotExistError(commit_missing_err) # Ensure we return full id if not SHA_PATTERN.match(str(commit_id_or_idx)): @@ -327,32 +315,31 @@ class GitRepository(BaseRepository): def _get_branches(self): return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True) - @LazyProperty + @CachedProperty def branches(self): return self._get_branches() - @LazyProperty + @CachedProperty def branches_closed(self): return {} - @LazyProperty + @CachedProperty def bookmarks(self): return {} - @LazyProperty + @CachedProperty def branches_all(self): all_branches = {} all_branches.update(self.branches) all_branches.update(self.branches_closed) return all_branches - @LazyProperty + @CachedProperty def tags(self): return self._get_tags() def _get_tags(self): - return self._get_refs_entries( - prefix='refs/tags/', strip_prefix=True, reverse=True) + return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True) def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): @@ -371,12 +358,13 @@ class GitRepository(BaseRepository): if name in self.tags: raise TagAlreadyExistError("Tag %s already exists" % name) commit = self.get_commit(commit_id=commit_id) - message = message or "Added tag %s for commit %s" % ( - name, commit.raw_id) - self._remote.set_refs('refs/tags/%s' % name, commit._commit['id']) + message = message or "Added tag %s for commit %s" % (name, commit.raw_id) + + self._remote.set_refs('refs/tags/%s' % name, commit.raw_id) - self._refs = self._get_refs() - self.tags = self._get_tags() + self._invalidate_prop_cache('tags') + self._invalidate_prop_cache('_refs') + return commit def remove_tag(self, name, user, message=None, date=None): @@ -392,19 +380,15 @@ class GitRepository(BaseRepository): """ if name not in self.tags: raise TagDoesNotExistError("Tag %s does not exist" % name) - tagpath = vcspath.join( - self._remote.get_refs_path(), 'refs', 'tags', name) - try: - os.remove(tagpath) - self._refs = self._get_refs() - self.tags = self._get_tags() - except OSError as e: - raise RepositoryError(e.strerror) + + self._remote.tag_remove(name) + self._invalidate_prop_cache('tags') + self._invalidate_prop_cache('_refs') def _get_refs(self): return self._remote.get_refs() - @LazyProperty + @CachedProperty def _refs(self): return self._get_refs() @@ -455,18 +439,13 @@ class GitRepository(BaseRepository): else: commit_id = "tip" - commit_id = self._lookup_commit(commit_id) - remote_idx = None if translate_tag: - # Need to call remote to translate id for tagging scenario - remote_data = self._remote.get_object(commit_id) - commit_id = remote_data["commit_id"] - remote_idx = remote_data["idx"] + commit_id = self._lookup_commit(commit_id) try: idx = self._commit_ids[commit_id] except KeyError: - idx = remote_idx or 0 + idx = -1 return GitCommit(self, commit_id, idx, pre_load=pre_load) @@ -539,14 +518,8 @@ class GitRepository(BaseRepository): 'start': start_pos, 'end': end_pos, } - commit_ids = self._get_all_commit_ids(filters=revfilters) + commit_ids = self._get_commit_ids(filters=revfilters) - # pure python stuff, it's slow due to walker walking whole repo - # def get_revs(walker): - # for walker_entry in walker: - # yield walker_entry.commit.id - # revfilters = {} - # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters))))) else: commit_ids = self.commit_ids @@ -576,33 +549,16 @@ class GitRepository(BaseRepository): if path1 is not None and path1 != path: raise ValueError("Diff of two different paths not supported.") - flags = [ - '-U%s' % context, '--full-index', '--binary', '-p', - '-M', '--abbrev=40'] - if ignore_whitespace: - flags.append('-w') - - if commit1 == self.EMPTY_COMMIT: - cmd = ['show'] + flags + [commit2.raw_id] + if path: + file_filter = path else: - cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id] - - if path: - cmd.extend(['--', path]) + file_filter = None - stdout, __ = self.run_git_command(cmd) - # If we used 'show' command, strip first few lines (until actual diff - # starts) - if commit1 == self.EMPTY_COMMIT: - lines = stdout.splitlines() - x = 0 - for line in lines: - if line.startswith('diff'): - break - x += 1 - # Append new line just like 'diff' command do - stdout = '\n'.join(lines[x:]) + '\n' - return GitDiff(stdout) + diff = self._remote.diff( + commit1.raw_id, commit2.raw_id, file_filter=file_filter, + opt_ignorews=ignore_whitespace, + context=context) + return GitDiff(diff) def strip(self, commit_id, branch_name): commit = self.get_commit(commit_id=commit_id) @@ -613,8 +569,11 @@ class GitRepository(BaseRepository): commit = commit.parents[0] self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id) - self._commit_ids_ver = time.time() - # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it + # clear cached properties + self._invalidate_prop_cache('commit_ids') + self._invalidate_prop_cache('_refs') + self._invalidate_prop_cache('branches') + return len(self.commit_ids) def get_common_ancestor(self, commit_id1, commit_id2, repo2): @@ -697,9 +656,11 @@ class GitRepository(BaseRepository): def set_refs(self, ref_name, commit_id): self._remote.set_refs(ref_name, commit_id) + self._invalidate_prop_cache('_refs') def remove_ref(self, ref_name): self._remote.remove_ref(ref_name) + self._invalidate_prop_cache('_refs') def _update_server_info(self): """ @@ -744,6 +705,12 @@ class GitRepository(BaseRepository): cmd.append(branch_name) self.run_git_command(cmd, fail_on_stderr=False) + def _create_branch(self, branch_name, commit_id): + """ + creates a branch in a GIT repo + """ + self._remote.create_branch(branch_name, commit_id) + def _identify(self): """ Return the current state of the working directory. @@ -820,8 +787,8 @@ class GitRepository(BaseRepository): return heads - def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): - return GitRepository(shadow_repository_path) + def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): + return GitRepository(shadow_repository_path, with_wire={"cache": cache}) def _local_pull(self, repository_path, branch_name, ff_only=True): """ @@ -859,9 +826,10 @@ class GitRepository(BaseRepository): return if self.is_empty(): - # TODO(skreft): do somehting more robust in this case. + # TODO(skreft): do something more robust in this case. raise RepositoryError( 'Do not know how to merge into empty repositories yet') + unresolved = None # N.B.(skreft): the --no-ff option is used to enforce the creation of a # commit message. We also specify the user who is doing the merge. @@ -872,9 +840,18 @@ class GitRepository(BaseRepository): try: output = self.run_git_command(cmd, fail_on_stderr=False) except RepositoryError: + files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'], + fail_on_stderr=False)[0].splitlines() + # NOTE(marcink): we add U notation for consistent with HG backend output + unresolved = ['U {}'.format(f) for f in files] + # Cleanup any merge leftovers self.run_git_command(['merge', '--abort'], fail_on_stderr=False) - raise + + if unresolved: + raise UnresolvedFilesInRepo(unresolved) + else: + raise def _local_push( self, source_branch, repository_path, target_branch, @@ -925,12 +902,12 @@ class GitRepository(BaseRepository): def _maybe_prepare_merge_workspace( self, repo_id, workspace_id, target_ref, source_ref): shadow_repository_path = self._get_shadow_repository_path( - repo_id, workspace_id) + self.path, repo_id, workspace_id) if not os.path.exists(shadow_repository_path): self._local_clone( shadow_repository_path, target_ref.name, source_ref.name) - log.debug( - 'Prepared shadow repository in %s', shadow_repository_path) + log.debug('Prepared %s shadow repository in %s', + self.alias, shadow_repository_path) return shadow_repository_path @@ -950,7 +927,7 @@ class GitRepository(BaseRepository): shadow_repository_path = self._maybe_prepare_merge_workspace( repo_id, workspace_id, target_ref, source_ref) - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) # checkout source, if it's different. Otherwise we could not # fetch proper commits for merge testing @@ -968,7 +945,7 @@ class GitRepository(BaseRepository): # Need to reload repo to invalidate the cache, or otherwise we cannot # retrieve the last target commit. - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) if target_ref.commit_id != shadow_repo.branches[target_ref.name]: log.warning('Shadow Target ref %s commit mismatch %s vs %s', target_ref, target_ref.commit_id, @@ -1000,9 +977,9 @@ class GitRepository(BaseRepository): [source_ref.commit_id]) merge_possible = True - # Need to reload repo to invalidate the cache, or otherwise we + # Need to invalidate the cache, or otherwise we # cannot retrieve the merge commit. - shadow_repo = GitRepository(shadow_repository_path) + shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path) merge_commit_id = shadow_repo.branches[pr_branch] # Set a reference pointing to the merge commit. This reference may @@ -1010,8 +987,11 @@ class GitRepository(BaseRepository): # the shadow repository. shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id) merge_ref = Reference('branch', 'pr-merge', merge_commit_id) - except RepositoryError: + except RepositoryError as e: log.exception('Failure when doing local merge on git shadow repo') + if isinstance(e, UnresolvedFilesInRepo): + metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0])) + merge_possible = False merge_failure_reason = MergeFailureReason.MERGE_FAILED diff --git a/rhodecode/lib/vcs/backends/hg/commit.py b/rhodecode/lib/vcs/backends/hg/commit.py --- a/rhodecode/lib/vcs/backends/hg/commit.py +++ b/rhodecode/lib/vcs/backends/hg/commit.py @@ -71,7 +71,7 @@ class MercurialCommit(base.BaseCommit): if not pre_load: return - result = self._remote.bulk_request(self.idx, pre_load) + result = self._remote.bulk_request(self.raw_id, pre_load) for attr, value in result.items(): if attr in ["author", "branch", "message"]: value = safe_unicode(value) @@ -93,7 +93,7 @@ class MercurialCommit(base.BaseCommit): @LazyProperty def branch(self): - return safe_unicode(self._remote.ctx_branch(self.idx)) + return safe_unicode(self._remote.ctx_branch(self.raw_id)) @LazyProperty def bookmarks(self): @@ -104,7 +104,7 @@ class MercurialCommit(base.BaseCommit): @LazyProperty def message(self): - return safe_unicode(self._remote.ctx_description(self.idx)) + return safe_unicode(self._remote.ctx_description(self.raw_id)) @LazyProperty def committer(self): @@ -112,22 +112,22 @@ class MercurialCommit(base.BaseCommit): @LazyProperty def author(self): - return safe_unicode(self._remote.ctx_user(self.idx)) + return safe_unicode(self._remote.ctx_user(self.raw_id)) @LazyProperty def date(self): - return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx)) + return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id)) @LazyProperty def status(self): """ Returns modified, added, removed, deleted files for current commit """ - return self._remote.ctx_status(self.idx) + return self._remote.ctx_status(self.raw_id) @LazyProperty def _file_paths(self): - return self._remote.ctx_list(self.idx) + return self._remote.ctx_list(self.raw_id) @LazyProperty def _dir_paths(self): @@ -149,16 +149,16 @@ class MercurialCommit(base.BaseCommit): def short_id(self): return self.raw_id[:12] - def _make_commits(self, indexes, pre_load=None): - return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load) - for idx in indexes if idx >= 0] + def _make_commits(self, commit_ids, pre_load=None): + return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) + for commit_id in commit_ids] @LazyProperty def parents(self): """ Returns list of parent commits. """ - parents = self._remote.ctx_parents(self.idx) + parents = self._remote.ctx_parents(self.raw_id) return self._make_commits(parents) def _get_phase_text(self, phase_id): @@ -170,19 +170,19 @@ class MercurialCommit(base.BaseCommit): @LazyProperty def phase(self): - phase_id = self._remote.ctx_phase(self.idx) + phase_id = self._remote.ctx_phase(self.raw_id) phase_text = self._get_phase_text(phase_id) return safe_unicode(phase_text) @LazyProperty def obsolete(self): - obsolete = self._remote.ctx_obsolete(self.idx) + obsolete = self._remote.ctx_obsolete(self.raw_id) return obsolete @LazyProperty def hidden(self): - hidden = self._remote.ctx_hidden(self.idx) + hidden = self._remote.ctx_hidden(self.raw_id) return hidden @LazyProperty @@ -190,7 +190,7 @@ class MercurialCommit(base.BaseCommit): """ Returns list of child commits. """ - children = self._remote.ctx_children(self.idx) + children = self._remote.ctx_children(self.raw_id) return self._make_commits(children) def _fix_path(self, path): @@ -222,28 +222,37 @@ class MercurialCommit(base.BaseCommit): Returns stat mode of the file at the given ``path``. """ path = self._get_filectx(path) - if 'x' in self._remote.fctx_flags(self.idx, path): + if 'x' in self._remote.fctx_flags(self.raw_id, path): return base.FILEMODE_EXECUTABLE else: return base.FILEMODE_DEFAULT def is_link(self, path): path = self._get_filectx(path) - return 'l' in self._remote.fctx_flags(self.idx, path) + return 'l' in self._remote.fctx_flags(self.raw_id, path) + + def is_node_binary(self, path): + path = self._get_filectx(path) + return self._remote.is_binary(self.raw_id, path) def get_file_content(self, path): """ Returns content of the file at given ``path``. """ path = self._get_filectx(path) - return self._remote.fctx_data(self.idx, path) + return self._remote.fctx_node_data(self.raw_id, path) + + def get_file_content_streamed(self, path): + path = self._get_filectx(path) + stream_method = getattr(self._remote, 'stream:fctx_node_data') + return stream_method(self.raw_id, path) def get_file_size(self, path): """ Returns size of the file at given ``path``. """ path = self._get_filectx(path) - return self._remote.fctx_size(self.idx, path) + return self._remote.fctx_size(self.raw_id, path) def get_path_history(self, path, limit=None, pre_load=None): """ @@ -251,7 +260,7 @@ class MercurialCommit(base.BaseCommit): for which file at given ``path`` has been modified. """ path = self._get_filectx(path) - hist = self._remote.node_history(self.idx, path, limit) + hist = self._remote.node_history(self.raw_id, path, limit) return [ self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in hist] @@ -261,13 +270,12 @@ class MercurialCommit(base.BaseCommit): Returns a generator of four element tuples with lineno, commit_id, commit lazy loader and line """ - result = self._remote.fctx_annotate(self.idx, path) + result = self._remote.fctx_annotate(self.raw_id, path) for ln_no, commit_id, content in result: yield ( ln_no, commit_id, - lambda: self.repository.get_commit(commit_id=commit_id, - pre_load=pre_load), + lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), content) def get_nodes(self, path): @@ -279,8 +287,7 @@ class MercurialCommit(base.BaseCommit): if self._get_kind(path) != NodeKind.DIR: raise CommitError( - "Directory does not exist for idx %s at '%s'" % - (self.idx, path)) + "Directory does not exist for idx %s at '%s'" % (self.raw_id, path)) path = self._fix_path(path) filenodes = [ @@ -300,10 +307,11 @@ class MercurialCommit(base.BaseCommit): loc = vals[0] commit = vals[1] dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias)) + nodes = dirnodes + filenodes - # cache nodes for node in nodes: - self.nodes[node.path] = node + if node.path not in self.nodes: + self.nodes[node.path] = node nodes.sort() return nodes @@ -331,8 +339,8 @@ class MercurialCommit(base.BaseCommit): return self.nodes[path] def get_largefile_node(self, path): - - if self._remote.is_large_file(path): + pointer_spec = self._remote.is_large_file(self.raw_id, path) + if pointer_spec: # content of that file regular FileNode is the hash of largefile file_id = self.get_file_content(path).strip() @@ -350,14 +358,14 @@ class MercurialCommit(base.BaseCommit): Returns a dictionary with submodule information from substate file of hg repository. """ - return self._remote.ctx_substate(self.idx) + return self._remote.ctx_substate(self.raw_id) @LazyProperty def affected_files(self): """ Gets a fast accessible file changes for given commit """ - return self._remote.ctx_files(self.idx) + return self._remote.ctx_files(self.raw_id) @property def added(self): diff --git a/rhodecode/lib/vcs/backends/hg/repository.py b/rhodecode/lib/vcs/backends/hg/repository.py --- a/rhodecode/lib/vcs/backends/hg/repository.py +++ b/rhodecode/lib/vcs/backends/hg/repository.py @@ -24,16 +24,15 @@ HG repository module import os import logging import binascii -import time import urllib from zope.cachedescriptors.property import Lazy as LazyProperty -from zope.cachedescriptors.property import CachedProperty from rhodecode.lib.compat import OrderedDict from rhodecode.lib.datelib import ( date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) from rhodecode.lib.utils import safe_unicode, safe_str +from rhodecode.lib.utils2 import CachedProperty from rhodecode.lib.vcs import connection, exceptions from rhodecode.lib.vcs.backends.base import ( BaseRepository, CollectionGenerator, Config, MergeResponse, @@ -43,7 +42,7 @@ from rhodecode.lib.vcs.backends.hg.diff from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit from rhodecode.lib.vcs.exceptions import ( EmptyRepositoryError, RepositoryError, TagAlreadyExistError, - TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError) + TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) from rhodecode.lib.vcs.compat import configparser hexlify = binascii.hexlify @@ -80,21 +79,19 @@ class MercurialRepository(BaseRepository # special requirements self.config = config if config else self.get_default_config( default=[('extensions', 'largefiles', '1')]) - self.with_wire = with_wire + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url, do_workspace_checkout) # caches self._commit_ids = {} - # dependent that trigger re-computation of commit_ids - self._commit_ids_ver = 0 - @LazyProperty def _remote(self): - return connection.Hg(self.path, self.config, with_wire=self.with_wire) + repo_id = self.path + return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) - @CachedProperty('_commit_ids_ver') + @CachedProperty def commit_ids(self): """ Returns list of commit ids, in ascending order. Being lazy @@ -108,15 +105,15 @@ class MercurialRepository(BaseRepository self._commit_ids = dict((commit_id, index) for index, commit_id in enumerate(commit_ids)) - @LazyProperty + @CachedProperty def branches(self): return self._get_branches() - @LazyProperty + @CachedProperty def branches_closed(self): return self._get_branches(active=False, closed=True) - @LazyProperty + @CachedProperty def branches_all(self): all_branches = {} all_branches.update(self.branches) @@ -143,7 +140,7 @@ class MercurialRepository(BaseRepository return OrderedDict(sorted(_branches, key=get_name, reverse=False)) - @LazyProperty + @CachedProperty def tags(self): """ Gets tags for this repository @@ -189,7 +186,7 @@ class MercurialRepository(BaseRepository self._remote.invalidate_vcs_cache() # Reinitialize tags - self.tags = self._get_tags() + self._invalidate_prop_cache('tags') tag_id = self.tags[name] return self.get_commit(commit_id=tag_id) @@ -216,7 +213,7 @@ class MercurialRepository(BaseRepository self._remote.tag(name, nullid, message, local, user, date, tz) self._remote.invalidate_vcs_cache() - self.tags = self._get_tags() + self._invalidate_prop_cache('tags') @LazyProperty def bookmarks(self): @@ -276,8 +273,9 @@ class MercurialRepository(BaseRepository self._remote.strip(commit_id, update=False, backup="none") self._remote.invalidate_vcs_cache() - self._commit_ids_ver = time.time() - # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it + # clear cache + self._invalidate_prop_cache('commit_ids') + return len(self.commit_ids) def verify(self): @@ -286,6 +284,12 @@ class MercurialRepository(BaseRepository self._remote.invalidate_vcs_cache() return verify + def hg_update_cache(self): + update_cache = self._remote.hg_update_cache() + + self._remote.invalidate_vcs_cache() + return update_cache + def get_common_ancestor(self, commit_id1, commit_id2, repo2): if commit_id1 == commit_id2: return commit_id1 @@ -362,7 +366,6 @@ class MercurialRepository(BaseRepository if create: os.makedirs(self.path, mode=0o755) - self._remote.localrepository(create) @LazyProperty @@ -631,6 +634,7 @@ class MercurialRepository(BaseRepository # In this case we should force a commit message return source_ref.commit_id, True + unresolved = None if use_rebase: try: bookmark_name = 'rcbook%s%s' % (source_ref.commit_id, @@ -641,17 +645,23 @@ class MercurialRepository(BaseRepository self._remote.invalidate_vcs_cache() self._update(bookmark_name, clean=True) return self._identify(), True - except RepositoryError: + except RepositoryError as e: # The rebase-abort may raise another exception which 'hides' # the original one, therefore we log it here. log.exception('Error while rebasing shadow repo during merge.') + if 'unresolved conflicts' in safe_str(e): + unresolved = self._remote.get_unresolved_files() + log.debug('unresolved files: %s', unresolved) # Cleanup any rebase leftovers self._remote.invalidate_vcs_cache() self._remote.rebase(abort=True) self._remote.invalidate_vcs_cache() self._remote.update(clean=True) - raise + if unresolved: + raise UnresolvedFilesInRepo(unresolved) + else: + raise else: try: self._remote.merge(source_ref.commit_id) @@ -661,10 +671,20 @@ class MercurialRepository(BaseRepository username=safe_str('%s <%s>' % (user_name, user_email))) self._remote.invalidate_vcs_cache() return self._identify(), True - except RepositoryError: + except RepositoryError as e: + # The merge-abort may raise another exception which 'hides' + # the original one, therefore we log it here. + log.exception('Error while merging shadow repo during merge.') + if 'unresolved merge conflicts' in safe_str(e): + unresolved = self._remote.get_unresolved_files() + log.debug('unresolved files: %s', unresolved) + # Cleanup any merge leftovers self._remote.update(clean=True) - raise + if unresolved: + raise UnresolvedFilesInRepo(unresolved) + else: + raise def _local_close(self, target_ref, user_name, user_email, source_ref, close_message=''): @@ -701,7 +721,7 @@ class MercurialRepository(BaseRepository def _maybe_prepare_merge_workspace( self, repo_id, workspace_id, unused_target_ref, unused_source_ref): shadow_repository_path = self._get_shadow_repository_path( - repo_id, workspace_id) + self.path, repo_id, workspace_id) if not os.path.exists(shadow_repository_path): self._local_clone(shadow_repository_path) log.debug( @@ -741,7 +761,7 @@ class MercurialRepository(BaseRepository shadow_repository_path = self._maybe_prepare_merge_workspace( repo_id, workspace_id, target_ref, source_ref) - shadow_repo = self._get_shadow_instance(shadow_repository_path) + shadow_repo = self.get_shadow_instance(shadow_repository_path) log.debug('Pulling in target reference %s', target_ref) self._validate_pull_reference(target_ref) @@ -807,8 +827,11 @@ class MercurialRepository(BaseRepository merge_possible = False merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED needs_push = False - except RepositoryError: + except RepositoryError as e: log.exception('Failure when doing local merge on hg shadow repo') + if isinstance(e, UnresolvedFilesInRepo): + metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0])) + merge_possible = False merge_failure_reason = MergeFailureReason.MERGE_FAILED needs_push = False @@ -821,7 +844,7 @@ class MercurialRepository(BaseRepository shadow_repo.bookmark( target_ref.name, revision=merge_commit_id) try: - shadow_repo_with_hooks = self._get_shadow_instance( + shadow_repo_with_hooks = self.get_shadow_instance( shadow_repository_path, enable_hooks=True) # This is the actual merge action, we push from shadow @@ -857,11 +880,11 @@ class MercurialRepository(BaseRepository merge_possible, merge_succeeded, merge_ref, merge_failure_reason, metadata=metadata) - def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): + def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): config = self.config.copy() if not enable_hooks: config.clear_section('hooks') - return MercurialRepository(shadow_repository_path, config) + return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) def _validate_pull_reference(self, reference): if not (reference.name in self.bookmarks or diff --git a/rhodecode/lib/vcs/backends/svn/commit.py b/rhodecode/lib/vcs/backends/svn/commit.py --- a/rhodecode/lib/vcs/backends/svn/commit.py +++ b/rhodecode/lib/vcs/backends/svn/commit.py @@ -108,6 +108,10 @@ class SubversionCommit(base.BaseCommit): return self.get_file_content(path).startswith('link') return False + def is_node_binary(self, path): + path = self._fix_path(path) + return self._remote.is_binary(self._svn_rev, safe_str(path)) + def _get_file_property(self, path, name): file_properties = self._remote.node_properties( safe_str(path), self._svn_rev) @@ -117,6 +121,11 @@ class SubversionCommit(base.BaseCommit): path = self._fix_path(path) return self._remote.get_file_content(safe_str(path), self._svn_rev) + def get_file_content_streamed(self, path): + path = self._fix_path(path) + stream_method = getattr(self._remote, 'stream:get_file_content') + return stream_method(safe_str(path), self._svn_rev) + def get_file_size(self, path): path = self._fix_path(path) return self._remote.get_file_size(safe_str(path), self._svn_rev) diff --git a/rhodecode/lib/vcs/backends/svn/repository.py b/rhodecode/lib/vcs/backends/svn/repository.py --- a/rhodecode/lib/vcs/backends/svn/repository.py +++ b/rhodecode/lib/vcs/backends/svn/repository.py @@ -27,11 +27,11 @@ import os import urllib from zope.cachedescriptors.property import Lazy as LazyProperty -from zope.cachedescriptors.property import CachedProperty from rhodecode.lib.compat import OrderedDict from rhodecode.lib.datelib import date_astimestamp from rhodecode.lib.utils import safe_str, safe_unicode +from rhodecode.lib.utils2 import CachedProperty from rhodecode.lib.vcs import connection, path as vcspath from rhodecode.lib.vcs.backends import base from rhodecode.lib.vcs.backends.svn.commit import ( @@ -69,19 +69,21 @@ class SubversionRepository(base.BaseRepo contact = base.BaseRepository.DEFAULT_CONTACT description = base.BaseRepository.DEFAULT_DESCRIPTION - def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False, - **kwargs): + def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None, + bare=False, **kwargs): self.path = safe_str(os.path.abspath(repo_path)) self.config = config if config else self.get_default_config() + self.with_wire = with_wire or {"cache": False} # default should not use cache self._init_repo(create, src_url) - # dependent that trigger re-computation of commit_ids - self._commit_ids_ver = 0 + # caches + self._commit_ids = {} @LazyProperty def _remote(self): - return connection.Svn(self.path, self.config) + repo_id = self.path + return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire) def _init_repo(self, create, src_url): if create and os.path.exists(self.path): @@ -97,7 +99,7 @@ class SubversionRepository(base.BaseRepo else: self._check_path() - @CachedProperty('_commit_ids_ver') + @CachedProperty def commit_ids(self): head = self._remote.lookup(None) return [str(r) for r in xrange(1, head + 1)] diff --git a/rhodecode/lib/vcs/client_http.py b/rhodecode/lib/vcs/client_http.py --- a/rhodecode/lib/vcs/client_http.py +++ b/rhodecode/lib/vcs/client_http.py @@ -25,6 +25,7 @@ Client for the VCSServer implemented bas import copy import logging import threading +import time import urllib2 import urlparse import uuid @@ -39,7 +40,6 @@ import rhodecode from rhodecode.lib.system_info import get_cert_path from rhodecode.lib.vcs import exceptions, CurlSession - log = logging.getLogger(__name__) @@ -51,155 +51,6 @@ EXCEPTIONS_MAP = { } -class RepoMaker(object): - - def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): - self.url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) - self._session_factory = session_factory - self.backend_type = backend_type - - def __call__(self, path, config, with_wire=None): - log.debug('RepoMaker call on %s', path) - return RemoteRepo( - path, config, self.url, self._session_factory(), - with_wire=with_wire) - - def __getattr__(self, name): - def f(*args, **kwargs): - return self._call(name, *args, **kwargs) - return f - - @exceptions.map_vcs_exceptions - def _call(self, name, *args, **kwargs): - payload = { - 'id': str(uuid.uuid4()), - 'method': name, - 'backend': self.backend_type, - 'params': {'args': args, 'kwargs': kwargs} - } - return _remote_call( - self.url, payload, EXCEPTIONS_MAP, self._session_factory()) - - -class ServiceConnection(object): - def __init__(self, server_and_port, backend_endpoint, session_factory): - self.url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) - self._session_factory = session_factory - - def __getattr__(self, name): - def f(*args, **kwargs): - return self._call(name, *args, **kwargs) - - return f - - @exceptions.map_vcs_exceptions - def _call(self, name, *args, **kwargs): - payload = { - 'id': str(uuid.uuid4()), - 'method': name, - 'params': {'args': args, 'kwargs': kwargs} - } - return _remote_call( - self.url, payload, EXCEPTIONS_MAP, self._session_factory()) - - -class RemoteRepo(object): - - def __init__(self, path, config, url, session, with_wire=None): - self.url = url - self._session = session - self._wire = { - "path": path, - "config": config, - "context": self._create_vcs_cache_context(), - } - if with_wire: - self._wire.update(with_wire) - - # johbo: Trading complexity for performance. Avoiding the call to - # log.debug brings a few percent gain even if is is not active. - if log.isEnabledFor(logging.DEBUG): - self._call = self._call_with_logging - - self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) - - def __getattr__(self, name): - def f(*args, **kwargs): - return self._call(name, *args, **kwargs) - return f - - @exceptions.map_vcs_exceptions - def _call(self, name, *args, **kwargs): - # TODO: oliver: This is currently necessary pre-call since the - # config object is being changed for hooking scenarios - wire = copy.deepcopy(self._wire) - wire["config"] = wire["config"].serialize() - - wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) - payload = { - 'id': str(uuid.uuid4()), - 'method': name, - 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} - } - return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session) - - def _call_with_logging(self, name, *args, **kwargs): - context_uid = self._wire.get('context') - log.debug('Calling %s@%s with args:%.10240r. wire_context: %s', - self.url, name, args, context_uid) - return RemoteRepo._call(self, name, *args, **kwargs) - - def __getitem__(self, key): - return self.revision(key) - - def _create_vcs_cache_context(self): - """ - Creates a unique string which is passed to the VCSServer on every - remote call. It is used as cache key in the VCSServer. - """ - return str(uuid.uuid4()) - - def invalidate_vcs_cache(self): - """ - This invalidates the context which is sent to the VCSServer on every - call to a remote method. It forces the VCSServer to create a fresh - repository instance on the next call to a remote method. - """ - self._wire['context'] = self._create_vcs_cache_context() - - -class RemoteObject(object): - - def __init__(self, url, session): - self._url = url - self._session = session - - # johbo: Trading complexity for performance. Avoiding the call to - # log.debug brings a few percent gain even if is is not active. - if log.isEnabledFor(logging.DEBUG): - self._call = self._call_with_logging - - def __getattr__(self, name): - def f(*args, **kwargs): - return self._call(name, *args, **kwargs) - return f - - @exceptions.map_vcs_exceptions - def _call(self, name, *args, **kwargs): - payload = { - 'id': str(uuid.uuid4()), - 'method': name, - 'params': {'args': args, 'kwargs': kwargs} - } - return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session) - - def _call_with_logging(self, name, *args, **kwargs): - log.debug('Calling %s@%s', self._url, name) - return RemoteObject._call(self, name, *args, **kwargs) - - def _remote_call(url, payload, exceptions_map, session): try: response = session.post(url, data=msgpack.packb(payload)) @@ -246,6 +97,191 @@ def _remote_call(url, payload, exception return response.get('result') +def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): + try: + response = session.post(url, data=msgpack.packb(payload)) + except pycurl.error as e: + msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) + raise exceptions.HttpVCSCommunicationError(msg) + except Exception as e: + message = getattr(e, 'message', '') + if 'Failed to connect' in message: + # gevent doesn't return proper pycurl errors + raise exceptions.HttpVCSCommunicationError(e) + else: + raise + + if response.status_code >= 400: + log.error('Call to %s returned non 200 HTTP code: %s', + url, response.status_code) + raise exceptions.HttpVCSCommunicationError(repr(response.content)) + + return response.iter_content(chunk_size=chunk_size) + + +class ServiceConnection(object): + def __init__(self, server_and_port, backend_endpoint, session_factory): + self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) + self._session_factory = session_factory + + def __getattr__(self, name): + def f(*args, **kwargs): + return self._call(name, *args, **kwargs) + + return f + + @exceptions.map_vcs_exceptions + def _call(self, name, *args, **kwargs): + payload = { + 'id': str(uuid.uuid4()), + 'method': name, + 'params': {'args': args, 'kwargs': kwargs} + } + return _remote_call( + self.url, payload, EXCEPTIONS_MAP, self._session_factory()) + + +class RemoteVCSMaker(object): + + def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): + self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) + self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream') + + self._session_factory = session_factory + self.backend_type = backend_type + + def __call__(self, path, repo_id, config, with_wire=None): + log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) + return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) + + def __getattr__(self, name): + def remote_attr(*args, **kwargs): + return self._call(name, *args, **kwargs) + return remote_attr + + @exceptions.map_vcs_exceptions + def _call(self, func_name, *args, **kwargs): + payload = { + 'id': str(uuid.uuid4()), + 'method': func_name, + 'backend': self.backend_type, + 'params': {'args': args, 'kwargs': kwargs} + } + url = self.url + return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) + + +class RemoteRepo(object): + CHUNK_SIZE = 16384 + + def __init__(self, path, repo_id, config, remote_maker, with_wire=None): + self.url = remote_maker.url + self.stream_url = remote_maker.stream_url + self._session = remote_maker._session_factory() + + with_wire = with_wire or {} + + repo_state_uid = with_wire.get('repo_state_uid') or 'state' + self._wire = { + "path": path, # repo path + "repo_id": repo_id, + "config": config, + "repo_state_uid": repo_state_uid, + "context": self._create_vcs_cache_context(path, repo_state_uid) + } + + if with_wire: + self._wire.update(with_wire) + + # NOTE(johbo): Trading complexity for performance. Avoiding the call to + # log.debug brings a few percent gain even if is is not active. + if log.isEnabledFor(logging.DEBUG): + self._call_with_logging = True + + self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) + + def __getattr__(self, name): + + if name.startswith('stream:'): + def repo_remote_attr(*args, **kwargs): + return self._call_stream(name, *args, **kwargs) + else: + def repo_remote_attr(*args, **kwargs): + return self._call(name, *args, **kwargs) + + return repo_remote_attr + + def _base_call(self, name, *args, **kwargs): + # TODO: oliver: This is currently necessary pre-call since the + # config object is being changed for hooking scenarios + wire = copy.deepcopy(self._wire) + wire["config"] = wire["config"].serialize() + wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) + + payload = { + 'id': str(uuid.uuid4()), + 'method': name, + 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} + } + + context_uid = wire.get('context') + return context_uid, payload + + @exceptions.map_vcs_exceptions + def _call(self, name, *args, **kwargs): + context_uid, payload = self._base_call(name, *args, **kwargs) + url = self.url + + start = time.time() + if self._call_with_logging: + log.debug('Calling %s@%s with args:%.10240r. wire_context: %s', + url, name, args, context_uid) + + result = _remote_call(url, payload, EXCEPTIONS_MAP, self._session) + if self._call_with_logging: + log.debug('Call %s@%s took: %.4fs. wire_context: %s', + url, name, time.time()-start, context_uid) + return result + + @exceptions.map_vcs_exceptions + def _call_stream(self, name, *args, **kwargs): + context_uid, payload = self._base_call(name, *args, **kwargs) + payload['chunk_size'] = self.CHUNK_SIZE + url = self.stream_url + + start = time.time() + if self._call_with_logging: + log.debug('Calling %s@%s with args:%.10240r. wire_context: %s', + url, name, args, context_uid) + + result = _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, + self.CHUNK_SIZE) + + if self._call_with_logging: + log.debug('Call %s@%s took: %.4fs. wire_context: %s', + url, name, time.time()-start, context_uid) + return result + + def __getitem__(self, key): + return self.revision(key) + + def _create_vcs_cache_context(self, *args): + """ + Creates a unique string which is passed to the VCSServer on every + remote call. It is used as cache key in the VCSServer. + """ + hash_key = '-'.join(map(str, args)) + return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) + + def invalidate_vcs_cache(self): + """ + This invalidates the context which is sent to the VCSServer on every + call to a remote method. It forces the VCSServer to create a fresh + repository instance on the next call to a remote method. + """ + self._wire['context'] = str(uuid.uuid4()) + + class VcsHttpProxy(object): CHUNK_SIZE = 16384 @@ -254,8 +290,7 @@ class VcsHttpProxy(object): retries = Retry(total=5, connect=None, read=None, redirect=None) adapter = requests.adapters.HTTPAdapter(max_retries=retries) - self.base_url = urlparse.urljoin( - 'http://%s' % server_and_port, backend_endpoint) + self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint) self.session = requests.Session() self.session.mount('http://', adapter) diff --git a/rhodecode/lib/vcs/conf/settings.py b/rhodecode/lib/vcs/conf/settings.py --- a/rhodecode/lib/vcs/conf/settings.py +++ b/rhodecode/lib/vcs/conf/settings.py @@ -25,9 +25,6 @@ Internal settings for vcs-lib # list of default encoding used in safe_unicode/safe_str methods DEFAULT_ENCODINGS = ['utf8'] -# Optional arguments to rev-filter, it has to be a list -# It can also be ['--branches', '--tags'] -GIT_REV_FILTER = ['--all'] # Compatibility version when creating SVN repositories. None means newest. # Other available options are: pre-1.4-compatible, pre-1.5-compatible, @@ -59,7 +56,7 @@ HOOKS_HOST = '127.0.0.1' MERGE_MESSAGE_TMPL = ( - u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}\n\n ' + u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}\n\n ' u'{pr_title}') MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode' MERGE_DRY_RUN_USER = 'Dry-Run User' diff --git a/rhodecode/lib/vcs/exceptions.py b/rhodecode/lib/vcs/exceptions.py --- a/rhodecode/lib/vcs/exceptions.py +++ b/rhodecode/lib/vcs/exceptions.py @@ -50,6 +50,10 @@ class RepositoryRequirementError(Reposit pass +class UnresolvedFilesInRepo(RepositoryError): + pass + + class VCSBackendNotSupportedError(VCSError): """ Exception raised when VCSServer does not support requested backend diff --git a/rhodecode/lib/vcs/nodes.py b/rhodecode/lib/vcs/nodes.py --- a/rhodecode/lib/vcs/nodes.py +++ b/rhodecode/lib/vcs/nodes.py @@ -27,6 +27,7 @@ import stat from zope.cachedescriptors.property import Lazy as LazyProperty +import rhodecode from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP from rhodecode.lib.utils import safe_unicode, safe_str from rhodecode.lib.utils2 import md5 @@ -369,6 +370,15 @@ class FileNode(Node): content = self._content return content + def stream_bytes(self): + """ + Returns an iterator that will stream the content of the file directly from + vcsserver without loading it to memory. + """ + if self.commit: + return self.commit.get_file_content_streamed(self.path) + raise NodeError("Cannot retrieve stream_bytes without related commit attribute") + @LazyProperty def md5(self): """ @@ -432,7 +442,7 @@ class FileNode(Node): @LazyProperty def last_commit(self): if self.commit: - pre_load = ["author", "date", "message"] + pre_load = ["author", "date", "message", "parents"] return self.commit.get_path_commit(self.path, pre_load=pre_load) raise NodeError( "Cannot retrieve last commit of the file without " @@ -548,7 +558,7 @@ class FileNode(Node): """ if self.commit is None: raise NodeError('Unable to get commit for this FileNode') - pre_load = ["author", "date", "message"] + pre_load = ["author", "date", "message", "parents"] return self.commit.get_file_annotate(self.path, pre_load=pre_load) @LazyProperty @@ -569,8 +579,11 @@ class FileNode(Node): """ Returns True if file has binary content. """ - _bin = self.raw_bytes and '\0' in self.raw_bytes - return _bin + if self.commit: + return self.commit.is_node_binary(self.path) + else: + raw_bytes = self._content + return raw_bytes and '\0' in raw_bytes @LazyProperty def extension(self): @@ -594,27 +607,32 @@ class FileNode(Node): if self.commit: return self.commit.get_largefile_node(self.path) + def count_lines(self, content, count_empty=False): + + if count_empty: + all_lines = 0 + empty_lines = 0 + for line in content.splitlines(True): + if line == '\n': + empty_lines += 1 + all_lines += 1 + + return all_lines, all_lines - empty_lines + else: + # fast method + empty_lines = all_lines = content.count('\n') + if all_lines == 0 and content: + # one-line without a newline + empty_lines = all_lines = 1 + + return all_lines, empty_lines + def lines(self, count_empty=False): all_lines, empty_lines = 0, 0 if not self.is_binary: content = self.content - if count_empty: - all_lines = 0 - empty_lines = 0 - for line in content.splitlines(True): - if line == '\n': - empty_lines += 1 - all_lines += 1 - - return all_lines, all_lines - empty_lines - else: - # fast method - empty_lines = all_lines = content.count('\n') - if all_lines == 0 and content: - # one-line without a newline - empty_lines = all_lines = 1 - + all_lines, empty_lines = self.count_lines(content, count_empty=count_empty) return all_lines, empty_lines def __repr__(self): @@ -654,7 +672,7 @@ class DirNode(Node): """ DirNode stores list of files and directories within this node. Nodes may be used standalone but within repository context they - lazily fetch data within same repositorty's commit. + lazily fetch data within same repository's commit. """ def __init__(self, path, nodes=(), commit=None): @@ -730,8 +748,7 @@ class DirNode(Node): return self._nodes_dict[path] elif len(paths) > 1: if self.commit is None: - raise NodeError( - "Cannot access deeper nodes without commit") + raise NodeError("Cannot access deeper nodes without commit") else: path1, path2 = paths[0], '/'.join(paths[1:]) return self.get_node(path1).get_node(path2) @@ -756,7 +773,7 @@ class DirNode(Node): @LazyProperty def last_commit(self): if self.commit: - pre_load = ["author", "date", "message"] + pre_load = ["author", "date", "message", "parents"] return self.commit.get_path_commit(self.path, pre_load=pre_load) raise NodeError( "Cannot retrieve last commit of the file without " @@ -848,3 +865,11 @@ class LargeFileNode(FileNode): Overwrites name to be the org lf path """ return self.org_path + + def stream_bytes(self): + with open(self.path, 'rb') as stream: + while True: + data = stream.read(16 * 1024) + if not data: + break + yield data diff --git a/rhodecode/model/comment.py b/rhodecode/model/comment.py --- a/rhodecode/model/comment.py +++ b/rhodecode/model/comment.py @@ -170,6 +170,23 @@ class CommentsModel(BaseModel): return todos + def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): + + todos = Session().query(ChangesetComment) \ + .filter(ChangesetComment.pull_request == pull_request) \ + .filter(ChangesetComment.resolved_by != None) \ + .filter(ChangesetComment.comment_type + == ChangesetComment.COMMENT_TYPE_TODO) + + if not show_outdated: + todos = todos.filter( + coalesce(ChangesetComment.display_state, '') != + ChangesetComment.COMMENT_OUTDATED) + + todos = todos.all() + + return todos + def get_commit_unresolved_todos(self, commit_id, show_outdated=True): todos = Session().query(ChangesetComment) \ @@ -187,6 +204,23 @@ class CommentsModel(BaseModel): return todos + def get_commit_resolved_todos(self, commit_id, show_outdated=True): + + todos = Session().query(ChangesetComment) \ + .filter(ChangesetComment.revision == commit_id) \ + .filter(ChangesetComment.resolved_by != None) \ + .filter(ChangesetComment.comment_type + == ChangesetComment.COMMENT_TYPE_TODO) + + if not show_outdated: + todos = todos.filter( + coalesce(ChangesetComment.display_state, '') != + ChangesetComment.COMMENT_OUTDATED) + + todos = todos.all() + + return todos + def _log_audit_action(self, action, action_data, auth_user, comment): audit_logger.store( action=action, @@ -198,7 +232,7 @@ class CommentsModel(BaseModel): f_path=None, line_no=None, status_change=None, status_change_type=None, comment_type=None, resolves_comment_id=None, closing_pr=False, send_email=True, - renderer=None, auth_user=None): + renderer=None, auth_user=None, extra_recipients=None): """ Creates new comment for commit or pull request. IF status_change is not none this comment is associated with a @@ -213,10 +247,13 @@ class CommentsModel(BaseModel): :param line_no: :param status_change: Label for status change :param comment_type: Type of comment + :param resolves_comment_id: id of comment which this one will resolve :param status_change_type: type of status change :param closing_pr: :param send_email: :param renderer: pick renderer for this comment + :param auth_user: current authenticated user calling this method + :param extra_recipients: list of extra users to be added to recipients """ if not text: @@ -302,7 +339,8 @@ class CommentsModel(BaseModel): 'comment_body': text, 'comment_file': f_path, 'comment_line': line_no, - 'comment_type': comment_type or 'note' + 'comment_type': comment_type or 'note', + 'comment_id': comment.comment_id } if commit_obj: @@ -316,6 +354,9 @@ class CommentsModel(BaseModel): recipients += [cs_author] commit_comment_url = self.get_url(comment, request=request) + commit_comment_reply_url = self.get_url( + comment, request=request, + anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) target_repo_url = h.link_to( repo.repo_name, @@ -325,8 +366,9 @@ class CommentsModel(BaseModel): kwargs.update({ 'commit': commit_obj, 'commit_message': commit_obj.message, - 'commit_target_repo': target_repo_url, + 'commit_target_repo_url': target_repo_url, 'commit_comment_url': commit_comment_url, + 'commit_comment_reply_url': commit_comment_reply_url }) elif pull_request_obj: @@ -342,11 +384,15 @@ class CommentsModel(BaseModel): pr_target_repo = pull_request_obj.target_repo pr_source_repo = pull_request_obj.source_repo - pr_comment_url = h.route_url( + pr_comment_url = self.get_url(comment, request=request) + pr_comment_reply_url = self.get_url( + comment, request=request, + anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) + + pr_url = h.route_url( 'pullrequest_show', repo_name=pr_target_repo.repo_name, - pull_request_id=pull_request_obj.pull_request_id, - _anchor='comment-%s' % comment.comment_id) + pull_request_id=pull_request_obj.pull_request_id, ) # set some variables for email notification pr_target_repo_url = h.route_url( @@ -359,13 +405,18 @@ class CommentsModel(BaseModel): kwargs.update({ 'pull_request': pull_request_obj, 'pr_id': pull_request_obj.pull_request_id, - 'pr_target_repo': pr_target_repo, - 'pr_target_repo_url': pr_target_repo_url, - 'pr_source_repo': pr_source_repo, - 'pr_source_repo_url': pr_source_repo_url, + 'pull_request_url': pr_url, + 'pull_request_target_repo': pr_target_repo, + 'pull_request_target_repo_url': pr_target_repo_url, + 'pull_request_source_repo': pr_source_repo, + 'pull_request_source_repo_url': pr_source_repo_url, 'pr_comment_url': pr_comment_url, + 'pr_comment_reply_url': pr_comment_reply_url, 'pr_closing': closing_pr, }) + + recipients += [self._get_user(u) for u in (extra_recipients or [])] + if send_email: # pre-generate the subject for notification itself (subject, @@ -459,24 +510,27 @@ class CommentsModel(BaseModel): q = q.order_by(ChangesetComment.created_on) return q.all() - def get_url(self, comment, request=None, permalink=False): + def get_url(self, comment, request=None, permalink=False, anchor=None): if not request: request = get_current_request() comment = self.__get_commit_comment(comment) + if anchor is None: + anchor = 'comment-{}'.format(comment.comment_id) + if comment.pull_request: pull_request = comment.pull_request if permalink: return request.route_url( 'pull_requests_global', pull_request_id=pull_request.pull_request_id, - _anchor='comment-%s' % comment.comment_id) + _anchor=anchor) else: return request.route_url( 'pullrequest_show', repo_name=safe_str(pull_request.target_repo.repo_name), pull_request_id=pull_request.pull_request_id, - _anchor='comment-%s' % comment.comment_id) + _anchor=anchor) else: repo = comment.repo @@ -486,13 +540,13 @@ class CommentsModel(BaseModel): return request.route_url( 'repo_commit', repo_name=safe_str(repo.repo_id), commit_id=commit_id, - _anchor='comment-%s' % comment.comment_id) + _anchor=anchor) else: return request.route_url( 'repo_commit', repo_name=safe_str(repo.repo_name), commit_id=commit_id, - _anchor='comment-%s' % comment.comment_id) + _anchor=anchor) def get_comments(self, repo_id, revision=None, pull_request=None): """ diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -29,6 +29,7 @@ import string import hashlib import logging import datetime +import uuid import warnings import ipaddress import functools @@ -36,10 +37,10 @@ import traceback import collections from sqlalchemy import ( - or_, and_, not_, func, TypeDecorator, event, + or_, and_, not_, func, cast, TypeDecorator, event, Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, - Text, Float, PickleType) + Text, Float, PickleType, BigInteger) from sqlalchemy.sql.expression import true, false, case from sqlalchemy.sql.functions import coalesce, count # pragma: no cover from sqlalchemy.orm import ( @@ -51,10 +52,10 @@ from sqlalchemy.dialects.mysql import LO from zope.cachedescriptors.property import Lazy as LazyProperty from pyramid import compat from pyramid.threadlocal import get_current_request -from webhelpers.text import collapse, remove_formatting +from webhelpers2.text import remove_formatting from rhodecode.translation import _ -from rhodecode.lib.vcs import get_vcs_instance +from rhodecode.lib.vcs import get_vcs_instance, VCSError from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference from rhodecode.lib.utils2 import ( str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, @@ -66,6 +67,8 @@ from rhodecode.lib.ext_json import json from rhodecode.lib.caching_query import FromCache from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data from rhodecode.lib.encrypt2 import Encryptor +from rhodecode.lib.exceptions import ( + ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) from rhodecode.model.meta import Base, Session URL_SEP = '/' @@ -574,6 +577,7 @@ class User(Base, BaseModel): _email = Column("email", String(255), nullable=True, unique=None, default=None) last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) @@ -583,7 +587,7 @@ class User(Base, BaseModel): _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data user_log = relationship('UserLog') - user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') + user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') repositories = relationship('Repository') repository_groups = relationship('RepoGroup') @@ -592,9 +596,9 @@ class User(Base, BaseModel): user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') - repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') - repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') - user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') + repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') + repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') group_member = relationship('UserGroupMember', cascade='all') @@ -614,13 +618,19 @@ class User(Base, BaseModel): # user pull requests user_pull_requests = relationship('PullRequest', cascade='all') # external identities - extenal_identities = relationship( + external_identities = relationship( 'ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all') # review rules user_review_rules = relationship('RepoReviewRuleUser', cascade='all') + # artifacts owned + artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id') + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id') + def __unicode__(self): return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.user_id, self.username) @@ -687,6 +697,17 @@ class User(Base, BaseModel): .all() return [self.email] + [x.email for x in other] + def emails_cached(self): + emails = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) + + emails = emails.options( + FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id)) + ) + + return [self.email] + [x.email for x in emails] + @property def auth_tokens(self): auth_tokens = self.get_auth_tokens() @@ -715,6 +736,23 @@ class User(Base, BaseModel): return feed_tokens[0].api_key return 'NO_FEED_TOKEN_AVAILABLE' + @LazyProperty + def artifact_token(self): + return self.get_artifact_token() + + def get_artifact_token(self, cache=True): + artifacts_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) + if cache: + artifacts_tokens = artifacts_tokens.options( + FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id)) + + artifacts_tokens = artifacts_tokens.all() + if artifacts_tokens: + return artifacts_tokens[0].api_key + return 'NO_ARTIFACT_TOKEN_AVAILABLE' + @classmethod def get(cls, user_id, cache=False): if not user_id: @@ -762,7 +800,7 @@ class User(Base, BaseModel): else: plain_token_map[token.api_key] = token log.debug( - 'Found %s plain and %s encrypted user tokens to check for authentication', + 'Found %s plain and %s encrypted tokens to check for authentication for this user', len(plain_token_map), len(enc_token_map)) # plain token match comes first @@ -828,6 +866,10 @@ class User(Base, BaseModel): def is_admin(self): return self.admin + @property + def language(self): + return self.user_data.get('language') + def AuthUser(self, **kwargs): """ Returns instance of AuthUser for this user @@ -947,7 +989,7 @@ class User(Base, BaseModel): old.update(**kwargs) usr.user_data = old Session().add(usr) - log.debug('updated userdata with ', kwargs) + log.debug('updated userdata with %s', kwargs) def update_lastlogin(self): """Update user lastlogin""" @@ -1020,6 +1062,7 @@ class User(Base, BaseModel): 'username': user.username, 'firstname': user.name, 'lastname': user.lastname, + 'description': user.description, 'email': user.email, 'emails': user.emails, } @@ -1060,7 +1103,7 @@ class User(Base, BaseModel): class UserApiKeys(Base, BaseModel): __tablename__ = 'user_api_keys' __table_args__ = ( - Index('uak_api_key_idx', 'api_key', unique=True), + Index('uak_api_key_idx', 'api_key'), Index('uak_api_key_expires_idx', 'api_key', 'expires'), base_table_args ) @@ -1072,9 +1115,10 @@ class UserApiKeys(Base, BaseModel): ROLE_VCS = 'token_role_vcs' ROLE_API = 'token_role_api' ROLE_FEED = 'token_role_feed' + ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' ROLE_PASSWORD_RESET = 'token_password_reset' - ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] + ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) @@ -1136,6 +1180,7 @@ class UserApiKeys(Base, BaseModel): cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), cls.ROLE_API: _('api calls'), cls.ROLE_FEED: _('feed access'), + cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), }.get(role, role) @property @@ -1327,7 +1372,7 @@ class UserGroup(Base, BaseModel): created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data - members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") + members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined") users_group_to_perm = relationship('UserGroupToPerm', cascade='all') users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') @@ -1601,7 +1646,7 @@ class Repository(Base, BaseModel): primary_key=True) _repo_name = Column( "repo_name", Text(), nullable=False, default=None) - _repo_name_hash = Column( + repo_name_hash = Column( "repo_name_hash", String(255), nullable=False, unique=True) repo_state = Column("repo_state", String(255), nullable=True) @@ -1664,25 +1709,26 @@ class Repository(Base, BaseModel): primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') extra_fields = relationship( - 'RepositoryField', cascade="all, delete, delete-orphan") + 'RepositoryField', cascade="all, delete-orphan") logs = relationship('UserLog') comments = relationship( - 'ChangesetComment', cascade="all, delete, delete-orphan") + 'ChangesetComment', cascade="all, delete-orphan") pull_requests_source = relationship( 'PullRequest', primaryjoin='PullRequest.source_repo_id==Repository.repo_id', - cascade="all, delete, delete-orphan") + cascade="all, delete-orphan") pull_requests_target = relationship( 'PullRequest', primaryjoin='PullRequest.target_repo_id==Repository.repo_id', - cascade="all, delete, delete-orphan") + cascade="all, delete-orphan") ui = relationship('RepoRhodeCodeUi', cascade="all") settings = relationship('RepoRhodeCodeSetting', cascade="all") - integrations = relationship('Integration', cascade="all, delete, delete-orphan") + integrations = relationship('Integration', cascade="all, delete-orphan") scoped_tokens = relationship('UserApiKeys', cascade="all") - artifacts = relationship('FileStore', cascade="all") + # no cascade, set NULL + artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id') def __unicode__(self): return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, @@ -1726,22 +1772,26 @@ class Repository(Base, BaseModel): else: self._locked = None - @hybrid_property - def changeset_cache(self): + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): from rhodecode.lib.vcs.backends.base import EmptyCommit dummy = EmptyCommit().__json__() - if not self._changeset_cache: - dummy['source_repo_id'] = self.repo_id + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id return json.loads(json.dumps(dummy)) try: - return json.loads(self._changeset_cache) + return json.loads(changeset_cache_raw) except TypeError: return dummy except Exception: log.error(traceback.format_exc()) return dummy + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache(self.repo_id, self._changeset_cache) + @changeset_cache.setter def changeset_cache(self, val): try: @@ -1756,7 +1806,7 @@ class Repository(Base, BaseModel): @repo_name.setter def repo_name(self, value): self._repo_name = value - self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() + self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() @classmethod def normalize_repo_name(cls, repo_name): @@ -2193,17 +2243,21 @@ class Repository(Base, BaseModel): def last_commit_cache_update_diff(self): return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) - @property - def last_commit_change(self): + @classmethod + def _load_commit_change(cls, last_commit_cache): from rhodecode.lib.vcs.utils.helpers import parse_datetime empty_date = datetime.datetime.fromtimestamp(0) - date_latest = self.changeset_cache.get('date', empty_date) + date_latest = last_commit_cache.get('date', empty_date) try: return parse_datetime(date_latest) except Exception: return empty_date @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property def last_db_change(self): return self.updated_on @@ -2245,20 +2299,27 @@ class Repository(Base, BaseModel): del override['ssh'] # we didn't override our tmpl from **overrides + request = get_current_request() if not uri_tmpl: - rc_config = SettingsModel().get_all_settings(cache=True) + if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): + rc_config = request.call_context.rc_config + else: + rc_config = SettingsModel().get_all_settings(cache=True) + if ssh: uri_tmpl = rc_config.get( 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH + else: uri_tmpl = rc_config.get( 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI - request = get_current_request() return get_clone_url(request=request, uri_tmpl=uri_tmpl, repo_name=self.repo_name, - repo_id=self.repo_id, **override) + repo_id=self.repo_id, + repo_type=self.repo_type, + **override) def set_state(self, state): self.repo_state = state @@ -2292,9 +2353,14 @@ class Repository(Base, BaseModel): return self.get_commit() return commit + def flush_commit_cache(self): + self.update_commit_cache(cs_cache={'raw_id':'0'}) + self.update_commit_cache() + def update_commit_cache(self, cs_cache=None, config=None): """ - Update cache of last changeset for repository, keys should be:: + Update cache of last commit for repository + cache_keys should be:: source_repo_id short_id @@ -2308,14 +2374,20 @@ class Repository(Base, BaseModel): """ from rhodecode.lib.vcs.backends.base import BaseChangeset + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + if cs_cache is None: # use no-cache version here - scm_repo = self.scm_instance(cache=False, config=config) - + try: + scm_repo = self.scm_instance(cache=False, config=config) + except VCSError: + scm_repo = None empty = scm_repo is None or scm_repo.is_empty() + if not empty: cs_cache = scm_repo.get_commit( - pre_load=["author", "date", "message", "parents"]) + pre_load=["author", "date", "message", "parents", "branch"]) else: cs_cache = EmptyCommit() @@ -2330,32 +2402,39 @@ class Repository(Base, BaseModel): # check if we have maybe already latest cached revision if is_outdated(cs_cache) or not self.changeset_cache: - _default = datetime.datetime.utcnow() - last_change = cs_cache.get('date') or _default + _current_datetime = datetime.datetime.utcnow() + last_change = cs_cache.get('date') or _current_datetime # we check if last update is newer than the new value # if yes, we use the current timestamp instead. Imagine you get # old commit pushed 1y ago, we'd set last update 1y to ago. last_change_timestamp = datetime_to_time(last_change) current_timestamp = datetime_to_time(last_change) - if last_change_timestamp > current_timestamp: - cs_cache['date'] = _default + if last_change_timestamp > current_timestamp and not empty: + cs_cache['date'] = _current_datetime + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + self.updated_on = last_change + Session().add(self) + Session().commit() + + else: + if empty: + cs_cache = EmptyCommit().__json__() + else: + cs_cache = self.changeset_cache + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) cs_cache['updated_on'] = time.time() self.changeset_cache = cs_cache + self.updated_on = _date_latest Session().add(self) Session().commit() - log.debug('updated repo %s with new commit cache %s', - self.repo_name, cs_cache) - else: - cs_cache = self.changeset_cache - cs_cache['updated_on'] = time.time() - self.changeset_cache = cs_cache - Session().add(self) - Session().commit() - - log.debug('Skipping update_commit_cache for repo:`%s` ' - 'commit already with latest changes', self.repo_name) + log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', + self.repo_name, cs_cache, _date_latest) @property def tip(self): @@ -2437,11 +2516,18 @@ class Repository(Base, BaseModel): # for repo2dbmapper config = kwargs.pop('config', None) cache = kwargs.pop('cache', None) - full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) + vcs_full_cache = kwargs.pop('vcs_full_cache', None) + if vcs_full_cache is not None: + # allows override global config + full_cache = vcs_full_cache + else: + full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) # if cache is NOT defined use default global, else we have a full # control over cache behaviour if cache is None and full_cache and not config: + log.debug('Initializing pure cached instance for %s', self.repo_path) return self._get_instance_cached() + # cache here is sent to the "vcs server" return self._get_instance(cache=bool(cache), config=config) @@ -2454,8 +2540,8 @@ class Repository(Base, BaseModel): region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) - def get_instance_cached(repo_id, context_id): - return self._get_instance() + def get_instance_cached(repo_id, context_id, _cache_state_uid): + return self._get_instance(repo_state_uid=_cache_state_uid) # we must use thread scoped cache here, # because each thread of gevent needs it's own not shared connection and cache @@ -2464,20 +2550,25 @@ class Repository(Base, BaseModel): uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, thread_scoped=True) with inv_context_manager as invalidation_context: - args = (self.repo_id, inv_context_manager.cache_key) + cache_state_uid = invalidation_context.cache_data['cache_state_uid'] + args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) + # re-compute and store cache if we get invalidate signal if invalidation_context.should_invalidate(): instance = get_instance_cached.refresh(*args) else: instance = get_instance_cached(*args) - log.debug('Repo instance fetched in %.3fs', inv_context_manager.compute_time) + log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) return instance - def _get_instance(self, cache=True, config=None): + def _get_instance(self, cache=True, config=None, repo_state_uid=None): + log.debug('Initializing %s instance `%s` with cache flag set to: %s', + self.repo_type, self.repo_path, cache) config = config or self._config custom_wire = { - 'cache': cache # controls the vcs.remote cache + 'cache': cache, # controls the vcs.remote cache + 'repo_state_uid': repo_state_uid } repo = get_vcs_instance( repo_path=safe_str(self.repo_full_path), @@ -2489,6 +2580,12 @@ class Repository(Base, BaseModel): repo.count() # cache rebuild return repo + def get_shadow_repository_path(self, workspace_id): + from rhodecode.lib.vcs.backends.base import BaseRepository + shadow_repo_path = BaseRepository._get_shadow_repository_path( + self.repo_full_path, self.repo_id, workspace_id) + return shadow_repo_path + def __json__(self): return {'landing_rev': self.landing_rev} @@ -2522,14 +2619,16 @@ class RepoGroup(Base, BaseModel): created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) - _changeset_cache = Column( - "changeset_cache", LargeBinary(), nullable=True) # JSON data + _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') parent_group = relationship('RepoGroup', remote_side=group_id) user = relationship('User') - integrations = relationship('Integration', cascade="all, delete, delete-orphan") + integrations = relationship('Integration', cascade="all, delete-orphan") + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id') def __init__(self, group_name='', parent_group=None): self.group_name = group_name @@ -2548,22 +2647,26 @@ class RepoGroup(Base, BaseModel): self._group_name = value self.group_name_hash = self.hash_repo_group_name(value) - @hybrid_property - def changeset_cache(self): + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): from rhodecode.lib.vcs.backends.base import EmptyCommit dummy = EmptyCommit().__json__() - if not self._changeset_cache: - dummy['source_repo_id'] = '' + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id return json.loads(json.dumps(dummy)) try: - return json.loads(self._changeset_cache) + return json.loads(changeset_cache_raw) except TypeError: return dummy except Exception: log.error(traceback.format_exc()) return dummy + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache('', self._changeset_cache) + @changeset_cache.setter def changeset_cache(self, val): try: @@ -2600,7 +2703,7 @@ class RepoGroup(Base, BaseModel): @classmethod def _generate_choice(cls, repo_group): - from webhelpers.html import literal as _literal + from webhelpers2.html import literal as _literal _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) return repo_group.group_id, _name(repo_group.full_path_splitted) @@ -2666,7 +2769,7 @@ class RepoGroup(Base, BaseModel): return q.all() @property - def parents(self, parents_recursion_limit = 10): + def parents(self, parents_recursion_limit=10): groups = [] if self.parent_group is None: return groups @@ -2692,17 +2795,21 @@ class RepoGroup(Base, BaseModel): def last_commit_cache_update_diff(self): return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) - @property - def last_commit_change(self): + @classmethod + def _load_commit_change(cls, last_commit_cache): from rhodecode.lib.vcs.utils.helpers import parse_datetime empty_date = datetime.datetime.fromtimestamp(0) - date_latest = self.changeset_cache.get('date', empty_date) + date_latest = last_commit_cache.get('date', empty_date) try: return parse_datetime(date_latest) except Exception: return empty_date @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property def last_db_change(self): return self.updated_on @@ -2792,7 +2899,8 @@ class RepoGroup(Base, BaseModel): def update_commit_cache(self, config=None): """ - Update cache of last changeset for newest repository inside this group, keys should be:: + Update cache of last commit for newest repository inside this repository group. + cache_keys should be:: source_repo_id short_id @@ -2805,47 +2913,37 @@ class RepoGroup(Base, BaseModel): """ from rhodecode.lib.vcs.utils.helpers import parse_datetime - - def repo_groups_and_repos(): - all_entries = OrderedDefaultDict(list) - - def _get_members(root_gr, pos=0): - - for repo in root_gr.repositories: - all_entries[root_gr].append(repo) - - # fill in all parent positions - for parent_group in root_gr.parents: - all_entries[parent_group].extend(all_entries[root_gr]) - - children_groups = root_gr.children.all() - if children_groups: - for cnt, gr in enumerate(children_groups, 1): - _get_members(gr, pos=pos+cnt) - - _get_members(root_gr=self) - return all_entries - empty_date = datetime.datetime.fromtimestamp(0) - for repo_group, repos in repo_groups_and_repos().items(): - - latest_repo_cs_cache = {} - for repo in repos: - repo_cs_cache = repo.changeset_cache - date_latest = latest_repo_cs_cache.get('date', empty_date) - date_current = repo_cs_cache.get('date', empty_date) - current_timestamp = datetime_to_time(parse_datetime(date_latest)) - if current_timestamp < datetime_to_time(parse_datetime(date_current)): - latest_repo_cs_cache = repo_cs_cache - latest_repo_cs_cache['source_repo_id'] = repo.repo_id - - latest_repo_cs_cache['updated_on'] = time.time() - repo_group.changeset_cache = latest_repo_cs_cache - Session().add(repo_group) - Session().commit() - - log.debug('updated repo group %s with new commit cache %s', - repo_group.group_name, latest_repo_cs_cache) + + def repo_groups_and_repos(root_gr): + for _repo in root_gr.repositories: + yield _repo + for child_group in root_gr.children.all(): + yield child_group + + latest_repo_cs_cache = {} + for obj in repo_groups_and_repos(self): + repo_cs_cache = obj.changeset_cache + date_latest = latest_repo_cs_cache.get('date', empty_date) + date_current = repo_cs_cache.get('date', empty_date) + current_timestamp = datetime_to_time(parse_datetime(date_latest)) + if current_timestamp < datetime_to_time(parse_datetime(date_current)): + latest_repo_cs_cache = repo_cs_cache + if hasattr(obj, 'repo_id'): + latest_repo_cs_cache['source_repo_id'] = obj.repo_id + else: + latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') + + _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) + + latest_repo_cs_cache['updated_on'] = time.time() + self.changeset_cache = latest_repo_cs_cache + self.updated_on = _date_latest + Session().add(self) + Session().commit() + + log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', + self.group_name, latest_repo_cs_cache, _date_latest) def permissions(self, with_admins=True, with_owner=True, expand_from_user_groups=False): @@ -3248,7 +3346,7 @@ class UserRepoToPerm(Base, BaseModel): repository = relationship('Repository') permission = relationship('Permission') - branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined') + branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined') @classmethod def create(cls, user, repository, permission): @@ -3489,7 +3587,7 @@ class CacheKey(Base, BaseModel): ) CACHE_TYPE_FEED = 'FEED' - CACHE_TYPE_README = 'README' + # namespaces used to register process/thread aware caches REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' SETTINGS_INVALIDATION_NAMESPACE = 'system_settings' @@ -3497,12 +3595,15 @@ class CacheKey(Base, BaseModel): cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) + cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) - def __init__(self, cache_key, cache_args=''): + def __init__(self, cache_key, cache_args='', cache_state_uid=None): self.cache_key = cache_key self.cache_args = cache_args self.cache_active = False + # first key should be same for all entries, since all workers should share it + self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() def __unicode__(self): return u"<%s('%s:%s[%s]')>" % ( @@ -3531,6 +3632,13 @@ class CacheKey(Base, BaseModel): return self._cache_key_partition()[2] @classmethod + def generate_new_state_uid(cls, based_on=None): + if based_on: + return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) + else: + return str(uuid.uuid4()) + + @classmethod def delete_all_cache(cls): """ Delete all cache keys from database. @@ -3553,7 +3661,8 @@ class CacheKey(Base, BaseModel): log.debug('cache objects deleted for cache args %s', safe_str(cache_uid)) else: - qry.update({"cache_active": False}) + qry.update({"cache_active": False, + "cache_state_uid": cls.generate_new_state_uid()}) log.debug('cache objects marked as invalid for cache args %s', safe_str(cache_uid)) @@ -3571,6 +3680,12 @@ class CacheKey(Base, BaseModel): return inv_obj return None + @classmethod + def get_namespace_map(cls, namespace): + return { + x.cache_key: x + for x in cls.query().filter(cls.cache_args == namespace)} + class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' @@ -3607,7 +3722,7 @@ class ChangesetComment(Base, BaseModel): author = relationship('User', lazy='joined') repo = relationship('Repository') - status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined') + status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') pull_request = relationship('PullRequest', lazy='joined') pull_request_version = relationship('PullRequestVersion') @@ -3802,6 +3917,7 @@ class _SetState(object): log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) raise + class _PullRequestBase(BaseModel): """ Common attributes of pull request and version entries. @@ -3906,6 +4022,14 @@ class _PullRequestBase(BaseModel): def reviewer_data_json(self): return json.dumps(self.reviewer_data) + @property + def work_in_progress(self): + """checks if pull request is work in progress by checking the title""" + title = self.title.upper() + if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): + return True + return False + @hybrid_property def description_safe(self): from rhodecode.lib import helpers as h @@ -3917,7 +4041,7 @@ class _PullRequestBase(BaseModel): @revisions.setter def revisions(self, val): - self._revisions = ':'.join(val) + self._revisions = u':'.join(val) @hybrid_property def last_merge_status(self): @@ -4080,14 +4204,10 @@ class PullRequest(Base, _PullRequestBase else: return '' % id(self) - reviewers = relationship('PullRequestReviewers', - cascade="all, delete, delete-orphan") - statuses = relationship('ChangesetStatus', - cascade="all, delete, delete-orphan") - comments = relationship('ChangesetComment', - cascade="all, delete, delete-orphan") - versions = relationship('PullRequestVersion', - cascade="all, delete, delete-orphan", + reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan") + statuses = relationship('ChangesetStatus', cascade="all, delete-orphan") + comments = relationship('ChangesetComment', cascade="all, delete-orphan") + versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic') @classmethod @@ -4125,6 +4245,9 @@ class PullRequest(Base, _PullRequestBase def is_closed(self): return pull_request_obj.is_closed() + def is_state_changing(self): + return pull_request_obj.is_state_changing() + @property def pull_request_version_id(self): return getattr(pull_request_obj, 'pull_request_version_id', None) @@ -4156,6 +4279,9 @@ class PullRequest(Base, _PullRequestBase def is_closed(self): return self.status == self.STATUS_CLOSED + def is_state_changing(self): + return self.pull_request_state != PullRequest.STATE_CREATED + def __json__(self): return { 'revisions': self.revisions, @@ -4176,11 +4302,10 @@ class PullRequest(Base, _PullRequestBase def get_shadow_repo(self): workspace_id = self.workspace_id - vcs_obj = self.target_repo.scm_instance() - shadow_repository_path = vcs_obj._get_shadow_repository_path( - self.target_repo.repo_id, workspace_id) + shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) if os.path.isdir(shadow_repository_path): - return vcs_obj._get_shadow_instance(shadow_repository_path) + vcs_obj = self.target_repo.scm_instance() + return vcs_obj.get_shadow_instance(shadow_repository_path) class PullRequestVersion(Base, _PullRequestBase): @@ -4214,6 +4339,9 @@ class PullRequestVersion(Base, _PullRequ # calculate from original return self.pull_request.status == self.STATUS_CLOSED + def is_state_changing(self): + return self.pull_request.pull_request_state != PullRequest.STATE_CREATED + def calculated_review_status(self): return self.pull_request.calculated_review_status() @@ -4293,6 +4421,7 @@ class Notification(Base, BaseModel): TYPE_REGISTRATION = u'registration' TYPE_PULL_REQUEST = u'pull_request' TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' + TYPE_PULL_REQUEST_UPDATE = u'pull_request_update' notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) subject = Column('subject', Unicode(512), nullable=True) @@ -4303,7 +4432,7 @@ class Notification(Base, BaseModel): created_by_user = relationship('User') notifications_to_users = relationship('UserNotification', lazy='joined', - cascade="all, delete, delete-orphan") + cascade="all, delete-orphan") @property def recipients(self): @@ -4958,8 +5087,7 @@ class _BaseBranchPerms(BaseModel): class UserToRepoBranchPermission(Base, _BaseBranchPerms): __tablename__ = 'user_to_repo_branch_permissions' __table_args__ = ( - {'extend_existing': True, 'mysql_engine': 'InnoDB', - 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,} + base_table_args ) branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) @@ -4985,8 +5113,7 @@ class UserToRepoBranchPermission(Base, _ class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): __tablename__ = 'user_group_to_repo_branch_permissions' __table_args__ = ( - {'extend_existing': True, 'mysql_engine': 'InnoDB', - 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,} + base_table_args ) branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) @@ -5040,16 +5167,22 @@ class UserBookmark(Base, BaseModel): .filter(UserBookmark.position == position).scalar() @classmethod - def get_bookmarks_for_user(cls, user_id): - return cls.query() \ + def get_bookmarks_for_user(cls, user_id, cache=True): + bookmarks = cls.query() \ .filter(UserBookmark.user_id == user_id) \ .options(joinedload(UserBookmark.repository)) \ .options(joinedload(UserBookmark.repository_group)) \ - .order_by(UserBookmark.position.asc()) \ - .all() + .order_by(UserBookmark.position.asc()) + + if cache: + bookmarks = bookmarks.options( + FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id)) + ) + + return bookmarks.all() def __unicode__(self): - return u'' % (self.position, self.redirect_url) + return u'' % (self.position, self.redirect_url) class FileStore(Base, BaseModel): @@ -5066,7 +5199,7 @@ class FileStore(Base, BaseModel): # sha256 hash file_hash = Column('file_hash', String(512), nullable=False) - file_size = Column('file_size', Integer(), nullable=False) + file_size = Column('file_size', BigInteger(), nullable=False) created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) @@ -5077,9 +5210,14 @@ class FileStore(Base, BaseModel): # if repo/repo_group reference is set, check for permissions check_acl = Column('check_acl', Boolean(), nullable=False, default=True) + # hidden defines an attachment that should be hidden from showing in artifact listing + hidden = Column('hidden', Boolean(), nullable=False, default=False) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id') + file_metadata = relationship('FileStoreMetadata', lazy='joined') + # scope limited to user, which requester have access to scope_user_id = Column( 'scope_user_id', Integer(), ForeignKey('users.user_id'), @@ -5105,9 +5243,13 @@ class FileStore(Base, BaseModel): repo_group = relationship('RepoGroup', lazy='joined') @classmethod + def get_by_store_uid(cls, file_store_uid): + return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() + + @classmethod def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', - file_description='', enabled=True, check_acl=True, user_id=None, - scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): + file_description='', enabled=True, hidden=False, check_acl=True, + user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): store_entry = FileStore() store_entry.file_uid = file_uid @@ -5119,14 +5261,52 @@ class FileStore(Base, BaseModel): store_entry.check_acl = check_acl store_entry.enabled = enabled + store_entry.hidden = hidden store_entry.user_id = user_id store_entry.scope_user_id = scope_user_id store_entry.scope_repo_id = scope_repo_id store_entry.scope_repo_group_id = scope_repo_group_id + return store_entry @classmethod + def store_metadata(cls, file_store_id, args, commit=True): + file_store = FileStore.get(file_store_id) + if file_store is None: + return + + for section, key, value, value_type in args: + has_key = FileStoreMetadata().query() \ + .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ + .filter(FileStoreMetadata.file_store_meta_section == section) \ + .filter(FileStoreMetadata.file_store_meta_key == key) \ + .scalar() + if has_key: + msg = 'key `{}` already defined under section `{}` for this file.'\ + .format(key, section) + raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) + + # NOTE(marcink): raises ArtifactMetadataBadValueType + FileStoreMetadata.valid_value_type(value_type) + + meta_entry = FileStoreMetadata() + meta_entry.file_store = file_store + meta_entry.file_store_meta_section = section + meta_entry.file_store_meta_key = key + meta_entry.file_store_meta_value_type = value_type + meta_entry.file_store_meta_value = value + + Session().add(meta_entry) + + try: + if commit: + Session().commit() + except IntegrityError: + Session().rollback() + raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') + + @classmethod def bump_access_counter(cls, file_uid, commit=True): FileStore().query()\ .filter(FileStore.file_uid == file_uid)\ @@ -5135,10 +5315,145 @@ class FileStore(Base, BaseModel): if commit: Session().commit() + def __json__(self): + data = { + 'filename': self.file_display_name, + 'filename_org': self.file_org_name, + 'file_uid': self.file_uid, + 'description': self.file_description, + 'hidden': self.hidden, + 'size': self.file_size, + 'created_on': self.created_on, + 'uploaded_by': self.upload_user.get_api_data(details='basic'), + 'downloaded_times': self.accessed_count, + 'sha256': self.file_hash, + 'metadata': self.file_metadata, + } + + return data + def __repr__(self): return ''.format(self.file_store_id) +class FileStoreMetadata(Base, BaseModel): + __tablename__ = 'file_store_metadata' + __table_args__ = ( + UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), + Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), + Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), + base_table_args + ) + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + + file_store_meta_id = Column( + "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _file_store_meta_section = Column( + "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_section_hash = Column( + "file_store_meta_section_hash", String(255), + nullable=True, unique=None, default=None) + _file_store_meta_key = Column( + "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_key_hash = Column( + "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) + _file_store_meta_value = Column( + "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_value_type = Column( + "file_store_meta_value_type", String(255), nullable=True, unique=None, + default='unicode') + + file_store_id = Column( + 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), + nullable=True, unique=None, default=None) + + file_store = relationship('FileStore', lazy='joined') + + @classmethod + def valid_value_type(cls, value): + if value.split('.')[0] not in cls.SETTINGS_TYPES: + raise ArtifactMetadataBadValueType( + 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) + + @hybrid_property + def file_store_meta_section(self): + return self._file_store_meta_section + + @file_store_meta_section.setter + def file_store_meta_section(self, value): + self._file_store_meta_section = value + self._file_store_meta_section_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_key(self): + return self._file_store_meta_key + + @file_store_meta_key.setter + def file_store_meta_key(self, value): + self._file_store_meta_key = value + self._file_store_meta_key_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_value(self): + val = self._file_store_meta_value + + if self._file_store_meta_value_type: + # e.g unicode.encrypted == unicode + _type = self._file_store_meta_value_type.split('.')[0] + # decode the encrypted value if it's encrypted field type + if '.encrypted' in self._file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_result_value(val, None)) + # do final type conversion + converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] + val = converter(val) + + return val + + @file_store_meta_value.setter + def file_store_meta_value(self, val): + val = safe_unicode(val) + # encode the encrypted value + if '.encrypted' in self.file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._file_store_meta_value = val + + @hybrid_property + def file_store_meta_value_type(self): + return self._file_store_meta_value_type + + @file_store_meta_value_type.setter + def file_store_meta_value_type(self, val): + # e.g unicode.encrypted + self.valid_value_type(val) + self._file_store_meta_value_type = val + + def __json__(self): + data = { + 'artifact': self.file_store.file_uid, + 'section': self.file_store_meta_section, + 'key': self.file_store_meta_key, + 'value': self.file_store_meta_value, + } + + return data + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section, + self.file_store_meta_key, self.file_store_meta_value) + + class DbMigrateVersion(Base, BaseModel): __tablename__ = 'db_migrate_version' __table_args__ = ( diff --git a/rhodecode/model/forms.py b/rhodecode/model/forms.py --- a/rhodecode/model/forms.py +++ b/rhodecode/model/forms.py @@ -143,6 +143,8 @@ def UserForm(localizer, edit=False, avai firstname = v.UnicodeString(strip=True, min=1, not_empty=False) lastname = v.UnicodeString(strip=True, min=1, not_empty=False) email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) + description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False, + if_missing='') extern_name = v.UnicodeString(strip=True) extern_type = v.UnicodeString(strip=True) language = v.OneOf(available_languages, hideList=False, @@ -244,12 +246,10 @@ def PasswordResetForm(localizer): return _PasswordResetForm -def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, - landing_revs=None, allow_disabled=False): +def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False): _ = localizer old_data = old_data or {} repo_groups = repo_groups or [] - landing_revs = landing_revs or [] supported_backends = BACKENDS.keys() class _RepoForm(formencode.Schema): @@ -263,7 +263,6 @@ def RepoForm(localizer, edit=False, old_ if_missing=old_data.get('repo_type')) repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) repo_private = v.StringBoolean(if_missing=False) - repo_landing_rev = v.OneOf(landing_revs, hideList=True) repo_copy_permissions = v.StringBoolean(if_missing=False) clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) @@ -333,12 +332,10 @@ def RepoFieldForm(localizer): def RepoForkForm(localizer, edit=False, old_data=None, - supported_backends=BACKENDS.keys(), repo_groups=None, - landing_revs=None): + supported_backends=BACKENDS.keys(), repo_groups=None): _ = localizer old_data = old_data or {} repo_groups = repo_groups or [] - landing_revs = landing_revs or [] class _RepoForkForm(formencode.Schema): allow_extra_fields = True @@ -353,7 +350,6 @@ def RepoForkForm(localizer, edit=False, copy_permissions = v.StringBoolean(if_missing=False) fork_parent_id = v.UnicodeString() chained_validators = [v.ValidForkName(localizer, edit, old_data)] - landing_rev = v.OneOf(landing_revs, hideList=True) return _RepoForkForm diff --git a/rhodecode/model/notification.py b/rhodecode/model/notification.py --- a/rhodecode/model/notification.py +++ b/rhodecode/model/notification.py @@ -111,6 +111,7 @@ class NotificationModel(BaseModel): # add mentioned users into recipients final_recipients = set(recipients_objs).union(mention_recipients) + notification = Notification.create( created_by=created_by_obj, subject=notification_subject, body=notification_body, recipients=final_recipients, @@ -260,6 +261,10 @@ class NotificationModel(BaseModel): _('%(user)s opened new pull request %(date_or_age)s'), _('%(user)s opened new pull request at %(date_or_age)s'), ], + notification.TYPE_PULL_REQUEST_UPDATE: [ + _('%(user)s updated pull request %(date_or_age)s'), + _('%(user)s updated pull request at %(date_or_age)s'), + ], notification.TYPE_PULL_REQUEST_COMMENT: [ _('%(user)s commented on pull request %(date_or_age)s'), _('%(user)s commented on pull request at %(date_or_age)s'), @@ -292,6 +297,7 @@ class EmailNotificationModel(BaseModel): TYPE_REGISTRATION = Notification.TYPE_REGISTRATION TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT + TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE TYPE_MAIN = Notification.TYPE_MESSAGE TYPE_PASSWORD_RESET = 'password_reset' @@ -318,6 +324,8 @@ class EmailNotificationModel(BaseModel): 'rhodecode:templates/email_templates/pull_request_review.mako', TYPE_PULL_REQUEST_COMMENT: 'rhodecode:templates/email_templates/pull_request_comment.mako', + TYPE_PULL_REQUEST_UPDATE: + 'rhodecode:templates/email_templates/pull_request_update.mako', } def __init__(self): @@ -340,6 +348,7 @@ class EmailNotificationModel(BaseModel): """ kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name + kwargs['rhodecode_version'] = rhodecode.__version__ instance_url = h.route_url('home') _kwargs = { 'instance_url': instance_url, diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py --- a/rhodecode/model/pull_request.py +++ b/rhodecode/model/pull_request.py @@ -51,7 +51,7 @@ from rhodecode.model import BaseModel from rhodecode.model.changeset_status import ChangesetStatusModel from rhodecode.model.comment import CommentsModel from rhodecode.model.db import ( - or_, PullRequest, PullRequestReviewers, ChangesetStatus, + or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, PullRequestVersion, ChangesetComment, Repository, RepoReviewRule) from rhodecode.model.meta import Session from rhodecode.model.notification import NotificationModel, \ @@ -65,9 +65,19 @@ log = logging.getLogger(__name__) # Data structure to hold the response data when updating commits during a pull # request update. -UpdateResponse = collections.namedtuple('UpdateResponse', [ - 'executed', 'reason', 'new', 'old', 'changes', - 'source_changed', 'target_changed']) +class UpdateResponse(object): + + def __init__(self, executed, reason, new, old, common_ancestor_id, + commit_changes, source_changed, target_changed): + + self.executed = executed + self.reason = reason + self.new = new + self.old = old + self.common_ancestor_id = common_ancestor_id + self.changes = commit_changes + self.source_changed = source_changed + self.target_changed = target_changed class PullRequestModel(BaseModel): @@ -137,8 +147,8 @@ class PullRequestModel(BaseModel): def get(self, pull_request): return self.__get_pull_request(pull_request) - def _prepare_get_all_query(self, repo_name, source=False, statuses=None, - opened_by=None, order_by=None, + def _prepare_get_all_query(self, repo_name, search_q=None, source=False, + statuses=None, opened_by=None, order_by=None, order_dir='desc', only_created=False): repo = None if repo_name: @@ -146,6 +156,14 @@ class PullRequestModel(BaseModel): q = PullRequest.query() + if search_q: + like_expression = u'%{}%'.format(safe_unicode(search_q)) + q = q.filter(or_( + cast(PullRequest.pull_request_id, String).ilike(like_expression), + PullRequest.title.ilike(like_expression), + PullRequest.description.ilike(like_expression), + )) + # source or target if repo and source: q = q.filter(PullRequest.source_repo == repo) @@ -179,28 +197,31 @@ class PullRequestModel(BaseModel): return q - def count_all(self, repo_name, source=False, statuses=None, + def count_all(self, repo_name, search_q=None, source=False, statuses=None, opened_by=None): """ Count the number of pull requests for a specific repository. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request :returns: int number of pull requests """ q = self._prepare_get_all_query( - repo_name, source=source, statuses=statuses, opened_by=opened_by) + repo_name, search_q=search_q, source=source, statuses=statuses, + opened_by=opened_by) return q.count() - def get_all(self, repo_name, source=False, statuses=None, opened_by=None, - offset=0, length=None, order_by=None, order_dir='desc'): + def get_all(self, repo_name, search_q=None, source=False, statuses=None, + opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): """ Get all pull requests for a specific repository. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request @@ -211,8 +232,8 @@ class PullRequestModel(BaseModel): :returns: list of pull requests """ q = self._prepare_get_all_query( - repo_name, source=source, statuses=statuses, opened_by=opened_by, - order_by=order_by, order_dir=order_dir) + repo_name, search_q=search_q, source=source, statuses=statuses, + opened_by=opened_by, order_by=order_by, order_dir=order_dir) if length: pull_requests = q.limit(length).offset(offset).all() @@ -221,24 +242,25 @@ class PullRequestModel(BaseModel): return pull_requests - def count_awaiting_review(self, repo_name, source=False, statuses=None, + def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, opened_by=None): """ Count the number of pull requests for a specific repository that are awaiting review. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request :returns: int number of pull requests """ pull_requests = self.get_awaiting_review( - repo_name, source=source, statuses=statuses, opened_by=opened_by) + repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) return len(pull_requests) - def get_awaiting_review(self, repo_name, source=False, statuses=None, + def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): """ @@ -246,6 +268,7 @@ class PullRequestModel(BaseModel): review. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request @@ -256,8 +279,8 @@ class PullRequestModel(BaseModel): :returns: list of pull requests """ pull_requests = self.get_all( - repo_name, source=source, statuses=statuses, opened_by=opened_by, - order_by=order_by, order_dir=order_dir) + repo_name, search_q=search_q, source=source, statuses=statuses, + opened_by=opened_by, order_by=order_by, order_dir=order_dir) _filtered_pull_requests = [] for pr in pull_requests: @@ -270,13 +293,14 @@ class PullRequestModel(BaseModel): else: return _filtered_pull_requests - def count_awaiting_my_review(self, repo_name, source=False, statuses=None, + def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, opened_by=None, user_id=None): """ Count the number of pull requests for a specific repository that are awaiting review from a specific user. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request @@ -284,12 +308,12 @@ class PullRequestModel(BaseModel): :returns: int number of pull requests """ pull_requests = self.get_awaiting_my_review( - repo_name, source=source, statuses=statuses, opened_by=opened_by, - user_id=user_id) + repo_name, search_q=search_q, source=source, statuses=statuses, + opened_by=opened_by, user_id=user_id) return len(pull_requests) - def get_awaiting_my_review(self, repo_name, source=False, statuses=None, + def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, opened_by=None, user_id=None, offset=0, length=None, order_by=None, order_dir='desc'): """ @@ -297,6 +321,7 @@ class PullRequestModel(BaseModel): review from a specific user. :param repo_name: target or source repo + :param search_q: filter by text :param source: boolean flag to specify if repo_name refers to source :param statuses: list of pull request statuses :param opened_by: author user of the pull request @@ -308,8 +333,8 @@ class PullRequestModel(BaseModel): :returns: list of pull requests """ pull_requests = self.get_all( - repo_name, source=source, statuses=statuses, opened_by=opened_by, - order_by=order_by, order_dir=order_dir) + repo_name, search_q=search_q, source=source, statuses=statuses, + opened_by=opened_by, order_by=order_by, order_dir=order_dir) _my = PullRequestModel().get_not_reviewed(user_id) my_participation = [] @@ -657,11 +682,13 @@ class PullRequestModel(BaseModel): source_ref_type = pull_request.source_ref_parts.type return source_ref_type in self.REF_TYPES - def update_commits(self, pull_request): + def update_commits(self, pull_request, updating_user): """ Get the updated list of commits for the pull request and return the new pull request version and the list of commits processed by this update action + + updating_user is the user_object who triggered the update """ pull_request = self.__get_pull_request(pull_request) source_ref_type = pull_request.source_ref_parts.type @@ -678,7 +705,7 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=False, reason=UpdateFailureReason.WRONG_REF_TYPE, - old=pull_request, new=None, changes=None, + old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, source_changed=False, target_changed=False) # source repo @@ -690,7 +717,7 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=False, reason=UpdateFailureReason.MISSING_SOURCE_REF, - old=pull_request, new=None, changes=None, + old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, source_changed=False, target_changed=False) source_changed = source_ref_id != source_commit.raw_id @@ -704,7 +731,7 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=False, reason=UpdateFailureReason.MISSING_TARGET_REF, - old=pull_request, new=None, changes=None, + old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, source_changed=False, target_changed=False) target_changed = target_ref_id != target_commit.raw_id @@ -713,7 +740,7 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=False, reason=UpdateFailureReason.NO_CHANGE, - old=pull_request, new=None, changes=None, + old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, source_changed=target_changed, target_changed=source_changed) change_in_found = 'target repo' if target_changed else 'source repo' @@ -744,23 +771,23 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=False, reason=UpdateFailureReason.MISSING_TARGET_REF, - old=pull_request, new=None, changes=None, + old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, source_changed=source_changed, target_changed=target_changed) # re-compute commit ids old_commit_ids = pull_request.revisions - pre_load = ["author", "branch", "date", "message"] + pre_load = ["author", "date", "message", "branch"] commit_ranges = target_repo.compare( target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, pre_load=pre_load) - ancestor = source_repo.get_common_ancestor( + ancestor_commit_id = source_repo.get_common_ancestor( source_commit.raw_id, target_commit.raw_id, target_repo) pull_request.source_ref = '%s:%s:%s' % ( source_ref_type, source_ref_name, source_commit.raw_id) pull_request.target_ref = '%s:%s:%s' % ( - target_ref_type, target_ref_name, ancestor) + target_ref_type, target_ref_name, ancestor_commit_id) pull_request.revisions = [ commit.raw_id for commit in reversed(commit_ranges)] @@ -772,7 +799,7 @@ class PullRequestModel(BaseModel): pull_request, pull_request_version) # calculate commit and file changes - changes = self._calculate_commit_id_changes( + commit_changes = self._calculate_commit_id_changes( old_commit_ids, new_commit_ids) file_changes = self._calculate_file_changes( old_diff_data, new_diff_data) @@ -782,23 +809,23 @@ class PullRequestModel(BaseModel): pull_request, old_diff_data=old_diff_data, new_diff_data=new_diff_data) - commit_changes = (changes.added or changes.removed) + valid_commit_changes = (commit_changes.added or commit_changes.removed) file_node_changes = ( file_changes.added or file_changes.modified or file_changes.removed) - pr_has_changes = commit_changes or file_node_changes + pr_has_changes = valid_commit_changes or file_node_changes # Add an automatic comment to the pull request, in case # anything has changed if pr_has_changes: update_comment = CommentsModel().create( - text=self._render_update_message(changes, file_changes), + text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), repo=pull_request.target_repo, user=pull_request.author, pull_request=pull_request, send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) # Update status to "Under Review" for added commits - for commit_id in changes.added: + for commit_id in commit_changes.added: ChangesetStatusModel().set_status( repo=pull_request.source_repo, status=ChangesetStatus.STATUS_UNDER_REVIEW, @@ -807,10 +834,19 @@ class PullRequestModel(BaseModel): pull_request=pull_request, revision=commit_id) + # send update email to users + try: + self.notify_users(pull_request=pull_request, updating_user=updating_user, + ancestor_commit_id=ancestor_commit_id, + commit_changes=commit_changes, + file_changes=file_changes) + except Exception: + log.exception('Failed to send email notification to users') + log.debug( 'Updated pull request %s, added_ids: %s, common_ids: %s, ' 'removed_ids: %s', pull_request.pull_request_id, - changes.added, changes.common, changes.removed) + commit_changes.added, commit_changes.common, commit_changes.removed) log.debug( 'Updated pull request with the following file changes: %s', file_changes) @@ -826,7 +862,8 @@ class PullRequestModel(BaseModel): return UpdateResponse( executed=True, reason=UpdateFailureReason.NONE, - old=pull_request, new=pull_request_version, changes=changes, + old=pull_request, new=pull_request_version, + common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, source_changed=source_changed, target_changed=target_changed) def _create_version_from_snapshot(self, pull_request): @@ -933,8 +970,13 @@ class PullRequestModel(BaseModel): old_hash = old_files.get(new_filename) if not old_hash: - # file is not present in old diff, means it's added - added_files.append(new_filename) + # file is not present in old diff, we have to figure out from parsed diff + # operation ADD/REMOVE + operations_dict = diff_data['stats']['ops'] + if diffs.DEL_FILENODE in operations_dict: + removed_files.append(new_filename) + else: + added_files.append(new_filename) else: if new_hash != old_hash: modified_files.append(new_filename) @@ -948,12 +990,13 @@ class PullRequestModel(BaseModel): return FileChangeTuple(added_files, modified_files, removed_files) - def _render_update_message(self, changes, file_changes): + def _render_update_message(self, ancestor_commit_id, changes, file_changes): """ render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), so it's always looking the same disregarding on which default renderer system is using. + :param ancestor_commit_id: ancestor raw_id :param changes: changes named tuple :param file_changes: file changes named tuple @@ -972,6 +1015,7 @@ class PullRequestModel(BaseModel): 'added_files': file_changes.added, 'modified_files': file_changes.modified, 'removed_files': file_changes.removed, + 'ancestor_commit_id': ancestor_commit_id } renderer = RstTemplateRenderer() return renderer.render('pull_request_update.mako', **params) @@ -1110,8 +1154,8 @@ class PullRequestModel(BaseModel): pr_target_repo = pull_request_obj.target_repo pr_url = h.route_url('pullrequest_show', - repo_name=pr_target_repo.repo_name, - pull_request_id=pull_request_obj.pull_request_id,) + repo_name=pr_target_repo.repo_name, + pull_request_id=pull_request_obj.pull_request_id,) # set some variables for email notification pr_target_repo_url = h.route_url( @@ -1155,6 +1199,75 @@ class PullRequestModel(BaseModel): email_kwargs=kwargs, ) + def notify_users(self, pull_request, updating_user, ancestor_commit_id, + commit_changes, file_changes): + + updating_user_id = updating_user.user_id + reviewers = set([x.user.user_id for x in pull_request.reviewers]) + # NOTE(marcink): send notification to all other users except to + # person who updated the PR + recipients = reviewers.difference(set([updating_user_id])) + + log.debug('Notify following recipients about pull-request update %s', recipients) + + pull_request_obj = pull_request + + # send email about the update + changed_files = ( + file_changes.added + file_changes.modified + file_changes.removed) + + pr_source_repo = pull_request_obj.source_repo + pr_target_repo = pull_request_obj.target_repo + + pr_url = h.route_url('pullrequest_show', + repo_name=pr_target_repo.repo_name, + pull_request_id=pull_request_obj.pull_request_id,) + + # set some variables for email notification + pr_target_repo_url = h.route_url( + 'repo_summary', repo_name=pr_target_repo.repo_name) + + pr_source_repo_url = h.route_url( + 'repo_summary', repo_name=pr_source_repo.repo_name) + + email_kwargs = { + 'date': datetime.datetime.now(), + 'updating_user': updating_user, + + 'pull_request': pull_request_obj, + + 'pull_request_target_repo': pr_target_repo, + 'pull_request_target_repo_url': pr_target_repo_url, + + 'pull_request_source_repo': pr_source_repo, + 'pull_request_source_repo_url': pr_source_repo_url, + + 'pull_request_url': pr_url, + + 'ancestor_commit_id': ancestor_commit_id, + 'added_commits': commit_changes.added, + 'removed_commits': commit_changes.removed, + 'changed_files': changed_files, + 'added_files': file_changes.added, + 'modified_files': file_changes.modified, + 'removed_files': file_changes.removed, + } + + (subject, + _h, _e, # we don't care about those + body_plaintext) = EmailNotificationModel().render_email( + EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) + + # create notification objects, and emails + NotificationModel().create( + created_by=updating_user, + notification_subject=subject, + notification_body=body_plaintext, + notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, + recipients=recipients, + email_kwargs=email_kwargs, + ) + def delete(self, pull_request, user): pull_request = self.__get_pull_request(pull_request) old_data = pull_request.get_api_data(with_merge_state=False) @@ -1320,6 +1433,7 @@ class PullRequestModel(BaseModel): else: possible = pull_request.last_merge_status == MergeFailureReason.NONE metadata = { + 'unresolved_files': '', 'target_ref': pull_request.target_ref_parts, 'source_ref': pull_request.source_ref_parts, } @@ -1598,6 +1712,7 @@ class MergeCheck(object): PERM_CHECK = 'perm' REVIEW_CHECK = 'review' MERGE_CHECK = 'merge' + WIP_CHECK = 'wip' def __init__(self): self.review_status = None @@ -1622,6 +1737,15 @@ class MergeCheck(object): _ = translator merge_check = cls() + # title has WIP: + if pull_request.work_in_progress: + log.debug("MergeCheck: cannot merge, title has wip: marker.") + + msg = _('WIP marker in title prevents from accidental merge.') + merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) + if fail_early: + return merge_check + # permissions to merge user_allowed_to_merge = PullRequestModel().check_user_merge( pull_request, auth_user) diff --git a/rhodecode/model/repo.py b/rhodecode/model/repo.py --- a/rhodecode/model/repo.py +++ b/rhodecode/model/repo.py @@ -43,13 +43,12 @@ from rhodecode.lib.utils2 import ( from rhodecode.lib.vcs.backends import get_backend from rhodecode.model import BaseModel from rhodecode.model.db import ( - _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, + _hash_key, func, case, joinedload, or_, in_filter_generator, + Session, Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) - from rhodecode.model.settings import VcsSettingsModel - log = logging.getLogger(__name__) @@ -217,8 +216,11 @@ class RepoModel(BaseModel): def last_change(last_change): if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: - last_change = last_change + datetime.timedelta(seconds= - (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) + ts = time.time() + utc_offset = (datetime.datetime.fromtimestamp(ts) + - datetime.datetime.utcfromtimestamp(ts)).total_seconds() + last_change = last_change + datetime.timedelta(seconds=utc_offset) + return _render("last_change", last_change) def rss_lnk(repo_name): @@ -246,26 +248,27 @@ class RepoModel(BaseModel): repos_data = [] for repo in repo_list: - cs_cache = repo.changeset_cache + # NOTE(marcink): because we use only raw column we need to load it like that + changeset_cache = Repository._load_changeset_cache( + repo.repo_id, repo._changeset_cache) + row = { "menu": quick_menu(repo.repo_name), "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, repo.private, repo.archived, repo.fork), - "name_raw": repo.repo_name.lower(), - "last_change": last_change(repo.last_commit_change), - "last_change_raw": datetime_to_time(repo.last_commit_change), + "desc": desc(repo.description), + + "last_change": last_change(repo.updated_on), - "last_changeset": last_rev(repo.repo_name, cs_cache), - "last_changeset_raw": cs_cache.get('revision'), + "last_changeset": last_rev(repo.repo_name, changeset_cache), + "last_changeset_raw": changeset_cache.get('revision'), - "desc": desc(repo.description_safe), - "owner": user_profile(repo.user.username), + "owner": user_profile(repo.User.username), "state": state(repo.repo_state), "rss": rss_lnk(repo.repo_name), - "atom": atom_lnk(repo.repo_name), } if admin: @@ -276,6 +279,87 @@ class RepoModel(BaseModel): return repos_data + def get_repos_data_table( + self, draw, start, limit, + search_q, order_by, order_dir, + auth_user, repo_group_id): + from rhodecode.model.scm import RepoList + + _perms = ['repository.read', 'repository.write', 'repository.admin'] + + repos = Repository.query() \ + .filter(Repository.group_id == repo_group_id) \ + .all() + auth_repo_list = RepoList( + repos, perm_set=_perms, + extra_kwargs=dict(user=auth_user)) + + allowed_ids = [-1] + for repo in auth_repo_list: + allowed_ids.append(repo.repo_id) + + repos_data_total_count = Repository.query() \ + .filter(Repository.group_id == repo_group_id) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .count() + + base_q = Session.query( + Repository.repo_id, + Repository.repo_name, + Repository.description, + Repository.repo_type, + Repository.repo_state, + Repository.private, + Repository.archived, + Repository.fork, + Repository.updated_on, + Repository._changeset_cache, + User, + ) \ + .filter(Repository.group_id == repo_group_id) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(Repository.repo_id, allowed_ids)) + ) \ + .join(User, User.user_id == Repository.user_id) \ + .group_by(Repository, User) + + repos_data_total_filtered_count = base_q.count() + + sort_defined = False + if order_by == 'repo_name': + sort_col = func.lower(Repository.repo_name) + sort_defined = True + elif order_by == 'user_username': + sort_col = User.username + else: + sort_col = getattr(Repository, order_by, None) + + if sort_defined or sort_col: + if order_dir == 'asc': + sort_col = sort_col.asc() + else: + sort_col = sort_col.desc() + + base_q = base_q.order_by(sort_col) + base_q = base_q.offset(start).limit(limit) + + repos_list = base_q.all() + + repos_data = RepoModel().get_repos_as_dict( + repo_list=repos_list, admin=False) + + data = ({ + 'draw': draw, + 'data': repos_data, + 'recordsTotal': repos_data_total_count, + 'recordsFiltered': repos_data_total_filtered_count, + }) + return data + def _get_defaults(self, repo_name): """ Gets information about repository, and returns a dict for @@ -381,7 +465,7 @@ class RepoModel(BaseModel): if ex_field: ex_field.field_value = kwargs[field] self.sa.add(ex_field) - cur_repo.updated_on = datetime.datetime.now() + self.sa.add(cur_repo) if source_repo_name != new_name: @@ -824,7 +908,7 @@ class RepoModel(BaseModel): def _create_filesystem_repo(self, repo_name, repo_type, repo_group, clone_uri=None, repo_store_location=None, - use_global_config=False): + use_global_config=False, install_hooks=True): """ makes repository on filesystem. It's group aware means it'll create a repository within a group, and alter the paths accordingly of @@ -883,13 +967,15 @@ class RepoModel(BaseModel): # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice if repo_type == 'git': repo = backend( - repo_path, config=config, create=True, src_url=clone_uri, - bare=True) + repo_path, config=config, create=True, src_url=clone_uri, bare=True, + with_wire={"cache": False}) else: repo = backend( - repo_path, config=config, create=True, src_url=clone_uri) + repo_path, config=config, create=True, src_url=clone_uri, + with_wire={"cache": False}) - repo.install_hooks() + if install_hooks: + repo.install_hooks() log.debug('Created repo %s with %s backend', safe_unicode(repo_name), safe_unicode(repo_type)) diff --git a/rhodecode/model/repo_group.py b/rhodecode/model/repo_group.py --- a/rhodecode/model/repo_group.py +++ b/rhodecode/model/repo_group.py @@ -28,6 +28,7 @@ import datetime import itertools import logging import shutil +import time import traceback import string @@ -35,8 +36,8 @@ from zope.cachedescriptors.property impo from rhodecode import events from rhodecode.model import BaseModel -from rhodecode.model.db import (_hash_key, - RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm, +from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator, + Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm, UserGroup, Repository) from rhodecode.model.settings import VcsSettingsModel, SettingsModel from rhodecode.lib.caching_query import FromCache @@ -519,7 +520,7 @@ class RepoGroupModel(BaseModel): if 'user' in form_data: repo_group.user = User.get_by_username(form_data['user']) - repo_group.updated_on = datetime.datetime.now() + self.sa.add(repo_group) # iterate over all members of this groups and do fixes @@ -536,7 +537,7 @@ class RepoGroupModel(BaseModel): log.debug('Fixing group %s to new name %s', obj.group_name, new_name) obj.group_name = new_name - obj.updated_on = datetime.datetime.now() + elif isinstance(obj, Repository): # we need to get all repositories from this new group and # rename them accordingly to new group path @@ -544,7 +545,7 @@ class RepoGroupModel(BaseModel): log.debug('Fixing repo %s to new name %s', obj.repo_name, new_name) obj.repo_name = new_name - obj.updated_on = datetime.datetime.now() + self.sa.add(obj) self._rename_group(old_path, new_path) @@ -697,6 +698,8 @@ class RepoGroupModel(BaseModel): for repo_group in repo_groups: repo_group.update_commit_cache() + + def get_repo_groups_as_dict(self, repo_group_list=None, admin=False, super_user_actions=False): @@ -714,15 +717,12 @@ class RepoGroupModel(BaseModel): def last_change(last_change): if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: - last_change = last_change + datetime.timedelta(seconds= - (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) + ts = time.time() + utc_offset = (datetime.datetime.fromtimestamp(ts) + - datetime.datetime.utcfromtimestamp(ts)).total_seconds() + last_change = last_change + datetime.timedelta(seconds=utc_offset) return _render("last_change", last_change) - def last_rev(repo_name, cs_cache): - return _render('revision', repo_name, cs_cache.get('revision'), - cs_cache.get('raw_id'), cs_cache.get('author'), - cs_cache.get('message'), cs_cache.get('date')) - def desc(desc, personal): return _render( 'repo_group_desc', desc, personal, c.visual.stylify_metatags) @@ -739,22 +739,23 @@ class RepoGroupModel(BaseModel): repo_group_data = [] for group in repo_group_list: - cs_cache = group.changeset_cache - last_repo_name = cs_cache.get('source_repo_name') - + # NOTE(marcink): because we use only raw column we need to load it like that + changeset_cache = RepoGroup._load_changeset_cache( + '', group._changeset_cache) + last_commit_change = RepoGroup._load_commit_change(changeset_cache) row = { "menu": quick_menu(group.group_name), "name": repo_group_lnk(group.group_name), "name_raw": group.group_name, - "last_change": last_change(group.last_commit_change), - "last_change_raw": datetime_to_time(group.last_commit_change), + + "last_change": last_change(last_commit_change), "last_changeset": "", "last_changeset_raw": "", - "desc": desc(group.description_safe, group.personal), + "desc": desc(group.group_description, group.personal), "top_level_repos": 0, - "owner": user_profile(group.user.username) + "owner": user_profile(group.User.username) } if admin: repo_count = group.repositories.count() @@ -773,6 +774,84 @@ class RepoGroupModel(BaseModel): return repo_group_data + def get_repo_groups_data_table( + self, draw, start, limit, + search_q, order_by, order_dir, + auth_user, repo_group_id): + from rhodecode.model.scm import RepoGroupList + + _perms = ['group.read', 'group.write', 'group.admin'] + repo_groups = RepoGroup.query() \ + .filter(RepoGroup.group_parent_id == repo_group_id) \ + .all() + auth_repo_group_list = RepoGroupList( + repo_groups, perm_set=_perms, + extra_kwargs=dict(user=auth_user)) + + allowed_ids = [-1] + for repo_group in auth_repo_group_list: + allowed_ids.append(repo_group.group_id) + + repo_groups_data_total_count = RepoGroup.query() \ + .filter(RepoGroup.group_parent_id == repo_group_id) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(RepoGroup.group_id, allowed_ids)) + ) \ + .count() + + base_q = Session.query( + RepoGroup.group_name, + RepoGroup.group_name_hash, + RepoGroup.group_description, + RepoGroup.group_id, + RepoGroup.personal, + RepoGroup.updated_on, + RepoGroup._changeset_cache, + User, + ) \ + .filter(RepoGroup.group_parent_id == repo_group_id) \ + .filter(or_( + # generate multiple IN to fix limitation problems + *in_filter_generator(RepoGroup.group_id, allowed_ids)) + ) \ + .join(User, User.user_id == RepoGroup.user_id) \ + .group_by(RepoGroup, User) + + repo_groups_data_total_filtered_count = base_q.count() + + sort_defined = False + + if order_by == 'group_name': + sort_col = func.lower(RepoGroup.group_name) + sort_defined = True + elif order_by == 'user_username': + sort_col = User.username + else: + sort_col = getattr(RepoGroup, order_by, None) + + if sort_defined or sort_col: + if order_dir == 'asc': + sort_col = sort_col.asc() + else: + sort_col = sort_col.desc() + + base_q = base_q.order_by(sort_col) + base_q = base_q.offset(start).limit(limit) + + repo_group_list = base_q.all() + + repo_groups_data = RepoGroupModel().get_repo_groups_as_dict( + repo_group_list=repo_group_list, admin=False) + + data = ({ + 'draw': draw, + 'data': repo_groups_data, + 'recordsTotal': repo_groups_data_total_count, + 'recordsFiltered': repo_groups_data_total_filtered_count, + }) + return data + def _get_defaults(self, repo_group_name): repo_group = RepoGroup.get_by_group_name(repo_group_name) diff --git a/rhodecode/model/scm.py b/rhodecode/model/scm.py --- a/rhodecode/model/scm.py +++ b/rhodecode/model/scm.py @@ -47,8 +47,9 @@ from rhodecode.lib.utils2 import (safe_s from rhodecode.lib.system_info import get_system_info from rhodecode.model import BaseModel from rhodecode.model.db import ( + or_, false, Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, - PullRequest) + PullRequest, FileStore) from rhodecode.model.settings import VcsSettingsModel from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl @@ -125,7 +126,7 @@ class _PermCheckIterator(object): self.obj_list = obj_list self.obj_attr = obj_attr self.perm_set = perm_set - self.perm_checker = perm_checker + self.perm_checker = perm_checker(*self.perm_set) self.extra_kwargs = extra_kwargs or {} def __len__(self): @@ -135,11 +136,11 @@ class _PermCheckIterator(object): return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) def __iter__(self): - checker = self.perm_checker(*self.perm_set) for db_obj in self.obj_list: # check permission at this level - name = getattr(db_obj, self.obj_attr, None) - if not checker(name, self.__class__.__name__, **self.extra_kwargs): + # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() + name = db_obj.__dict__.get(self.obj_attr, None) + if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): continue yield db_obj @@ -149,12 +150,11 @@ class RepoList(_PermCheckIterator): def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): if not perm_set: - perm_set = [ - 'repository.read', 'repository.write', 'repository.admin'] + perm_set = ['repository.read', 'repository.write', 'repository.admin'] super(RepoList, self).__init__( obj_list=db_repo_list, - obj_attr='repo_name', perm_set=perm_set, + obj_attr='_repo_name', perm_set=perm_set, perm_checker=HasRepoPermissionAny, extra_kwargs=extra_kwargs) @@ -167,7 +167,7 @@ class RepoGroupList(_PermCheckIterator): super(RepoGroupList, self).__init__( obj_list=db_repo_group_list, - obj_attr='group_name', perm_set=perm_set, + obj_attr='_group_name', perm_set=perm_set, perm_checker=HasRepoGroupPermissionAny, extra_kwargs=extra_kwargs) @@ -226,8 +226,9 @@ class ScmModel(BaseModel): raise RepositoryError('Duplicate repository name %s ' 'found in %s' % (name, path)) elif path[0] in rhodecode.BACKENDS: - klass = get_backend(path[0]) - repos[name] = klass(path[1], config=config) + backend = get_backend(path[0]) + repos[name] = backend(path[1], config=config, + with_wire={"cache": False}) except OSError: continue log.debug('found %s paths with repositories', len(repos)) @@ -363,6 +364,12 @@ class ScmModel(BaseModel): .filter(PullRequest.target_repo == repo)\ .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() + def get_artifacts(self, repo): + repo = self._get_repo(repo) + return self.sa.query(FileStore)\ + .filter(FileStore.repo == repo)\ + .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count() + def mark_as_fork(self, repo, fork, user): repo = self._get_repo(repo) fork = self._get_repo(fork) @@ -582,6 +589,42 @@ class ScmModel(BaseModel): return _dirs, _files + def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): + """ + Generate files for quick filter in files view + """ + + _files = list() + _dirs = list() + try: + _repo = self._get_repo(repo_name) + commit = _repo.scm_instance().get_commit(commit_id=commit_id) + root_path = root_path.lstrip('/') + for __, dirs, files in commit.walk(root_path): + + for f in files: + + _data = { + "name": h.escape(f.unicode_path), + "type": "file", + } + + _files.append(_data) + + for d in dirs: + + _data = { + "name": h.escape(d.unicode_path), + "type": "dir", + } + + _dirs.append(_data) + except RepositoryError: + log.exception("Exception in get_quick_filter_nodes") + raise + + return _dirs, _files + def get_node(self, repo_name, commit_id, file_path, extended_info=False, content=False, max_file_bytes=None, cache=True): """ @@ -628,11 +671,14 @@ class ScmModel(BaseModel): size = file_node.size over_size_limit = (max_file_bytes is not None and size > max_file_bytes) full_content = None + all_lines = 0 if not file_node.is_binary and not over_size_limit: full_content = safe_unicode(file_node.content) + all_lines, empty_lines = file_node.count_lines(full_content) file_data.update({ "content": full_content, + "lines": all_lines }) elif content: # get content *without* cache @@ -641,11 +687,14 @@ class ScmModel(BaseModel): over_size_limit = (max_file_bytes is not None and size > max_file_bytes) full_content = None + all_lines = 0 if not is_binary and not over_size_limit: full_content = safe_unicode(_content) + all_lines, empty_lines = file_node.count_lines(full_content) file_data.update({ "content": full_content, + "lines": all_lines }) except RepositoryError: @@ -890,6 +939,21 @@ class ScmModel(BaseModel): def get_unread_journal(self): return self.sa.query(UserLog).count() + @classmethod + def backend_landing_ref(cls, repo_type): + """ + Return a default landing ref based on a repository type. + """ + + landing_ref = { + 'hg': ('branch:default', 'default'), + 'git': ('branch:master', 'master'), + 'svn': ('rev:tip', 'latest tip'), + 'default': ('rev:tip', 'latest tip'), + } + + return landing_ref.get(repo_type) or landing_ref['default'] + def get_repo_landing_revs(self, translator, repo=None): """ Generates select option with tags branches and bookmarks (for hg only) @@ -900,41 +964,56 @@ class ScmModel(BaseModel): _ = translator repo = self._get_repo(repo) - hist_l = [ - ['rev:tip', _('latest tip')] + if repo: + repo_type = repo.repo_type + else: + repo_type = 'default' + + default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) + + default_ref_options = [ + [default_landing_ref, landing_ref_lbl] ] - choices = [ - 'rev:tip' + default_choices = [ + default_landing_ref ] if not repo: - return choices, hist_l + return default_choices, default_ref_options repo = repo.scm_instance() - branches_group = ( - [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) - for b in repo.branches], - _("Branches")) - hist_l.append(branches_group) + ref_options = [('rev:tip', 'latest tip')] + choices = ['rev:tip'] + + # branches + branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches] + if not branch_group: + # new repo, or without maybe a branch? + branch_group = default_ref_options + + branches_group = (branch_group, _("Branches")) + ref_options.append(branches_group) choices.extend([x[0] for x in branches_group[0]]) + # bookmarks for HG if repo.alias == 'hg': bookmarks_group = ( [(u'book:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.bookmarks], _("Bookmarks")) - hist_l.append(bookmarks_group) + ref_options.append(bookmarks_group) choices.extend([x[0] for x in bookmarks_group[0]]) + # tags tags_group = ( [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) for t in repo.tags], _("Tags")) - hist_l.append(tags_group) + ref_options.append(tags_group) choices.extend([x[0] for x in tags_group[0]]) - return choices, hist_l + return choices, ref_options def get_server_info(self, environ=None): server_info = get_system_info(environ) diff --git a/rhodecode/model/settings.py b/rhodecode/model/settings.py --- a/rhodecode/model/settings.py +++ b/rhodecode/model/settings.py @@ -242,7 +242,7 @@ class SettingsModel(BaseModel): region.invalidate() result = _get_all_settings('rhodecode_settings', key) - log.debug('Fetching app settings for key: %s took: %.3fs', key, + log.debug('Fetching app settings for key: %s took: %.4fs', key, inv_context_manager.compute_time) return result @@ -521,6 +521,12 @@ class VcsSettingsModel(object): return self._collect_all_settings(global_=False) @assert_repo_settings + def get_repo_settings_inherited(self): + global_settings = self.get_global_settings() + global_settings.update(self.get_repo_settings()) + return global_settings + + @assert_repo_settings def create_or_update_repo_settings( self, data, inherit_global_settings=False): from rhodecode.model.scm import ScmModel diff --git a/rhodecode/model/user.py b/rhodecode/model/user.py --- a/rhodecode/model/user.py +++ b/rhodecode/model/user.py @@ -37,7 +37,7 @@ from rhodecode.lib.utils2 import ( AttributeDict, str2bool) from rhodecode.lib.exceptions import ( DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, - UserOwnsUserGroupsException, NotAllowedToCreateUserError) + UserOwnsUserGroupsException, NotAllowedToCreateUserError, UserOwnsArtifactsException) from rhodecode.lib.caching_query import FromCache from rhodecode.model import BaseModel from rhodecode.model.auth_token import AuthTokenModel @@ -179,6 +179,7 @@ class UserModel(BaseModel): 'email': user.email, 'firstname': user.name, 'lastname': user.lastname, + 'description': user.description, 'active': user.active, 'admin': user.admin, 'extern_name': user.extern_name, @@ -225,7 +226,8 @@ class UserModel(BaseModel): active=True, admin=False, extern_type=None, extern_name=None, cur_user=None, plugin=None, force_password_change=False, allow_to_create_user=True, create_repo_group=None, - updating_user_id=None, language=None, strict_creation_check=True): + updating_user_id=None, language=None, description='', + strict_creation_check=True): """ Creates a new instance if not found, or updates current one @@ -249,6 +251,8 @@ class UserModel(BaseModel): :param updating_user_id: if we set it up this is the user we want to update this allows to editing username. :param language: language of user from interface. + :param description: user description + :param strict_creation_check: checks for allowed creation license wise etc. :returns: new User object with injected `is_new_user` attribute. """ @@ -328,7 +332,7 @@ class UserModel(BaseModel): new_user = User() edit = False else: - log.debug('updating user %s', username) + log.debug('updating user `%s`', username) events.trigger(events.UserPreUpdate(user, user_data)) new_user = user edit = True @@ -356,6 +360,7 @@ class UserModel(BaseModel): new_user.extern_type = safe_unicode(extern_type) new_user.name = firstname new_user.lastname = lastname + new_user.description = description # set password only if creating an user or password is changed if not edit or _password_change(new_user, password): @@ -407,6 +412,10 @@ class UserModel(BaseModel): self.sa.flush() user_data = new_user.get_dict() + user_data.update({ + 'first_name': user_data.get('firstname'), + 'last_name': user_data.get('lastname'), + }) kwargs = { # use SQLALCHEMY safe dump of user data 'user': AttributeDict(user_data), @@ -505,11 +514,37 @@ class UserModel(BaseModel): # if nothing is done we have left overs left return left_overs + def _handle_user_artifacts(self, username, artifacts, handle_mode=None): + _superadmin = self.cls.get_first_super_admin() + left_overs = True + + if handle_mode == 'detach': + for a in artifacts: + a.upload_user = _superadmin + # set description we know why we super admin now owns + # additional artifacts that were orphaned ! + a.file_description += ' \n::detached artifact from deleted user: %s' % (username,) + self.sa.add(a) + left_overs = False + elif handle_mode == 'delete': + from rhodecode.apps.file_store import utils as store_utils + storage = store_utils.get_file_storage(self.request.registry.settings) + for a in artifacts: + file_uid = a.file_uid + storage.delete(file_uid) + self.sa.delete(a) + + left_overs = False + + # if nothing is done we have left overs left + return left_overs + def delete(self, user, cur_user=None, handle_repos=None, - handle_repo_groups=None, handle_user_groups=None): + handle_repo_groups=None, handle_user_groups=None, handle_artifacts=None): + from rhodecode.lib.hooks_base import log_delete_user + if not cur_user: - cur_user = getattr( - get_current_rhodecode_user(), 'username', None) + cur_user = getattr(get_current_rhodecode_user(), 'username', None) user = self._get_user(user) try: @@ -547,12 +582,23 @@ class UserModel(BaseModel): u'removed. Switch owners or remove those user groups:%s' % (user.username, len(user_groups), ', '.join(user_groups))) + left_overs = self._handle_user_artifacts( + user.username, user.artifacts, handle_artifacts) + if left_overs and user.artifacts: + artifacts = [x.file_uid for x in user.artifacts] + raise UserOwnsArtifactsException( + u'user "%s" still owns %s artifacts and cannot be ' + u'removed. Switch owners or remove those artifacts:%s' + % (user.username, len(artifacts), ', '.join(artifacts))) + + user_data = user.get_dict() # fetch user data before expire + # we might change the user data with detach/delete, make sure # the object is marked as expired before actually deleting ! self.sa.expire(user) self.sa.delete(user) - from rhodecode.lib.hooks_base import log_delete_user - log_delete_user(deleted_by=cur_user, **user.get_dict()) + + log_delete_user(deleted_by=cur_user, **user_data) except Exception: log.error(traceback.format_exc()) raise @@ -570,7 +616,8 @@ class UserModel(BaseModel): 'password_reset_url': pwd_reset_url, 'user': user, 'email': user_email, - 'date': datetime.datetime.now() + 'date': datetime.datetime.now(), + 'first_admin_email': User.get_first_super_admin().email } (subject, headers, email_body, @@ -628,7 +675,8 @@ class UserModel(BaseModel): 'new_password': new_passwd, 'user': user, 'email': user_email, - 'date': datetime.datetime.now() + 'date': datetime.datetime.now(), + 'first_admin_email': User.get_first_super_admin().email } (subject, headers, email_body, @@ -697,19 +745,40 @@ class UserModel(BaseModel): return False log.debug('AuthUser: filling found user:%s data', dbuser) - user_data = dbuser.get_dict() - user_data.update({ - # set explicit the safe escaped values + attrs = { + 'user_id': dbuser.user_id, + 'username': dbuser.username, + 'name': dbuser.name, 'first_name': dbuser.first_name, + 'firstname': dbuser.firstname, 'last_name': dbuser.last_name, - }) + 'lastname': dbuser.lastname, + 'admin': dbuser.admin, + 'active': dbuser.active, + + 'email': dbuser.email, + 'emails': dbuser.emails_cached(), + 'short_contact': dbuser.short_contact, + 'full_contact': dbuser.full_contact, + 'full_name': dbuser.full_name, + 'full_name_or_username': dbuser.full_name_or_username, - for k, v in user_data.items(): - # properties of auth user we dont update - if k not in ['auth_tokens', 'permissions']: - setattr(auth_user, k, v) + '_api_key': dbuser._api_key, + '_user_data': dbuser._user_data, + + 'created_on': dbuser.created_on, + 'extern_name': dbuser.extern_name, + 'extern_type': dbuser.extern_type, + 'inherit_default_permissions': dbuser.inherit_default_permissions, + + 'language': dbuser.language, + 'last_activity': dbuser.last_activity, + 'last_login': dbuser.last_login, + 'password': dbuser.password, + } + auth_user.__dict__.update(attrs) except Exception: log.error(traceback.format_exc()) auth_user.is_authenticated = False diff --git a/rhodecode/model/validation_schema/schemas/repo_schema.py b/rhodecode/model/validation_schema/schemas/repo_schema.py --- a/rhodecode/model/validation_schema/schemas/repo_schema.py +++ b/rhodecode/model/validation_schema/schemas/repo_schema.py @@ -26,6 +26,11 @@ from rhodecode.model.validation_schema.u from rhodecode.model.validation_schema import validators, preparers, types DEFAULT_LANDING_REF = 'rev:tip' +DEFAULT_BACKEND_LANDING_REF = { + 'hg': 'branch:default', + 'git': 'branch:master', + 'svn': 'rev:tip', +} def get_group_and_repo(repo_name): @@ -74,8 +79,14 @@ def deferred_sync_uri_validator(node, kw @colander.deferred def deferred_landing_ref_widget(node, kw): - items = kw.get( - 'repo_ref_items', [(DEFAULT_LANDING_REF, DEFAULT_LANDING_REF)]) + repo_type = kw.get('repo_type') + default_opts = [] + if repo_type: + default_opts.append( + (DEFAULT_BACKEND_LANDING_REF[repo_type], + DEFAULT_BACKEND_LANDING_REF[repo_type])) + + items = kw.get('repo_ref_items', default_opts) items = convert_to_optgroup(items) return deform.widget.Select2Widget(values=items) diff --git a/rhodecode/model/validation_schema/schemas/search_schema.py b/rhodecode/model/validation_schema/schemas/search_schema.py --- a/rhodecode/model/validation_schema/schemas/search_schema.py +++ b/rhodecode/model/validation_schema/schemas/search_schema.py @@ -22,6 +22,18 @@ import colander +def sort_validator(node, value): + if value in ['oldfirst', 'newfirst']: + return value + if value.startswith('asc:'): + return value + if value.startswith('desc:'): + return value + + msg = u'Invalid search sort, must be `oldfirst`, `newfirst`, or start with asc: or desc:' + raise colander.Invalid(node, msg) + + class SearchParamsSchema(colander.MappingSchema): search_query = colander.SchemaNode( colander.String(), @@ -32,8 +44,8 @@ class SearchParamsSchema(colander.Mappin validator=colander.OneOf(['content', 'path', 'commit', 'repository'])) search_sort = colander.SchemaNode( colander.String(), - missing='newfirst', - validator=colander.OneOf(['oldfirst', 'newfirst'])) + missing='desc:date', + validator=sort_validator) search_max_lines = colander.SchemaNode( colander.Integer(), missing=10) diff --git a/rhodecode/model/validation_schema/schemas/user_schema.py b/rhodecode/model/validation_schema/schemas/user_schema.py --- a/rhodecode/model/validation_schema/schemas/user_schema.py +++ b/rhodecode/model/validation_schema/schemas/user_schema.py @@ -103,6 +103,9 @@ class UserSchema(colander.Schema): last_name = colander.SchemaNode( colander.String(), missing='') + description = colander.SchemaNode( + colander.String(), missing='') + active = colander.SchemaNode( types.StringBooleanType(), missing=False) @@ -167,6 +170,12 @@ class UserProfileSchema(colander.Schema) lastname = colander.SchemaNode( colander.String(), missing='', title='Last name') + description = colander.SchemaNode( + colander.String(), missing='', title='Personal Description', + widget=forms.widget.TextAreaWidget(), + validator=colander.Length(max=250) + ) + email = colander.SchemaNode( colander.String(), widget=deferred_user_email_in_emails_widget, validator=deferred_user_email_in_emails_validator, @@ -176,6 +185,7 @@ class UserProfileSchema(colander.Schema) ) + class AddEmailSchema(colander.Schema): current_password = colander.SchemaNode( colander.String(), diff --git a/rhodecode/public/502.html b/rhodecode/public/502.html --- a/rhodecode/public/502.html +++ b/rhodecode/public/502.html @@ -2,6 +2,7 @@ Error - 502 Bad Gateway + diff --git a/rhodecode/public/css/buttons.less b/rhodecode/public/css/buttons.less --- a/rhodecode/public/css/buttons.less +++ b/rhodecode/public/css/buttons.less @@ -47,10 +47,16 @@ input[type="button"] { &:active { outline:none; } + &:hover { color: @rcdarkblue; - background-color: @white; - .border ( @border-thickness, @grey4 ); + background-color: @grey6; + + } + + &.btn-active { + color: @rcdarkblue; + background-color: @grey6; } .icon-remove { @@ -71,6 +77,8 @@ input[type="button"] { margin: 0 0 0 0; } + + } @@ -97,6 +105,11 @@ input[type="button"] { .border ( @border-thickness-buttons, @grey5 ); background-color: transparent; } + &.btn-active { + color: @rcdarkblue; + background-color: @white; + .border ( @border-thickness, @rcdarkblue ); + } } .btn-primary, @@ -214,10 +227,78 @@ input[type="button"] { display: inline-block; .btn { float: left; - margin: 0 0 0 -1px; + margin: 0 0 0 0; + // first item + &:first-of-type:not(:last-of-type) { + border-radius: @border-radius 0 0 @border-radius; + + } + // middle elements + &:not(:first-of-type):not(:last-of-type) { + border-radius: 0; + border-left-width: 0; + border-right-width: 0; + } + // last item + &:last-of-type:not(:first-of-type) { + border-radius: 0 @border-radius @border-radius 0; + } + + &:only-child { + border-radius: @border-radius; + } + } + +} + + +.btn-group-actions { + position: relative; + z-index: 100; + + &:not(.open) .btn-action-switcher-container { + display: none; } } + +.btn-action-switcher-container{ + position: absolute; + top: 30px; + left: 0px; +} + +.btn-action-switcher { + display: block; + position: relative; + z-index: 300; + min-width: 240px; + max-width: 500px; + margin-top: 4px; + margin-bottom: 24px; + font-size: 14px; + font-weight: 400; + padding: 8px 0; + background-color: #fff; + border: 1px solid @grey4; + border-radius: 3px; + box-shadow: @dropdown-shadow; + + li { + display: block; + text-align: left; + list-style: none; + padding: 5px 10px; + } + + li .action-help-block { + font-size: 10px; + line-height: normal; + color: @grey4; + } + +} + .btn-link { background: transparent; border: none; diff --git a/rhodecode/public/css/code-block.less b/rhodecode/public/css/code-block.less --- a/rhodecode/public/css/code-block.less +++ b/rhodecode/public/css/code-block.less @@ -302,11 +302,15 @@ table.code-difftable { // Comments - -div.comment:target { +.comment-selected-hl { border-left: 6px solid @comment-highlight-color !important; - padding-left: 3px; - margin-left: -9px; + padding-left: 3px !important; + margin-left: -7px !important; +} + +div.comment:target, +div.comment-outdated:target { + .comment-selected-hl; } //TODO: anderson: can't get an absolute number out of anything, so had to put the @@ -414,7 +418,16 @@ div.codeblock { } .gist_url { - padding: 0px 0px 10px 0px; + padding: 0px 0px 35px 0px; + } + + .gist-desc { + clear: both; + margin: 0 0 10px 0; + code { + white-space: pre-line; + line-height: inherit + } } .author { @@ -682,13 +695,14 @@ input.filediff-collapse-state { display: none } .filediff-collapse-indicator { - width: 0; - height: 0; - border-style: solid; - border-width: 4.5px 0 4.5px 9.3px; - border-color: transparent transparent transparent #aaa; - margin: 6px 0px; + float: left; + cursor: pointer; + margin: 1px -5px; } + .filediff-collapse-indicator:before { + content: '\f105'; + } + .filediff-menu { display: none; } @@ -696,18 +710,20 @@ input.filediff-collapse-state { } &+ .filediff { /* file diff is expanded */ + .filediff-collapse-indicator { - width: 0; - height: 0; - border-style: solid; - border-width: 9.3px 4.5px 0 4.5px; - border-color: #aaa transparent transparent transparent; - margin: 6px 0px; + float: left; + cursor: pointer; + margin: 1px -5px; + } + .filediff-collapse-indicator:before { + content: '\f107'; + } - } .filediff-menu { display: block; } + margin: 10px 0; &:nth-child(2) { margin: 0; @@ -735,14 +751,14 @@ input.filediff-collapse-state { #diff-file-sticky{ will-change: min-height; + height: 80px; } .sidebar__inner{ transform: translate(0, 0); /* For browsers don't support translate3d. */ transform: translate3d(0, 0, 0); will-change: position, transform; - height: 70px; - z-index: 30; + height: 65px; background-color: #fff; padding: 5px 0px; } @@ -757,28 +773,41 @@ input.filediff-collapse-state { } .is-affixed { + + .sidebar__inner { + z-index: 30; + } + .sidebar_inner_shadow { position: fixed; top: 75px; right: -100%; left: -100%; - z-index: 28; + z-index: 30; display: block; height: 5px; content: ""; background: linear-gradient(rgba(0, 0, 0, 0.075), rgba(0, 0, 0, 0.001)) repeat-x 0 0; border-top: 1px solid rgba(0, 0, 0, 0.15); } + .fpath-placeholder { visibility: visible !important; } } .diffset-menu { - margin-bottom: 20px; + } + +#todo-box { + clear:both; + display: none; + text-align: right +} + .diffset { - margin: 20px auto; + margin: 0px auto; .diffset-heading { border: 1px solid @grey5; margin-bottom: -1px; @@ -826,6 +855,8 @@ input.filediff-collapse-state { margin-right: 3px; font-size: 12px; font-weight: normal; + min-width: 30px; + text-align: center; &:first-child { border-radius: @border-radius 0 0 @border-radius; @@ -877,18 +908,10 @@ input.filediff-collapse-state { } } - .filediff-collapse-indicator { - border-style: solid; - float: left; - margin: 4px 0px 0 0; - cursor: pointer; - } - .filediff-heading { - background: @grey7; cursor: pointer; display: block; - padding: 5px 10px; + padding: 10px 10px; } .filediff-heading:after { content: ""; @@ -900,9 +923,9 @@ input.filediff-collapse-state { } .filediff-menu { - float: right; text-align: right; padding: 5px 5px 5px 0px; + background: @grey7; &> a, &> span { @@ -958,8 +981,16 @@ input.filediff-collapse-state { } +.op-added { + color: @alert1; +} + +.op-deleted { + color: @alert2; +} .filediff, .filelist { + .pill { &[op="name"] { background: none; diff --git a/rhodecode/public/css/codemirror.less b/rhodecode/public/css/codemirror.less --- a/rhodecode/public/css/codemirror.less +++ b/rhodecode/public/css/codemirror.less @@ -5,6 +5,7 @@ font-family: monospace; height: 300px; color: black; + direction: ltr; border-radius: @border-radius; border: @border-thickness solid @grey6; margin: 0 0 @padding; @@ -15,7 +16,8 @@ .CodeMirror-lines { padding: 4px 0; /* Vertical padding around content */ } -.CodeMirror pre { +.CodeMirror pre.CodeMirror-line, +.CodeMirror pre.CodeMirror-line-like { padding: 0 4px; /* Horizontal padding of content */ } @@ -44,28 +46,36 @@ /* CURSOR */ -.CodeMirror div.CodeMirror-cursor { +.CodeMirror-cursor { border-left: 1px solid black; + border-right: none; + width: 0; } /* Shown when moving in bi-directional text */ .CodeMirror div.CodeMirror-secondarycursor { border-left: 1px solid silver; } -.CodeMirror.cm-fat-cursor div.CodeMirror-cursor { +.cm-fat-cursor .CodeMirror-cursor { width: auto; - border: 0; + border: 0 !important; background: @grey6; } -.CodeMirror.cm-fat-cursor div.CodeMirror-cursors { +.cm-fat-cursor div.CodeMirror-cursors { z-index: 1; } - +.cm-fat-cursor-mark { + background-color: rgba(20, 255, 20, 0.5); + -webkit-animation: blink 1.06s steps(1) infinite; + -moz-animation: blink 1.06s steps(1) infinite; + animation: blink 1.06s steps(1) infinite; +} .cm-animate-fat-cursor { width: auto; border: 0; -webkit-animation: blink 1.06s steps(1) infinite; -moz-animation: blink 1.06s steps(1) infinite; animation: blink 1.06s steps(1) infinite; + background-color: #7e7; } @-moz-keyframes blink { 0% { background: #7e7; } @@ -84,12 +94,18 @@ } /* Can style cursor different in overwrite (non-insert) mode */ -div.CodeMirror-overwrite div.CodeMirror-cursor {} +.CodeMirror-overwrite .CodeMirror-cursor {} .cm-tab { display: inline-block; text-decoration: inherit; } +.CodeMirror-rulers { + position: absolute; + left: 0; right: 0; top: -50px; bottom: 0; + overflow: hidden; +} .CodeMirror-ruler { border-left: 1px solid #ccc; + top: 0; bottom: 0; position: absolute; } @@ -113,7 +129,7 @@ div.CodeMirror-overwrite div.CodeMirror- .cm-s-default .cm-property, .cm-s-default .cm-operator {} .cm-s-default .cm-variable-2 {color: #05a;} -.cm-s-default .cm-variable-3 {color: #085;} +.cm-s-default .cm-variable-3, .cm-s-default .cm-type {color: #085;} .cm-s-default .cm-comment {color: #a50;} .cm-s-default .cm-string {color: #a11;} .cm-s-default .cm-string-2 {color: #f50;} @@ -133,8 +149,8 @@ div.CodeMirror-overwrite div.CodeMirror- /* Default styles for common addons */ -div.CodeMirror span.CodeMirror-matchingbracket {color: #0f0;} -div.CodeMirror span.CodeMirror-nonmatchingbracket {color: #f22;} +div.CodeMirror span.CodeMirror-matchingbracket {color: #0b0;} +div.CodeMirror span.CodeMirror-nonmatchingbracket {color: #a22;} .CodeMirror-matchingtag { background: rgba(255, 150, 0, .3); } .CodeMirror-activeline-background {background: #e8f2ff;} @@ -191,20 +207,21 @@ div.CodeMirror span.CodeMirror-nonmatchi .CodeMirror-gutters { position: absolute; left: 0; top: 0; + min-height: 100%; z-index: 3; } .CodeMirror-gutter { white-space: normal; height: 100%; display: inline-block; + vertical-align: top; margin-bottom: -30px; - /* Hack to make IE7 behave */ - *zoom:1; - *display:inline; } .CodeMirror-gutter-wrapper { position: absolute; z-index: 4; + background: none !important; + border: none !important; height: 100%; } .CodeMirror-gutter-background { @@ -227,7 +244,8 @@ div.CodeMirror span.CodeMirror-nonmatchi cursor: text; min-height: 1px; /* prevents collapsing before first draw */ } -.CodeMirror pre { +.CodeMirror pre.CodeMirror-line, +.CodeMirror pre.CodeMirror-line-like { /* Reset some styles that the rest of the page might have set */ -moz-border-radius: 0; -webkit-border-radius: 0; border-radius: 0; border-width: 0; @@ -243,8 +261,11 @@ div.CodeMirror span.CodeMirror-nonmatchi position: relative; overflow: visible; -webkit-tap-highlight-color: transparent; + -webkit-font-variant-ligatures: contextual; + font-variant-ligatures: contextual; } -.CodeMirror-wrap pre { +.CodeMirror-wrap pre.CodeMirror-line, +.CodeMirror-wrap pre.CodeMirror-line-like { word-wrap: break-word; white-space: pre-wrap; word-break: normal; @@ -259,11 +280,14 @@ div.CodeMirror span.CodeMirror-nonmatchi .CodeMirror-linewidget { position: relative; z-index: 2; + padding: 0.1px; /* Force widget margins to stay inside of the container */ overflow: auto; } .CodeMirror-widget {} +.CodeMirror-rtl pre { direction: rtl; } + .CodeMirror-code { outline: none; } @@ -274,8 +298,9 @@ div.CodeMirror span.CodeMirror-nonmatchi .CodeMirror-gutter, .CodeMirror-gutters, .CodeMirror-linenumber { - -moz-box-sizing: content-box; - box-sizing: content-box; + /* RhodeCode added !important, to fix diffs rule */ + -moz-box-sizing: content-box !important; + box-sizing: content-box !important; } .CodeMirror-measure { @@ -286,13 +311,12 @@ div.CodeMirror span.CodeMirror-nonmatchi visibility: hidden; } - -.CodeMirror div.CodeMirror-cursor { +.CodeMirror-cursor { position: absolute; + pointer-events: none; border-right: none; width: 0; } - .CodeMirror-measure pre { position: static; } div.CodeMirror-cursors { @@ -315,13 +339,10 @@ div.CodeMirror-dragcursors { .CodeMirror-line::-moz-selection, .CodeMirror-line > span::-moz-selection, .CodeMirror-line > span > span::-moz-selection { background: #d7d4f0; } .cm-searching { - background: #ffa; - background: rgba(255, 255, 0, .4); + background-color: #ffa; + background-color: rgba(255, 255, 0, .4); } -/* IE7 hack to prevent it from returning funny offsetTops on the spans */ -.CodeMirror span { *vertical-align: text-bottom; } - /* Used to force a border model for a node */ .cm-force-border { padding-right: .1px; } diff --git a/rhodecode/public/css/comments.less b/rhodecode/public/css/comments.less --- a/rhodecode/public/css/comments.less +++ b/rhodecode/public/css/comments.less @@ -10,6 +10,18 @@ width: 100%; } +.comments-heading { + margin-bottom: -1px; + background: @grey6; + display: block; + padding: 10px 0px; + font-size: 18px +} + +#comment-tr-show { + padding: 5px 0; +} + tr.inline-comments div { max-width: 100%; @@ -219,11 +231,6 @@ tr.inline-comments div { } } - .flag_status { - display: inline-block; - margin: -2px .5em 0 .25em - } - .delete-comment { display: inline-block; color: @rcblue; @@ -384,8 +391,12 @@ form.comment-form { display: inline-block; } + .status_box { + margin-left: 10px; + } + .action-buttons { - float: right; + float: left; display: inline-block; } @@ -418,7 +429,7 @@ form.comment-form { } .comment-footer { - margin-bottom: 110px; + margin-bottom: 50px; margin-top: 10px; } } @@ -514,7 +525,7 @@ form.comment-form { } .comment-area { - padding: 8px 12px; + padding: 6px 8px; border: 1px solid @grey5; .border-radius(@border-radius); @@ -524,6 +535,14 @@ form.comment-form { } +comment-area-text { + color: @grey3; +} + +.comment-area-header { + height: 35px; +} + .comment-area-header .nav-links { display: flex; flex-flow: row wrap; @@ -532,13 +551,40 @@ form.comment-form { } .comment-area-footer { - display: flex; + min-height: 30px; } .comment-footer .toolbar { } +.comment-attachment-uploader { + border: 1px dashed white; + border-radius: @border-radius; + margin-top: -10px; + line-height: 30px; + &.dz-drag-hover { + border-color: @grey3; + } + + .dz-error-message { + padding-top: 0; + } +} + +.comment-attachment-text { + clear: both; + font-size: 11px; + color: #8F8F8F; + width: 100%; + .pick-attachment { + color: #8F8F8F; + } + .pick-attachment:hover { + color: @rcblue; + } +} + .nav-links { padding: 0; margin: 0; @@ -571,8 +617,13 @@ form.comment-form { } .toolbar-text { - float: left; - margin: -5px 0px 0px 0px; - font-size: 12px; + float: right; + font-size: 11px; + color: @grey4; + text-align: right; + + a { + color: @grey4; + } } diff --git a/rhodecode/public/css/deform.less b/rhodecode/public/css/deform.less --- a/rhodecode/public/css/deform.less +++ b/rhodecode/public/css/deform.less @@ -11,18 +11,20 @@ } .control-label { - width: 200px; + width: 220px; padding: 10px 0px; float: left; } .control-inputs { - width: 400px; + min-width: 400px; float: left; } - .form-group .radio, .form-group .checkbox { + .form-group .radio, + .form-group .checkbox { position: relative; display: block; - /* margin-bottom: 10px; */ + padding: 10px 0; + margin-bottom: 0; } .form-group { @@ -51,6 +53,10 @@ cursor: not-allowed; } + .alert { + margin: 10px 0; + } + .error-block { color: red; margin: 0; @@ -87,7 +93,7 @@ } .form-control.select2-container { - height: 40px; + } .deform-full-field-sequence.control-inputs { diff --git a/rhodecode/public/css/forms.less b/rhodecode/public/css/forms.less --- a/rhodecode/public/css/forms.less +++ b/rhodecode/public/css/forms.less @@ -273,6 +273,85 @@ form.rcform { } +.rcform-element { + + label { display: inline; border:none; padding:0; } + .label { display: none; } + + label:not(#ie) { + cursor: pointer; + display: inline-block; + position: relative; + background: white; + border-radius: 4px; + box-shadow: none; + + &:hover::after { + opacity: 0.5; + } + } + + input[type="radio"], + input[type="checkbox"] { + padding: 0; + border: none; + } + + input[type="radio"]:not(#ie), + input[type="checkbox"]:not(#ie) { + // Hide the input, but have it still be clickable + opacity: 0; + float: left; + height: 0; + width: 0; + margin: 0; + padding: 0; + } + input[type='radio'] + label:not(#ie), + input[type='checkbox'] + label:not(#ie) { + margin: 0; + clear: none; + } + + input[type='radio'] + label:not(#ie) { + .circle (@form-radio-width,white); + float: left; + display: inline-block; + height: @form-radio-width; + width: @form-radio-width; + margin: 2px 2px 2px 0; + border: 1px solid @grey4; + background-color: white; + box-shadow: none; + text-indent: -9999px; + transition: none; + + & + .label { + float: left; + margin-top: 7px + } + } + + input[type='radio']:checked + label:not(#ie) { + margin: 0 0px 0 -2px; + padding: 3px; + border-style: double; + border-color: white; + border-width: thick; + background-color: @rcblue; + box-shadow: none; + } + + fieldset { + .label:not(#ie) { + display: inline; + margin: 0 1em 0 .5em; + line-height: 1em; + } + } + +} + .badged-field { .user-badge { line-height: 25px; diff --git a/rhodecode/public/css/helpers.less b/rhodecode/public/css/helpers.less --- a/rhodecode/public/css/helpers.less +++ b/rhodecode/public/css/helpers.less @@ -27,6 +27,10 @@ a { cursor: pointer; } display: block; } +.clear-both { + clear: both; +} + .pull-right { float: right !important; } @@ -48,6 +52,16 @@ a { cursor: pointer; } } } +.noselect { + -webkit-touch-callout: none; /* iOS Safari */ + -webkit-user-select: none; /* Safari */ + -khtml-user-select: none; /* Konqueror HTML */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* Internet Explorer/Edge */ + user-select: none; /* Non-prefixed version, currently + supported by Chrome and Opera */ +} + //--- DEVICE-SPECIFIC CLASSES ---------------// //regular tablet and up @media (min-width:768px) { diff --git a/rhodecode/public/css/legacy_code_styles.less b/rhodecode/public/css/legacy_code_styles.less --- a/rhodecode/public/css/legacy_code_styles.less +++ b/rhodecode/public/css/legacy_code_styles.less @@ -204,6 +204,7 @@ div.markdown-block img { border-style: none; background-color: #fff; padding-right: 20px; + max-width: 100%; } diff --git a/rhodecode/public/css/login.less b/rhodecode/public/css/login.less --- a/rhodecode/public/css/login.less +++ b/rhodecode/public/css/login.less @@ -213,12 +213,9 @@ a { padding: 0 !important; - color: @rcblue !important; } } - a.bookmark-item { - color: @rcblue !important; - } + } @@ -281,7 +278,7 @@ color: @grey4; &:hover { - color: @grey2; + color: @rcblue; } } diff --git a/rhodecode/public/css/main.less b/rhodecode/public/css/main.less --- a/rhodecode/public/css/main.less +++ b/rhodecode/public/css/main.less @@ -26,6 +26,7 @@ @import 'panels-bootstrap'; @import 'panels'; @import 'deform'; +@import 'tooltips'; //--- BASE ------------------// .noscript-error { @@ -105,13 +106,17 @@ input + .action-link, .action-link.first cursor: inherit; } + .clipboard-action { cursor: pointer; - color: @grey4; margin-left: 5px; - &:hover { - color: @grey2; + &:not(.no-grey) { + + &:hover { + color: @grey2; + } + color: @grey4; } } @@ -239,6 +244,7 @@ input.inline[type="file"] { // for the header and then remove the min-height. I chose a smaller value // intentionally here to avoid rendering issues in the main navigation. min-height: 49px; + min-width: 1024px; position: relative; vertical-align: bottom; @@ -373,8 +379,9 @@ ul.auth_plugins { font-family: @text-bold; } -.pr-origininfo, .pr-targetinfo { +.pr-commit-flow { position: relative; + font-weight: 600; .tag { display: inline-block; @@ -407,17 +414,42 @@ ul.auth_plugins { padding: 0 0; } - -#pr-title-input { - width: 72%; +.pr-title-input { + width: 80%; font-size: 1em; - margin: 0; - padding: 0 0 0 @padding/4; + margin: 0 0 4px 0; + padding: 0; line-height: 1.7em; color: @text-color; letter-spacing: .02em; font-weight: @text-bold-weight; font-family: @text-bold; + + &:hover { + box-shadow: none; + } +} + +#pr-title { + input { + border: 1px transparent; + color: black; + opacity: 1; + background: #fff; + } +} + +.pr-title-closed-tag { + font-size: 16px; +} + +#pr-desc { + padding: 10px 0; + + .markdown-block { + padding: 0; + margin-bottom: -30px; + } } #pullrequest_title { @@ -433,6 +465,31 @@ ul.auth_plugins { overflow: hidden; } +.pr-details-title { + height: 16px +} + +.pr-details-title-author-pref { + padding-right: 10px +} + +.label-pr-detail { + display: table-cell; + width: 120px; + padding-top: 7.5px; + padding-bottom: 7.5px; + padding-right: 7.5px; +} + +.source-details ul { + padding: 10px 16px; +} + +.source-details-action { + color: @grey4; + font-size: 11px +} + .pr-submit-button { float: right; margin: 0 0 0 5px; @@ -451,6 +508,23 @@ ul.auth_plugins { vertical-align: top; } +#open_edit_pullrequest { + padding: 0; +} + +#close_edit_pullrequest { + +} + +#delete_pullrequest { + clear: inherit; + + form { + display: inline; + } + +} + .perms_section_head { min-width: 625px; @@ -755,6 +829,11 @@ label { padding: 0 0 0 .17em; line-height: 1em; } + + & + .no-margin { + margin: 0 + } + } .user-inline-data { @@ -771,7 +850,7 @@ label { max-width: 200px; min-height: (@gravatar-size + @border-thickness * 2); // account for border display: block; - padding: 0 0 0 (@gravatar-size + @basefontsize/2 + @border-thickness * 2); + padding: 0 0 0 (@gravatar-size + @basefontsize/4); .gravatar { @@ -800,6 +879,16 @@ label { } } +.gist-type-fields { + line-height: 30px; + height: 30px; + + .gist-type-fields-wrapper { + vertical-align: middle; + display: inline-block; + line-height: 25px; + } +} // ADMIN SETTINGS @@ -850,7 +939,6 @@ label { .notification-list { div { - display: inline-block; vertical-align: middle; } @@ -982,9 +1070,6 @@ label { } #graph { - .flag_status { - margin: 0; - } .pagination-left { float: left; @@ -1069,6 +1154,9 @@ label { a { color: @grey5 } + @media screen and (max-width: 1200px) { + display: none; + } } img { @@ -1086,26 +1174,52 @@ label { } } -.flag_status { - margin: 2px; +.review-status { &.under_review { - .circle(5px, @alert3); + color: @alert3; } &.approved { - .circle(5px, @alert1); + color: @alert1; } &.rejected, &.forced_closed{ - .circle(5px, @alert2); + color: @alert2; } &.not_reviewed { - .circle(5px, @grey5); + color: @grey5; } } -.flag_status_comment_box { - margin: 5px 6px 0px 2px; +.review-status-under_review { + color: @alert3; +} +.status-tag-under_review { + border-color: @alert3; +} + +.review-status-approved { + color: @alert1; +} +.status-tag-approved { + border-color: @alert1; } + +.review-status-rejected, +.review-status-forced_closed { + color: @alert2; +} +.status-tag-rejected, +.status-tag-forced_closed { + border-color: @alert2; +} + +.review-status-not_reviewed { + color: @grey5; +} +.status-tag-not_reviewed { + border-color: @grey5; +} + .test_pattern_preview { margin: @space 0; @@ -1394,11 +1508,9 @@ table.integrations { } .reviewer_status { display: inline-block; - vertical-align: top; width: 25px; min-width: 25px; height: 1.2em; - margin-top: 3px; line-height: 1em; } @@ -1468,14 +1580,52 @@ table.integrations { } } .pr-details-content { - margin-top: @textmargin; - margin-bottom: @textmargin; + margin-top: @textmargin - 5; + margin-bottom: @textmargin - 5; } .pr-reviewer-rules { padding: 10px 0px 20px 0px; } +.todo-resolved { + text-decoration: line-through; +} + +.todo-table { + width: 100%; + + td { + padding: 5px 0px; + } + + .td-todo-number { + text-align: left; + white-space: nowrap; + width: 15%; + } + + .td-todo-gravatar { + width: 5%; + + img { + margin: -3px 0; + } + } + +} + +.todo-comment-text-wrapper { + display: inline-grid; +} + +.todo-comment-text { + margin-left: 5px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + .group_members { margin-top: 0; padding: 0; @@ -1509,10 +1659,6 @@ table.integrations { margin: -15px 0px 0px 0px; } -.comments-summary-td { - border-top: 1px dashed @grey5; -} - // new entry in group_members .td-author-new-entry { background-color: rgba(red(@alert1), green(@alert1), blue(@alert1), 0.3); @@ -1774,6 +1920,13 @@ BIN_FILENODE = 7 margin-bottom: 20px; } +.pull-request-merge-refresh { + margin: 2px 7px; + a { + color: @grey3; + } +} + .pull-request-merge ul { padding: 0px 0px; } @@ -1826,19 +1979,16 @@ BIN_FILENODE = 7 .pr-versions { font-size: 1.1em; + padding: 7.5px; table { - padding: 0px 5px; + } td { line-height: 15px; } - .flag_status { - margin: 0; - } - .compare-radio-button { position: relative; top: -3px; @@ -1880,10 +2030,6 @@ BIN_FILENODE = 7 .table_disp { .status { width: auto; - - .flag_status { - float: left; - } } } @@ -1926,10 +2072,6 @@ BIN_FILENODE = 7 text-align: left; } - .desc{ - width: 1163px; - } - .delete-notifications, .read-notifications{ width: 35px; min-width: 35px; //fixes when only one button is displayed @@ -2758,31 +2900,31 @@ form.markup-form { .dropzone, .dropzone-pure { - border: 2px dashed @grey5; - border-radius: 5px; - background: white; - min-height: 200px; - padding: 54px; + border: 2px dashed @grey5; + border-radius: 5px; + background: white; + min-height: 200px; + padding: 54px; .dz-message { - font-weight: 700; - text-align: center; - margin: 2em 0; + font-weight: 700; + text-align: center; + margin: 2em 0; } } .dz-preview { - margin: 10px 0 !important; - position: relative; - vertical-align: top; - padding: 10px; - border-bottom: 1px solid @grey5; + margin: 10px 0 !important; + position: relative; + vertical-align: top; + padding: 10px; + border-bottom: 1px solid @grey5; } .dz-filename { - font-weight: 700; - float:left; + font-weight: 700; + float: left; } .dz-sending { @@ -2790,15 +2932,106 @@ form.markup-form { } .dz-response { - clear:both + clear: both } .dz-filename-size { - float:right + float: right } .dz-error-message { - color: @alert2; - padding-top: 10px; - clear: both; + color: @alert2; + padding-top: 10px; + clear: both; +} + + +.user-hovercard { + padding: 5px; +} + +.user-hovercard-icon { + display: inline; + padding: 0; + box-sizing: content-box; + border-radius: 50%; + float: left; +} + +.user-hovercard-name { + float: right; + vertical-align: top; + padding-left: 10px; + min-width: 150px; +} + +.user-hovercard-bio { + clear: both; + padding-top: 10px; +} + +.user-hovercard-header { + clear: both; + min-height: 10px; +} + +.user-hovercard-footer { + clear: both; + min-height: 10px; +} + +.user-group-hovercard { + padding: 5px; } + +.user-group-hovercard-icon { + display: inline; + padding: 0; + box-sizing: content-box; + border-radius: 50%; + float: left; +} + +.user-group-hovercard-name { + float: left; + vertical-align: top; + padding-left: 10px; + min-width: 150px; +} + +.user-group-hovercard-icon i { + border: 1px solid @grey4; + border-radius: 4px; +} + +.user-group-hovercard-bio { + clear: both; + padding-top: 10px; + line-height: 1.0em; +} + +.user-group-hovercard-header { + clear: both; + min-height: 10px; +} + +.user-group-hovercard-footer { + clear: both; + min-height: 10px; +} + +.pr-hovercard-header { + clear: both; + display: block; + line-height: 20px; +} + +.pr-hovercard-user { + display: flex; + align-items: center; + padding-left: 5px; +} + +.pr-hovercard-title { + padding-top: 5px; +} \ No newline at end of file diff --git a/rhodecode/public/css/navigation.less b/rhodecode/public/css/navigation.less --- a/rhodecode/public/css/navigation.less +++ b/rhodecode/public/css/navigation.less @@ -128,6 +128,16 @@ // for this specifically we do not use a variable } + .menulink-counter { + border: 1px solid @grey2; + border-radius: @border-radius; + background: @grey7; + display: inline-block; + padding: 0px 4px; + text-align: center; + font-size: 12px; + } + .pr_notifications { padding-left: .5em; } @@ -171,7 +181,7 @@ border-bottom: none; color: @grey2; - &:hover { color: @grey1; } + &:hover { color: @rcblue; } } } @@ -200,6 +210,7 @@ &#quick_login_li { padding-left: .5em; + margin-right: 0px; &:hover #quick_login_link { color: inherit; @@ -426,6 +437,56 @@ } } + +// new objects main action +.action-menu { + left: auto; + right: 0; + padding: 12px; + z-index: 999; + overflow: hidden; + background-color: #fff; + border: 1px solid @grey5; + color: @grey2; + box-shadow: @dropdown-shadow; + + .submenu-title { + font-weight: bold; + } + + .submenu-title:not(:first-of-type) { + padding-top: 10px; + } + + &.submenu { + min-width: 200px; + + ol { + padding:0; + } + + li { + display: block; + margin: 0; + padding: .2em .5em; + line-height: 1em; + + background-color: #fff; + list-style-type: none; + + a { + padding: 4px; + color: @grey4 !important; + border-bottom: none; + } + } + li:not(.submenu-title) a:hover{ + color: @grey2 !important; + } + } +} + + // Header Repository Switcher // Select2 Dropdown #select2-drop.select2-drop.repo-switcher-dropdown { @@ -562,10 +623,12 @@ ul#context-pages { font-size: 1.1em; position: relative; top: 95px; + height: 0; } -.dataTables_paginate, .pagination-wh { - text-align: left; +.dataTables_paginate, +.pagination-wh { + text-align: center; display: inline-block; border-left: 1px solid @grey5; float: none; @@ -577,10 +640,15 @@ ul#context-pages { display: inline-block; padding: @menupadding/4 @menupadding; border: 1px solid @grey5; - border-left: 0; + margin-left: -1px; color: @grey2; cursor: pointer; float: left; + font-weight: 600; + white-space: nowrap; + vertical-align: middle; + user-select: none; + min-width: 15px; &:hover { color: @rcdarkblue; @@ -687,7 +755,10 @@ ul#context-pages { background-color: @black !important; margin-right: 0; } - + .searchTagIcon { + margin: 0; + background: @black !important; + } .searchTagHelp { background-color: @grey1 !important; margin: 0; diff --git a/rhodecode/public/css/polymer.less b/rhodecode/public/css/polymer.less --- a/rhodecode/public/css/polymer.less +++ b/rhodecode/public/css/polymer.less @@ -19,7 +19,3 @@ --paper-spinner-layer-4-color: @grey3; } -.paper-toggle-button { - display: inline; -} - diff --git a/rhodecode/public/css/rcicons.less b/rhodecode/public/css/rcicons.less --- a/rhodecode/public/css/rcicons.less +++ b/rhodecode/public/css/rcicons.less @@ -1,12 +1,12 @@ @font-face { font-family: 'rcicons'; - src: url('../fonts/RCIcons/rcicons.eot?92789106'); - src: url('../fonts/RCIcons/rcicons.eot?92789106#iefix') format('embedded-opentype'), - url('../fonts/RCIcons/rcicons.woff2?92789106') format('woff2'), - url('../fonts/RCIcons/rcicons.woff?92789106') format('woff'), - url('../fonts/RCIcons/rcicons.ttf?92789106') format('truetype'), - url('../fonts/RCIcons/rcicons.svg?92789106#rcicons') format('svg'); + src: url('../fonts/RCIcons/rcicons.eot?44705679'); + src: url('../fonts/RCIcons/rcicons.eot?44705679#iefix') format('embedded-opentype'), + url('../fonts/RCIcons/rcicons.woff2?44705679') format('woff2'), + url('../fonts/RCIcons/rcicons.woff?44705679') format('woff'), + url('../fonts/RCIcons/rcicons.ttf?44705679') format('truetype'), + url('../fonts/RCIcons/rcicons.svg?44705679#rcicons') format('svg'); font-weight: normal; font-style: normal; @@ -163,8 +163,6 @@ .icon-down:before { content: '\e80b'; } /* '' */ .icon-folder:before { content: '\e80c'; } /* '' */ .icon-folder-open:before { content: '\e80d'; } /* '' */ -.icon-folder-empty:before { content: '\f114'; } /* '' */ -.icon-folder-open-empty:before { content: '\f115'; } /* '' */ .icon-trash-empty:before { content: '\e80e'; } /* '' */ .icon-group:before { content: '\e80f'; } /* '' */ .icon-remove:before { content: '\e810'; } /* '' */ @@ -187,6 +185,7 @@ .icon-info-circled:before { content: '\e821'; } /* '' */ .icon-upload:before { content: '\e822'; } /* '' */ .icon-home:before { content: '\e823'; } /* '' */ +.icon-flag-filled:before { content: '\e824'; } /* '' */ .icon-git:before { content: '\e82a'; } /* '' */ .icon-hg:before { content: '\e82d'; } /* '' */ .icon-svn:before { content: '\e82e'; } /* '' */ @@ -195,11 +194,24 @@ .icon-rhodecode:before { content: '\e831'; } /* '' */ .icon-up:before { content: '\e832'; } /* '' */ .icon-merge:before { content: '\e833'; } /* '' */ +.icon-spin-alt:before { content: '\e834'; } /* '' */ +.icon-spin:before { content: '\e838'; } /* '' */ .icon-docs:before { content: '\f0c5'; } /* '' */ .icon-menu:before { content: '\f0c9'; } /* '' */ +.icon-sort:before { content: '\f0dc'; } /* '' */ .icon-paste:before { content: '\f0ea'; } /* '' */ .icon-doc-text:before { content: '\f0f6'; } /* '' */ .icon-plus-squared:before { content: '\f0fe'; } /* '' */ +.icon-angle-left:before { content: '\f104'; } /* '' */ +.icon-angle-right:before { content: '\f105'; } /* '' */ +.icon-angle-up:before { content: '\f106'; } /* '' */ +.icon-angle-down:before { content: '\f107'; } /* '' */ +.icon-circle-empty:before { content: '\f10c'; } /* '' */ +.icon-circle:before { content: '\f111'; } /* '' */ +.icon-folder-empty:before { content: '\f114'; } /* '' */ +.icon-folder-open-empty:before { content: '\f115'; } /* '' */ +.icon-code:before { content: '\f121'; } /* '' */ +.icon-info:before { content: '\f129'; } /* '' */ .icon-minus-squared:before { content: '\f146'; } /* '' */ .icon-minus-squared-alt:before { content: '\f147'; } /* '' */ .icon-doc-inv:before { content: '\f15b'; } /* '' */ @@ -207,10 +219,9 @@ .icon-plus-squared-alt:before { content: '\f196'; } /* '' */ .icon-file-code:before { content: '\f1c9'; } /* '' */ .icon-history:before { content: '\f1da'; } /* '' */ +.icon-circle-thin:before { content: '\f1db'; } /* '' */ .icon-sliders:before { content: '\f1de'; } /* '' */ .icon-trash:before { content: '\f1f8'; } /* '' */ -.icon-spin-alt:before { content: '\e834'; } /* '' */ -.icon-spin:before { content: '\e838'; } /* '' */ // MERGED ICONS BASED ON CURRENT ONES @@ -233,10 +244,13 @@ .icon-false:before { &:extend(.icon-delete:before); } .icon-expand-linked:before { &:extend(.icon-down:before); } .icon-pr-merge-fail:before { &:extend(.icon-delete:before); } +.icon-wide-mode:before { &:extend(.icon-sort:before); } +.icon-flag-filled-red:before { &:extend(.icon-flag-filled:before); } +.icon-user-group-alt:before { &:extend(.icon-group:before); } // TRANSFORM - .icon-merge:before {transform: rotate(180deg);} +.icon-wide-mode:before {transform: rotate(90deg);} // -- END ICON CLASSES -- // @@ -254,6 +268,7 @@ .icon-false { color: @grey5 } .icon-expand-linked { cursor: pointer; color: @grey3; font-size: 14px } .icon-more-linked { cursor: pointer; color: @grey3 } +.icon-flag-filled-red { color: @color5 !important; } .repo-switcher-dropdown .select2-result-label { .icon-git:before { diff --git a/rhodecode/public/css/readme-box.less b/rhodecode/public/css/readme-box.less --- a/rhodecode/public/css/readme-box.less +++ b/rhodecode/public/css/readme-box.less @@ -1,4 +1,13 @@ /** README styling **/ + +.readme-title { + border: 1px solid @grey6; + padding: 10px 5px; + font-weight: 600; + margin-top: 30px; + margin-bottom: -1px; +} + div.readme_box { clear: both; overflow: hidden; @@ -94,6 +103,7 @@ div.readme_box img { border-style: none; background-color: #fff; padding-right: 20px; + max-width: 100%; } diff --git a/rhodecode/public/css/select2.less b/rhodecode/public/css/select2.less --- a/rhodecode/public/css/select2.less +++ b/rhodecode/public/css/select2.less @@ -108,10 +108,6 @@ select.select2{height:28px;visibility:hi .drop-menu-dropdown { .drop-menu-core; - - .flag_status { - margin-top: 0; - } } .drop-menu-base { @@ -122,10 +118,6 @@ select.select2{height:28px;visibility:hi z-index: 2; cursor: pointer; - .flag_status { - margin-top: 0; - } - a { display:block; padding: .7em; @@ -152,6 +144,8 @@ select.select2{height:28px;visibility:hi .drop-menu-no-width { .drop-menu-base; width: auto; + min-width: 0; + margin: 0; } .field-sm .drop-menu { diff --git a/rhodecode/public/css/summary.less b/rhodecode/public/css/summary.less --- a/rhodecode/public/css/summary.less +++ b/rhodecode/public/css/summary.less @@ -153,6 +153,7 @@ text-align: center; color: #949494; font-size: 11px; + line-height: 1.3em; &:hover { background: #f1f1f1; @@ -199,6 +200,10 @@ .fieldset { margin-bottom: 0; } + + .tags-main { + margin-bottom: 5px; + } } .fieldset { @@ -228,7 +233,8 @@ .right-label-summary { float: left; - margin-top: 7px; + margin-top: 0px; + line-height: 2em; width: ~"calc(100% - 160px)"; } } @@ -250,7 +256,8 @@ .right-label-summary { float: left; - margin-top: 7px; + margin-top: 0px; + line-height: 2em; } } diff --git a/rhodecode/public/css/tables.less b/rhodecode/public/css/tables.less --- a/rhodecode/public/css/tables.less +++ b/rhodecode/public/css/tables.less @@ -29,6 +29,16 @@ table.dataTable { font-style: normal; } + .td-user { + .rc-user { + white-space: nowrap; + } + } + + .td-email { + white-space: nowrap; + } + th, td { height: auto; @@ -118,6 +128,10 @@ table.dataTable { } } + &.td-sha { + white-space: nowrap; + } + &.td-graphbox { width: 100px; max-width: 100px; @@ -177,6 +191,24 @@ table.dataTable { padding-left: .65em; } + &.td-issue-tracker-name { + width: 180px; + input { + width: 180px; + } + + } + + &.td-issue-tracker-regex { + white-space: nowrap; + + min-width: 300px; + input { + min-width: 300px; + } + + } + &.td-url { white-space: nowrap; } diff --git a/rhodecode/public/css/tags.less b/rhodecode/public/css/tags.less --- a/rhodecode/public/css/tags.less +++ b/rhodecode/public/css/tags.less @@ -11,7 +11,7 @@ padding: .25em; text-align: center; font-size: (-1 + @basefontsize); //fit in tables - line-height: .9em; + line-height: 1.1em; border: none; box-shadow: @button-shadow; .border-radius(@border-radius); @@ -49,6 +49,36 @@ .tag7 { .border ( @border-thickness-tags, @color7 ); color:@color7; } .tag8 { .border ( @border-thickness-tags, @color8 ); color:@color8; } + +.tag-gist-public { + .border (@border-thickness-tags, @color1); + color: @color1; +} + + +.tag-gist-private { + .border (@border-thickness-tags, @color2); + color: @color2; +} + + +.tag-merge-state-created { + color: @color1; +} + +.tag-merge-state-creating { + color: @color1; +} + +.tag-merge-state-merging { + color: @color3; +} + +.tag-merge-state-updating { + color: @color3; +} + + .metatag-list { margin: 0; padding: 0; diff --git a/rhodecode/public/css/tooltips.less b/rhodecode/public/css/tooltips.less new file mode 100644 --- /dev/null +++ b/rhodecode/public/css/tooltips.less @@ -0,0 +1,460 @@ +/* This is the core CSS of Tooltipster */ + +/* GENERAL STRUCTURE RULES (do not edit this section) */ + +.tooltipster-base { + /* this ensures that a constrained height set by functionPosition, + if greater that the natural height of the tooltip, will be enforced + in browsers that support display:flex */ + display: flex; + pointer-events: none; + /* this may be overriden in JS for fixed position origins */ + position: absolute; +} + +.tooltipster-box { + /* see .tooltipster-base. flex-shrink 1 is only necessary for IE10- + and flex-basis auto for IE11- (at least) */ + flex: 1 1 auto; +} + +.tooltipster-content { + /* prevents an overflow if the user adds padding to the div */ + box-sizing: border-box; + /* these make sure we'll be able to detect any overflow */ + max-height: 100%; + max-width: 100%; + overflow: auto; +} + +.tooltipster-ruler { + /* these let us test the size of the tooltip without overflowing the window */ + bottom: 0; + left: 0; + overflow: hidden; + position: fixed; + right: 0; + top: 0; + visibility: hidden; +} + +/* ANIMATIONS */ + +/* Open/close animations */ + +/* fade */ + +.tooltipster-fade { + opacity: 0; + -webkit-transition-property: opacity; + -moz-transition-property: opacity; + -o-transition-property: opacity; + -ms-transition-property: opacity; + transition-property: opacity; +} +.tooltipster-fade.tooltipster-show { + opacity: 1; +} + +/* grow */ + +.tooltipster-grow { + -webkit-transform: scale(0,0); + -moz-transform: scale(0,0); + -o-transform: scale(0,0); + -ms-transform: scale(0,0); + transform: scale(0,0); + -webkit-transition-property: -webkit-transform; + -moz-transition-property: -moz-transform; + -o-transition-property: -o-transform; + -ms-transition-property: -ms-transform; + transition-property: transform; + -webkit-backface-visibility: hidden; +} +.tooltipster-grow.tooltipster-show { + -webkit-transform: scale(1,1); + -moz-transform: scale(1,1); + -o-transform: scale(1,1); + -ms-transform: scale(1,1); + transform: scale(1,1); + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1); + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -moz-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -ms-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -o-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); +} + +/* swing */ + +.tooltipster-swing { + opacity: 0; + -webkit-transform: rotateZ(4deg); + -moz-transform: rotateZ(4deg); + -o-transform: rotateZ(4deg); + -ms-transform: rotateZ(4deg); + transform: rotateZ(4deg); + -webkit-transition-property: -webkit-transform, opacity; + -moz-transition-property: -moz-transform; + -o-transition-property: -o-transform; + -ms-transition-property: -ms-transform; + transition-property: transform; +} +.tooltipster-swing.tooltipster-show { + opacity: 1; + -webkit-transform: rotateZ(0deg); + -moz-transform: rotateZ(0deg); + -o-transform: rotateZ(0deg); + -ms-transform: rotateZ(0deg); + transform: rotateZ(0deg); + -webkit-transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 1); + -webkit-transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 2.4); + -moz-transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 2.4); + -ms-transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 2.4); + -o-transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 2.4); + transition-timing-function: cubic-bezier(0.230, 0.635, 0.495, 2.4); +} + +/* fall */ + +.tooltipster-fall { + -webkit-transition-property: top; + -moz-transition-property: top; + -o-transition-property: top; + -ms-transition-property: top; + transition-property: top; + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1); + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -moz-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -ms-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -o-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); +} +.tooltipster-fall.tooltipster-initial { + top: 0 !important; +} +.tooltipster-fall.tooltipster-show { +} +.tooltipster-fall.tooltipster-dying { + -webkit-transition-property: all; + -moz-transition-property: all; + -o-transition-property: all; + -ms-transition-property: all; + transition-property: all; + top: 0 !important; + opacity: 0; +} + +/* slide */ + +.tooltipster-slide { + -webkit-transition-property: left; + -moz-transition-property: left; + -o-transition-property: left; + -ms-transition-property: left; + transition-property: left; + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1); + -webkit-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -moz-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -ms-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + -o-transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); + transition-timing-function: cubic-bezier(0.175, 0.885, 0.320, 1.15); +} +.tooltipster-slide.tooltipster-initial { + left: -40px !important; +} +.tooltipster-slide.tooltipster-show { +} +.tooltipster-slide.tooltipster-dying { + -webkit-transition-property: all; + -moz-transition-property: all; + -o-transition-property: all; + -ms-transition-property: all; + transition-property: all; + left: 0 !important; + opacity: 0; +} + +/* Update animations */ + +/* We use animations rather than transitions here because + transition durations may be specified in the style tag due to + animationDuration, and we try to avoid collisions and the use + of !important */ + +/* fade */ + +@keyframes tooltipster-fading { + 0% { + opacity: 0; + } + 100% { + opacity: 1; + } +} + +.tooltipster-update-fade { + animation: tooltipster-fading 400ms; +} + +/* rotate */ + +@keyframes tooltipster-rotating { + 25% { + transform: rotate(-2deg); + } + 75% { + transform: rotate(2deg); + } + 100% { + transform: rotate(0); + } +} + +.tooltipster-update-rotate { + animation: tooltipster-rotating 600ms; +} + +/* scale */ + +@keyframes tooltipster-scaling { + 50% { + transform: scale(1.1); + } + 100% { + transform: scale(1); + } +} + +.tooltipster-update-scale { + animation: tooltipster-scaling 600ms; +} + +/** + * DEFAULT STYLE OF THE SIDETIP PLUGIN + * + * All styles are "namespaced" with .tooltipster-sidetip to prevent + * conflicts between plugins. + */ + +/* .tooltipster-box */ + +.tooltipster-sidetip .tooltipster-box { + background: #565656; + border: 2px solid black; + border-radius: 4px; +} + +.tooltipster-sidetip.tooltipster-bottom .tooltipster-box { + margin-top: 8px; +} + +.tooltipster-sidetip.tooltipster-left .tooltipster-box { + margin-right: 8px; +} + +.tooltipster-sidetip.tooltipster-right .tooltipster-box { + margin-left: 8px; +} + +.tooltipster-sidetip.tooltipster-top .tooltipster-box { + margin-bottom: 8px; +} + +/* .tooltipster-content */ + +.tooltipster-sidetip .tooltipster-content { + color: white; + line-height: 18px; + padding: 6px 14px; +} + +/* .tooltipster-arrow : will keep only the zone of .tooltipster-arrow-uncropped that +corresponds to the arrow we want to display */ + +.tooltipster-sidetip .tooltipster-arrow { + overflow: hidden; + position: absolute; +} + +.tooltipster-sidetip.tooltipster-bottom .tooltipster-arrow { + height: 10px; + /* half the width, for centering */ + margin-left: -10px; + top: 0; + width: 20px; +} + +.tooltipster-sidetip.tooltipster-left .tooltipster-arrow { + height: 20px; + margin-top: -10px; + right: 0; + /* top 0 to keep the arrow from overflowing .tooltipster-base when it has not + been positioned yet */ + top: 0; + width: 10px; +} + +.tooltipster-sidetip.tooltipster-right .tooltipster-arrow { + height: 20px; + margin-top: -10px; + left: 0; + /* same as .tooltipster-left .tooltipster-arrow */ + top: 0; + width: 10px; +} + +.tooltipster-sidetip.tooltipster-top .tooltipster-arrow { + bottom: 0; + height: 10px; + margin-left: -10px; + width: 20px; +} + +/* common rules between .tooltipster-arrow-background and .tooltipster-arrow-border */ + +.tooltipster-sidetip .tooltipster-arrow-background, .tooltipster-sidetip .tooltipster-arrow-border { + height: 0; + position: absolute; + width: 0; +} + +/* .tooltipster-arrow-background */ + +.tooltipster-sidetip .tooltipster-arrow-background { + border: 10px solid transparent; +} + +.tooltipster-sidetip.tooltipster-bottom .tooltipster-arrow-background { + border-bottom-color: #565656; + left: 0; + top: 3px; +} + +.tooltipster-sidetip.tooltipster-left .tooltipster-arrow-background { + border-left-color: #565656; + left: -3px; + top: 0; +} + +.tooltipster-sidetip.tooltipster-right .tooltipster-arrow-background { + border-right-color: #565656; + left: 3px; + top: 0; +} + +.tooltipster-sidetip.tooltipster-top .tooltipster-arrow-background { + border-top-color: #565656; + left: 0; + top: -3px; +} + +/* .tooltipster-arrow-border */ + +.tooltipster-sidetip .tooltipster-arrow-border { + border: 10px solid transparent; + left: 0; + top: 0; +} + +.tooltipster-sidetip.tooltipster-bottom .tooltipster-arrow-border { + border-bottom-color: black; +} + +.tooltipster-sidetip.tooltipster-left .tooltipster-arrow-border { + border-left-color: black; +} + +.tooltipster-sidetip.tooltipster-right .tooltipster-arrow-border { + border-right-color: black; +} + +.tooltipster-sidetip.tooltipster-top .tooltipster-arrow-border { + border-top-color: black; +} + +/* tooltipster-arrow-uncropped */ + +.tooltipster-sidetip .tooltipster-arrow-uncropped { + position: relative; +} + +.tooltipster-sidetip.tooltipster-bottom .tooltipster-arrow-uncropped { + top: -10px; +} + +.tooltipster-sidetip.tooltipster-right .tooltipster-arrow-uncropped { + left: -10px; +} + +.tooltipster-sidetip.tooltipster-shadow .tooltipster-box { + border: none; + border-radius: 5px; + background: #fff; + box-shadow: 0 0 5px 3px rgba(0, 0, 0, .1) +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-bottom .tooltipster-box { + margin-top: 6px +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-left .tooltipster-box { + margin-right: 6px +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-right .tooltipster-box { + margin-left: 6px +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-top .tooltipster-box { + margin-bottom: 6px +} + +.tooltipster-sidetip.tooltipster-shadow .tooltipster-content { + color: #8d8d8d +} + +.tooltipster-sidetip.tooltipster-shadow .tooltipster-arrow { + height: 6px; + margin-left: -6px; + width: 12px +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-left .tooltipster-arrow, .tooltipster-sidetip.tooltipster-shadow.tooltipster-right .tooltipster-arrow { + height: 12px; + margin-left: 0; + margin-top: -6px; + width: 6px +} + +.tooltipster-sidetip.tooltipster-shadow .tooltipster-arrow-background { + display: none +} + +.tooltipster-sidetip.tooltipster-shadow .tooltipster-arrow-border { + border: 6px solid transparent +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-bottom .tooltipster-arrow-border { + border-bottom-color: #fff +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-left .tooltipster-arrow-border { + border-left-color: #fff +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-right .tooltipster-arrow-border { + border-right-color: #fff +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-top .tooltipster-arrow-border { + border-top-color: #fff +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-bottom .tooltipster-arrow-uncropped { + top: -6px +} + +.tooltipster-sidetip.tooltipster-shadow.tooltipster-right .tooltipster-arrow-uncropped { + left: -6px +} \ No newline at end of file diff --git a/rhodecode/public/css/type.less b/rhodecode/public/css/type.less --- a/rhodecode/public/css/type.less +++ b/rhodecode/public/css/type.less @@ -468,6 +468,42 @@ dd { margin-left: 20px; } } + + &.dt-300 { + dt { + width: 300px; + } + } + + &.dt-400 { + dt { + width: 400px; + } + } + + &.dt-500 { + dt { + width: 500px; + } + } + + &.dt-600 { + dt { + width: 600px; + } + } + + &.dt-700 { + dt { + width: 700px; + } + } + + &.dt-800 { + dt { + width: 800px; + } + } } diff --git a/rhodecode/public/fonts/RCIcons/config.json b/rhodecode/public/fonts/RCIcons/config.json --- a/rhodecode/public/fonts/RCIcons/config.json +++ b/rhodecode/public/fonts/RCIcons/config.json @@ -564,6 +564,72 @@ "src": "fontawesome" }, { + "uid": "7034e4d22866af82bef811f52fb1ba46", + "css": "code", + "code": 61729, + "src": "fontawesome" + }, + { + "uid": "f3f90c8c89795da30f7444634476ea4f", + "css": "angle-left", + "code": 61700, + "src": "fontawesome" + }, + { + "uid": "7bf14281af5633a597f85b061ef1cfb9", + "css": "angle-right", + "code": 61701, + "src": "fontawesome" + }, + { + "uid": "5de9370846a26947e03f63142a3f1c07", + "css": "angle-up", + "code": 61702, + "src": "fontawesome" + }, + { + "uid": "e4dde1992f787163e2e2b534b8c8067d", + "css": "angle-down", + "code": 61703, + "src": "fontawesome" + }, + { + "uid": "4i0s2bklai5fywieqm4dqqngfz9ptfab", + "css": "flag-filled", + "code": 59428, + "src": "typicons" + }, + { + "uid": "3d4ea8a78dc34efe891f3a0f3d961274", + "css": "info", + "code": 61737, + "src": "fontawesome" + }, + { + "uid": "56a21935a5d4d79b2e91ec00f760b369", + "css": "sort", + "code": 61660, + "src": "fontawesome" + }, + { + "uid": "130380e481a7defc690dfb24123a1f0c", + "css": "circle", + "code": 61713, + "src": "fontawesome" + }, + { + "uid": "422e07e5afb80258a9c4ed1706498f8a", + "css": "circle-empty", + "code": 61708, + "src": "fontawesome" + }, + { + "uid": "5774d0a0e50f6eefc8be01bd761e5dd3", + "css": "circle-thin", + "code": 61915, + "src": "fontawesome" + }, + { "uid": "c43db6645e7515889fc2193294f50767", "css": "plus", "code": 59411, diff --git a/rhodecode/public/fonts/RCIcons/rcicons.eot b/rhodecode/public/fonts/RCIcons/rcicons.eot index 041a625cef455f924ff6020314e110f7355ddc65..a90776c9222c7300ddafe5d11867c2d70e988e1e GIT binary patch literal 19568 zc%0pQdwg8gdEj~6_q}uH-kE#vJTw}OW~3RttjCOI#t&Jx@k6$-jj$bqZEz5lY|EA< zSC)AN2m#6hr8cE3ZAqJiED8N(!;;``NV-W$384vLw{DxJ5EDW}+6;7;SY0+h;#%YV z&b@c!k>dpV`|V$=(Rc58pYMFel!q~UZyrIH{9*t7 z-3yOxy)+LY9nGM9XaZmwO`&6M$~H8P4xr;`5{*JGLJryqDKn5e;g(9FKGcnRFI(I1 zP(^nkih75(f8>hP@3h_qiAy1M$5q>V@`F9wMTkEH@tymQj2``u6K{PIA*vxH7zZYA z-T%j@m)wnz{AUOa^d1}^9dk0H2cYbm0HuSFAl^yefbiD=S`HqWy@mYv!VjUnp8~W^ zPVXBnS6lxa>L(!ou_L3m9K|<~zk|Fe#8XqFN5-#v^KYL(Xg!oCe|dEJ*z7l(H+&7D zjeiO_G&9`kwhM<4;(9KeyKs){Ss-8SZ`e%%*!tLo?|w8m@PCj>D=g!+BPZUey`kj6kD?@jA0JOiDzw5%h3+83Qtj@g`&XdDjk*n%F?snKKtFXub+MA>@Ut%=fpW}&YZL7n&;Z*hUTuDdu;Bz zb7$t>n0x;M@K6OojU|`V;M^LPTVuoA9=FC1Yc>8|e;6>k1wQv(@_ESpoNzx6qetA& zqww*R6os+yz>Oksr3~X#f%^lXHiF1NCJLc2vXBi5CW4|U2E5*Y5~vX+Q4>(18KqGM zwV+m%MLE=l+EEAU1bQt2<L5JSD>|M z9a@hzpdqvoZ32{pf9I(GfBbXx553)wZDO3`0prJt4@^H*d|>=r@qzJh#RtaQ6(5+! zsrUf1jVeAc{Z#P*H~>|AU_4*(0k{HHd|=pBd;m^C6(4|SP{jw}9#ruG_y|>e0FFWx zAAq+|#RuRrRPh1$4OM&q&O;R+fCo{<2jE6j@d5Y}ReS&rMHL@_S5d_W;96Ai0r(eH zd;m^H6(4}7QN;(~ZdCCB_#9Py0FFl$AAt8!#RrfARPh1i0abhe$pD%60AvJJmUsZt zf-2n}fZU);j|U(@sM6~JV)gnw0I5Qiya!BARSF&;)?U8{h_$!W1CTy+_S+sXH{k4d zJphSBXJ7XKWD}iz#{-a3boLh>fV`r!RS!UN(H!Ri$S|7YJpgG&bAksT*Jw`k03;mE zX&!*AqdC(9ka{#{c>wZ{=4=l@6QH?f4?riNxpoggJD|A@9)O-eb3-10#z1p>JOJH+ z=C1Pqv(s0U2eTnz6+FQIqwjrcJ# zOx~e8=-u>7^kw=#xo+;8{Ac*rg%ROV;T7>y5|Rd_*Ok0-zpATK>etn?+EVSHcAxfY zU}<11@UY&de<662vDBEcdaaGtSM9WOt@D)#isT~?M!D!n^tR}e(O06sh_%E9V|T^= zIX)ae5r3gUYgpd!WP&6fOPp)$YW%}w)RXoL@6mh7FF-zkvv67zM8S$U5hq_Rl}r7( zY%UwNgiLcfyAt=83e9=PCiLL0YYN3;;hJ5icYA@|A2amoF)d{1_;OI4zdUG`~4C0`(x<0>(}**g>(YC+m=PN2Et-S1 z1Sev`Gl}z%lFb!MY!b?YxHa!sFy#JHm_1ko*HY-=%;4}~CS)eGP5CR+t!}I}ne+lJ zO>6o$7kB8%)@s^{#oD4#%Zf(ZVhwl$i^dY<+Sb8LW^iLuBDA-^MaRp`Wy36C(<-y7 znX5Lj@bI9yT(%lZJMK1DeH$vl|M{1_H+g}*bMYE4eTsgACP3eV_QkC*L1C*21Iv^# zuoCXZqK9+d!LL*w34{Uy#i!M%f^SgOY8R&-YAd$47u%>MnR@jRQQ@mSih@U3iNB}9 zn{k%e>h|*_-9V%0cJu)BlPUJ+`g<_^7qgijT*~J9v&C)4iD`i{z zOXWguUm=MbaWh>~;xGqeqB=)8j-xWsB!N)Dpd6v9z)?dLv_OL+Hw2AFgFvDr5^D1B zRwN=*PLK%3yo50&1an+pKsQt}8PKU-&9ClB#d}v*e}=#C`?^dqFQ_~r4ZNUngrh_! zn#@BzLFB2x$&^xA=HN*&mK02>DnSv3h6YW`AwoqNI-{J<|GuGCds#>LbU;7cccr3S z*>|`m2meTOc`9NO$Jj-gb)qz<~fOSyvT{h zGEFBOhk@RZ!byzcoFWrKWs%WV;W#1y)kD0bN*O9DBGw4kJ=E5zYPw7!sz!tO)7N*b zfto;1sKHa#3a@jVDusg@M>P?1nk*1(h@2!54mgBzvVuYPvEHB#4?S^&aDmmk`mPTs z)uXI!{IINha{nWGKY0uMr5M<0=KnU=53d_zCJhB9%8_)o2s$9cUL~Lg#2D4LtEzmj_J3?uCCW&&Bg1=(e|c@dd=|0&Q8+ubga2t z+;jb;n>KO7-iO=TTgY4K==@Y91Jxgirswx}tZMHV>`>E@@^!^bB;L~1vGK}lBI$Hw z^7=i+a%&;e)^_sx(L?c!%M)Lr3e}m92AEi&8?6X-nDC@?U?f1&^W@JqfFk^{B;ht$ zUJpi~Iw{NeA<3cqdeHXOH)UI@{sW}1hvY{k8}ekI-xk{Db8cJxIp6TGKA^8O-aZWN zHbeWbLfg;~epHsLx5&{otRu*9Sg$r@$<;~L^{njkGAO8*@INtM7mKbzbJaQd=F>um zJ}oFJ-VNBibVd-)NRdd5|9?rJpeplOvFTY~Fg0u%fa9SuePXyUzpF4jTsT!2#y108 zsNT$?B+8zlVMH!mxbRCZ25p3x_s5isYr?wmD(qS{CucK*MsnG5eal=d^xvX8v-3}c z6WIKEbZ0C1XgK-JYHx8S^L0=W*z2*Ue1p@*%wYBJgH=<5_aYT_1h=nL}2!hdP2UMN|rkU@00tO_uT4gqaI((VJ31l=e6v5u#g>b$oGSrjV|NKC*iT#U~l7MJQ#fP z$>33k1wQw=;I9@8cC!NBY8?FbDEU|5n-#D&*}{CgEGT);wPVveh`}aiJm(ISH*+gs zE`0|N7?4&e3A+xYGSkC4Y+ z{zKuR>f?DBsB6yB~1SoShvEVJ081v;=68JjVlAFdXxY&eSy;YRRmt6BM z3Ly1`JRQVtOvfx1juaXDx{4+Po^+14Mc3)#%g~=lqPjyusSp=Gg&Pds*eHg@>c2hT0~zaNMgcG!gF{Y3%9hS>FM#ki@sO^P<_4+}hbZ;B@sgZtZGlpsla+T#KC3 zwnT>$%~o_{vblOZF&wd)*#qC)ELtiT2S-oFcZcwrmb0siJXkm!d^YpbErj?o$n5ks zm?IPA*zQ6r@P&?nFUku6(B~pEQbo3Uj<8iMGvJP=DY8tbdk(H6+J!uM8AORh zQX%3bq8n1q`R~EZr5sGJY~IVL5@Za*{Gr~(1NU3focN+cC`8(ZQdON%vBm1|e@Qir= zOJpErzkUAm-kR;pbT{}05<2ARE+ALF#LORC4>`=m({YXo7qdjJLjW8Ns@1bxKv}>E zm>a0aOrD=u0NyTnrZVrY5kT`s{KiDb-91>b?Urq1=j=|Lki>(E8g3K#;7C9ex5VPI z$Qidwn$ZxsN-%^C4#!JvN>JJ_N?73!N_wQ#D=Te@#$}0`w~HDMCL&kyL2;wSak5uf z!FyNi*m2X29k;N2BWX9}1zoU5Fh3BGHYW^4JRoZWys$dS>w=aKCK`fR6Wy}0cxs8L ziPlJ6QFR5+uTA(x;|A7hzSRuZLpK5aLZ|`VhQ5O4d>dc^9r^w&NHPmimIMp?lDL?( zm!)j+B4L?B^?`)7-Nma*SFkS7oWq_O)^<7v6!)5hCZILuq%f_*)*X>D(@<^$3?H+; zE~qV5_d{02?=ec>v;#lgwX_R&Ebqep`&Eg%M$p1d9H*@__|>)}h+N>HpajB^E4Y9# z!tpIqK>P?VN&HSx^~x!NusR%twH7Ogq2wAqz+cTpO(`IJL=f=)?ShgZ>o9LOs+uZx zQM`^eD&qF-qS8qF4a{{&V8$XGSxXW!B>Sa1B)2s88w=N%=B>sq($>`2(Lr{>V?{^D ze?I`Y=;261D=fLqE4aKW3^W8JKuF7T+?7F|m%6m56TraE{6b0IfRc(d5O+(eeXJxW zB<(;$SR+#PueZulP+z4J($>(5vD%N@2!VJ|lDEpT5nwq1GHqC?FoZJ%a)Sep!v#z2x_7Pd@C~N1^AWe zN#I{#1uoU%`lwx)i@=iSBX_02sHX4w$dzwm?kCmf40Xep;TY?B!E(Q*Zmqsw{bBX} zt*W{eOE`;ql&Al`C-;Xyz*|GJ^uJ^7pUI~Xk@%ZDP2xqOk z9w(o=lWiiilZ^}C0$jh0WYC9$;8|S*TD{y23LQ{&eg)66WebEENsnEbvGt@S!c4B~ zO*>&wr(q|;SRB@1u7Ksr%oyWRXYW!BBif4()s5!m74NJ_@QS!jj*Z-xSJj>8{~(`l z;$eZRwy0RPuif+~+&R_B?Rfq6_M5-6e$Dln;*BYF;;PJ{m28|3;{)}hJkH^X>h6iW zoD&qWqve(@W=H6rKU7MxAXozCtMgm$YQWLh-f*~O$qk2wm3t?~R(gGm{w+QD!>5g3uDsb}_-r%+g!sg9_uniR2md%iO zwHu$=_Tc9q8X-IG|H6GcKQ{AHj_>bg*iLrbjs4ci~w zK}Jq|_5{>@aQpEWXJ=o0hdn%>t%iRKJE3Up?*UBVkxd5ycecd_OCymzm{oR7b7>LM zpHRlf6{P^Cc6?k-DlirBR`W^{-jG;OUaiCQO`YX7s{u*h>P^=;SKzwM_wDOS9m z25b%yTM~#Fc;vuM7HxP$fSJn$ctjXC7hRUFBf(SPUO4&?_zTR-u!0vO&(@6{FW<;~ z5Z9|XX$9*eg%gUN@X=yC5OSav*TZeJc3;Eo+|?yIr5Notcul=*BCrypYg$IEsNEhIKF=F`Vklx~}0;^<~S#CC9*z7>2)x zM_!}6+ir=SmN18=8;hQyG5cMAB~R{(tqzX-WTfZLLb4;pLivZte~l%Yt9$L>$Vj); z_O6{Gi3s^Ti{@Q={}%lT-3BNmE3s5jChpEyZ*X)cM;6=>gtA}z!xOb#^ zLuy3)8Qq+VpMT2hBc7W-6O051M++R`bN8s6#J2V$nM5+~B#aof49(D~$nm@&DMFLT z2@*#s2?tHvZqS@Vc*U}66_aJ2t)HQ9f?pa0uk_kWsnwqa1DFwEq++35h%m~zbgPjq z=q;s-{oek>!m^dotDEFpYO#F``swrkxgncvpdW!}m25D6%iNHIr!n*(XGe{IEej2T zz#lXMnjA`6fka3JW%s6rS*$F50HZ5xjEsmSwVvQyk$Gf!jm~ zxX=7D9B36){s@L8aBc}t2_aJuTukKZupm=O;CNY;1BT;RsjwhOyey@74%(ue<*2Gm zMOhN$m}$4^dPtCUsgZ~FC>5z70Vd*Wc$p+Vt;_L7U3ePUm*VPQ@xlw7#0k&gWf*gW zyr6J@;jeXHr(Yv)qJFd+J;-*Viyqb4Mo4pdVb@cLm=@@Y65A>Vdb(%5?#4>7l%u9C zxQ?9Yod>&B%>F?FW&HVswY|3?U9%2Cot1FvHm6Dpn^s@Nq8g7iTc)VZe>1Mx!KQeW z9jgU+O=Rao!pejw5`mOsf$TCfjPZJQkft`7^M3^77C0bdFc63~nPjUbq#c+bjDK(s zm1LUdfjRHwfsP;N6^Z5rczj%tB@8oX8CE75S4DQLBnBE9o2`&+&cCFK?7)uWguwy)o+kDJBB*upV0$=^brnYh~aH4=@a;;nS81GoO{a%eEK`dFVR8tHQa*l#qW`2 zWE;7QJWf7KUP34=c*m{qZ};}Od*J?7coXr3)C)I(x6x;LxE6a0<&w)Cp463lvuU<& zcDL($sJA%*!b@B|Zv~0Xn&CWA_1dIf1S+#8*nfYaufG%fRkPmtCCS##G1*?8rzC^e z_R(Zmkvxl0E8-*X9gjBQe9=>)zWH%qZZ~W%Ka=ed7P#N-!40%}9r_6#QJ_nz;~;TQ z6Cw)~0d3^&Ec-)h!i7aWl?Mq3O^%(B)9l4V1*W#x7&1PUYGY#`Hf;C27FJvRS;;E zRN_Ms$&d_A6GIxsm=}VoX(V*;wpy~eh#^Fx@jx`3P8obqq@5R%*#Zh%zi?#-$S8D;a8W^0Ba$b-L7itf{lblsTEFP+KA}ATc1Z9b2<` ziEX4~#YRC38X^H5K}9hUYH5fCqFT%`qjpoHqyVmj0_D_|njQ?AmTd>G2qhAx3dw+x zE%0)Ks0IY8nW=avs$0O&`xM!f+t7a37X%7_(ma<^*-~qq@YgPFSsb%rM9DXiETVeF4J$ zc<|Q1=(BqW6s!av!UD_y5ha1N11)rHF%2(3szi9g?r%VEw{lWawe+y;VES=UcZ`6e zn&8PZ<4R;jGQu*jm1TkXXLBl~@ge9>v6R+9NmszONJ}A2mW;-@35>;StC(kiTjkJUo-vj#Q-fCoK@;?-6lzp; z$qWTdna1=;cUq#dF`|GT9d^_ZM?-R^S=3=NO>poUpgHD#D1sH>xF}ddCINyj8;T~% z7Idu&0a0V5gsA}M*$CT`p=+`g2s@e_R_w5zj5-BioPeaM0w;n844w>?3813~B#H$= zl!JClHe~6MOfIU4LA9kg5C=BOM|oM(!2E#lagksu8q>g2O4u!_6&sq{20DQqg-~;X zi+{bNskI^5*c1zGG~>~L;xdgPaB@u0fUyE*OH(}E&{A&G#Be+i4}?SDNnwHG7=r=R z1|^wIw!nOOa1A7oZQEr|`0$#eW3Dx0g`D0v;RQA`L}YFN!CZhRBI9phRud0nST|uD zgM9Geg{@i*`V*F|h{8_-7PWyVTaWQu~L3JXGc z&`H4eAnrT`;f4g~F?UNCv)X(7ZvEh-w`r76|dMDaD*vRvQ!Hrhzn z!wS3%tJ_Tol^3q1G3{S}O|Aa=nq9LleC;Tc){Zi9E#*6zfeizh<_2v;-%w+V8_6V- z88^_K8Sf1yTl4*@xNYk^^SErCl4Jsbk!Bj%|fdlHwq@qNCvlN zAKtsC%149X%HbB$lHIuHV>jP=$0#d&7z(HOR!Pu9bX#L`*?QYbrmnhT=k`-qw>KEr zq(Nc#w{~6saJIGjt^ux|8{4?2B^`}jU02p_)V0%Hn-PgjIFyAB;eccO>U zljy7HWyHRvv-nUp6Jef(YqFU&XGWP%vl}W+U>%e06%}@=j-~bX>8?G6(SmU8dG@Z{I@GEXl560JgWjJ5~`qtV`O){{yriw}BT${?7z% z4<-Bnr;t4$7%Opg1fHO|_kh5-Z%W2F@+Y90GgRjOgsn|7^!#5-Gy}yy1=YMa`72Ho z_~&4~$)E3G4S$6N$ySl3aeYBD{}Lr$D@5VkW(EA$9PzqTysl~WL6N%Xa5A|LWO(2t zYjOi?l5T@;{Jvnl$Qt%~;AP;Sx@KK$h5v*X2Etkw1%b7BamfE7@EiQLdH3nk<&T5~ zf-np)j~8%btqHf`i^O342}D>JA;V*-i=N2A!`q+tc&!PgQ7bC2d*A-0A@7*gKe%Kw z=xv^s3PD`JVj)s4dV8!UyHbeFjJxY<(r4YhuKBmw+2!B3r>Z$4{ zpL*)Zmwu3pyp59%$^Jt!QX#i5>?3|A<=~s0Qt+pSlX9Mm-24>2tzy_f(t|u0CEn2iZ+ttled3tvefb_b2Q2Dn)lo$ zt6I<^n!6pS{O_Q`j+d1rbom|VgB65m?H+Q)JtvQ%26oGzZ}!fTGiA0HUAF-oF68)2 zr;=eJ+TJ1K)A&6`+aH`z#Olpp(G>|lASx%HdI~4&=P>1vj{?;-3FhT0ydUQ5i55}8 zn$W`PRDbWOcdHM$#df7^@~#_tPLyEYYiFzVXW|947H$7uuhja*LeuxbhTZ z$x{|yHD9@6FTP<7iL%$i{Yku+T+QypWABKm{>mqCCEhEXIU@+{guHrQ0LPa%xP(B` zBLDgsL5Fm3TOkh?I1s+r=H^( zjt!bs1&mn3_&EHG ziggB@JgHmYxq?y5hx*;`+UEZlRAuU^jTyvl|rt7d>o? zbq)k?Z7yDl7XM^2yi; zc_;R{=(g7RCu%seYqf`y{i=ko3`foH1!OC3d`}zOT{ntq%QMOB?o8@?$?U~Z6n4iO z{JU~r;~etm?ss{XdFQotg5pUJrnH)svSIC8PFzj$h*a{yE%Y zD_OXBI?9ZG!84NL!Uf>GgU2nS`j)F*G8y;v19)bF&PMm96g+}foptp@)n)V_vxnHTj+fDSH4}V^n)CdE@WMT5_uR7n8&_fXYevvB zNJQikiEw+q2W&yFYZp#S%8-A1xzvJBxAt$`ySCcP(7>m!xB)*}yZWqDFR_q6|=3&6$qBnTvbbB|Ee{-A`0G;;(Qg#tOsp-p^&P&5&qZnX# zSO2meP6P&LhyrT zc%ihYe47Qo{hMd|Qh%q6u0(r2q`z}fu?V#rjm-|zj7-HamBKVAOzTB`BZB_PeSqKd zZO-5mBHh2ABcK>`{!U3ED<$bpP8Y$lbAGx(Uk4qu1-!hB_ZB~dEA*a7ZBRG&-w(~& zocO0I{Qapw__{{v0cAA#3anF{1iT(@nLpmSSzEEJb_Z$318XmfX4>lR~)qo0e0LmQV3 zuABYT@Hti~Be~45$PX}>&kue8yRSq0FZBa9-Fw4QGH~7fC+^$3eEHt{7F}5&-&%UZ zy^q|xcYy2PfBWONAMCAudXYaMd$fYJ4h61?fY;A7+bWQZA%x0yrr7Fv!Zn(#!gg~p z4C(AmnZIAVsQ^nKd`X;#a$zX<;*0nJSEI8>rJB6s4!Wy4euujjImGkV{QvE@SgL>Xp}xAZ?KdetKTPr;@O2f$zF+^w!f&G#Yo0GWMTmJ}0^tEaOd-71 z4|5P6_QO0<(bayK{Yu1jepo@BXrCWefpgqn%i!2osScqh{4hp3R{b!6aLf-=2pq6X-Jxw z8l6my9-lopJ##FzI<N&`SOiWuGCnmc-8emc_{iwYVd2J^(W!k0>FnqMZgP6xVd3}` zdvW{6$H%zI@%^*B%hlZ2^vzSk{^`lF@fp>7Etx(#KBdmij2=6A;fF z$yqvn>$nK8WP1Pp;6A`=d~(UYiJ5(q<70V!ahy9kdHk5h9<`Lfk%_5`G2V@76I1)A zYe~ZKqpVZz;PjDkb^qk(fhGGVCLx_3n3yF8571*bO{qR{mW+;#>9z3e^nnAD9O&B0Oa^l{>b>ufpO{B(TORBH^%}%pnb7bV)3E;p0dBd`2eCL*ApW;6*f_{BD|AHy2(C$B^XM$bFLd zy_$<+3~!NPH!*dSR>P7-jCxF2DH+H#zJxU@9-KHfJ3Vu&>LWFKaAHb4HaP+PALCs< Ia7Ez%0xHMQ;s5{u diff --git a/rhodecode/public/fonts/RCIcons/rcicons.svg b/rhodecode/public/fonts/RCIcons/rcicons.svg --- a/rhodecode/public/fonts/RCIcons/rcicons.svg +++ b/rhodecode/public/fonts/RCIcons/rcicons.svg @@ -78,6 +78,8 @@ + + @@ -102,16 +104,34 @@ + + + + + + + + + + + + + + + + + + @@ -126,6 +146,8 @@ + + diff --git a/rhodecode/public/fonts/RCIcons/rcicons.ttf b/rhodecode/public/fonts/RCIcons/rcicons.ttf index 567538e5a7fbcac3d5306bd158d27bf86f0a0721..ff76b7751c85c82f1b21a34d8ded7b9dee20cc58 GIT binary patch literal 19404 zc%0pQdwg8gdEj~6_q}uH-kE#vJTw}OW~3RttjCOI#t&Jx@k6$-jjdpV`|V$=nRD-Xe&;*i`CjKSLKq>WqPq}9y+b=baz*NQT5m%LFNNHlSMBJ@ z5BBU3A^i}fckMqqa_m2jzx7FksD_YW9GtlIz#pGkayLTqpCL5RduVKA)X9t-gt~7+ zC>??f@lN^%#J>)q<+e+ck?3ZZRcYX3;N+WP0vJ^|&A9UZyl7`}o09h5~O zothjuI(FTgfBOVN>!CjR%VSf=XTI6I;cEzO{8PZ883FEe`}|>qxSsj*^XIu9mb2(@ z*v)~k^|ATyel$4ne~?Nmtl+hyC*P^Pq3vF-hwgx+goxjgwHqdXg)}$CauxU>G+$S) zpk6eH$hr-icOrdaWM&e@5m#4UAIB5p2S)(DpMo;ZeM=C8S}y%Kei-r4-ED+X>>=-s z-$Z?wFeG@2F8jCb!1__Nx`Jlkcj>VQ#_~BICj7Ax|GUFyT&m7f(@&fAWsqN)KhNsW z-Ch9NUo77}Kj#MXGI7@C-uVmUwGV;A+pM1&Oc?(PKkfI4$Gkx9_QFD6pYiZvft9=U z&5P|8#oabu9{ll)ToAgv*&~m7G{h$%7rXhCaRKCW_(n+gq7>9KDlL_cN`GbPxo@BQ z?zz{`y>spt=c=>ftTt=T+Oy5G?XyF(*UdgQ``y{Ivv174KMy=qLC|8!B`r9&h2^%` zFuT`n@xxk+f7c%d%x;0teV2S5az7{C&%@{u_wy)xSPKGUlVfTvN#55V21;s@Y! zRPh6FJgWEscpp{#08)S|egJtu6+eJvfXsUVWCT@~cmbpZRl2y#An%(OK&>d*@Ixm10L9>r}0rUx) z{jL{4v!K}@dI2$bXT5+Jyf?f6dIxN%7nrQM7~Y3oLhs@m@nd9|yhC@=yXlwc%k+P8 z-P||%&+xAc+k{7jSHw?ANE(n{SMtjJs;*9|Usum*OSMDVecG#mrGe4F!+M+kh2Txb zQe)ccwKiH`wbRbE&Q~HRl8-zX<)Yi7w?&_fz7qXKtR*%WyDRq3@!|N%_zMkM!}5kF z6D09i;(TLQ;~yrYp0v-uNAD%S0QmsU!f8x~ zk>v+j1xZ>iK~=d!2z#Cxcog6#MVR!I&Hh0Q15RSwv~&z45Q2bFHk=l0v&=|QDw_S- zO!GaNffoF4Edv>NR}CuD>v>bVC)wE}ibPCywx_E68vMRr{wBP6dYu>`LmjE=SdZAz zgj2~*UklHV(PeZy(@#9krbR4t* z63TaAQ*1L`8ydm)!pq*9yujYMc#W4nMZZB4pzlHZ;#QcTu+@ZtWy%;>33p@B!#VHZSE`Q$ zLIHu|Gip@9H>hg0i&GD`72Dg3ZPb!Xz50l#@YNng!6U53-&5huI4f**`+1UXpiy)? zdI0*#6#H}iJs7^lY^Dd7vbp|ju^R)+!nc^srL!3zZZ?z6^!7nkW-0E2nEOhLy?tph zEo6YDEZ{0JBiInt8KSn1?KI@UH)DZN2$0u^GmPsj$K$$Ewza=hF7);llDH8!(Yh}*cXjn=_zSKiH`jNgX73IplBQ-hrN17vN(XEhZ zF0!HKBfWhf{DmZeh|@$IPzVv*X~fP7oG;~i`-=V9{$h^tE)X2(&*mbRt<6Obu1!Xc z%|u zsZ50YUL-?X=KhZ9x5lon*J91Z>&nsgrigmY@W#$g((-hyxm?_P{e+u0e#5?p+uB>m zTj}WBWF!O4ABm>t4s@()?-=Y*(~iUtx z@r=t8U!e-snU4mTSfCrN2zHq8q;p^-K+^N%&o+P}{IMkAHd$T|MxZ(&%lIM5q5OK# z_SH9KTdMv8GVZM4ZOOhyihK3QDpP&CF z7Xug}=KV1xCr!@Gc;3kh3?>V71t%%kb8?sSY@|cZJq-%&x$5&Q z6R%;3G5&i%Z5-4)#=>>_c!oxn-!dd1x#K=cK2Gx*J8 z=#Rm*cSi<-mn#ALF*D4R83-ebt^lRyh+q_e>zSas!rIHTYOF;z*XJ^Ax#Y=iC#Ve} z0+tY1Snxh}Z=Vfnpw+GJ!ek$hs4hsTT&ufK8?F$D~hu86@TuNb<(D?&gHX-Y`5?1T{yh6q&6bW$xO$0aTx>oRXp zP}hbQ3MNfnYYeATYjXpXF3+ybW}QZwPJ}c=;_YB0C`SZE5hDBsUsiukzEAFB-gB#~ zje2}BhndLLoY%7J!9sDwA>R*fHoA-_pM=L|gT0N5>0t25Cxgcv7Wv%gg1=fZ*v%?< zt8wt#qvT(KZ&twCWDE1{vY_NW*N#o^AO@S5@tiwQ-ps9lx%3@8kQ12qUv=-o3+`QX z#g65HfE_%RZ^}w`Y(;xRz9|`%_ic^FSG31_8sgEd`{bw|toDG*nY8S0!K=}-z7Vvm zWH?Mtb}n1d9*fws%J^Ue>E^JJI-Ay*tslJkee5)<4^LVw+({SJ?^E zOxSb34Q;K2O+E%`tI>3KAm;yxtR(;J>kz~Ybt4RknSGG?Ed78qNsp?)M59_Ts9h$8 zPhZTz1@;OCS;pKSLB3l_HCz=A`t&g9ZT`SYMV-yz?E{)W-D=m0v7?t=9r0)?pKZc>Wyj=LPmwSp7l zh~PN_5}?f4#Dcd}V$7FgS>WqrOKu*s;9?VQ^;S_LUvka6D1g)#@^lcpDIK#^I8tQn z>nfTIc+xrE7G0-{FGGJKiRumsr9xc%G;T0>W1|=rs~;caD7Y%6p5bkqdkZ30>K?wj z8w7RTI*HGAUp?Gi;;0kr*|Kf8dnqN7IEX(1%{D^wx$$^nNq@P+hF4)}du&M}?zAs0 zz{~C^|3N7vbpfhN^Gu#{%BxBwTFLZ>Vij3&*|MLKE@M zZH+x$J?lH*6_WV2c3w1ll3P2Q2b`{+#;siq4Yc)Do@esWG>|4 z^`27-4*xALJ|nzTft>0Na;MQBox9U5rMJV&F|vPa3eSiazC;F6_S+Xe@2%OsO!t6q zAfdya?gDbo(HXDajV z8UZwK#0Mri?(V^g?YC?vyJmLbgd`qP)Nq@?2e$=8aZ4;Ni=1)0q!|s7s{}*X;BdUu zrUaz}qJ$OxkfcXiy}HttXk3=4dAq3LU?OrA9~3uQ94C8~6})f7&Yd^y+<6NtH7Edh^HPPBuS5;lX^J^1+)wqGR znr}7D_0UZ~zYuCbx1q0~S>Fa&Ku5km3zE!Ylx4xfz9cRt?PV!jyhvE)P<v=cww zwX_R&Ebqep`&Eg%M$p1d9H*@__|>)}h+N>1pajB^E4YBLjpJLSfcOz!lK5Ss>eW*O zVRbkPYb{n0L(MgOfWMlHno>ach#=qtI|L;`)?wamR5exXqIex`RKy)SM5U4T8<^{m zz>GyWvX&%d$o6Y@NN#QJH&(7O&0CFKq^+s3ql4^*$BK@Q|9%i~(Zi93R#&wr(q|;SRB@1u7Ksr z%oyWRXYW!BBie@#*Nx`o74NJ_@QS!jj%~Xyud2H){6Rk7#KQtrZBemoU%Tl~xbv!$ z+xhzK?KgjC{hI4D#T!%V_*I$1E7>?7#s}+1d4j{^)ji{RIVUJ$N6Rf+%#P4Kf2fpX zL9hhOSLe3g)qtb1ec^D+k{b>WEBB6%u3pu0V=0X2{2b`$kAjZQpkB78-YR~ecWb$Z zk0uF>(gGm{w+QD!>5g6vDsb}_-r%+i!sg9_upJcImd%iPwVR&a{@~{y+D3NX|AqT@ zeQf%r%6BG(+rIwZpWpfZcAnp|h3B_>&2^!c21u;=4Lcs(Nw%H*>`7?*;Eoe7&dj{{ z4tsb$TMhpfc0$qG-vgM!BbyEa?re(%>^ydkroyjq9pn>s6PRBN>$x;Xv2_1Jl9{E$mq1gcZWWtgJ?+OJ&9c-}&}Q1lnX z;21bGlyWL5=ZKTS71y#>l93}2VTtEh?%S@hf7{P>QmlGC4cHtawj>ZU@W_FiEZXph z05g{h@Q5&OF1jpTM}nuoy>Rp)@E4euVFfQHo~;`@UcQm}Ag)(&$_mye3cqeng6>`| z!>8HDqWM?*uI_Mq#ZObJ1cbQ>ib^Ikzq8P+v8G?e7;6Ek6`Q+ z?jpY@Q8znVY6a{%vB@5_MTK3ryr9MU5~j22kYAyQB}ICcZ3-0KeT4I(GNedHSQH+3 ztZq2lcSA;!l|baQ`&96S-F}AhX59ce*Kc})cDp`jJ3GZ{1yfIwz`C`!?Qe^~s3V0W zW~ed$l7gxCc`V0x%s*E74KC-qS}AiXT?)JlE~gxYiDi>|DiStb?=#d`TLfse$OESt z-@yYnwY4=R@pi}eG`pY)_^U7_^B3k{q4&eOH4X{K-Nex`JTuI>EV2jN$&1ii48_?= zbmNs*UP$OW9mT*8!#WsmGo0$nx~}0;^<~S#CC9*z7>2)xM_!}63%A5hOPE8`jYZGU znEkH5k|%e?RtLBJWLwXjg=9yH#qtl4{~Aj)SNGY$ZQHt~ws-9mNkqu!Su*d^`?u&% z=w{%jYteOR^wPc5QWhskAzvWe=m#3}O`0N7 zji^B-8V|;8-Ld3=tbuMfBwZAPyk=+A054(=Up9?SKcky-@e5CReZ+Hf zXM>Rd;b?&)eC{5Vli1dNB$G(SorDpimZ2Fs6*-<4Bt>WvIYHtmCE=iH+YOp?7_V4X ztzxpwv-Q*TP4G*D;FVr`DYg2uU;r~Bj8rU?3lT;+mu@w(1-+$ovESRDSXj0)dUcbW zOD(pKK|g)rKR0Bv4fG@Mtdb4JZ-Jp^{{ULoy$-miJ1@WZQy}7PMA=# z4|oT8x1_*4jBeaRvbk<|g~*(WBw>UAE`#heTk_i$aE#|?Ff$B%wu*qwU=#%x+!ky` zYM@{f0aIO|pS$oycZ53|a=8l?E+pH!D9bV|bL3=aBG8~4q9(+sEJ>OoNI}bzP1&|J zK~xOgXpaXHp|D_6N#P04>7s3l9KkDBZ&@a*IK>f85x7l+fcwlZ!+};&<&R=m0_T?S zln^or(Zxir4hu4s1df+gIbb-Bl?n@j#LH5O=KvPvEJsykD$0@|$4t9T*F%D=ON~6h zqg14V1el1g;boHev@XXRb>V4XUy7@L#S1TR5+^)|mto8i@`A$sg}>H)oqmnHiTcqV z^dQ@bE_zgF8zIf;lbymWu+ng#bY+8L4i)uX9Y?-1q_szIw2bwOIrow-vI9Gg69yY(C=%M6GA*2pmsP(NmgSHV z(4o4(&I|)V{7+Ug8o+!4)RDx=AA_z0=(52RKdXI>ewF+qin;q}&A`S? zFnk^9Zt{bGbYn2PkP=^2pdqxic z(nmRrA%(ZItWV&dX7Z)#bM7r8@agX)zeI=7*KiBI7r#fAk?rIz@;Lb{c?qGg;2pQZ z*Y53e_rU$F@FwC5sTXeoZ==uha4q&0$|aXOJgF=9X47ol>~7chP;YYrgqOH_-U<>wY1TWxB-z?ICfm#Nlw=UwKAH?Gl4mJuMSSGF8hlISE#e|XAUc{S`RB3g|`!iq!KLZX-UOY;zJS1kPJ=}LmI`H7lNv3 zBy{k$TC%x_Aw;6_Ks20A8GKNrq?uw);53^^yu!TEG(PUb1JmIw?;3`lIp)@)v48|hfFQP6^hNI*wWQA~td z8e)N{7IVy~-P9;4fGeRuId!F`2ZN?%+rcYBiG-;_Heh56yxbtF0fA~}DjtgJRy4o? z{Q*}X2*KKoTV|;O}A93X4D>c(DNP8K^dS zJ=Qebhb0`wtXIA{LEZAGE^(|A*6JQJ%&{`s62MwtfUrLvyfrZT>>dIIE5V1b05d>D zNg(Y&3mscb!wZlr5uUL78_?UWoRm~8JuEwzeq7WYBjBhec=F7+5?PUquncTvSz!Lz zoJuCoW8jWxLbX(jH^gX=67N2O%!we;Av>UOKsnVhv+WTn9spx}82VEzrFBr!6|gPR zQpl4fqcLs*WAWN5<{98tIdp_)jHS!e;1xm81U)K+8dY5~LjhB!F+I|qmZ)rOQ$UXn zJ8FodAvx15>M)rmICu>Jj=3L-U86 zOk)U~91}EPtbp0l6i+v_l-o2h91p|;;ShLISl~FuV8FCNNoJERFkc>A0|{i?c9|1C zyyobbYt2|8r#DV`fz1pNnHxYb7vPD=_#2qj#KRcYO&G_Z9DMkAq}OFYoPbVPE3#A% z7^a&=xPrAAYBzcO4Sd0M5u4lww2^t4u@V-UqTr~)qEH`n67W5UJ5NElp+o=v!&mSd z%nG-9yDqL5OrCjK2=PygiiUS(ssX*azeCY$`LbN%s;%$uox7cS?Pm3*+yD zlxML^tWp;L>or&JT(@iD@WI1ZUD4buwCZuAV8V=KaBKGAeS52XGzhL7ZXqq%je9?K z^R0J`u*yfEa*A)21U*EzHzt>@x2b?s(7#BOfSAEp%;QxJT*BFx!!=qS1qJ&c}2Uqvq?_A8ymhq9Rn^DJDG z&8#^y%6yvLP-z0|m~^kGuuF9;ueVQk?I|o@Sf-oDS~{*9<2CKO1p2RLK%-2p)k1ff z8sy%-1<)+Zu3-SSx4t{p5In3)VB`M*+K=17iz5GL0=JhEet=WRUJ#6xxVjCVpt<*g zz_@Qp#yIjPpqevO=Kh4OO)~VtUrRIt)jtKzyf^tPP89g(V7qQ3G>w%Yn zf9k-x*b4s%FARi17X^XAyg1~45%>*0Y~FpkbonD;fglV6%;N>z7&PHFe32NeKY<7f zBV>3ibTZvDHtWa!S>$1EO;3si$zVehyO(`6y6blVD!1!Utf^o@fyjtO+fwP4)Mldbj$3TWxpBChxkj z=R^tSy>_-*e){@BDectB%X}8L0k1J0zmOO3YRdbaq_Td}WkSKdC+@Hkz z$kpslJob*5>aTnPSK@ub*|UPcPROel1aN$LgG&e`E%L9Q6?Di4w-w4@fdlc2FgMp= ziujTPtq8!}%vb*n8YKiDw!r}qyj*12d;-9pecdarUbqO8T#LWYU|w<$tn9%g*Sa{q ze%8g0;prmv4Uu2y#lw<;tik)|i#X=+;`26u2Q>|j4VpCtj2K}2oQI@4=mp4&&t5e4 zizaZKi0esp6I^$4fuG({JBZ`9p2U0W&MC;r?GHWq(02DxJ+tUw2iL(y_2B64`g5Co z^^M5FJ=X~P4M?l$o&_;&2fTN2zchT=8?d{Rk?PIB?8%6G=0KiuQbU(qioFRx06HM$ zw791{bL>nEfBKSpvaU|%+5rc4gJ(UvLE-<5bVbs14IYQN|BB|hMv!!5Rwg^Q=7%;*<9BPlLi0M0vj%rdHP zx!NU@abG`xXC~-ubZ<(*BWTrG#~=7cQm;PmkP}DT=n<%f>UAQs*9&KrV$D=tM*lH; zh&}6g8J$-%!I!8x&o2ls+>>_CE$e@A6?T7S1U-X9L@tpCx95An7WBGy;f$mV`L~x# zE%;1p|Hgf5tGx^jeCCQ9@T0Y>&q{TkmDZ}__ZBL7ikB?dK!N=d5}WRBBhUI@WCO{H zfb^CmRlf}8tLLI6`wP;KB}w|J7zly2o;!WfIH8&kSA*S#ma^Z5#MSUZJ{>goS-}7zG;1eP}aDXGA78j!cNb-%#8z&Z@=bKGgnmq*uPM4hs$($A7rlE zzDdxzEj$*A%eyxpo;kd^yRCJLvBc5ON5i3w%LdoYd}{bSYm|{(W?1A07|iDfKY-oW zq5YTo0h{iy_nN-`__Ef&NZ&~WbTT@ezH9k2qks3KMb7*S%cxrX(z|`c-*u=zC z_x`D))BDHwPfZ@*K6dcL#K^Rt@?X2grjL(LO{V&~d;OfDvB|ONk(sek2Ko3+2lF#C z2T}*7r;esJ_%Kt)rl$^%?VssBG&6H-c~4J$UqH$!bPU~!rqMV$hz_9{lmY{~7PZ4W z@BV9yWe~d&YNVj1S8E(iq7gI!IV0!jIo6l&~; zmiyh(<7hjSAB5Hu0Au=+vc;)gP?JF)chf1rs2lKFTxSUCOuF?(+@41la6XQ1f`0Ol zI|G<8?5168QlM}bzo?kmIgcklUkAg1$H_T3;Ff916 zfJH2Uhpb=~Yd8RAEr<_mw!W;1> zJdCeI!syt<*vuH2IwArm937jSk#3xtI&ySm`iOAj^vLA?Lv&{3AU83!|A=s6lD)VC zV`HP-#Mpru-sNg;bn50w;lR|y=-9OCy_QTJ8=F*Trbmt+S~7O@*vzf`!Re_J$Asyz zqfhSTc3sKyW``H8!zi|M>L&iLueVzBtVtn>cY?V~<)+;OO||#T4(RwDHLUQ?)GN z#4*+>cWCP9n0jDhcn!GNA+5KX6oRuSCPHx`s;?JF`u9<$ipPsIn zuBrLao=TFE05HJUV1fi7{5Q)@{crvMA5v->VgLXb?AMb2OVmkYsC<;vSlGTW@h|=R zmuL#HsKJ=on>c-8kpKWVHvj;QnrgkDX=&$e0RTYs0RWVS0D!=^tU0D&s~;xj001rN z7mxi*xUeZqEUmu4FHHSQ{|~2x34qwPvUl_P!rZ>}A7624!FN8_?HtYi^V|Hw(*6_f z52_A(6R)p*>HhO0`Hv4~48G`KV*kaVOZnmjeC5-xzq^p{Q&&dk`w`4-M(cT$FN+$_=5*d!2;D8QDF;O3AjZA70 z5(y5V=7*97{13OrGOMp|qOVVVxG5qiC?7_QUC8*GxjA?=TOAv_F%+0}4ILsf3$D!O_#Tf_A|~o2#DXtB$!!;zNNPhXw*1o6Koz}x3(9Sej5ZIbX-V4s&odzY%#YnovDCSGFuHYmYwkKIVSU{EW~UB;iL>+JJ-k@@Ez z;hPN?8AV#}rXtUX;$$N^s=+kb7N!#-GZ{w$IiD+SRq#4>HbMtnooV;`aJ@vl&9c(##G z`GjzneEew3KA)WCDEp*>Ex?7aIfy{TdAvcM!PL+_=p6i@i6P_+o} zjz?aWBe&J%E=ktd``Tvc54-PHSNK+5R$djp3U&DU_*lF=m>EtL<|bQk zB+r=@iJ}imLsJ^ZVnjuRlJpzbdE@G3Km}8Yy$!R*U-X$Wv%oe-b5HrfZP@(AJQ1pOT2ed>5)TxPf20X~v<&pT^8`vXeT$#C=Pc8?_-@D< za5Ucz`YZ)yoR9ycog9p=tO`DN_CmJB{X8cu5+ET~OY!xF&@rU?uJ$9Ng!~UAnAJ<# zE<#A~T*DbVEOeGs%>^#3wunMrSr)zWs)Ms82Z2X7AtE7cNzCz1%p#b;_ZZp(`0*8# z1E~?bK}KP=B@)@QImh)Yqu^3;W=l8${!NlgBFjPlGGXBX6^h+)!-ix=*cwvlGJWI7 z`228#__diRQXj8qUKcM}ccl51YY*dgPZIhum#5%pgG>Rtws`N;M|&J-3h*v}bKy0z zSOkVp_W{DZvV{ELAfCdYovEA?b*~;IYhsQ35`AlqXUZlEVBZwEf?!x)Al+4v2@ z8NV~(l06i6RJ-$(qh*Wzgk~^bSC(73Jn)}YwVl|0_q(k-FP!wtqpT0b{3h&a;=q7p z&wO7LFeXQQhJlO5~FmHQ@2SBere)`S{~j{lvpr2RCXBou`*8BTVNm8rUbfs=_4tJyy!END*NoJ+?AV12oMNXQEyp-xD-4 zBRsr&H|?(KKd<^F&dFk*MvcsjszVhPeA0iM5|jfOk!0v9m{Gy*zfHxM$W04WlC<%c zF$8lK%yhf4!)1S)n970Fe^j9+P2iHrNJbjRiaWEJXyf-Vo1juu)ev3HDpn!=jOuwC z4Hruk=Y-oQjyfgBhU2QMk)BIni&9o03#E;Qr4yCbL?ioysKA5A7pR$>@eL{wZ9ehi z@4^6FJS>qM?kf$8@-hPTqvuM$a1y)_vmY)z1d`BFg)l2zOmEs~;`K`%41B+5x8Z?8 zzD*+=>snd*SsZ-2fiVv#cUGc!^LLE%((hRK+&ETsIC#(x%)flWh;P&%$>LA*E6TaU z%jhH8FdbAX)CEK07S&5gjM1=9NSp;zyv!BwBjP4mtrfJU@cK%!4b(89<%lQRgqcKy z^KRe!drB}c{DILx_c@FwI6=yReCcpn8<&YCi2`=YQ~2RA(*i~_G|JG(=xC8gXS+Em zaC!0(Q`3u@aRzjoCop7!tyF z5`|qVt$xxVPQxQvo?Rwyf8u_Gh~)NS6pL2~lFuZP#JzTi^zk@XQ8yIS0PHVbdu0Q(o$peZL4SjM+L>^%l!ip=yR}qnB5pAJ*-MuM5 zjl=W6X&n7M9QY4+E&Ua%RGa?Y-F$}aMiv5bwrlqSbO^UbKnhPD0TGF}-!&$^F{N>_ z*0qrXxWp5?j$}x516r(zS9U?(t9gPnf#^mA3tbK22gH(N=&8~;bC=rsO$(x4g?Qa* zY6}p^1JDAy$be{r0DO7}A_dNsVp4VK$kgsf?uL!lcYtK!YWK?=mDd{(;ZO1I5WuF0 zO_M}pmUusA(Edbp> z)5g`cxv-hn4dbmBZF?w&XL|7j9+*h2|tY1-_6ti zn5Iq~#0CbEG9fggkjd-EeCdN$fIADT+((PcTco(zDamSpv8$68xkVJ?OdJQe#Z7KB z>8RC*{N?{GX51j673dNOd8pA&TdCr5?Dmj1vB#*!9l^)Q?1ng4we8vOJ(j?~pD`?kR?*k8q<0&tj#0VG zrRgtnW93%R-xn)fG{4xsnfc*7=Ex;+;XAUpW<<%3dfQ77Gj=@)zL~jJa@q*qz*6cl zeKaYiE8HeYO`&)R+zzw+adfbXzlIj8+Bc%Heyb(rRvUmeA$Yu)$U87wjaRxt1kc1B znjxZfywvQ|Zy^%UrulZ|=y9bwd1>HdGg=`&z$vVJ!7%AyVN+JRp030Yv$42Vo`3G8 z#MSMuB12%ogUoEX?VKAdRlD!E=9cA0ABGZNm4!d?RO4<>H?O5@A7>${X)&s)cznzK zAY>PND5-fkqH8uX*v&no=in2U*?5uFc?WlVcA)Qvkdkevw|ll9XSBQgl)J$vz8l_w z6FxL8RQ1f}oyo?0J}(maa9mQJOA!&DQzOdx@lC<7{tmA!0pD7)Xh1ag5+XYb)!c;i zp|E^ENX+F)KZJla+)~_0kr0~QZ#T#cr4Q#&N{d(Ny#2>VknPIe$r-Yc3#h}o=^75l z|Lqh8lq>LBde;N$-!^%SierDh{Q0!|su}gq=$>Z7VeJ;u>l55}pYnLd2v`c($#N=p zN(G4mk%7#8`uB(&;h8-L5*tafa9OwsLH}UI1KIvzAZ)AtLNE%`3aV9ekaXV+ZWL0F z_EeF#kvz&)(Y|k{|1GLAF4C29L!j?RA%PJ{W00he&1;~F0AkJJ639ANie4)$8%?g7 zW*iO6;WNOd|1|Og{&KMmDpq5J8Yz9DRfeQ^AF$c?mT09ebZT8;N}bWViLZ|2Dok z)QUQk;uy!<<&#%|Apu-WNE2gLT%hlG_a!U(J!ge>hFu^ohyN5zx%-^&`b<@8TsPLG zIPqnE6fNcs88Ua5@9^LIXS4^PBq2aE6aS5lT>W(C^j4j&q_LN{NAVsZ8~h%z;5$`D z;W4Fn)Kf_8=9b{|uJ;Ozf%ZQuD#>p0dn`I3o#mk$I@ycKx0>Y9Iii@wB&>M(YAAQXtwq zl7_&8z!BsG@z^$QtFiH_xcBWb3NaSSSNWzNIOj9B#({56oAv!1seU**_PPwL6o)H9AO^~QN* z0Xnzl{NB<`HpzPXt;NUoM5myJjj|`P%p#n(?TqT$ap3m~seTKB9%7%Q) zr$7n2Wlv=}7nRJJ>qUtU!U@sNsgHA(9oPp|7%`AyU^VP>qXi5Zq*9S?LUl_B)Tpa> zY+%>%Rh;2cWKZK)cbwGbUhk6hgE|A>707`m^vDM%Ob`W!4IIhFtkHPt8FX|;!UEg9 z`@7{YYJc5xRmb3y?fUYae?Dm*tI*18nbt-)H+@*HEawzM!aszn+FfvBXoyo?q3G>) z7&05DxFE~;D%o78Bg5L6KXswGoa4ON>pLXVstnu-Ijj97+zYepyas>SpFx7eg9n?A z5lOgp5g&~fGZ-~rnJuxsIz)>poNJ#fwY-ikAMJSE7{|3X3Xa;l_`Uv~ZJCbE1v>v&|q$!7MK5+E@vWtU4Z=HN1h2VFPPe7R2pcTthL3Up8Pw#R#>!Yp_{4 zV03aXyD1Z-c;iBYbUy4t!@I)TOGN{+&!a*RLn><02kMQp&iOG-h$wJ~I^cO4iWs)% zKt4mMpuWZ=CGbUyle7q2brb?5o1I2GnZ4kmSRxYi)=LD_EbTJ@lkdSTf^P~>2|&jd z+jS4hLbl{qTdU>lGtKJW7qzDk+tpfK&wuxxJ8>-{8-fJw2DyKY-$_$n{)V6gFlu;Q zdZGUO@QA2ofV1+4w|f7R7buhkHhYRbcUsM=d@NG&NX@4gFUQKAT;JwmMo^2Lo4Glb z`JkGosFpJ;LHJ-i4_?{S#wt0T6omyV*%bNU9(S}>ZlrKXnQE`hcI&CZd9kOKTci0( z`8Ur-0*UUjSWk#1XQ^vq2FH=8)c}nnpw_7KVZeCDMrh`BetD~~OGSJ&ytB5N z9;>1OxHr9V4m=1F*2sh}MwU=*YkH3-PMKjWQ60Xm+2k(W_P)qzwvFyxm4`csqnew) zf+iw@#Tc;Jh49t8T8Tm=88^c%hKRx4Bv*$j?)*ot9HiGOlh!73vQnMG94NNYf$f`Gs{c6^b=92^aG+QFxqQ*9lUQWGL1|5P=`g2=M|yp!b=l@g zlOy~D5~y=kLKi}<)3Y{B%Y=TnTw_^4AitqRXh-5i4*KUtrMs@^!_*B;XL-!y(|BYnc$uOu zTw5Vvy>=Pa)G=SfwuyUa7uSn&?JE2D@xk}~SlnDk>EQi65g0b=eNmZI|FhZP?Edy> zOJLWP7Q_UGKw2J{Ii)Y{hkB%mgq|zsN`ZdDj78cA)=cDsE{SIin|y{_Yq^a@&02>ns(1 z>tyXYB=}a0yki=Ef9o*WbQs3^R|AeJ&jE5x;KYu8v*rTI+N~GJR~hp}m&&Q}%o;#D z7deBb$mXt@`gqGoY2~=;gzX_Jk<%MAHBYNCUv6*#e_vb!7M-+nhl&^DH$!H^lz!F_ zbW7TiUAiHJ9i;NwEU;JP{FxkB{`LNZO=|O3-C#5Sv-M%;QMaNtgh*GD863Ki5j-e8 zAA6O5@ZbXTp>ds6VfIYS>*??B5zqy#=N!?xf&IiPIfzqpd<)z}dR<|}>^epr6-k8N zY_|i2<{msBSjgRbm=3=5qvTBC@e3}CJq`-tPU{&yRhb-@;R&7bl%41tgH77x0)aS@ znEm!QR&btHBd9S*aRv!*sq{S*jVoC{y#wAJbOYLQ|5ZE!KhwsMLSt%pg@vh6dX66U zvl4oTVwNL!eu@}U>xt$tV=?1AWcjD9GZqTeXy|b|$&ypdRk$lE8LLI^{mOihQEWnp zNwDujAF^EZBf9y8eJUqfK@CMI4ONZreY}+JGm*-;wyY+ZS{!QrPkyZrx>zt4^+Z=r ziBiMq4x@Wn$l!>ePhB$<*weG#DCWxKs+hJn1%92(>i5v0hG4=Nq3OLj5s{%kQHw zUb}H-J+#j-T_HAiB_o2dtQ_%^sr6P7W7*LHN3j69kW=uTn@(hD`EyGrc0`vJ=#c4M zLMWWgVrENDr#h67@|q-;oEKS+fo#FliE<26RcE9UT6jCxls9SSc{6SKQoMq(h8op1 zH^N~}eXUaYW<*=s%rPppC8`E`6B&*STXwXDC-4Z zG`k>%e=?9}Z$e`^;|EUh^x`E2MOLx{Hv*-?66K6^F1?iCjO2VdC3K;5Y#MAW{8qR! z6}T?7kRVu-&MPU7A33^WJM__A!6Np$@oSeY{Moa{WU=u2r+CvaiW>3vO{mh2Pl%k7qr2~7`@Q-UF;Cv_Ku|*`nc1A@**UO4p+9KIFtQ7g0&#D{ z=2w#D$%GjbSs^WOWHFQ87PP`GBiQ~(Z!_E^u3FF{O*6-7#2gQ6KH0x5fX+b+g_7Wr z0kx5+DRbbpFSsw>%J$(22m&|YLhlz(tpl6`+$v~WQHG-o*Wf)6!jU8+v#?^(BS6ik z4vb&(cwVoxT_~+M)>&5&j%Eu{*7ovCy3^8pdm5{MXmOr;c=>8bXs&^fjE00K-lIdz zFcc$loZ&Ki;BH5xPpyWTRS9_>t1g7SLx+llzj6;D>U6XbwlU7JlJim!o53}b^HKc@Lqf?@Wf_n&nT-9iAQ4*PIYSP$?Za12HYy7 z{9^%f{6BVqT2`Q^#z^(Q)ul@!she3;wz#Z?q_&Y`stf5%Yv}H2OK@W7oSYjIc)Rvk zlUO?i0eqR#rSR`*=4`<*u;}Gwo!V*YGD$bMG4PEIco;y;_8vAMUze&^b;2bFRiP8;hCb+w6TuEqR;dbVN7q;Q<`d=-Q9ZK(?S$`-IGZe4 zyaxVQ(y{+~qC|Y?BcE}p_(wb@hIFpH@dr&%E3{cm)J0+xA!_AUMKJg<^mG-Wv?#8lvT zQ-A*%nA7IFa``XRVGix@J>X^e+7&aHFv_+Z{cba|8SqXy)hP^m;Wa!_DWwc_%L;hU zOXIgz0OgL7UHTuV)g|i2~a>t3hV~KQ<$u&U}4+NXj*OIOmdHf z@kATEq3y4n|^wntHC(m)ZZC{^7b@cZew;n3~?&h$Ki)Ez&d2rO^e+DtXi?a#USuuvu zSSWhQ5yW!qYW%O9(MPa0)E$g(=c&Ri>=0Ep=>i7Y>Qu~udY{IcqdUISmBeu)Rgz~8Inr>50I%!bI`)zR z)y)LQec)ILJO3KWxJy^>iZ!;@RlojG_f50ViCA@RGrd1JDhKik>-wJvCyY3n@aQ8o z81Mb}-;i@}w{uJZd%b9Qsl*ZQC2lrZ5FdQAdl#{o%AVi5?hyM~F{`4{w4(!P_i&5x z3#ozKvEm1j@I8bppgS^T7YrB~Y=8m*&N&Y6E^bIs;G0`eTv_oa9urFFcgqx)jM9iV z7WMU|a3%@sq4e3gAK^~$iJ@uM`*X)qbJiA4ewd4gXfAx3;s>l~$xnVlY0X0Uq$#0) zz7Y)%y(>;N%p>dH;KiaSWpZLHqgFmwOZtu2+%klXAs47=WeQ+tyEAo9n57Fx7o-Yv zXA%o*F`-Nm3lRo9fPjwy_w8Mu^b@{qZz$fMkUj3tgx9mLf^NT7Iet2Lzi;niQS^GJ^{c~xS&PfP-26f7J{(?nJ6>}u+iQT_G1*CYwp3e) z!D*UF>=Uz)vqno$Ja)>*Br#G2yv9j%+_8P~6^Q#uhbwkNBR9{$;9BK4Y6_niW4YY`F!)c5Zdtgq8Q4tyC{C7*|=y- zu7oEMpwtHI$Z@m@(-Yb@w^7!*5K0j2$Vv#DBgK+P5 zR0Am22j!%4=~9VpnzzEi+OS9QX^;EPr)Jtmo?j;0{PoM18D0w7EazU9N!RLr5;<4T zg>t?NCw^}JALln4;tMF(K#A5#hh&YrHN~1v&D0&C&3Ho-o7a_%8rSWpv!Q)Y3FqMT z_C3fd44TOJU*1`y%NCM_(}-_XX2fw4m-W(5#4!j`NLRaiFU4A`EX*~dA1}jCpbx91 z11rWHCKN)v&#bchx189hc$x1F6a!FIIl8A*B=@JqnN@;m| zlBtepnZKP3?=J=VIOze2?3_q{;vcOb%ZJavTRj6@D=|O#-6rO_?BpOxt+4i&RgwFM zf3gM;mRACJcrAu=3cnK6FEaIWv+fy@{MU@|r@`FB}IpVO~EV__( zMdjoULXHfA$dPdci81hyeDH35*W(zBIb}`_LhZ`LnJi(oi%5C6fjjR#gd8{2@IVQ* zIqgt9GKtH0oyYbep9V9zg-~>M>~qCzWrJ=6wQ@h9i_E3!Noy`d9u#xaOJ=+wJhZk^ z9KlUyUsa~SNFH7+XFD)o$=pzm0|RPtCA`zT&;cQq6uqMJANdhy zP6vIU1(AKbfxhp*__%Fmix{GppNYMBd+!pplmrS6uRGr>o+CTl07NH`12lY;BokBn zMn8Ho3~8AJm-2rbRc__h+OlWxIS(SqmUaH0g!PlxN63kUQ?G4u^(e?RmnHbOWcC6Y6Hb zsSD5YB$@`|NfNZ6nzp@;f*Zwq;IAKwouT=83MQX$0m}7g#C)Tw4Sc=-o|rl+h$8qG zWmgoXq}u0ehb5Dli~u!0R<&kfwc_hkumz*8iO?mjyd88s;(}Z}Fbi$8 z4;=>b0Y89UzqmP0{P?Ne>w`;UWPIvrCeCUEMi!#m`5VK0DW6vEG6?CTWf8NxXOmX5 z%WDlR<$YF~H|dape6Et)aH2;!DAU_JSrvxY(^?dJ@-Ox=o{loA0={{Eo5 zF$z3G=(GSfY@%Rp7925jmd#MeKSfVJjOXgfBItuRYO?g%ywcD2e{9*7|E?|4cHJUA z3o{Qu8R;mGZg#~lCpG$`H6GI@=Pcr`Jm@rjSoZn3??0mHTsH4#AM69HSD>$ zvC$w6Ajr)hbCb!AVm(@`TuhM>vQYtEBO3e{WM~HK@>s5+=T+9*+83p z*#r9f`sC>i`~Jr++(??j63GaI`Sbb{M;ghM60-4_VqEIu6JaD+`!A^=)92^6Ey@8T z7SV^ksjt8d;WzmnY$amf)G#0iFLVLUD6k<1{ijf9FUo(71yNZ6mGV-a|F>yT01yrs z0bGKaf~A4Wf}cXDLj*(AK}`A#&JQIm}jF+s@PH>LNS7<@^KAws2Dfv2qikZ z2^)Nm_)b2`g4Rjn9WF~T)*q9Ms~q2>nQ17eI2Ilre~3#A{x)N`J@{vtHrX85)v?(D z_a+eeX5I(&<`4xq5ZeJX1Fv|IJP%DWfLxQ6<`m{fqfaIL=V*K>OGRQ_ktIq7)jO z5=0$nhn7gYmAp78rRZdYjkE$!wcGzn z141CtOtop7%00Y%j`I`9tYsn!ZVzM=`=w-uN8`Eo*wnreyVm0UQ2)-S+Aw)-Yj* z_{Byctba~Z_GOSuTQ2x%r?3V!KR?b0w4Vcw{<|OhWSC!>-%!KeV`;hcoqetZ$qpCJ ziqj`+cZXC*z|mxQu6t3~yj-|3reQDij2|~(3a`tI7Uw@#iAS9Jb_YTd02uOUD=E zr_<4%Y`;%eaS*Icy_}_?np8@s(gn-G(wrs>(~^=Xb6S+luYW|OMocrn(?r?3Q9sqd zw5^g9>0REI=Vp0ZXAx)RZq9NxC8@FUnbN7MqGRcjeX~Bo^&_l`c1k>XREMsk>sPvk z{z1Iq63|#NtLWs2kBe;vaGC diff --git a/rhodecode/public/fonts/RCIcons/rcicons.woff2 b/rhodecode/public/fonts/RCIcons/rcicons.woff2 index 57e99161f3477a5f5ec4bc8f8ef04091d5e743ec..593ab60a90db9ffd9c426f41b5954e866a06bac0 GIT binary patch literal 9748 zc$@(gChOUEPew8T0RR91045Xw3jhEB087jO042i!0RR9100000000000000000000 z0000SR0dW6j1UU=Fx@;0fffJ(HUcCAf+z$a1%*Keh9MjIJT>E(mB7XU;CtAbh{{nf zqJp%B?En9ez>Oi{n^@h#2Jl%&J(VL23l8BB%G%1|QaW#+``w{qW|piB;{TU3E_CJi zuD$9LRfHdW%C8eMNlb)8Y&VL#J6=E4u*(q|`y}^(bgS8tY)Crk)R02Agd|i8kdTC) zD%}=}h>ZiKTR)%-(bEr#o(ivFt6NSj163`Yf zKPPetx9;8cj9Wp3O#@hz|7Rbg++<@gU2+g{d-tqcOOU&JdE_UI6ArLiYk@!+505@s zvr@8WcB>?NXRk`FHY{9{8RHQ^!@!XTkEc>|?2nYNl>F3c!$QF3z1bs}Wp2Ae@ObAN z5KyZGVm_6^E2;h0*P&Mn{z1;mIT}V;u9wNo7uSp7_vwt7PrhvLEGcghy@VDo`+Xw5`>MZzIo(Ye?0R#Z3FcGEzs?(>~ zfH`kg`2z3&93BvOp+86F6&K9{%6=0+#?g>+Mt1oYTZXIy5&&9_UIc(O@*U(kC=nFC z1`-C3ADIdh@*#E@UcJLCuDOtnSz#}}9f$-lI0-Gz4Z7a-5?PBJBl~p>s4F<4um35f z3dj0g%g)`u_18c9&;Nb-=@r2y@c9X1@B-~?PyOtj%f%mg*=CH7dFy&JdGd*m+r-|# zT(){V$9w&_gSR(5@6FTy2aiOiP-%1qlf~w6d3=FTB$h~Ja)nZ*)@XHlgVAJRX=QC= zYiIA^=;Z9;>gMj@>E-R?>t_}xo|aAmJp-dekR&+GZDhmI-D`I4K!_sZh$JG7$RhHH zBBG3_BI<}HqK)VxMiJwPNkktpjhIEuBNh?Mh*iWoVu;v8Y$J9N`-nrtG2#>vBF+() zh-<{HE^)6LJn9zDy2GpP@vaAa>Ji_1!mpn3uaOpt#x0bMuTeFgp>F(%rtvS@#wT0jk;xANO zLJM^b2LgGLSezkFzmVsG$yWxRZ&m7bQ9WijZ(07%_`)`8vCY^Ox5lBs)|XBv&n}-l z(O3N&4fzJsbsTkgH7aXu;J%w_>`c1o3t@^*Ep^AqWqk=K_q0^#?8U8ztrowJI@c~+ zPpb9PE6198ZXWK=Za4Z(eM#Gv4P&n1ez{x{g17Pc9Aj=K((_;>YkQimmUz_x=QMTS zuXZcE(&IpCzBIfTr}&jUexf2A0`UT#f&k10cfopGGmEh*g%OUlwa1B0YFa z?t@Xj7c2eYq84OcwQ6v^?u$p=1?yO)QRWBFiqrd9zMBuM(^mBRLzWQ-@{vxf&d77m zZWUnoc=&wi_7Bah{G*fff^?nkXc8jJ^nhJWN#HAa;Qfwx>Hrj@I_RQ!f`pMmRi`#t!sH|awuh`KPF+|i7W!qz9UNK{#KfR>{UDVuM@@2^ZoST7b_<{Mv zibXhO*Ku4{%Mli!4PppKd5Ezb01!{vGRs*Vr%Y={(e2A0F(#W1h>741i%@NbqD!>{ zDujfFQSPsfXPdh@H5waXRS(CdUw<7tnj%F?z`uBe1t=zlTSh~MV4++8K8xkIN&5;f zBcIiqzRLJnvu*l5lA^Vj26D4*_A&nQ%IYSTPzh2ost+~|&+q_$vw!n3Q)gP4&O#Jj zsygHq-X2085?NpWc}culmeE!8N<=?N?Uw z#`Z*Iyz#&nxkh4dc3}s|AssF^xaEBCt=V-B+(x)X5R_Bq7sYwNlGTa8I$GHK!}e~t zd!9%f-^D(j7z}LC*ag@HEWh8)S2HjS-FCV@vMOJ!eA!wdBnTkh5#)N>Jg67giVX_b zAPg7SnkYB{5{dFhUafDJylqt}oP9!}YGPY(3-MHUoppDHw})8X z>X%K@Z?+!iyWy4{pBMMZiS2I(o2v=q$+?P_x1GG%AKrEriCkxJ>lrL3zYhz&Rj(($ z3G&^dnI|F7nzxbBItj8LU2x^#{y%gkq9Jjxf zx4UuKovx18_4ro0jxj1tyq5%|@n!E*Fj5RRfb}$6mUF_e0IrFV`omF#p?k4)D|{$| zU?9Uez!b46iSE5*Flj&qbsFPAqPM1;RtCH8u%Nl=A2MqxN1H;(ebcR{dv_h2rXFHx z*zIhXCTldg&55pd26(Xc(x}AR??v3oxZ+C0>2r@k z{fsedJJ z^@umc0&Avfv&61Df&jT;=nkFx&kU}uE)PkUxuZgNV4zn~HV}czMlSOjMC!%qb6PMb zHF!ob!?ujYmf5iU^M2?yWxzTa)K>u-oQ8;Coh;@JFqYjUXs`l#kDvw=i4Fk`48Xc3 zZeU>9H{tXN8i7SDh=_YkpPAT@MTs%lU4~pE6iuLK# zvv>GJ-J9@L4~}v(l=cN#-YC|vIizV}t+A}iNw zVHew}S?!%ErquYjDEF=kcjxXyMb>VLn$|?z;5NV#l^=KnIw6f`4q%B&=Nqe9R71At z`_gdA!LkpzZnR9sPuC#dY5Lz|*>pHzO;Ad?D;g`(C{{w#BU^b!@c*I(fV}vuWBA|J_HAeU{rfi%bkWSP5 z&Lp)SgdvRb;qaX(zl7uG=U-N2vF2ng#g#*WRfV0Tyf935jXUp7KELrW44LVhw+96| z9#paD{|8{G$1K-^(x#L;^Nied+ik_P)hp;iZ|^FF(1tQ%5s<%;%HL;Lre^IG+$BhJ?lwA6fBRn>x}iq?F|Fn=+Y<8Ll%e+RzqgRLfp!O4d@# zIFiF#H+_|fGILbcAHSh-135hI-Z0MUVk){xl5Gn|@In4Tqr;UPy`}?fn?xHE=17ueHknxM;Lz*RIWgX!v;(MY;!rreR?Dy zWdI=4hc{|T-**WVAlUa5LJAVYlP8#a$wKWdJ1fJ4s*2;X?s7-lh3+|*TUsXC=c3L5 z4jY4K0;{vXbg%p<{_4#tZ}+NU=!W=-=91g;-jSm3o8izIJ`?j|h&BZSKkAYw%+mnF z&l3sV`VQJFvwo*SwkDSHj3bLgF;!v@B!Ev!n z-lsnRcF!ed3w1w-iaI3=f|wWhfrY(Q5l51-qi^njWnxWmD2Y<~aUePzekYR4sJI~x zZMC@m!l;jUfqR>UGo*lz5^8-kevk`CQ`3-|fUhLKU;8*dl}`>!2|4j5rw zJSO`y94X`5yyFQ=0hT|pQ>N>`9fxtV5w{<{#ktPpB!2+L9h^v3=gLW{{`0-@f>QJ* zt<9Hx-?-897DSiL^_p;OD27NKA-hH-iI=D$0V%yG%~nBXQt$JwbQ|vC8>gcDV$S(Z z5q9CmHNRAS7z0twjl1thd(Cl;S6Fu3g;)+i%$8*ODzv;IXnmb8koV0Z7a%w%yI=}1J2KC7 zK@84d?8BbEghK)eq$@HO%&bI#Gx0h08%FRJW_E-*_D6_s}KxcUNpHzDwS05cx-(m+%+atFQimejtARf$>50m&Pxv@6-4H@|T`m zPQJ5auRGMozLSTXkbjh)2$4&!<-EjJN^^7coQw5(pM#PuqlE;)-bP=E&*zelaO~jrN=!C*7# z?H^K>{KwvdqK+9e4~Y)myC1f0Yu5dq)GXoVe}@00ICv0PtkJ1LFM-RiV<)W@Bs#k%&sL&pInO zEq<5^wbWoJf*4B?6eC1@3KE7-CbcUW1GHYo7;WC(KeYe*xyq?YBN70Q_irCPWr-Lj z-rc?;FE~85(U)lH0n#e?Yx!sKw)z=nJ7#*e1TbUj!&aJA4jKbxOQ@ru00biVbf<~RLQc&66-pF@0H&+V45}T;25L8GcP(L8R$#?2>*e^iP*x2k8 z!IfLxPwegrxX+A@v~0K45Y2HlsS*=a`$0$)I_bDq$ZkMf+f)ws{IeQ+fd35cB?P;m z0fo2!%F*mzO{pqyv#mQBN1{X~s15nvXW}Rw6nQk|6|E(NP|L-{Qz1GGTR3ky^s*|n z7p*E=lK;UYRvmlXr2f<5fA(T$xB*{}2 zUPRj4_iN+jLOuJ}5BqAiZz$QBRL1j~5dDw4AL}*N6E4`8a!hgh%CNZfs zZz_0^oct;D)9yW9@6bHL73$jUbT$KY_2~G~@<%~gzc%E6wKGV3`(`?G&O*7@sI4}= zS-gKDL7w7&q6TUx>3QX;Uc2hLyh08JdHi)GqN{G#8Uezd%(DA`^iE;wye9fg^wM3g zW@>%Hyj5Q6gViiGl$Lo*!0$vp%|*+ne~NOHJ||z$!k+AtF)YTL7y23@zzYO#7}T#Q zYHRxuY1{Bk+om5$ZOz^V-aXOk%asbEqUKmy*j@G3AojBf<@^L&_7d3-t)a<>6 za&3S%uv~9?ptVoGCN)};v+A!@6Mt!+MWlv!RL{P@{9!)VVUWGkf83a|$usfWi4not zO`eADC6Pg*&%gCd+-MDol*n(OHaKGA5);AmDC_wLQ8@vrSYhsn)yukH#Ad2>Vbj#? z(O-`1rG7)RQxG$J9kNt+Bg7SLWowu!@Rd-Ual9;{`J`aXq!A9_;O7!xNy)5ftmI^j zdrt>!^nL%n)2GvX09dQn3tK<1ti7#FJL5yE2vlK9T~<-Ouz%*LaDY@R95cNY*k2?n ze)=i4;ONn5bcbouuy6n$Ig$<0c`;^dWz!2;Y`2zwa6Bt&iJF;3tgb!5rFLe*uj%Pm ziTsq9HJd2OB~#h8lpcarc%URL#>TcMOwCg>6KYDg{Z(DSoLNJzCAP#Cu__-gOqoX4 z4^>4NL^-TY)=c2w0X<}9;kmhK(K=SXAwC8!(KMg4s(>|f3%tpTv09f17tlTEeWCO$ z)4Lo-=n6V-9%}nQl{*#3@w_ zYkCrkq&CFV+$pfpV=IU`q^ejuWA?rXn)*sHm27<#RFP90DnU~Hxv>+q46W(K#x-?# z?Kc)1&74s%G0n|FOz*nPL^;|3gp_o12YtuflM+N5;D<7Q=vrVQ+_Ke_gAAz5+~pUqQ1;U5rWcKPp4 zU8WvTfe>A(b-O_`CphhOL@_;qo#eF^7smB`U5+Esb4K>Y59VSiImPv_W2?<(JJX&lQ*0fYsSZN|BWBBMY%4l^)9lRB8CG*s zlXWlBoSxo-?6kHtrXNIBS?x{Lmc^wvZj}w+DVu+TG*WfrLfPA%vbr0ve1O@R@7i^G zclveuX!KAwl5@!W5Y&Nk5PrxL`mDmkH5f`N{mzyMT&Cd>!I)|A;3LDresYO$kr^Hq z3Yo4#d@i6=(qBs&<90@dwIT8EPN4?Jg zM4PiH9YnZKawsPR%o?IOsZp~Zhz|Wnk(Dr0UWNPtyuM$znVP#w-_rJ~H8frHisNfO zS(V_NSw?7t10>$1Y4mc2tW}!v!>C~Fw0V`t^j1A@WobOZ5P1K-!>7XpvLRxH$+baX z3uk&iWq%j$pTKpmd%SR)V1ui*%@yi`Wv84U{dVz3)6)m#+5V{`0!H}*Z6AgEZ2p`z z5Y0`=_MGOC%*@D>bS=<*Ph zOgQJh`^A=JU4t9>Osjh3t;Qbvlxc4kKXl9S4iuw*uvg zGa6C7q0h0>`FUgtTd=rcV3!wTZ{L6U=Q3L54V1E;4VE$%PbHvzPN~-$&)Vl~=J5aP zA6wzUI4v5Nks>dr#&hDs9?a&GiXy}$V$!47l*lNz{bb+n$)c#Oc^YFkmt&hPAw|(6 z6~vx|dDk!sM&_TH;vYFKR>I|Q=3k>?{7X|HW(7K#$8zl2)I<%3nerR1a=IuE8Y!yeQwSRwp&P+BrLVV5cWJ={2MHe}lD+-*|)cp?>k!MNmVY0;;fB*0s z@B50sIcangFK26@33nOjD>M`f6k@o9rwT!eWvML5PY2-?c>_qGl zgL3e>Em$8Es1G(X*X&bD=gm`1WaGx8v;-$K{BP{;(qCm9`Llv}BOI;d3R^rJlj8&J zfB}Rq^`G4LWo=*ex~@J%ib%td%}tFi>1Jt%PJ8m>;(aIz2+V%SF5iOyBCpa3ORC>g zzfoT)6ke7h03`F^ZtTsud*txGDdzln@2v|KwSTi%!hiR#VPu{RSbt+X@ zm)K@pZg|M$sGUWwtxeB?BefkK#lPTi0C&EcIalqJA_yP|dZe;`#`>(ieaWZm40lR% z&yQ8Gk?!Yn+#~CLZ@DH!DHB-fZmRL9IB{|CEUtU05e5G`3=pKZ@@`l5bA$un=Xkto zq+rO)s=5nh%fF7JmM4u>f$|9KUCJm1F+w+dJIDeabXJH@MhJ2Hu=pSUW)KXAIdjPEXE zgwMWRk?g-93B$ile8v4d?vCx;UmpuOVF+M_jx#<}_dCC>y4ak9m!<#dm9S`2i3i5V z_>T?wz4Tj*Wvo0b+AC$UQ`q!V(}qfVCuq}%w?EV#ausmN_@%~NE{KX61Z zRGJKuEDO0VlMiDsWM@~)miSc)Yeh3n$%N-t`C9PYa#KGCyoPu|0qHDWi0etc9s^E} zH)*oFWa7Bvh_a2VJv@=}I(&(sS}MiM!mR+@`nc<1qA3_r^gmSC$y0XLCcp>FX%u6b7{|R^jNl%(aIvbHGwv#5aNkA$jrAyCjE!??7 z>SjXWi3CoxaTDm2!;Zo+cEUrsZOY8@Kc&ciI))oGxCV z@&v%zh^iS`1H21631XzR!U57g;h0I-J3A(%5)}gbr=g)2cERwX#MxIz+FM;0Z*Q}2B?+}R7LEw|$-@s? zcF2j2nN7q6Qm<`UGb5z1?jeeYqV>-yVNF0F# z_Eh4KBntje8xGD;>`H>I#)L!Y*9i21?iOpaIlUR#-Te|8&4*T*?akQk)aDj*_5`YH zQ#5VUu`5fhThb&LnAs#8Ajy00Nx_&3gcEa9fCAcTmb=YGm#zwLs-w}4h-a}*$WMlTA zp>cM!-B4a~s_HOHB5g%Xe;03kdlBFIv0L9BPVC@Y8CD$~JJbUjOZ>E(*PF?g1+1^;a zp-%apK_hsZ{V#g4Ft!j)$9$Be*YJ4d8N!K|y?Y5aKVjFQmFIektwjIrVLuqS1OQxY z;3~kPFiY>bSP5qBy$!pBdG+3d=`m63FIWKj^bqAlhv0T+Fe=7u2Y}v2e$%V!I$F{C z0=UY^_uI04t7>|S$x8_be3x@y)e!cs~_%4*ZYRFvJnW;g5iO?V#7DFr?0P(PAv#D9HKRF?3p@VYP7`fVEY+ILnGz zDFVDH_5Ff@h3Kq>94lACvxPuwLQN=L+e%vj?JBLJHZ8LN=;aCsYpVz>QX>~04iS;H zRh%|QmyTS@b&J%wTFQpJkcA6VEe;s$jS3c5#oB(sG)cVw7DohMxNZZ{2{&UQvuoMa zXnh-JTs)bpP%N7Tj#h1z@3oNGwJeI*4xsF_vb4CMSrL;orvx6B#^Dte z#bMl*zF(yLNjh0wF3Km8Ega=xv@n!NcHbiaZYg{-I@b@ZfD9(qB-nBL$lP0yj$&5a zwkC%A+IqG5{cIa{Bipt(@mgD*5w_bK%kzq0AWIfn&_YL>?qQlZznH_4Eln8GEgx)w ict&uMF$HBL#TGkSXyCK3EF_D}*cyeNkV+JA3AnHZz3OrR diff --git a/rhodecode/public/js/mode/apl/apl.js b/rhodecode/public/js/mode/apl/apl.js --- a/rhodecode/public/js/mode/apl/apl.js +++ b/rhodecode/public/js/mode/apl/apl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/asciiarmor/asciiarmor.js b/rhodecode/public/js/mode/asciiarmor/asciiarmor.js --- a/rhodecode/public/js/mode/asciiarmor/asciiarmor.js +++ b/rhodecode/public/js/mode/asciiarmor/asciiarmor.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -68,6 +68,7 @@ }); CodeMirror.defineMIME("application/pgp", "asciiarmor"); + CodeMirror.defineMIME("application/pgp-encrypted", "asciiarmor"); CodeMirror.defineMIME("application/pgp-keys", "asciiarmor"); CodeMirror.defineMIME("application/pgp-signature", "asciiarmor"); }); diff --git a/rhodecode/public/js/mode/asn.1/asn.1.js b/rhodecode/public/js/mode/asn.1/asn.1.js --- a/rhodecode/public/js/mode/asn.1/asn.1.js +++ b/rhodecode/public/js/mode/asn.1/asn.1.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/asterisk/asterisk.js b/rhodecode/public/js/mode/asterisk/asterisk.js --- a/rhodecode/public/js/mode/asterisk/asterisk.js +++ b/rhodecode/public/js/mode/asterisk/asterisk.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* * ===================================================================================== @@ -9,7 +9,7 @@ * Description: CodeMirror mode for Asterisk dialplan * * Created: 05/17/2012 09:20:25 PM - * Revision: none + * Revision: 08/05/2019 AstLinux Project: Support block-comments * * Author: Stas Kobzar (stas@modulis.ca), * Company: Modulis.ca Inc. @@ -67,7 +67,26 @@ CodeMirror.defineMode("asterisk", functi var cur = ''; var ch = stream.next(); // comment + if (state.blockComment) { + if (ch == "-" && stream.match("-;", true)) { + state.blockComment = false; + } else if (stream.skipTo("--;")) { + stream.next(); + stream.next(); + stream.next(); + state.blockComment = false; + } else { + stream.skipToEnd(); + } + return "comment"; + } if(ch == ";") { + if (stream.match("--", true)) { + if (!stream.match("-", false)) { // Except ;--- is not a block comment + state.blockComment = true; + return "comment"; + } + } stream.skipToEnd(); return "comment"; } @@ -124,6 +143,7 @@ CodeMirror.defineMode("asterisk", functi return { startState: function() { return { + blockComment: false, extenStart: false, extenSame: false, extenInclude: false, @@ -187,7 +207,11 @@ CodeMirror.defineMode("asterisk", functi } return null; - } + }, + + blockCommentStart: ";--", + blockCommentEnd: "--;", + lineComment: ";" }; }); diff --git a/rhodecode/public/js/mode/brainfuck/brainfuck.js b/rhodecode/public/js/mode/brainfuck/brainfuck.js --- a/rhodecode/public/js/mode/brainfuck/brainfuck.js +++ b/rhodecode/public/js/mode/brainfuck/brainfuck.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Brainfuck mode created by Michael Kaminsky https://github.com/mkaminsky11 diff --git a/rhodecode/public/js/mode/clike/clike.js b/rhodecode/public/js/mode/clike/clike.js --- a/rhodecode/public/js/mode/clike/clike.js +++ b/rhodecode/public/js/mode/clike/clike.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,6 +11,41 @@ })(function(CodeMirror) { "use strict"; +function Context(indented, column, type, info, align, prev) { + this.indented = indented; + this.column = column; + this.type = type; + this.info = info; + this.align = align; + this.prev = prev; +} +function pushContext(state, col, type, info) { + var indent = state.indented; + if (state.context && state.context.type == "statement" && type != "statement") + indent = state.context.indented; + return state.context = new Context(indent, col, type, info, null, state.context); +} +function popContext(state) { + var t = state.context.type; + if (t == ")" || t == "]" || t == "}") + state.indented = state.context.indented; + return state.context = state.context.prev; +} + +function typeBefore(stream, state, pos) { + if (state.prevToken == "variable" || state.prevToken == "type") return true; + if (/\S(?:[^- ]>|[*\]])\s*$|\*$/.test(stream.string.slice(0, pos))) return true; + if (state.typeAtEndOfLine && stream.column() == stream.indentation()) return true; +} + +function isTopScope(context) { + for (;;) { + if (!context || context.type == "top") return true; + if (context.type == "}" && context.prev.info != "namespace") return false; + context = context.prev; + } +} + CodeMirror.defineMode("clike", function(config, parserConfig) { var indentUnit = config.indentUnit, statementIndentUnit = parserConfig.statementIndentUnit || indentUnit, @@ -30,7 +65,10 @@ CodeMirror.defineMode("clike", function( numberStart = parserConfig.numberStart || /[\d\.]/, number = parserConfig.number || /^(?:0x[a-f\d]+|0b[01]+|(?:\d+\.?\d*|\.\d+)(?:e[-+]?\d+)?)(u|ll?|l|f)?/i, isOperatorChar = parserConfig.isOperatorChar || /[+\-*&%=<>!?|\/]/, - endStatement = parserConfig.endStatement || /^[;:,]$/; + isIdentifierChar = parserConfig.isIdentifierChar || /[\w\$_\xa1-\uffff]/, + // An optional function that takes a {string} token and returns true if it + // should be treated as a builtin. + isReservedIdentifier = parserConfig.isReservedIdentifier || false; var curPunc, isDefKeyword; @@ -64,12 +102,12 @@ CodeMirror.defineMode("clike", function( } } if (isOperatorChar.test(ch)) { - stream.eatWhile(isOperatorChar); + while (!stream.match(/^\/[\/*]/, false) && stream.eat(isOperatorChar)) {} return "operator"; } - stream.eatWhile(/[\w\$_\xa1-\uffff]/); + stream.eatWhile(isIdentifierChar); if (namespaceSeparator) while (stream.match(namespaceSeparator)) - stream.eatWhile(/[\w\$_\xa1-\uffff]/); + stream.eatWhile(isIdentifierChar); var cur = stream.current(); if (contains(keywords, cur)) { @@ -77,8 +115,9 @@ CodeMirror.defineMode("clike", function( if (contains(defKeywords, cur)) isDefKeyword = true; return "keyword"; } - if (contains(types, cur)) return "variable-3"; - if (contains(builtin, cur)) { + if (contains(types, cur)) return "type"; + if (contains(builtin, cur) + || (isReservedIdentifier && isReservedIdentifier(cur))) { if (contains(blockKeywords, cur)) curPunc = "newstatement"; return "builtin"; } @@ -111,40 +150,9 @@ CodeMirror.defineMode("clike", function( return "comment"; } - function Context(indented, column, type, align, prev) { - this.indented = indented; - this.column = column; - this.type = type; - this.align = align; - this.prev = prev; - } - function isStatement(type) { - return type == "statement" || type == "switchstatement" || type == "namespace"; - } - function pushContext(state, col, type) { - var indent = state.indented; - if (state.context && isStatement(state.context.type) && !isStatement(type)) - indent = state.context.indented; - return state.context = new Context(indent, col, type, null, state.context); - } - function popContext(state) { - var t = state.context.type; - if (t == ")" || t == "]" || t == "}") - state.indented = state.context.indented; - return state.context = state.context.prev; - } - - function typeBefore(stream, state) { - if (state.prevToken == "variable" || state.prevToken == "variable-3") return true; - if (/\S(?:[^- ]>|[*\]])\s*$|\*$/.test(stream.string.slice(0, stream.start))) return true; - } - - function isTopScope(context) { - for (;;) { - if (!context || context.type == "top") return true; - if (context.type == "}" && context.prev.type != "namespace") return false; - context = context.prev; - } + function maybeEOL(stream, state) { + if (parserConfig.typeFirstDefinitions && stream.eol() && isTopScope(state.context)) + state.typeAtEndOfLine = typeBefore(stream, state, stream.pos) } // Interface @@ -153,7 +161,7 @@ CodeMirror.defineMode("clike", function( startState: function(basecolumn) { return { tokenize: null, - context: new Context((basecolumn || 0) - indentUnit, 0, "top", false), + context: new Context((basecolumn || 0) - indentUnit, 0, "top", null, false), indented: 0, startOfLine: true, prevToken: null @@ -167,36 +175,32 @@ CodeMirror.defineMode("clike", function( state.indented = stream.indentation(); state.startOfLine = true; } - if (stream.eatSpace()) return null; + if (stream.eatSpace()) { maybeEOL(stream, state); return null; } curPunc = isDefKeyword = null; var style = (state.tokenize || tokenBase)(stream, state); if (style == "comment" || style == "meta") return style; if (ctx.align == null) ctx.align = true; - if (endStatement.test(curPunc)) while (isStatement(state.context.type)) popContext(state); + if (curPunc == ";" || curPunc == ":" || (curPunc == "," && stream.match(/^\s*(?:\/\/.*)?$/, false))) + while (state.context.type == "statement") popContext(state); else if (curPunc == "{") pushContext(state, stream.column(), "}"); else if (curPunc == "[") pushContext(state, stream.column(), "]"); else if (curPunc == "(") pushContext(state, stream.column(), ")"); else if (curPunc == "}") { - while (isStatement(ctx.type)) ctx = popContext(state); + while (ctx.type == "statement") ctx = popContext(state); if (ctx.type == "}") ctx = popContext(state); - while (isStatement(ctx.type)) ctx = popContext(state); + while (ctx.type == "statement") ctx = popContext(state); } else if (curPunc == ctx.type) popContext(state); else if (indentStatements && (((ctx.type == "}" || ctx.type == "top") && curPunc != ";") || - (isStatement(ctx.type) && curPunc == "newstatement"))) { - var type = "statement"; - if (curPunc == "newstatement" && indentSwitch && stream.current() == "switch") - type = "switchstatement"; - else if (style == "keyword" && stream.current() == "namespace") - type = "namespace"; - pushContext(state, stream.column(), type); + (ctx.type == "statement" && curPunc == "newstatement"))) { + pushContext(state, stream.column(), "statement", stream.current()); } if (style == "variable" && ((state.prevToken == "def" || - (parserConfig.typeFirstDefinitions && typeBefore(stream, state) && + (parserConfig.typeFirstDefinitions && typeBefore(stream, state, stream.start) && isTopScope(state.context) && stream.match(/^\s*\(/, false))))) style = "def"; @@ -209,24 +213,28 @@ CodeMirror.defineMode("clike", function( state.startOfLine = false; state.prevToken = isDefKeyword ? "def" : style || curPunc; + maybeEOL(stream, state); return style; }, indent: function(state, textAfter) { - if (state.tokenize != tokenBase && state.tokenize != null) return CodeMirror.Pass; + if (state.tokenize != tokenBase && state.tokenize != null || state.typeAtEndOfLine) return CodeMirror.Pass; var ctx = state.context, firstChar = textAfter && textAfter.charAt(0); - if (isStatement(ctx.type) && firstChar == "}") ctx = ctx.prev; + var closing = firstChar == ctx.type; + if (ctx.type == "statement" && firstChar == "}") ctx = ctx.prev; + if (parserConfig.dontIndentStatements) + while (ctx.type == "statement" && parserConfig.dontIndentStatements.test(ctx.info)) + ctx = ctx.prev if (hooks.indent) { - var hook = hooks.indent(state, ctx, textAfter); + var hook = hooks.indent(state, ctx, textAfter, indentUnit); if (typeof hook == "number") return hook } - var closing = firstChar == ctx.type; - var switchBlock = ctx.prev && ctx.prev.type == "switchstatement"; + var switchBlock = ctx.prev && ctx.prev.info == "switch"; if (parserConfig.allmanIndentation && /[{(]/.test(firstChar)) { while (ctx.type != "top" && ctx.type != "}") ctx = ctx.prev return ctx.indented } - if (isStatement(ctx.type)) + if (ctx.type == "statement") return ctx.indented + (firstChar == "{" ? 0 : statementIndentUnit); if (ctx.align && (!dontAlignCalls || ctx.type != ")")) return ctx.column + (closing ? 0 : 1); @@ -240,6 +248,7 @@ CodeMirror.defineMode("clike", function( electricInput: indentSwitch ? /^\s*(?:case .*?:|default:|\{\}?|\})$/ : /^\s*[{}]$/, blockCommentStart: "/*", blockCommentEnd: "*/", + blockCommentContinue: " * ", lineComment: "//", fold: "brace" }; @@ -258,8 +267,52 @@ CodeMirror.defineMode("clike", function( } } var cKeywords = "auto if break case register continue return default do sizeof " + - "static else struct switch extern typedef union for goto while enum const volatile"; - var cTypes = "int long char short double float unsigned signed void size_t ptrdiff_t"; + "static else struct switch extern typedef union for goto while enum const " + + "volatile inline restrict asm fortran"; + + // Keywords from https://en.cppreference.com/w/cpp/keyword includes C++20. + var cppKeywords = "alignas alignof and and_eq audit axiom bitand bitor catch " + + "class compl concept constexpr const_cast decltype delete dynamic_cast " + + "explicit export final friend import module mutable namespace new noexcept " + + "not not_eq operator or or_eq override private protected public " + + "reinterpret_cast requires static_assert static_cast template this " + + "thread_local throw try typeid typename using virtual xor xor_eq"; + + var objCKeywords = "bycopy byref in inout oneway out self super atomic nonatomic retain copy " + + "readwrite readonly strong weak assign typeof nullable nonnull null_resettable _cmd " + + "@interface @implementation @end @protocol @encode @property @synthesize @dynamic @class " + + "@public @package @private @protected @required @optional @try @catch @finally @import " + + "@selector @encode @defs @synchronized @autoreleasepool @compatibility_alias @available"; + + var objCBuiltins = "FOUNDATION_EXPORT FOUNDATION_EXTERN NS_INLINE NS_FORMAT_FUNCTION " + + " NS_RETURNS_RETAINEDNS_ERROR_ENUM NS_RETURNS_NOT_RETAINED NS_RETURNS_INNER_POINTER " + + "NS_DESIGNATED_INITIALIZER NS_ENUM NS_OPTIONS NS_REQUIRES_NIL_TERMINATION " + + "NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_SWIFT_NAME NS_REFINED_FOR_SWIFT" + + // Do not use this. Use the cTypes function below. This is global just to avoid + // excessive calls when cTypes is being called multiple times during a parse. + var basicCTypes = words("int long char short double float unsigned signed " + + "void bool"); + + // Do not use this. Use the objCTypes function below. This is global just to avoid + // excessive calls when objCTypes is being called multiple times during a parse. + var basicObjCTypes = words("SEL instancetype id Class Protocol BOOL"); + + // Returns true if identifier is a "C" type. + // C type is defined as those that are reserved by the compiler (basicTypes), + // and those that end in _t (Reserved by POSIX for types) + // http://www.gnu.org/software/libc/manual/html_node/Reserved-Names.html + function cTypes(identifier) { + return contains(basicCTypes, identifier) || /.+_t$/.test(identifier); + } + + // Returns true if identifier is a "Objective C" type. + function objCTypes(identifier) { + return cTypes(identifier) || contains(basicObjCTypes, identifier); + } + + var cBlockKeywords = "case do else for if switch while struct enum union"; + var cDefKeywords = "struct enum union"; function cppHook(stream, state) { if (!state.startOfLine) return false @@ -277,10 +330,18 @@ CodeMirror.defineMode("clike", function( } function pointerHook(_stream, state) { - if (state.prevToken == "variable-3") return "variable-3"; + if (state.prevToken == "type") return "type"; return false; } + // For C and C++ (and ObjC): identifiers starting with __ + // or _ followed by a capital letter are reserved for the compiler. + function cIsReservedIdentifier(token) { + if (!token || token.length < 2) return false; + if (token[0] != '_') return false; + return (token[1] == '_') || (token[1] !== token[1].toLowerCase()); + } + function cpp14Literal(stream) { stream.eatWhile(/[\w\.']/); return "number"; @@ -311,7 +372,7 @@ CodeMirror.defineMode("clike", function( } function cppLooksLikeConstructor(word) { - var lastTwo = /(\w+)::(\w+)$/.exec(word); + var lastTwo = /(\w+)::~?(\w+)$/.exec(word); return lastTwo && lastTwo[1] == lastTwo[2]; } @@ -363,29 +424,30 @@ CodeMirror.defineMode("clike", function( def(["text/x-csrc", "text/x-c", "text/x-chdr"], { name: "clike", keywords: words(cKeywords), - types: words(cTypes + " bool _Complex _Bool float_t double_t intptr_t intmax_t " + - "int8_t int16_t int32_t int64_t uintptr_t uintmax_t uint8_t uint16_t " + - "uint32_t uint64_t"), - blockKeywords: words("case do else for if switch while struct"), - defKeywords: words("struct"), + types: cTypes, + blockKeywords: words(cBlockKeywords), + defKeywords: words(cDefKeywords), typeFirstDefinitions: true, - atoms: words("null true false"), - hooks: {"#": cppHook, "*": pointerHook}, + atoms: words("NULL true false"), + isReservedIdentifier: cIsReservedIdentifier, + hooks: { + "#": cppHook, + "*": pointerHook, + }, modeProps: {fold: ["brace", "include"]} }); def(["text/x-c++src", "text/x-c++hdr"], { name: "clike", - keywords: words(cKeywords + " asm dynamic_cast namespace reinterpret_cast try explicit new " + - "static_cast typeid catch operator template typename class friend private " + - "this using const_cast inline public throw virtual delete mutable protected " + - "alignas alignof constexpr decltype nullptr noexcept thread_local final " + - "static_assert override"), - types: words(cTypes + " bool wchar_t"), - blockKeywords: words("catch class do else finally for if struct switch try while"), - defKeywords: words("class namespace struct enum union"), + keywords: words(cKeywords + " " + cppKeywords), + types: cTypes, + blockKeywords: words(cBlockKeywords + " class try catch"), + defKeywords: words(cDefKeywords + " class namespace"), typeFirstDefinitions: true, - atoms: words("true false null"), + atoms: words("true false NULL nullptr"), + dontIndentStatements: /^template$/, + isIdentifierChar: /[\w\$_~\xa1-\uffff]/, + isReservedIdentifier: cIsReservedIdentifier, hooks: { "#": cppHook, "*": pointerHook, @@ -418,19 +480,22 @@ CodeMirror.defineMode("clike", function( def("text/x-java", { name: "clike", keywords: words("abstract assert break case catch class const continue default " + - "do else enum extends final finally float for goto if implements import " + + "do else enum extends final finally for goto if implements import " + "instanceof interface native new package private protected public " + "return static strictfp super switch synchronized this throw throws transient " + - "try volatile while"), + "try volatile while @interface"), types: words("byte short int long float double boolean char void Boolean Byte Character Double Float " + "Integer Long Number Object Short String StringBuffer StringBuilder Void"), blockKeywords: words("catch class do else finally for if switch try while"), - defKeywords: words("class interface package enum"), + defKeywords: words("class interface enum @interface"), typeFirstDefinitions: true, atoms: words("true false null"), - endStatement: /^[;:]$/, + number: /^(?:0x[a-f\d_]+|0b[01_]+|(?:[\d_]+\.?\d*|\.\d+)(?:e[-+]?[\d_]+)?)(u|ll?|l|f)?/i, hooks: { "@": function(stream) { + // Don't match the @interface keyword. + if (stream.match('interface', false)) return false; + stream.eatWhile(/[\w\$_]/); return "meta"; } @@ -479,25 +544,42 @@ CodeMirror.defineMode("clike", function( return "string"; } + function tokenNestedComment(depth) { + return function (stream, state) { + var ch + while (ch = stream.next()) { + if (ch == "*" && stream.eat("/")) { + if (depth == 1) { + state.tokenize = null + break + } else { + state.tokenize = tokenNestedComment(depth - 1) + return state.tokenize(stream, state) + } + } else if (ch == "/" && stream.eat("*")) { + state.tokenize = tokenNestedComment(depth + 1) + return state.tokenize(stream, state) + } + } + return "comment" + } + } + def("text/x-scala", { name: "clike", keywords: words( - /* scala */ "abstract case catch class def do else extends final finally for forSome if " + "implicit import lazy match new null object override package private protected return " + - "sealed super this throw trait try type val var while with yield _ : = => <- <: " + - "<% >: # @ " + + "sealed super this throw trait try type val var while with yield _ " + /* package scala */ "assert assume require print println printf readLine readBoolean readByte readShort " + - "readChar readInt readLong readFloat readDouble " + - - ":: #:: " + "readChar readInt readLong readFloat readDouble" ), types: words( "AnyVal App Application Array BufferedIterator BigDecimal BigInt Char Console Either " + - "Enumeration Equiv Error Exception Fractional Function IndexedSeq Integral Iterable " + + "Enumeration Equiv Error Exception Fractional Function IndexedSeq Int Integral Iterable " + "Iterator List Map Numeric Nil NotNull Option Ordered Ordering PartialFunction PartialOrdering " + "Product Proxy Range Responder Seq Serializable Set Specializable Stream StringBuilder " + "StringContext Symbol Throwable Traversable TraversableOnce Tuple Unit Vector " + @@ -509,11 +591,12 @@ CodeMirror.defineMode("clike", function( "StringBuffer System Thread ThreadGroup ThreadLocal Throwable Triple Void" ), multiLineStrings: true, - blockKeywords: words("catch class do else finally for forSome if match switch try while"), - defKeywords: words("class def object package trait type val var"), + blockKeywords: words("catch class enum do else finally for forSome if match switch try while"), + defKeywords: words("class enum def object package trait type val var"), atoms: words("true false null"), indentStatements: false, indentSwitch: false, + isOperatorChar: /[+\-*&%=<>!?|\/#:@]/, hooks: { "@": function(stream) { stream.eatWhile(/[\w\$_]/); @@ -527,9 +610,24 @@ CodeMirror.defineMode("clike", function( "'": function(stream) { stream.eatWhile(/[\w\$_\xa1-\uffff]/); return "atom"; + }, + "=": function(stream, state) { + var cx = state.context + if (cx.type == "}" && cx.align && stream.eat(">")) { + state.context = new Context(cx.indented, cx.column, cx.type, cx.info, null, cx.prev) + return "operator" + } else { + return false + } + }, + + "/": function(stream, state) { + if (!stream.eat("*")) return false + state.tokenize = tokenNestedComment(1) + return state.tokenize(stream, state) } }, - modeProps: {closeBrackets: {triples: '"'}} + modeProps: {closeBrackets: {pairs: '()[]{}""', triples: '"'}} }); function tokenKotlinString(tripleString){ @@ -553,33 +651,59 @@ CodeMirror.defineMode("clike", function( name: "clike", keywords: words( /*keywords*/ - "package as typealias class interface this super val " + - "var fun for is in This throw return " + + "package as typealias class interface this super val operator " + + "var fun for is in This throw return annotation " + "break continue object if else while do try when !in !is as? " + /*soft keywords*/ "file import where by get set abstract enum open inner override private public internal " + "protected catch finally out final vararg reified dynamic companion constructor init " + "sealed field property receiver param sparam lateinit data inline noinline tailrec " + - "external annotation crossinline const operator infix" + "external annotation crossinline const operator infix suspend actual expect setparam" ), types: words( /* package java.lang */ "Boolean Byte Character CharSequence Class ClassLoader Cloneable Comparable " + "Compiler Double Exception Float Integer Long Math Number Object Package Pair Process " + "Runtime Runnable SecurityManager Short StackTraceElement StrictMath String " + - "StringBuffer System Thread ThreadGroup ThreadLocal Throwable Triple Void" + "StringBuffer System Thread ThreadGroup ThreadLocal Throwable Triple Void Annotation Any BooleanArray " + + "ByteArray Char CharArray DeprecationLevel DoubleArray Enum FloatArray Function Int IntArray Lazy " + + "LazyThreadSafetyMode LongArray Nothing ShortArray Unit" ), intendSwitch: false, indentStatements: false, multiLineStrings: true, + number: /^(?:0x[a-f\d_]+|0b[01_]+|(?:[\d_]+(\.\d+)?|\.\d+)(?:e[-+]?[\d_]+)?)(u|ll?|l|f)?/i, blockKeywords: words("catch class do else finally for if where try while enum"), - defKeywords: words("class val var object package interface fun"), + defKeywords: words("class val var object interface fun"), atoms: words("true false null this"), hooks: { + "@": function(stream) { + stream.eatWhile(/[\w\$_]/); + return "meta"; + }, + '*': function(_stream, state) { + return state.prevToken == '.' ? 'variable' : 'operator'; + }, '"': function(stream, state) { state.tokenize = tokenKotlinString(stream.match('""')); return state.tokenize(stream, state); + }, + "/": function(stream, state) { + if (!stream.eat("*")) return false; + state.tokenize = tokenNestedComment(1); + return state.tokenize(stream, state) + }, + indent: function(state, ctx, textAfter, indentUnit) { + var firstChar = textAfter && textAfter.charAt(0); + if ((state.prevToken == "}" || state.prevToken == ")") && textAfter == "") + return state.indented; + if ((state.prevToken == "operator" && textAfter != "}" && state.context.type != "}") || + state.prevToken == "variable" && firstChar == "." || + (state.prevToken == "}" || state.prevToken == ")") && firstChar == ".") + return indentUnit * 2 + ctx.indented; + if (ctx.align && ctx.type == "}") + return ctx.indented + (state.context.type == (textAfter || "").charAt(0) ? 0 : indentUnit); } }, modeProps: {closeBrackets: {triples: '"'}} @@ -646,11 +770,11 @@ CodeMirror.defineMode("clike", function( def("text/x-nesc", { name: "clike", - keywords: words(cKeywords + "as atomic async call command component components configuration event generic " + + keywords: words(cKeywords + " as atomic async call command component components configuration event generic " + "implementation includes interface module new norace nx_struct nx_union post provides " + "signal task uses abstract extends"), - types: words(cTypes), - blockKeywords: words("case do else for if switch while struct"), + types: cTypes, + blockKeywords: words(cBlockKeywords), atoms: words("null true false"), hooks: {"#": cppHook}, modeProps: {fold: ["brace", "include"]} @@ -658,28 +782,67 @@ CodeMirror.defineMode("clike", function( def("text/x-objectivec", { name: "clike", - keywords: words(cKeywords + "inline restrict _Bool _Complex _Imaginery BOOL Class bycopy byref id IMP in " + - "inout nil oneway out Protocol SEL self super atomic nonatomic retain copy readwrite readonly"), - types: words(cTypes), - atoms: words("YES NO NULL NILL ON OFF true false"), + keywords: words(cKeywords + " " + objCKeywords), + types: objCTypes, + builtin: words(objCBuiltins), + blockKeywords: words(cBlockKeywords + " @synthesize @try @catch @finally @autoreleasepool @synchronized"), + defKeywords: words(cDefKeywords + " @interface @implementation @protocol @class"), + dontIndentStatements: /^@.*$/, + typeFirstDefinitions: true, + atoms: words("YES NO NULL Nil nil true false nullptr"), + isReservedIdentifier: cIsReservedIdentifier, hooks: { - "@": function(stream) { - stream.eatWhile(/[\w\$]/); - return "keyword"; - }, + "#": cppHook, + "*": pointerHook, + }, + modeProps: {fold: ["brace", "include"]} + }); + + def("text/x-objectivec++", { + name: "clike", + keywords: words(cKeywords + " " + objCKeywords + " " + cppKeywords), + types: objCTypes, + builtin: words(objCBuiltins), + blockKeywords: words(cBlockKeywords + " @synthesize @try @catch @finally @autoreleasepool @synchronized class try catch"), + defKeywords: words(cDefKeywords + " @interface @implementation @protocol @class class namespace"), + dontIndentStatements: /^@.*$|^template$/, + typeFirstDefinitions: true, + atoms: words("YES NO NULL Nil nil true false nullptr"), + isReservedIdentifier: cIsReservedIdentifier, + hooks: { "#": cppHook, - indent: function(_state, ctx, textAfter) { - if (ctx.type == "statement" && /^@\w/.test(textAfter)) return ctx.indented + "*": pointerHook, + "u": cpp11StringHook, + "U": cpp11StringHook, + "L": cpp11StringHook, + "R": cpp11StringHook, + "0": cpp14Literal, + "1": cpp14Literal, + "2": cpp14Literal, + "3": cpp14Literal, + "4": cpp14Literal, + "5": cpp14Literal, + "6": cpp14Literal, + "7": cpp14Literal, + "8": cpp14Literal, + "9": cpp14Literal, + token: function(stream, state, style) { + if (style == "variable" && stream.peek() == "(" && + (state.prevToken == ";" || state.prevToken == null || + state.prevToken == "}") && + cppLooksLikeConstructor(stream.current())) + return "def"; } }, - modeProps: {fold: "brace"} + namespaceSeparator: "::", + modeProps: {fold: ["brace", "include"]} }); def("text/x-squirrel", { name: "clike", keywords: words("base break clone continue const default delete enum extends function in class" + " foreach local resume return this throw typeof yield constructor instanceof static"), - types: words(cTypes), + types: cTypes, blockKeywords: words("case catch class else for foreach if switch try while"), defKeywords: words("function local class"), typeFirstDefinitions: true, @@ -757,7 +920,7 @@ CodeMirror.defineMode("clike", function( return "atom"; }, token: function(_stream, state, style) { - if ((style == "variable" || style == "variable-3") && + if ((style == "variable" || style == "type") && state.prevToken == ".") { return "variable-2"; } diff --git a/rhodecode/public/js/mode/clojure/clojure.js b/rhodecode/public/js/mode/clojure/clojure.js --- a/rhodecode/public/js/mode/clojure/clojure.js +++ b/rhodecode/public/js/mode/clojure/clojure.js @@ -1,15 +1,10 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -/** - * Author: Hans Engel - * Branched from CodeMirror's Scheme mode (by Koh Zi Han, based on implementation by Koh Zi Chun) - */ +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { - if (typeof exports == "object" && typeof module == "object") // CommonJS + if (typeof exports === "object" && typeof module === "object") // CommonJS mod(require("../../lib/codemirror")); - else if (typeof define == "function" && define.amd) // AMD + else if (typeof define === "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); @@ -17,233 +12,281 @@ "use strict"; CodeMirror.defineMode("clojure", function (options) { - var BUILTIN = "builtin", COMMENT = "comment", STRING = "string", CHARACTER = "string-2", - ATOM = "atom", NUMBER = "number", BRACKET = "bracket", KEYWORD = "keyword", VAR = "variable"; - var INDENT_WORD_SKIP = options.indentUnit || 2; - var NORMAL_INDENT_UNIT = options.indentUnit || 2; - - function makeKeywords(str) { - var obj = {}, words = str.split(" "); - for (var i = 0; i < words.length; ++i) obj[words[i]] = true; - return obj; - } - - var atoms = makeKeywords("true false nil"); - - var keywords = makeKeywords( - "defn defn- def def- defonce defmulti defmethod defmacro defstruct deftype defprotocol defrecord defproject deftest slice defalias defhinted defmacro- defn-memo defnk defnk defonce- defunbound defunbound- defvar defvar- let letfn do case cond condp for loop recur when when-not when-let when-first if if-let if-not . .. -> ->> doto and or dosync doseq dotimes dorun doall load import unimport ns in-ns refer try catch finally throw with-open with-local-vars binding gen-class gen-and-load-class gen-and-save-class handler-case handle"); - - var builtins = makeKeywords( - "* *' *1 *2 *3 *agent* *allow-unresolved-vars* *assert* *clojure-version* *command-line-args* *compile-files* *compile-path* *compiler-options* *data-readers* *e *err* *file* *flush-on-newline* *fn-loader* *in* *math-context* *ns* *out* *print-dup* *print-length* *print-level* *print-meta* *print-readably* *read-eval* *source-path* *unchecked-math* *use-context-classloader* *verbose-defrecords* *warn-on-reflection* + +' - -' -> ->> ->ArrayChunk ->Vec ->VecNode ->VecSeq -cache-protocol-fn -reset-methods .. / < <= = == > >= EMPTY-NODE accessor aclone add-classpath add-watch agent agent-error agent-errors aget alength alias all-ns alter alter-meta! alter-var-root amap ancestors and apply areduce array-map aset aset-boolean aset-byte aset-char aset-double aset-float aset-int aset-long aset-short assert assoc assoc! assoc-in associative? atom await await-for await1 bases bean bigdec bigint biginteger binding bit-and bit-and-not bit-clear bit-flip bit-not bit-or bit-set bit-shift-left bit-shift-right bit-test bit-xor boolean boolean-array booleans bound-fn bound-fn* bound? butlast byte byte-array bytes case cast char char-array char-escape-string char-name-string char? chars chunk chunk-append chunk-buffer chunk-cons chunk-first chunk-next chunk-rest chunked-seq? class class? clear-agent-errors clojure-version coll? comment commute comp comparator compare compare-and-set! compile complement concat cond condp conj conj! cons constantly construct-proxy contains? count counted? create-ns create-struct cycle dec dec' decimal? declare default-data-readers definline definterface defmacro defmethod defmulti defn defn- defonce defprotocol defrecord defstruct deftype delay delay? deliver denominator deref derive descendants destructure disj disj! dissoc dissoc! distinct distinct? doall dorun doseq dosync dotimes doto double double-array doubles drop drop-last drop-while empty empty? ensure enumeration-seq error-handler error-mode eval even? every-pred every? ex-data ex-info extend extend-protocol extend-type extenders extends? false? ffirst file-seq filter filterv find find-keyword find-ns find-protocol-impl find-protocol-method find-var first flatten float float-array float? floats flush fn fn? fnext fnil for force format frequencies future future-call future-cancel future-cancelled? future-done? future? gen-class gen-interface gensym get get-in get-method get-proxy-class get-thread-bindings get-validator group-by hash hash-combine hash-map hash-set identical? identity if-let if-not ifn? import in-ns inc inc' init-proxy instance? int int-array integer? interleave intern interpose into into-array ints io! isa? iterate iterator-seq juxt keep keep-indexed key keys keyword keyword? last lazy-cat lazy-seq let letfn line-seq list list* list? load load-file load-reader load-string loaded-libs locking long long-array longs loop macroexpand macroexpand-1 make-array make-hierarchy map map-indexed map? mapcat mapv max max-key memfn memoize merge merge-with meta method-sig methods min min-key mod munge name namespace namespace-munge neg? newline next nfirst nil? nnext not not-any? not-empty not-every? not= ns ns-aliases ns-imports ns-interns ns-map ns-name ns-publics ns-refers ns-resolve ns-unalias ns-unmap nth nthnext nthrest num number? numerator object-array odd? or parents partial partition partition-all partition-by pcalls peek persistent! pmap pop pop! pop-thread-bindings pos? pr pr-str prefer-method prefers primitives-classnames print print-ctor print-dup print-method print-simple print-str printf println println-str prn prn-str promise proxy proxy-call-with-super proxy-mappings proxy-name proxy-super push-thread-bindings pvalues quot rand rand-int rand-nth range ratio? rational? rationalize re-find re-groups re-matcher re-matches re-pattern re-seq read read-line read-string realized? reduce reduce-kv reductions ref ref-history-count ref-max-history ref-min-history ref-set refer refer-clojure reify release-pending-sends rem remove remove-all-methods remove-method remove-ns remove-watch repeat repeatedly replace replicate require reset! reset-meta! resolve rest restart-agent resultset-seq reverse reversible? rseq rsubseq satisfies? second select-keys send send-off seq seq? seque sequence sequential? set set-error-handler! set-error-mode! set-validator! set? short short-array shorts shuffle shutdown-agents slurp some some-fn sort sort-by sorted-map sorted-map-by sorted-set sorted-set-by sorted? special-symbol? spit split-at split-with str string? struct struct-map subs subseq subvec supers swap! symbol symbol? sync take take-last take-nth take-while test the-ns thread-bound? time to-array to-array-2d trampoline transient tree-seq true? type unchecked-add unchecked-add-int unchecked-byte unchecked-char unchecked-dec unchecked-dec-int unchecked-divide-int unchecked-double unchecked-float unchecked-inc unchecked-inc-int unchecked-int unchecked-long unchecked-multiply unchecked-multiply-int unchecked-negate unchecked-negate-int unchecked-remainder-int unchecked-short unchecked-subtract unchecked-subtract-int underive unquote unquote-splicing update-in update-proxy use val vals var-get var-set var? vary-meta vec vector vector-of vector? when when-first when-let when-not while with-bindings with-bindings* with-in-str with-loading-context with-local-vars with-meta with-open with-out-str with-precision with-redefs with-redefs-fn xml-seq zero? zipmap *default-data-reader-fn* as-> cond-> cond->> reduced reduced? send-via set-agent-send-executor! set-agent-send-off-executor! some-> some->>"); - - var indentKeys = makeKeywords( - // Built-ins - "ns fn def defn defmethod bound-fn if if-not case condp when while when-not when-first do future comment doto locking proxy with-open with-precision reify deftype defrecord defprotocol extend extend-protocol extend-type try catch " + - - // Binding forms - "let letfn binding loop for doseq dotimes when-let if-let " + - - // Data structures - "defstruct struct-map assoc " + - - // clojure.test - "testing deftest " + - - // contrib - "handler-case handle dotrace deftrace"); - - var tests = { - digit: /\d/, - digit_or_colon: /[\d:]/, - hex: /[0-9a-f]/i, - sign: /[+-]/, - exponent: /e/i, - keyword_char: /[^\s\(\[\;\)\]]/, - symbol: /[\w*+!\-\._?:<>\/\xa1-\uffff]/, - block_indent: /^(?:def|with)[^\/]+$|\/(?:def|with)/ - }; - - function stateStack(indent, type, prev) { // represents a state stack object - this.indent = indent; - this.type = type; - this.prev = prev; - } + var atoms = ["false", "nil", "true"]; + var specialForms = [".", "catch", "def", "do", "if", "monitor-enter", + "monitor-exit", "new", "quote", "recur", "set!", "throw", "try", "var"]; + var coreSymbols = ["*", "*'", "*1", "*2", "*3", "*agent*", + "*allow-unresolved-vars*", "*assert*", "*clojure-version*", + "*command-line-args*", "*compile-files*", "*compile-path*", + "*compiler-options*", "*data-readers*", "*default-data-reader-fn*", "*e", + "*err*", "*file*", "*flush-on-newline*", "*fn-loader*", "*in*", + "*math-context*", "*ns*", "*out*", "*print-dup*", "*print-length*", + "*print-level*", "*print-meta*", "*print-namespace-maps*", + "*print-readably*", "*read-eval*", "*reader-resolver*", "*source-path*", + "*suppress-read*", "*unchecked-math*", "*use-context-classloader*", + "*verbose-defrecords*", "*warn-on-reflection*", "+", "+'", "-", "-'", + "->", "->>", "->ArrayChunk", "->Eduction", "->Vec", "->VecNode", + "->VecSeq", "-cache-protocol-fn", "-reset-methods", "..", "/", "<", "<=", + "=", "==", ">", ">=", "EMPTY-NODE", "Inst", "StackTraceElement->vec", + "Throwable->map", "accessor", "aclone", "add-classpath", "add-watch", + "agent", "agent-error", "agent-errors", "aget", "alength", "alias", + "all-ns", "alter", "alter-meta!", "alter-var-root", "amap", "ancestors", + "and", "any?", "apply", "areduce", "array-map", "as->", "aset", + "aset-boolean", "aset-byte", "aset-char", "aset-double", "aset-float", + "aset-int", "aset-long", "aset-short", "assert", "assoc", "assoc!", + "assoc-in", "associative?", "atom", "await", "await-for", "await1", + "bases", "bean", "bigdec", "bigint", "biginteger", "binding", "bit-and", + "bit-and-not", "bit-clear", "bit-flip", "bit-not", "bit-or", "bit-set", + "bit-shift-left", "bit-shift-right", "bit-test", "bit-xor", "boolean", + "boolean-array", "boolean?", "booleans", "bound-fn", "bound-fn*", + "bound?", "bounded-count", "butlast", "byte", "byte-array", "bytes", + "bytes?", "case", "cast", "cat", "char", "char-array", + "char-escape-string", "char-name-string", "char?", "chars", "chunk", + "chunk-append", "chunk-buffer", "chunk-cons", "chunk-first", "chunk-next", + "chunk-rest", "chunked-seq?", "class", "class?", "clear-agent-errors", + "clojure-version", "coll?", "comment", "commute", "comp", "comparator", + "compare", "compare-and-set!", "compile", "complement", "completing", + "concat", "cond", "cond->", "cond->>", "condp", "conj", "conj!", "cons", + "constantly", "construct-proxy", "contains?", "count", "counted?", + "create-ns", "create-struct", "cycle", "dec", "dec'", "decimal?", + "declare", "dedupe", "default-data-readers", "definline", "definterface", + "defmacro", "defmethod", "defmulti", "defn", "defn-", "defonce", + "defprotocol", "defrecord", "defstruct", "deftype", "delay", "delay?", + "deliver", "denominator", "deref", "derive", "descendants", "destructure", + "disj", "disj!", "dissoc", "dissoc!", "distinct", "distinct?", "doall", + "dorun", "doseq", "dosync", "dotimes", "doto", "double", "double-array", + "double?", "doubles", "drop", "drop-last", "drop-while", "eduction", + "empty", "empty?", "ensure", "ensure-reduced", "enumeration-seq", + "error-handler", "error-mode", "eval", "even?", "every-pred", "every?", + "ex-data", "ex-info", "extend", "extend-protocol", "extend-type", + "extenders", "extends?", "false?", "ffirst", "file-seq", "filter", + "filterv", "find", "find-keyword", "find-ns", "find-protocol-impl", + "find-protocol-method", "find-var", "first", "flatten", "float", + "float-array", "float?", "floats", "flush", "fn", "fn?", "fnext", "fnil", + "for", "force", "format", "frequencies", "future", "future-call", + "future-cancel", "future-cancelled?", "future-done?", "future?", + "gen-class", "gen-interface", "gensym", "get", "get-in", "get-method", + "get-proxy-class", "get-thread-bindings", "get-validator", "group-by", + "halt-when", "hash", "hash-combine", "hash-map", "hash-ordered-coll", + "hash-set", "hash-unordered-coll", "ident?", "identical?", "identity", + "if-let", "if-not", "if-some", "ifn?", "import", "in-ns", "inc", "inc'", + "indexed?", "init-proxy", "inst-ms", "inst-ms*", "inst?", "instance?", + "int", "int-array", "int?", "integer?", "interleave", "intern", + "interpose", "into", "into-array", "ints", "io!", "isa?", "iterate", + "iterator-seq", "juxt", "keep", "keep-indexed", "key", "keys", "keyword", + "keyword?", "last", "lazy-cat", "lazy-seq", "let", "letfn", "line-seq", + "list", "list*", "list?", "load", "load-file", "load-reader", + "load-string", "loaded-libs", "locking", "long", "long-array", "longs", + "loop", "macroexpand", "macroexpand-1", "make-array", "make-hierarchy", + "map", "map-entry?", "map-indexed", "map?", "mapcat", "mapv", "max", + "max-key", "memfn", "memoize", "merge", "merge-with", "meta", + "method-sig", "methods", "min", "min-key", "mix-collection-hash", "mod", + "munge", "name", "namespace", "namespace-munge", "nat-int?", "neg-int?", + "neg?", "newline", "next", "nfirst", "nil?", "nnext", "not", "not-any?", + "not-empty", "not-every?", "not=", "ns", "ns-aliases", "ns-imports", + "ns-interns", "ns-map", "ns-name", "ns-publics", "ns-refers", + "ns-resolve", "ns-unalias", "ns-unmap", "nth", "nthnext", "nthrest", + "num", "number?", "numerator", "object-array", "odd?", "or", "parents", + "partial", "partition", "partition-all", "partition-by", "pcalls", "peek", + "persistent!", "pmap", "pop", "pop!", "pop-thread-bindings", "pos-int?", + "pos?", "pr", "pr-str", "prefer-method", "prefers", + "primitives-classnames", "print", "print-ctor", "print-dup", + "print-method", "print-simple", "print-str", "printf", "println", + "println-str", "prn", "prn-str", "promise", "proxy", + "proxy-call-with-super", "proxy-mappings", "proxy-name", "proxy-super", + "push-thread-bindings", "pvalues", "qualified-ident?", + "qualified-keyword?", "qualified-symbol?", "quot", "rand", "rand-int", + "rand-nth", "random-sample", "range", "ratio?", "rational?", + "rationalize", "re-find", "re-groups", "re-matcher", "re-matches", + "re-pattern", "re-seq", "read", "read-line", "read-string", + "reader-conditional", "reader-conditional?", "realized?", "record?", + "reduce", "reduce-kv", "reduced", "reduced?", "reductions", "ref", + "ref-history-count", "ref-max-history", "ref-min-history", "ref-set", + "refer", "refer-clojure", "reify", "release-pending-sends", "rem", + "remove", "remove-all-methods", "remove-method", "remove-ns", + "remove-watch", "repeat", "repeatedly", "replace", "replicate", "require", + "reset!", "reset-meta!", "reset-vals!", "resolve", "rest", + "restart-agent", "resultset-seq", "reverse", "reversible?", "rseq", + "rsubseq", "run!", "satisfies?", "second", "select-keys", "send", + "send-off", "send-via", "seq", "seq?", "seqable?", "seque", "sequence", + "sequential?", "set", "set-agent-send-executor!", + "set-agent-send-off-executor!", "set-error-handler!", "set-error-mode!", + "set-validator!", "set?", "short", "short-array", "shorts", "shuffle", + "shutdown-agents", "simple-ident?", "simple-keyword?", "simple-symbol?", + "slurp", "some", "some->", "some->>", "some-fn", "some?", "sort", + "sort-by", "sorted-map", "sorted-map-by", "sorted-set", "sorted-set-by", + "sorted?", "special-symbol?", "spit", "split-at", "split-with", "str", + "string?", "struct", "struct-map", "subs", "subseq", "subvec", "supers", + "swap!", "swap-vals!", "symbol", "symbol?", "sync", "tagged-literal", + "tagged-literal?", "take", "take-last", "take-nth", "take-while", "test", + "the-ns", "thread-bound?", "time", "to-array", "to-array-2d", + "trampoline", "transduce", "transient", "tree-seq", "true?", "type", + "unchecked-add", "unchecked-add-int", "unchecked-byte", "unchecked-char", + "unchecked-dec", "unchecked-dec-int", "unchecked-divide-int", + "unchecked-double", "unchecked-float", "unchecked-inc", + "unchecked-inc-int", "unchecked-int", "unchecked-long", + "unchecked-multiply", "unchecked-multiply-int", "unchecked-negate", + "unchecked-negate-int", "unchecked-remainder-int", "unchecked-short", + "unchecked-subtract", "unchecked-subtract-int", "underive", "unquote", + "unquote-splicing", "unreduced", "unsigned-bit-shift-right", "update", + "update-in", "update-proxy", "uri?", "use", "uuid?", "val", "vals", + "var-get", "var-set", "var?", "vary-meta", "vec", "vector", "vector-of", + "vector?", "volatile!", "volatile?", "vreset!", "vswap!", "when", + "when-first", "when-let", "when-not", "when-some", "while", + "with-bindings", "with-bindings*", "with-in-str", "with-loading-context", + "with-local-vars", "with-meta", "with-open", "with-out-str", + "with-precision", "with-redefs", "with-redefs-fn", "xml-seq", "zero?", + "zipmap"]; + var haveBodyParameter = [ + "->", "->>", "as->", "binding", "bound-fn", "case", "catch", "comment", + "cond", "cond->", "cond->>", "condp", "def", "definterface", "defmethod", + "defn", "defmacro", "defprotocol", "defrecord", "defstruct", "deftype", + "do", "doseq", "dotimes", "doto", "extend", "extend-protocol", + "extend-type", "fn", "for", "future", "if", "if-let", "if-not", "if-some", + "let", "letfn", "locking", "loop", "ns", "proxy", "reify", "struct-map", + "some->", "some->>", "try", "when", "when-first", "when-let", "when-not", + "when-some", "while", "with-bindings", "with-bindings*", "with-in-str", + "with-loading-context", "with-local-vars", "with-meta", "with-open", + "with-out-str", "with-precision", "with-redefs", "with-redefs-fn"]; - function pushStack(state, indent, type) { - state.indentStack = new stateStack(indent, type, state.indentStack); - } - - function popStack(state) { - state.indentStack = state.indentStack.prev; - } + CodeMirror.registerHelper("hintWords", "clojure", + [].concat(atoms, specialForms, coreSymbols)); - function isNumber(ch, stream){ - // hex - if ( ch === '0' && stream.eat(/x/i) ) { - stream.eatWhile(tests.hex); - return true; - } + var atom = createLookupMap(atoms); + var specialForm = createLookupMap(specialForms); + var coreSymbol = createLookupMap(coreSymbols); + var hasBodyParameter = createLookupMap(haveBodyParameter); + var delimiter = /^(?:[\\\[\]\s"(),;@^`{}~]|$)/; + var numberLiteral = /^(?:[+\-]?\d+(?:(?:N|(?:[eE][+\-]?\d+))|(?:\.?\d*(?:M|(?:[eE][+\-]?\d+))?)|\/\d+|[xX][0-9a-fA-F]+|r[0-9a-zA-Z]+)?(?=[\\\[\]\s"#'(),;@^`{}~]|$))/; + var characterLiteral = /^(?:\\(?:backspace|formfeed|newline|return|space|tab|o[0-7]{3}|u[0-9A-Fa-f]{4}|x[0-9A-Fa-f]{4}|.)?(?=[\\\[\]\s"(),;@^`{}~]|$))/; - // leading sign - if ( ( ch == '+' || ch == '-' ) && ( tests.digit.test(stream.peek()) ) ) { - stream.eat(tests.sign); - ch = stream.next(); - } - - if ( tests.digit.test(ch) ) { - stream.eat(ch); - stream.eatWhile(tests.digit); + // simple-namespace := /^[^\\\/\[\]\d\s"#'(),;@^`{}~][^\\\[\]\s"(),;@^`{}~]*/ + // simple-symbol := /^(?:\/|[^\\\/\[\]\d\s"#'(),;@^`{}~][^\\\[\]\s"(),;@^`{}~]*)/ + // qualified-symbol := ((<.>)*)? + var qualifiedSymbol = /^(?:(?:[^\\\/\[\]\d\s"#'(),;@^`{}~][^\\\[\]\s"(),;@^`{}~]*(?:\.[^\\\/\[\]\d\s"#'(),;@^`{}~][^\\\[\]\s"(),;@^`{}~]*)*\/)?(?:\/|[^\\\/\[\]\d\s"#'(),;@^`{}~][^\\\[\]\s"(),;@^`{}~]*)*(?=[\\\[\]\s"(),;@^`{}~]|$))/; - if ( '.' == stream.peek() ) { - stream.eat('.'); - stream.eatWhile(tests.digit); - } else if ('/' == stream.peek() ) { - stream.eat('/'); - stream.eatWhile(tests.digit); - } - - if ( stream.eat(tests.exponent) ) { - stream.eat(tests.sign); - stream.eatWhile(tests.digit); - } - - return true; - } + function base(stream, state) { + if (stream.eatSpace() || stream.eat(",")) return ["space", null]; + if (stream.match(numberLiteral)) return [null, "number"]; + if (stream.match(characterLiteral)) return [null, "string-2"]; + if (stream.eat(/^"/)) return (state.tokenize = inString)(stream, state); + if (stream.eat(/^[(\[{]/)) return ["open", "bracket"]; + if (stream.eat(/^[)\]}]/)) return ["close", "bracket"]; + if (stream.eat(/^;/)) {stream.skipToEnd(); return ["space", "comment"];} + if (stream.eat(/^[#'@^`~]/)) return [null, "meta"]; - return false; - } + var matches = stream.match(qualifiedSymbol); + var symbol = matches && matches[0]; - // Eat character that starts after backslash \ - function eatCharacter(stream) { - var first = stream.next(); - // Read special literals: backspace, newline, space, return. - // Just read all lowercase letters. - if (first && first.match(/[a-z]/) && stream.match(/[a-z]+/, true)) { - return; - } - // Read unicode character: \u1000 \uA0a1 - if (first === "u") { - stream.match(/[0-9a-z]{4}/i, true); - } + if (!symbol) { + // advance stream by at least one character so we don't get stuck. + stream.next(); + stream.eatWhile(function (c) {return !is(c, delimiter);}); + return [null, "error"]; } - return { - startState: function () { - return { - indentStack: null, - indentation: 0, - mode: false - }; - }, + if (symbol === "comment" && state.lastToken === "(") + return (state.tokenize = inComment)(stream, state); + if (is(symbol, atom) || symbol.charAt(0) === ":") return ["symbol", "atom"]; + if (is(symbol, specialForm) || is(symbol, coreSymbol)) return ["symbol", "keyword"]; + if (state.lastToken === "(") return ["symbol", "builtin"]; // other operator + + return ["symbol", "variable"]; + } - token: function (stream, state) { - if (state.indentStack == null && stream.sol()) { - // update indentation, but only if indentStack is empty - state.indentation = stream.indentation(); - } + function inString(stream, state) { + var escaped = false, next; + + while (next = stream.next()) { + if (next === "\"" && !escaped) {state.tokenize = base; break;} + escaped = !escaped && next === "\\"; + } + + return [null, "string"]; + } - // skip spaces - if (state.mode != "string" && stream.eatSpace()) { - return null; - } - var returnType = null; + function inComment(stream, state) { + var parenthesisCount = 1; + var next; - switch(state.mode){ - case "string": // multi-line string parsing mode - var next, escaped = false; - while ((next = stream.next()) != null) { - if (next == "\"" && !escaped) { + while (next = stream.next()) { + if (next === ")") parenthesisCount--; + if (next === "(") parenthesisCount++; + if (parenthesisCount === 0) { + stream.backUp(1); + state.tokenize = base; + break; + } + } - state.mode = false; - break; - } - escaped = !escaped && next == "\\"; - } - returnType = STRING; // continue on in string mode - break; - default: // default parsing mode - var ch = stream.next(); + return ["space", "comment"]; + } + + function createLookupMap(words) { + var obj = {}; + + for (var i = 0; i < words.length; ++i) obj[words[i]] = true; + + return obj; + } - if (ch == "\"") { - state.mode = "string"; - returnType = STRING; - } else if (ch == "\\") { - eatCharacter(stream); - returnType = CHARACTER; - } else if (ch == "'" && !( tests.digit_or_colon.test(stream.peek()) )) { - returnType = ATOM; - } else if (ch == ";") { // comment - stream.skipToEnd(); // rest of the line is a comment - returnType = COMMENT; - } else if (isNumber(ch,stream)){ - returnType = NUMBER; - } else if (ch == "(" || ch == "[" || ch == "{" ) { - var keyWord = '', indentTemp = stream.column(), letter; - /** - Either - (indent-word .. - (non-indent-word .. - (;something else, bracket, etc. - */ + function is(value, test) { + if (test instanceof RegExp) return test.test(value); + if (test instanceof Object) return test.propertyIsEnumerable(value); + } - if (ch == "(") while ((letter = stream.eat(tests.keyword_char)) != null) { - keyWord += letter; - } + return { + startState: function () { + return { + ctx: {prev: null, start: 0, indentTo: 0}, + lastToken: null, + tokenize: base + }; + }, + + token: function (stream, state) { + if (stream.sol() && (typeof state.ctx.indentTo !== "number")) + state.ctx.indentTo = state.ctx.start + 1; + + var typeStylePair = state.tokenize(stream, state); + var type = typeStylePair[0]; + var style = typeStylePair[1]; + var current = stream.current(); - if (keyWord.length > 0 && (indentKeys.propertyIsEnumerable(keyWord) || - tests.block_indent.test(keyWord))) { // indent-word - pushStack(state, indentTemp + INDENT_WORD_SKIP, ch); - } else { // non-indent word - // we continue eating the spaces - stream.eatSpace(); - if (stream.eol() || stream.peek() == ";") { - // nothing significant after - // we restart indentation the user defined spaces after - pushStack(state, indentTemp + NORMAL_INDENT_UNIT, ch); - } else { - pushStack(state, indentTemp + stream.current().length, ch); // else we match - } - } - stream.backUp(stream.current().length - 1); // undo all the eating + if (type !== "space") { + if (state.lastToken === "(" && state.ctx.indentTo === null) { + if (type === "symbol" && is(current, hasBodyParameter)) + state.ctx.indentTo = state.ctx.start + options.indentUnit; + else state.ctx.indentTo = "next"; + } else if (state.ctx.indentTo === "next") { + state.ctx.indentTo = stream.column(); + } + + state.lastToken = current; + } - returnType = BRACKET; - } else if (ch == ")" || ch == "]" || ch == "}") { - returnType = BRACKET; - if (state.indentStack != null && state.indentStack.type == (ch == ")" ? "(" : (ch == "]" ? "[" :"{"))) { - popStack(state); - } - } else if ( ch == ":" ) { - stream.eatWhile(tests.symbol); - return ATOM; - } else { - stream.eatWhile(tests.symbol); + if (type === "open") + state.ctx = {prev: state.ctx, start: stream.column(), indentTo: null}; + else if (type === "close") state.ctx = state.ctx.prev || state.ctx; + + return style; + }, - if (keywords && keywords.propertyIsEnumerable(stream.current())) { - returnType = KEYWORD; - } else if (builtins && builtins.propertyIsEnumerable(stream.current())) { - returnType = BUILTIN; - } else if (atoms && atoms.propertyIsEnumerable(stream.current())) { - returnType = ATOM; - } else { - returnType = VAR; - } - } - } + indent: function (state) { + var i = state.ctx.indentTo; - return returnType; - }, + return (typeof i === "number") ? + i : + state.ctx.start + 1; + }, - indent: function (state) { - if (state.indentStack == null) return state.indentation; - return state.indentStack.indent; - }, - - closeBrackets: {pairs: "()[]{}\"\""}, - lineComment: ";;" - }; + closeBrackets: {pairs: "()[]{}\"\""}, + lineComment: ";;" + }; }); CodeMirror.defineMIME("text/x-clojure", "clojure"); CodeMirror.defineMIME("text/x-clojurescript", "clojure"); +CodeMirror.defineMIME("application/edn", "clojure"); }); diff --git a/rhodecode/public/js/mode/cmake/cmake.js b/rhodecode/public/js/mode/cmake/cmake.js --- a/rhodecode/public/js/mode/cmake/cmake.js +++ b/rhodecode/public/js/mode/cmake/cmake.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") diff --git a/rhodecode/public/js/mode/cobol/cobol.js b/rhodecode/public/js/mode/cobol/cobol.js --- a/rhodecode/public/js/mode/cobol/cobol.js +++ b/rhodecode/public/js/mode/cobol/cobol.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /** * Author: Gautam Mehta diff --git a/rhodecode/public/js/mode/coffeescript/coffeescript.js b/rhodecode/public/js/mode/coffeescript/coffeescript.js --- a/rhodecode/public/js/mode/coffeescript/coffeescript.js +++ b/rhodecode/public/js/mode/coffeescript/coffeescript.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /** * Link to the project's GitHub page: @@ -349,6 +349,10 @@ CodeMirror.defineMode("coffeescript", fu return external; }); +// IANA registered media type +// https://www.iana.org/assignments/media-types/ +CodeMirror.defineMIME("application/vnd.coffeescript", "coffeescript"); + CodeMirror.defineMIME("text/x-coffeescript", "coffeescript"); CodeMirror.defineMIME("text/coffeescript", "coffeescript"); diff --git a/rhodecode/public/js/mode/commonlisp/commonlisp.js b/rhodecode/public/js/mode/commonlisp/commonlisp.js --- a/rhodecode/public/js/mode/commonlisp/commonlisp.js +++ b/rhodecode/public/js/mode/commonlisp/commonlisp.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -43,11 +43,12 @@ CodeMirror.defineMode("commonlisp", func else { stream.skipToEnd(); return "error"; } } else if (ch == "#") { var ch = stream.next(); - if (ch == "[") { type = "open"; return "bracket"; } + if (ch == "(") { type = "open"; return "bracket"; } else if (/[+\-=\.']/.test(ch)) return null; else if (/\d/.test(ch) && stream.match(/^\d*#/)) return null; else if (ch == "|") return (state.tokenize = inComment)(stream, state); else if (ch == ":") { readSym(stream); return "meta"; } + else if (ch == "\\") { stream.next(); readSym(stream); return "string-2" } else return "error"; } else { var name = readSym(stream); diff --git a/rhodecode/public/js/mode/crystal/crystal.js b/rhodecode/public/js/mode/crystal/crystal.js --- a/rhodecode/public/js/mode/crystal/crystal.js +++ b/rhodecode/public/js/mode/crystal/crystal.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -29,26 +29,22 @@ var types = /^[A-Z_\u009F-\uFFFF][a-zA-Z0-9_\u009F-\uFFFF]*/; var keywords = wordRegExp([ "abstract", "alias", "as", "asm", "begin", "break", "case", "class", "def", "do", - "else", "elsif", "end", "ensure", "enum", "extend", "for", "fun", "if", "ifdef", + "else", "elsif", "end", "ensure", "enum", "extend", "for", "fun", "if", "include", "instance_sizeof", "lib", "macro", "module", "next", "of", "out", "pointerof", - "private", "protected", "rescue", "return", "require", "sizeof", "struct", - "super", "then", "type", "typeof", "union", "unless", "until", "when", "while", "with", - "yield", "__DIR__", "__FILE__", "__LINE__" + "private", "protected", "rescue", "return", "require", "select", "sizeof", "struct", + "super", "then", "type", "typeof", "uninitialized", "union", "unless", "until", "when", "while", "with", + "yield", "__DIR__", "__END_LINE__", "__FILE__", "__LINE__" ]); var atomWords = wordRegExp(["true", "false", "nil", "self"]); var indentKeywordsArray = [ "def", "fun", "macro", "class", "module", "struct", "lib", "enum", "union", - "if", "unless", "case", "while", "until", "begin", "then", - "do", - "for", "ifdef" + "do", "for" ]; var indentKeywords = wordRegExp(indentKeywordsArray); - var dedentKeywordsArray = [ - "end", - "else", "elsif", - "rescue", "ensure" - ]; + var indentExpressionKeywordsArray = ["if", "unless", "case", "while", "until", "begin", "then"]; + var indentExpressionKeywords = wordRegExp(indentExpressionKeywordsArray); + var dedentKeywordsArray = ["end", "else", "elsif", "rescue", "ensure"]; var dedentKeywords = wordRegExp(dedentKeywordsArray); var dedentPunctualsArray = ["\\)", "\\}", "\\]"]; var dedentPunctuals = new RegExp("^(?:" + dedentPunctualsArray.join("|") + ")$"); @@ -90,12 +86,15 @@ } else if (state.lastToken == ".") { return "property"; } else if (keywords.test(matched)) { - if (state.lastToken != "abstract" && indentKeywords.test(matched)) { - if (!(matched == "fun" && state.blocks.indexOf("lib") >= 0)) { + if (indentKeywords.test(matched)) { + if (!(matched == "fun" && state.blocks.indexOf("lib") >= 0) && !(matched == "def" && state.lastToken == "abstract")) { state.blocks.push(matched); state.currentIndent += 1; } - } else if (dedentKeywords.test(matched)) { + } else if ((state.lastStyle == "operator" || !state.lastStyle) && indentExpressionKeywords.test(matched)) { + state.blocks.push(matched); + state.currentIndent += 1; + } else if (matched == "end") { state.blocks.pop(); state.currentIndent -= 1; } @@ -124,12 +123,6 @@ return "variable-2"; } - // Global variables - if (stream.eat("$")) { - stream.eat(/[0-9]+|\?/) || stream.match(idents) || stream.match(types); - return "variable-3"; - } - // Constants and types if (stream.match(types)) { return "tag"; @@ -165,6 +158,9 @@ } else if (stream.match("%w")) { embed = false; delim = stream.next(); + } else if (stream.match("%q")) { + embed = false; + delim = stream.next(); } else { if(delim = stream.match(/^%([^\w\s=])/)) { delim = delim[1]; @@ -183,6 +179,11 @@ return chain(tokenQuote(delim, style, embed), stream, state); } + // Here Docs + if (matched = stream.match(/^<<-('?)([A-Z]\w*)\1/)) { + return chain(tokenHereDoc(matched[2], !matched[1]), stream, state) + } + // Characters if (stream.eat("'")) { stream.match(/^(?:[^']|\\(?:[befnrtv0'"]|[0-7]{3}|u(?:[0-9a-fA-F]{4}|\{[0-9a-fA-F]{1,6}\})))/); @@ -202,14 +203,14 @@ return "number"; } - if (stream.eat(/\d/)) { + if (stream.eat(/^\d/)) { stream.match(/^\d*(?:\.\d+)?(?:[eE][+-]?\d+)?/); return "number"; } // Operators if (stream.match(operators)) { - stream.eat("="); // Operators can follow assigin symbol. + stream.eat("="); // Operators can follow assign symbol. return "operator"; } @@ -339,7 +340,7 @@ return style; } - escaped = ch == "\\"; + escaped = embed && ch == "\\"; } else { stream.next(); escaped = false; @@ -350,12 +351,52 @@ }; } + function tokenHereDoc(phrase, embed) { + return function (stream, state) { + if (stream.sol()) { + stream.eatSpace() + if (stream.match(phrase)) { + state.tokenize.pop(); + return "string"; + } + } + + var escaped = false; + while (stream.peek()) { + if (!escaped) { + if (stream.match("{%", false)) { + state.tokenize.push(tokenMacro("%", "%")); + return "string"; + } + + if (stream.match("{{", false)) { + state.tokenize.push(tokenMacro("{", "}")); + return "string"; + } + + if (embed && stream.match("#{", false)) { + state.tokenize.push(tokenNest("#{", "}", "meta")); + return "string"; + } + + escaped = embed && stream.next() == "\\"; + } else { + stream.next(); + escaped = false; + } + } + + return "string"; + } + } + return { startState: function () { return { tokenize: [tokenBase], currentIndent: 0, lastToken: null, + lastStyle: null, blocks: [] }; }, @@ -366,6 +407,7 @@ if (style && style != "comment") { state.lastToken = token; + state.lastStyle = style; } return style; diff --git a/rhodecode/public/js/mode/css/css.js b/rhodecode/public/js/mode/css/css.js --- a/rhodecode/public/js/mode/css/css.js +++ b/rhodecode/public/js/mode/css/css.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -28,6 +28,7 @@ CodeMirror.defineMode("css", function(co colorKeywords = parserConfig.colorKeywords || {}, valueKeywords = parserConfig.valueKeywords || {}, allowNested = parserConfig.allowNested, + lineComment = parserConfig.lineComment, supportsAtComponent = parserConfig.supportsAtComponent === true; var type, override; @@ -62,7 +63,7 @@ CodeMirror.defineMode("css", function(co if (/[\d.]/.test(stream.peek())) { stream.eatWhile(/[\w.%]/); return ret("number", "unit"); - } else if (stream.match(/^-[\w\\\-]+/)) { + } else if (stream.match(/^-[\w\\\-]*/)) { stream.eatWhile(/[\w\\\-]/); if (stream.match(/^\s*:/, false)) return ret("variable-2", "variable-definition"); @@ -76,12 +77,11 @@ CodeMirror.defineMode("css", function(co return ret("qualifier", "qualifier"); } else if (/[:;{}\[\]\(\)]/.test(ch)) { return ret(null, ch); - } else if ((ch == "u" && stream.match(/rl(-prefix)?\(/)) || - (ch == "d" && stream.match("omain(")) || - (ch == "r" && stream.match("egexp("))) { - stream.backUp(1); - state.tokenize = tokenParenthesized; - return ret("property", "word"); + } else if (stream.match(/[\w-.]+(?=\()/)) { + if (/^(url(-prefix)?|domain|regexp)$/.test(stream.current().toLowerCase())) { + state.tokenize = tokenParenthesized; + } + return ret("variable callee", "variable"); } else if (/[\w\\\-]/.test(ch)) { stream.eatWhile(/[\w\\\-]/); return ret("property", "word"); @@ -161,16 +161,16 @@ CodeMirror.defineMode("css", function(co return pushContext(state, stream, "block"); } else if (type == "}" && state.context.prev) { return popContext(state); - } else if (supportsAtComponent && /@component/.test(type)) { + } else if (supportsAtComponent && /@component/i.test(type)) { return pushContext(state, stream, "atComponentBlock"); - } else if (/^@(-moz-)?document$/.test(type)) { + } else if (/^@(-moz-)?document$/i.test(type)) { return pushContext(state, stream, "documentTypes"); - } else if (/^@(media|supports|(-moz-)?document|import)$/.test(type)) { + } else if (/^@(media|supports|(-moz-)?document|import)$/i.test(type)) { return pushContext(state, stream, "atBlock"); - } else if (/^@(font-face|counter-style)/.test(type)) { + } else if (/^@(font-face|counter-style)/i.test(type)) { state.stateArg = type; return "restricted_atBlock_before"; - } else if (/^@(-(moz|ms|o|webkit)-)?keyframes$/.test(type)) { + } else if (/^@(-(moz|ms|o|webkit)-)?keyframes$/i.test(type)) { return "keyframes"; } else if (type && type.charAt(0) == "@") { return pushContext(state, stream, "at"); @@ -253,6 +253,8 @@ CodeMirror.defineMode("css", function(co }; states.pseudo = function(type, stream, state) { + if (type == "meta") return "pseudo"; + if (type == "word") { override = "variable-3"; return state.context.type; @@ -380,7 +382,8 @@ CodeMirror.defineMode("css", function(co style = style[0]; } override = style; - state.state = states[state.state](type, stream, state); + if (type != "comment") + state.state = states[state.state](type, stream, state); return override; }, @@ -398,7 +401,6 @@ CodeMirror.defineMode("css", function(co ch == "{" && (cx.type == "at" || cx.type == "atBlock")) { // Dedent relative to current context. indent = Math.max(0, cx.indent - indentUnit); - cx = cx.prev; } } return indent; @@ -407,6 +409,8 @@ CodeMirror.defineMode("css", function(co electricChars: "}", blockCommentStart: "/*", blockCommentEnd: "*/", + blockCommentContinue: " * ", + lineComment: lineComment, fold: "brace" }; }); @@ -414,7 +418,7 @@ CodeMirror.defineMode("css", function(co function keySet(array) { var keys = {}; for (var i = 0; i < array.length; ++i) { - keys[array[i]] = true; + keys[array[i].toLowerCase()] = true; } return keys; } @@ -468,7 +472,7 @@ CodeMirror.defineMode("css", function(co "border-top-left-radius", "border-top-right-radius", "border-top-style", "border-top-width", "border-width", "bottom", "box-decoration-break", "box-shadow", "box-sizing", "break-after", "break-before", "break-inside", - "caption-side", "clear", "clip", "color", "color-profile", "column-count", + "caption-side", "caret-color", "clear", "clip", "color", "color-profile", "column-count", "column-fill", "column-gap", "column-rule", "column-rule-color", "column-rule-style", "column-rule-width", "column-span", "column-width", "columns", "content", "counter-increment", "counter-reset", "crop", "cue", @@ -484,19 +488,19 @@ CodeMirror.defineMode("css", function(co "font-variant-alternates", "font-variant-caps", "font-variant-east-asian", "font-variant-ligatures", "font-variant-numeric", "font-variant-position", "font-weight", "grid", "grid-area", "grid-auto-columns", "grid-auto-flow", - "grid-auto-position", "grid-auto-rows", "grid-column", "grid-column-end", - "grid-column-start", "grid-row", "grid-row-end", "grid-row-start", - "grid-template", "grid-template-areas", "grid-template-columns", + "grid-auto-rows", "grid-column", "grid-column-end", "grid-column-gap", + "grid-column-start", "grid-gap", "grid-row", "grid-row-end", "grid-row-gap", + "grid-row-start", "grid-template", "grid-template-areas", "grid-template-columns", "grid-template-rows", "hanging-punctuation", "height", "hyphens", "icon", "image-orientation", "image-rendering", "image-resolution", - "inline-box-align", "justify-content", "left", "letter-spacing", + "inline-box-align", "justify-content", "justify-items", "justify-self", "left", "letter-spacing", "line-break", "line-height", "line-stacking", "line-stacking-ruby", "line-stacking-shift", "line-stacking-strategy", "list-style", "list-style-image", "list-style-position", "list-style-type", "margin", "margin-bottom", "margin-left", "margin-right", "margin-top", - "marker-offset", "marks", "marquee-direction", "marquee-loop", + "marks", "marquee-direction", "marquee-loop", "marquee-play-count", "marquee-speed", "marquee-style", "max-height", - "max-width", "min-height", "min-width", "move-to", "nav-down", "nav-index", + "max-width", "min-height", "min-width", "mix-blend-mode", "move-to", "nav-down", "nav-index", "nav-left", "nav-right", "nav-up", "object-fit", "object-position", "opacity", "order", "orphans", "outline", "outline-color", "outline-offset", "outline-style", "outline-width", @@ -504,7 +508,7 @@ CodeMirror.defineMode("css", function(co "padding", "padding-bottom", "padding-left", "padding-right", "padding-top", "page", "page-break-after", "page-break-before", "page-break-inside", "page-policy", "pause", "pause-after", "pause-before", "perspective", - "perspective-origin", "pitch", "pitch-range", "play-during", "position", + "perspective-origin", "pitch", "pitch-range", "place-content", "place-items", "place-self", "play-during", "position", "presentation-level", "punctuation-trim", "quotes", "region-break-after", "region-break-before", "region-break-inside", "region-fragment", "rendering-intent", "resize", "rest", "rest-after", "rest-before", "richness", @@ -522,9 +526,9 @@ CodeMirror.defineMode("css", function(co "text-wrap", "top", "transform", "transform-origin", "transform-style", "transition", "transition-delay", "transition-duration", "transition-property", "transition-timing-function", "unicode-bidi", - "vertical-align", "visibility", "voice-balance", "voice-duration", + "user-select", "vertical-align", "visibility", "voice-balance", "voice-duration", "voice-family", "voice-pitch", "voice-range", "voice-rate", "voice-stress", - "voice-volume", "volume", "white-space", "widows", "width", "word-break", + "voice-volume", "volume", "white-space", "widows", "width", "will-change", "word-break", "word-spacing", "word-wrap", "z-index", // SVG-specific "clip-path", "clip-rule", "mask", "enable-background", "filter", "flood-color", @@ -589,7 +593,7 @@ CodeMirror.defineMode("css", function(co "above", "absolute", "activeborder", "additive", "activecaption", "afar", "after-white-space", "ahead", "alias", "all", "all-scroll", "alphabetic", "alternate", "always", "amharic", "amharic-abegede", "antialiased", "appworkspace", - "arabic-indic", "armenian", "asterisks", "attr", "auto", "avoid", "avoid-column", "avoid-page", + "arabic-indic", "armenian", "asterisks", "attr", "auto", "auto-flow", "avoid", "avoid-column", "avoid-page", "avoid-region", "background", "backwards", "baseline", "below", "bidi-override", "binary", "bengali", "blink", "block", "block-axis", "bold", "bolder", "border", "border-box", "both", "bottom", "break", "break-all", "break-word", "bullets", "button", "button-bevel", @@ -598,10 +602,10 @@ CodeMirror.defineMode("css", function(co "cell", "center", "checkbox", "circle", "cjk-decimal", "cjk-earthly-branch", "cjk-heavenly-stem", "cjk-ideographic", "clear", "clip", "close-quote", "col-resize", "collapse", "color", "color-burn", "color-dodge", "column", "column-reverse", - "compact", "condensed", "contain", "content", + "compact", "condensed", "contain", "content", "contents", "content-box", "context-menu", "continuous", "copy", "counter", "counters", "cover", "crop", "cross", "crosshair", "currentcolor", "cursive", "cyclic", "darken", "dashed", "decimal", - "decimal-leading-zero", "default", "default-button", "destination-atop", + "decimal-leading-zero", "default", "default-button", "dense", "destination-atop", "destination-in", "destination-out", "destination-over", "devanagari", "difference", "disc", "discard", "disclosure-closed", "disclosure-open", "document", "dot-dash", "dot-dot-dash", @@ -615,13 +619,13 @@ CodeMirror.defineMode("css", function(co "ethiopic-halehame-ti-er", "ethiopic-halehame-ti-et", "ethiopic-halehame-tig", "ethiopic-numeric", "ew-resize", "exclusion", "expanded", "extends", "extra-condensed", "extra-expanded", "fantasy", "fast", "fill", "fixed", "flat", "flex", "flex-end", "flex-start", "footnotes", - "forwards", "from", "geometricPrecision", "georgian", "graytext", "groove", + "forwards", "from", "geometricPrecision", "georgian", "graytext", "grid", "groove", "gujarati", "gurmukhi", "hand", "hangul", "hangul-consonant", "hard-light", "hebrew", "help", "hidden", "hide", "higher", "highlight", "highlighttext", "hiragana", "hiragana-iroha", "horizontal", "hsl", "hsla", "hue", "icon", "ignore", "inactiveborder", "inactivecaption", "inactivecaptiontext", "infinite", "infobackground", "infotext", "inherit", "initial", "inline", "inline-axis", - "inline-block", "inline-flex", "inline-table", "inset", "inside", "intrinsic", "invert", + "inline-block", "inline-flex", "inline-grid", "inline-table", "inset", "inside", "intrinsic", "invert", "italic", "japanese-formal", "japanese-informal", "justify", "kannada", "katakana", "katakana-iroha", "keep-all", "khmer", "korean-hangul-formal", "korean-hanja-formal", "korean-hanja-informal", @@ -641,7 +645,7 @@ CodeMirror.defineMode("css", function(co "mix", "mongolian", "monospace", "move", "multiple", "multiply", "myanmar", "n-resize", "narrower", "ne-resize", "nesw-resize", "no-close-quote", "no-drop", "no-open-quote", "no-repeat", "none", "normal", "not-allowed", "nowrap", - "ns-resize", "numbers", "numeric", "nw-resize", "nwse-resize", "oblique", "octal", "open-quote", + "ns-resize", "numbers", "numeric", "nw-resize", "nwse-resize", "oblique", "octal", "opacity", "open-quote", "optimizeLegibility", "optimizeSpeed", "oriya", "oromo", "outset", "outside", "outside-shape", "overlay", "overline", "padding", "padding-box", "painted", "page", "paused", "persian", "perspective", "plus-darker", "plus-lighter", @@ -653,17 +657,17 @@ CodeMirror.defineMode("css", function(co "rgb", "rgba", "ridge", "right", "rotate", "rotate3d", "rotateX", "rotateY", "rotateZ", "round", "row", "row-resize", "row-reverse", "rtl", "run-in", "running", "s-resize", "sans-serif", "saturation", "scale", "scale3d", "scaleX", "scaleY", "scaleZ", "screen", - "scroll", "scrollbar", "se-resize", "searchfield", + "scroll", "scrollbar", "scroll-position", "se-resize", "searchfield", "searchfield-cancel-button", "searchfield-decoration", - "searchfield-results-button", "searchfield-results-decoration", + "searchfield-results-button", "searchfield-results-decoration", "self-start", "self-end", "semi-condensed", "semi-expanded", "separate", "serif", "show", "sidama", "simp-chinese-formal", "simp-chinese-informal", "single", "skew", "skewX", "skewY", "skip-white-space", "slide", "slider-horizontal", "slider-vertical", "sliderthumb-horizontal", "sliderthumb-vertical", "slow", "small", "small-caps", "small-caption", "smaller", "soft-light", "solid", "somali", - "source-atop", "source-in", "source-out", "source-over", "space", "space-around", "space-between", "spell-out", "square", + "source-atop", "source-in", "source-out", "source-over", "space", "space-around", "space-between", "space-evenly", "spell-out", "square", "square-button", "start", "static", "status-bar", "stretch", "stroke", "sub", - "subpixel-antialiased", "super", "sw-resize", "symbolic", "symbols", "table", + "subpixel-antialiased", "super", "sw-resize", "symbolic", "symbols", "system-ui", "table", "table-caption", "table-cell", "table-column", "table-column-group", "table-footer-group", "table-header-group", "table-row", "table-row-group", "tamil", @@ -671,9 +675,9 @@ CodeMirror.defineMode("css", function(co "thick", "thin", "threeddarkshadow", "threedface", "threedhighlight", "threedlightshadow", "threedshadow", "tibetan", "tigre", "tigrinya-er", "tigrinya-er-abegede", "tigrinya-et", "tigrinya-et-abegede", "to", "top", - "trad-chinese-formal", "trad-chinese-informal", + "trad-chinese-formal", "trad-chinese-informal", "transform", "translate", "translate3d", "translateX", "translateY", "translateZ", - "transparent", "ultra-condensed", "ultra-expanded", "underline", "up", + "transparent", "ultra-condensed", "ultra-expanded", "underline", "unset", "up", "upper-alpha", "upper-armenian", "upper-greek", "upper-hexadecimal", "upper-latin", "upper-norwegian", "upper-roman", "uppercase", "urdu", "url", "var", "vertical", "vertical-text", "visible", "visibleFill", "visiblePainted", @@ -730,6 +734,7 @@ CodeMirror.defineMode("css", function(co valueKeywords: valueKeywords, fontProperties: fontProperties, allowNested: true, + lineComment: "//", tokenHooks: { "/": function(stream, state) { if (stream.eat("/")) { @@ -743,8 +748,8 @@ CodeMirror.defineMode("css", function(co } }, ":": function(stream) { - if (stream.match(/\s*\{/)) - return [null, "{"]; + if (stream.match(/\s*\{/, false)) + return [null, null] return false; }, "$": function(stream) { @@ -772,6 +777,7 @@ CodeMirror.defineMode("css", function(co valueKeywords: valueKeywords, fontProperties: fontProperties, allowNested: true, + lineComment: "//", tokenHooks: { "/": function(stream, state) { if (stream.eat("/")) { @@ -786,7 +792,7 @@ CodeMirror.defineMode("css", function(co }, "@": function(stream) { if (stream.eat("{")) return [null, "interpolation"]; - if (stream.match(/^(charset|document|font-face|import|(-(moz|ms|o|webkit)-)?keyframes|media|namespace|page|supports)\b/, false)) return false; + if (stream.match(/^(charset|document|font-face|import|(-(moz|ms|o|webkit)-)?keyframes|media|namespace|page|supports)\b/i, false)) return false; stream.eatWhile(/[\w\\\-]/); if (stream.match(/^\s*:/, false)) return ["variable-2", "variable-definition"]; diff --git a/rhodecode/public/js/mode/cypher/cypher.js b/rhodecode/public/js/mode/cypher/cypher.js --- a/rhodecode/public/js/mode/cypher/cypher.js +++ b/rhodecode/public/js/mode/cypher/cypher.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // By the Neo4j Team and contributors. // https://github.com/neo4j-contrib/CodeMirror @@ -20,8 +20,12 @@ CodeMirror.defineMode("cypher", function(config) { var tokenBase = function(stream/*, state*/) { var ch = stream.next(); - if (ch === "\"" || ch === "'") { - stream.match(/.+?["']/); + if (ch ==='"') { + stream.match(/.*?"/); + return "string"; + } + if (ch === "'") { + stream.match(/.*?'/); return "string"; } if (/[{}\(\),\.;\[\]]/.test(ch)) { @@ -62,7 +66,7 @@ var curPunc; var funcs = wordRegexp(["abs", "acos", "allShortestPaths", "asin", "atan", "atan2", "avg", "ceil", "coalesce", "collect", "cos", "cot", "count", "degrees", "e", "endnode", "exp", "extract", "filter", "floor", "haversin", "head", "id", "keys", "labels", "last", "left", "length", "log", "log10", "lower", "ltrim", "max", "min", "node", "nodes", "percentileCont", "percentileDisc", "pi", "radians", "rand", "range", "reduce", "rel", "relationship", "relationships", "replace", "reverse", "right", "round", "rtrim", "shortestPath", "sign", "sin", "size", "split", "sqrt", "startnode", "stdev", "stdevp", "str", "substring", "sum", "tail", "tan", "timestamp", "toFloat", "toInt", "toString", "trim", "type", "upper"]); var preds = wordRegexp(["all", "and", "any", "contains", "exists", "has", "in", "none", "not", "or", "single", "xor"]); - var keywords = wordRegexp(["as", "asc", "ascending", "assert", "by", "case", "commit", "constraint", "create", "csv", "cypher", "delete", "desc", "descending", "detach", "distinct", "drop", "else", "end", "ends", "explain", "false", "fieldterminator", "foreach", "from", "headers", "in", "index", "is", "join", "limit", "load", "match", "merge", "null", "on", "optional", "order", "periodic", "profile", "remove", "return", "scan", "set", "skip", "start", "starts", "then", "true", "union", "unique", "unwind", "using", "when", "where", "with"]); + var keywords = wordRegexp(["as", "asc", "ascending", "assert", "by", "case", "commit", "constraint", "create", "csv", "cypher", "delete", "desc", "descending", "detach", "distinct", "drop", "else", "end", "ends", "explain", "false", "fieldterminator", "foreach", "from", "headers", "in", "index", "is", "join", "limit", "load", "match", "merge", "null", "on", "optional", "order", "periodic", "profile", "remove", "return", "scan", "set", "skip", "start", "starts", "then", "true", "union", "unique", "unwind", "using", "when", "where", "with", "call", "yield"]); var operatorChars = /[*+\-<>=&|~%^]/; return { diff --git a/rhodecode/public/js/mode/d/d.js b/rhodecode/public/js/mode/d/d.js --- a/rhodecode/public/js/mode/d/d.js +++ b/rhodecode/public/js/mode/d/d.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -44,7 +44,7 @@ CodeMirror.defineMode("d", function(conf } if (ch == "/") { if (stream.eat("+")) { - state.tokenize = tokenComment; + state.tokenize = tokenNestedComment; return tokenNestedComment(stream, state); } if (stream.eat("*")) { @@ -182,7 +182,12 @@ CodeMirror.defineMode("d", function(conf else return ctx.indented + (closing ? 0 : indentUnit); }, - electricChars: "{}" + electricChars: "{}", + blockCommentStart: "/*", + blockCommentEnd: "*/", + blockCommentContinue: " * ", + lineComment: "//", + fold: "brace" }; }); diff --git a/rhodecode/public/js/mode/dart/dart.js b/rhodecode/public/js/mode/dart/dart.js --- a/rhodecode/public/js/mode/dart/dart.js +++ b/rhodecode/public/js/mode/dart/dart.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -12,10 +12,10 @@ "use strict"; var keywords = ("this super static final const abstract class extends external factory " + - "implements get native operator set typedef with enum throw rethrow " + - "assert break case continue default in return new deferred async await " + + "implements mixin get native set typedef with enum throw rethrow " + + "assert break case continue default in return new deferred async await covariant " + "try catch finally do else for if switch while import library export " + - "part of show hide is as").split(" "); + "part of show hide is as extension on").split(" "); var blockKeywords = "try catch finally do else for if switch while".split(" "); var atoms = "true false null".split(" "); var builtins = "void bool num int double dynamic var String".split(" "); @@ -72,6 +72,21 @@ return null; } return false; + }, + + "/": function(stream, state) { + if (!stream.eat("*")) return false + state.tokenize = tokenNestedComment(1) + return state.tokenize(stream, state) + }, + token: function(stream, _, style) { + if (style == "variable") { + // Assume uppercase symbols are classes using variable-2 + var isUpper = RegExp('^[_$]*[A-Z][a-zA-Z0-9_$]*$','g'); + if (isUpper.test(stream.current())) { + return 'variable-2'; + } + } } } }); @@ -121,6 +136,27 @@ return "variable"; } + function tokenNestedComment(depth) { + return function (stream, state) { + var ch + while (ch = stream.next()) { + if (ch == "*" && stream.eat("/")) { + if (depth == 1) { + state.tokenize = null + break + } else { + state.tokenize = tokenNestedComment(depth - 1) + return state.tokenize(stream, state) + } + } else if (ch == "/" && stream.eat("*")) { + state.tokenize = tokenNestedComment(depth + 1) + return state.tokenize(stream, state) + } + } + return "comment" + } + } + CodeMirror.registerHelper("hintWords", "application/dart", keywords.concat(atoms).concat(builtins)); // This is needed to make loading through meta.js work. diff --git a/rhodecode/public/js/mode/diff/diff.js b/rhodecode/public/js/mode/diff/diff.js --- a/rhodecode/public/js/mode/diff/diff.js +++ b/rhodecode/public/js/mode/diff/diff.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/django/django.js b/rhodecode/public/js/mode/django/django.js --- a/rhodecode/public/js/mode/django/django.js +++ b/rhodecode/public/js/mode/django/django.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -66,11 +66,11 @@ } // A string can be included in either single or double quotes (this is - // the delimeter). Mark everything as a string until the start delimeter + // the delimiter). Mark everything as a string until the start delimiter // occurs again. - function inString (delimeter, previousTokenizer) { + function inString (delimiter, previousTokenizer) { return function (stream, state) { - if (!state.escapeNext && stream.eat(delimeter)) { + if (!state.escapeNext && stream.eat(delimiter)) { state.tokenize = previousTokenizer; } else { if (state.escapeNext) { @@ -80,7 +80,7 @@ var ch = stream.next(); // Take into account the backslash for escaping characters, such as - // the string delimeter. + // the string delimiter. if (ch == "\\") { state.escapeNext = true; } @@ -100,7 +100,7 @@ return "null"; } - // Dot folowed by a non-word character should be considered an error. + // Dot followed by a non-word character should be considered an error. if (stream.match(/\.\W+/)) { return "error"; } else if (stream.eat(".")) { @@ -119,7 +119,7 @@ return "null"; } - // Pipe folowed by a non-word character should be considered an error. + // Pipe followed by a non-word character should be considered an error. if (stream.match(/\.\W+/)) { return "error"; } else if (stream.eat("|")) { @@ -199,7 +199,7 @@ return "null"; } - // Dot folowed by a non-word character should be considered an error. + // Dot followed by a non-word character should be considered an error. if (stream.match(/\.\W+/)) { return "error"; } else if (stream.eat(".")) { @@ -218,7 +218,7 @@ return "null"; } - // Pipe folowed by a non-word character should be considered an error. + // Pipe followed by a non-word character should be considered an error. if (stream.match(/\.\W+/)) { return "error"; } else if (stream.eat("|")) { diff --git a/rhodecode/public/js/mode/dockerfile/dockerfile.js b/rhodecode/public/js/mode/dockerfile/dockerfile.js --- a/rhodecode/public/js/mode/dockerfile/dockerfile.js +++ b/rhodecode/public/js/mode/dockerfile/dockerfile.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,30 +11,64 @@ })(function(CodeMirror) { "use strict"; + var from = "from"; + var fromRegex = new RegExp("^(\\s*)\\b(" + from + ")\\b", "i"); + + var shells = ["run", "cmd", "entrypoint", "shell"]; + var shellsAsArrayRegex = new RegExp("^(\\s*)(" + shells.join('|') + ")(\\s+\\[)", "i"); + + var expose = "expose"; + var exposeRegex = new RegExp("^(\\s*)(" + expose + ")(\\s+)", "i"); + + var others = [ + "arg", "from", "maintainer", "label", "env", + "add", "copy", "volume", "user", + "workdir", "onbuild", "stopsignal", "healthcheck", "shell" + ]; + // Collect all Dockerfile directives - var instructions = ["from", "maintainer", "run", "cmd", "expose", "env", - "add", "copy", "entrypoint", "volume", "user", - "workdir", "onbuild"], + var instructions = [from, expose].concat(shells).concat(others), instructionRegex = "(" + instructions.join('|') + ")", - instructionOnlyLine = new RegExp(instructionRegex + "\\s*$", "i"), - instructionWithArguments = new RegExp(instructionRegex + "(\\s+)", "i"); + instructionOnlyLine = new RegExp("^(\\s*)" + instructionRegex + "(\\s*)(#.*)?$", "i"), + instructionWithArguments = new RegExp("^(\\s*)" + instructionRegex + "(\\s+)", "i"); CodeMirror.defineSimpleMode("dockerfile", { start: [ // Block comment: This is a line starting with a comment { - regex: /#.*$/, + regex: /^\s*#.*$/, + sol: true, token: "comment" }, + { + regex: fromRegex, + token: [null, "keyword"], + sol: true, + next: "from" + }, // Highlight an instruction without any arguments (for convenience) { regex: instructionOnlyLine, - token: "variable-2" + token: [null, "keyword", null, "error"], + sol: true + }, + { + regex: shellsAsArrayRegex, + token: [null, "keyword", null], + sol: true, + next: "array" + }, + { + regex: exposeRegex, + token: [null, "keyword", null], + sol: true, + next: "expose" }, // Highlight an instruction followed by arguments { regex: instructionWithArguments, - token: ["variable-2", null], + token: [null, "keyword", null], + sol: true, next: "arguments" }, { @@ -42,26 +76,21 @@ token: null } ], - arguments: [ + from: [ { - // Line comment without instruction arguments is an error - regex: /#.*$/, - token: "error", - next: "start" - }, - { - regex: /[^#]+\\$/, - token: null - }, - { - // Match everything except for the inline comment - regex: /[^#]+/, + regex: /\s*$/, token: null, next: "start" }, { - regex: /$/, - token: null, + // Line comment without instruction arguments is an error + regex: /(\s*)(#.*)$/, + token: [null, "error"], + next: "start" + }, + { + regex: /(\s*\S+\s+)(as)/i, + token: [null, "keyword"], next: "start" }, // Fail safe return to start @@ -70,9 +99,112 @@ next: "start" } ], - meta: { - lineComment: "#" + single: [ + { + regex: /(?:[^\\']|\\.)/, + token: "string" + }, + { + regex: /'/, + token: "string", + pop: true + } + ], + double: [ + { + regex: /(?:[^\\"]|\\.)/, + token: "string" + }, + { + regex: /"/, + token: "string", + pop: true + } + ], + array: [ + { + regex: /\]/, + token: null, + next: "start" + }, + { + regex: /"(?:[^\\"]|\\.)*"?/, + token: "string" } + ], + expose: [ + { + regex: /\d+$/, + token: "number", + next: "start" + }, + { + regex: /[^\d]+$/, + token: null, + next: "start" + }, + { + regex: /\d+/, + token: "number" + }, + { + regex: /[^\d]+/, + token: null + }, + // Fail safe return to start + { + token: null, + next: "start" + } + ], + arguments: [ + { + regex: /^\s*#.*$/, + sol: true, + token: "comment" + }, + { + regex: /"(?:[^\\"]|\\.)*"?$/, + token: "string", + next: "start" + }, + { + regex: /"/, + token: "string", + push: "double" + }, + { + regex: /'(?:[^\\']|\\.)*'?$/, + token: "string", + next: "start" + }, + { + regex: /'/, + token: "string", + push: "single" + }, + { + regex: /[^#"']+[\\`]$/, + token: null + }, + { + regex: /[^#"']+$/, + token: null, + next: "start" + }, + { + regex: /[^#"']+/, + token: null + }, + // Fail safe return to start + { + token: null, + next: "start" + } + ], + meta: { + lineComment: "#" + } }); CodeMirror.defineMIME("text/x-dockerfile", "dockerfile"); diff --git a/rhodecode/public/js/mode/dtd/dtd.js b/rhodecode/public/js/mode/dtd/dtd.js --- a/rhodecode/public/js/mode/dtd/dtd.js +++ b/rhodecode/public/js/mode/dtd/dtd.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* DTD mode @@ -114,17 +114,17 @@ CodeMirror.defineMode("dtd", function(co if( textAfter.match(/\]\s+|\]/) )n=n-1; else if(textAfter.substr(textAfter.length-1, textAfter.length) === ">"){ - if(textAfter.substr(0,1) === "<")n; - else if( type == "doindent" && textAfter.length > 1 )n; + if(textAfter.substr(0,1) === "<") {} + else if( type == "doindent" && textAfter.length > 1 ) {} else if( type == "doindent")n--; - else if( type == ">" && textAfter.length > 1)n; - else if( type == "tag" && textAfter !== ">")n; + else if( type == ">" && textAfter.length > 1) {} + else if( type == "tag" && textAfter !== ">") {} else if( type == "tag" && state.stack[state.stack.length-1] == "rule")n--; else if( type == "tag")n++; else if( textAfter === ">" && state.stack[state.stack.length-1] == "rule" && type === ">")n--; - else if( textAfter === ">" && state.stack[state.stack.length-1] == "rule")n; + else if( textAfter === ">" && state.stack[state.stack.length-1] == "rule") {} else if( textAfter.substr(0,1) !== "<" && textAfter.substr(0,1) === ">" )n=n-1; - else if( textAfter === ">")n; + else if( textAfter === ">") {} else n=n-1; //over rule them all if(type == null || type == "]")n--; diff --git a/rhodecode/public/js/mode/dylan/dylan.js b/rhodecode/public/js/mode/dylan/dylan.js --- a/rhodecode/public/js/mode/dylan/dylan.js +++ b/rhodecode/public/js/mode/dylan/dylan.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,6 +11,14 @@ })(function(CodeMirror) { "use strict"; +function forEach(arr, f) { + for (var i = 0; i < arr.length; i++) f(arr[i], i) +} +function some(arr, f) { + for (var i = 0; i < arr.length; i++) if (f(arr[i], i)) return true + return false +} + CodeMirror.defineMode("dylan", function(_config) { // Words var words = { @@ -136,13 +144,13 @@ CodeMirror.defineMode("dylan", function( var wordLookup = {}; var styleLookup = {}; - [ + forEach([ "keyword", "definition", "simpleDefinition", "signalingCalls" - ].forEach(function(type) { - words[type].forEach(function(word) { + ], function(type) { + forEach(words[type], function(word) { wordLookup[word] = type; styleLookup[word] = styles[type]; }); @@ -169,15 +177,16 @@ CodeMirror.defineMode("dylan", function( } else if (stream.eat("/")) { stream.skipToEnd(); return "comment"; - } else { - stream.skipTo(" "); - return "operator"; } + stream.backUp(1); } // Decimal - else if (/\d/.test(ch)) { - stream.match(/^\d*(?:\.\d*)?(?:e[+\-]?\d+)?/); - return "number"; + else if (/[+\-\d\.]/.test(ch)) { + if (stream.match(/^[+-]?[0-9]*\.[0-9]*([esdx][+-]?[0-9]+)?/i) || + stream.match(/^[+-]?[0-9]+([esdx][+-]?[0-9]+)/i) || + stream.match(/^[+-]?\d+/)) { + return "number"; + } } // Hash else if (ch == "#") { @@ -186,7 +195,7 @@ CodeMirror.defineMode("dylan", function( ch = stream.peek(); if (ch == '"') { stream.next(); - return chain(stream, state, tokenString('"', "string-2")); + return chain(stream, state, tokenString('"', "string")); } // Binary number else if (ch == "b") { @@ -206,29 +215,73 @@ CodeMirror.defineMode("dylan", function( stream.eatWhile(/[0-7]/); return "number"; } + // Token concatenation in macros + else if (ch == '#') { + stream.next(); + return "punctuation"; + } + // Sequence literals + else if ((ch == '[') || (ch == '(')) { + stream.next(); + return "bracket"; // Hash symbol - else { + } else if (stream.match(/f|t|all-keys|include|key|next|rest/i)) { + return "atom"; + } else { stream.eatWhile(/[-a-zA-Z]/); - return "keyword"; + return "error"; + } + } else if (ch == "~") { + stream.next(); + ch = stream.peek(); + if (ch == "=") { + stream.next(); + ch = stream.peek(); + if (ch == "=") { + stream.next(); + return "operator"; + } + return "operator"; } + return "operator"; + } else if (ch == ":") { + stream.next(); + ch = stream.peek(); + if (ch == "=") { + stream.next(); + return "operator"; + } else if (ch == ":") { + stream.next(); + return "punctuation"; + } + } else if ("[](){}".indexOf(ch) != -1) { + stream.next(); + return "bracket"; + } else if (".,".indexOf(ch) != -1) { + stream.next(); + return "punctuation"; } else if (stream.match("end")) { return "keyword"; } for (var name in patterns) { if (patterns.hasOwnProperty(name)) { var pattern = patterns[name]; - if ((pattern instanceof Array && pattern.some(function(p) { + if ((pattern instanceof Array && some(pattern, function(p) { return stream.match(p); })) || stream.match(pattern)) return patternStyles[name]; } } + if (/[+\-*\/^=<>&|]/.test(ch)) { + stream.next(); + return "operator"; + } if (stream.match("define")) { return "def"; } else { stream.eatWhile(/[\w\-]/); // Keyword - if (wordLookup[stream.current()]) { + if (wordLookup.hasOwnProperty(stream.current())) { return styleLookup[stream.current()]; } else if (stream.current().match(symbol)) { return "variable"; @@ -240,29 +293,37 @@ CodeMirror.defineMode("dylan", function( } function tokenComment(stream, state) { - var maybeEnd = false, - ch; + var maybeEnd = false, maybeNested = false, nestedCount = 0, ch; while ((ch = stream.next())) { if (ch == "/" && maybeEnd) { - state.tokenize = tokenBase; - break; + if (nestedCount > 0) { + nestedCount--; + } else { + state.tokenize = tokenBase; + break; + } + } else if (ch == "*" && maybeNested) { + nestedCount++; } maybeEnd = (ch == "*"); + maybeNested = (ch == "/"); } return "comment"; } function tokenString(quote, style) { return function(stream, state) { - var next, end = false; + var escaped = false, next, end = false; while ((next = stream.next()) != null) { - if (next == quote) { + if (next == quote && !escaped) { end = true; break; } + escaped = !escaped && next == "\\"; } - if (end) + if (end || !escaped) { state.tokenize = tokenBase; + } return style; }; } diff --git a/rhodecode/public/js/mode/ebnf/ebnf.js b/rhodecode/public/js/mode/ebnf/ebnf.js --- a/rhodecode/public/js/mode/ebnf/ebnf.js +++ b/rhodecode/public/js/mode/ebnf/ebnf.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -94,7 +94,7 @@ if (bracesMode !== null && (state.braced || peek === "{")) { if (state.localState === null) - state.localState = bracesMode.startState(); + state.localState = CodeMirror.startState(bracesMode); var token = bracesMode.token(stream, state.localState), text = stream.current(); diff --git a/rhodecode/public/js/mode/ecl/ecl.js b/rhodecode/public/js/mode/ecl/ecl.js --- a/rhodecode/public/js/mode/ecl/ecl.js +++ b/rhodecode/public/js/mode/ecl/ecl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/eiffel/eiffel.js b/rhodecode/public/js/mode/eiffel/eiffel.js --- a/rhodecode/public/js/mode/eiffel/eiffel.js +++ b/rhodecode/public/js/mode/eiffel/eiffel.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/elm/elm.js b/rhodecode/public/js/mode/elm/elm.js --- a/rhodecode/public/js/mode/elm/elm.js +++ b/rhodecode/public/js/mode/elm/elm.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -70,7 +70,7 @@ if (smallRE.test(ch)) { var isDef = source.pos === 1; source.eatWhile(idRE); - return isDef ? "variable-3" : "variable"; + return isDef ? "type" : "variable"; } if (digitRE.test(ch)) { diff --git a/rhodecode/public/js/mode/erlang/erlang.js b/rhodecode/public/js/mode/erlang/erlang.js --- a/rhodecode/public/js/mode/erlang/erlang.js +++ b/rhodecode/public/js/mode/erlang/erlang.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /*jshint unused:true, eqnull:true, curly:true, bitwise:true */ /*jshint undef:true, latedef:true, trailing:true */ @@ -433,15 +433,16 @@ CodeMirror.defineMode("erlang", function } function maybe_drop_post(s) { + if (!s.length) return s var last = s.length-1; if (s[last].type === "dot") { return []; } - if (s[last].type === "fun" && s[last-1].token === "fun") { + if (last > 1 && s[last].type === "fun" && s[last-1].token === "fun") { return s.slice(0,last-1); } - switch (s[s.length-1].token) { + switch (s[last].token) { case "}": return d(s,{g:["{"]}); case "]": return d(s,{i:["["]}); case ")": return d(s,{i:["("]}); diff --git a/rhodecode/public/js/mode/factor/factor.js b/rhodecode/public/js/mode/factor/factor.js --- a/rhodecode/public/js/mode/factor/factor.js +++ b/rhodecode/public/js/mode/factor/factor.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Factor syntax highlight - simple mode // @@ -22,52 +22,54 @@ {regex: /#?!.*/, token: "comment"}, // strings """, multiline --> state {regex: /"""/, token: "string", next: "string3"}, - {regex: /"/, token: "string", next: "string"}, + {regex: /(STRING:)(\s)/, token: ["keyword", null], next: "string2"}, + {regex: /\S*?"/, token: "string", next: "string"}, // numbers: dec, hex, unicode, bin, fractional, complex - {regex: /(?:[+-]?)(?:0x[\d,a-f]+)|(?:0o[0-7]+)|(?:0b[0,1]+)|(?:\d+.?\d*)/, token: "number"}, + {regex: /(?:0x[\d,a-f]+)|(?:0o[0-7]+)|(?:0b[0,1]+)|(?:\-?\d+.?\d*)(?=\s)/, token: "number"}, //{regex: /[+-]?/} //fractional // definition: defining word, defined word, etc - {regex: /(\:)(\s+)(\S+)(\s+)(\()/, token: ["keyword", null, "def", null, "keyword"], next: "stack"}, + {regex: /((?:GENERIC)|\:?\:)(\s+)(\S+)(\s+)(\()/, token: ["keyword", null, "def", null, "bracket"], next: "stack"}, + // method definition: defining word, type, defined word, etc + {regex: /(M\:)(\s+)(\S+)(\s+)(\S+)/, token: ["keyword", null, "def", null, "tag"]}, // vocabulary using --> state {regex: /USING\:/, token: "keyword", next: "vocabulary"}, // vocabulary definition/use - {regex: /(USE\:|IN\:)(\s+)(\S+)/, token: ["keyword", null, "variable-2"]}, - // - {regex: /<\S+>/, token: "builtin"}, + {regex: /(USE\:|IN\:)(\s+)(\S+)(?=\s|$)/, token: ["keyword", null, "tag"]}, + // definition: a defining word, defined word + {regex: /(\S+\:)(\s+)(\S+)(?=\s|$)/, token: ["keyword", null, "def"]}, // "keywords", incl. ; t f . [ ] { } defining words - {regex: /;|t|f|if|\.|\[|\]|\{|\}|MAIN:/, token: "keyword"}, + {regex: /(?:;|\\|t|f|if|loop|while|until|do|PRIVATE>| and the like + {regex: /\S+[\)>\.\*\?]+(?=\s|$)/, token: "builtin"}, + {regex: /[\)><]+\S+(?=\s|$)/, token: "builtin"}, + // operators + {regex: /(?:[\+\-\=\/\*<>])(?=\s|$)/, token: "keyword"}, // any id (?) {regex: /\S+/, token: "variable"}, - - { - regex: /./, - token: null - } + {regex: /\s+|./, token: null} ], vocabulary: [ {regex: /;/, token: "keyword", next: "start"}, - {regex: /\S+/, token: "variable-2"}, - { - regex: /./, - token: null - } + {regex: /\S+/, token: "tag"}, + {regex: /\s+|./, token: null} ], string: [ {regex: /(?:[^\\]|\\.)*?"/, token: "string", next: "start"}, {regex: /.*/, token: "string"} ], + string2: [ + {regex: /^;/, token: "keyword", next: "start"}, + {regex: /.*/, token: "string"} + ], string3: [ {regex: /(?:[^\\]|\\.)*?"""/, token: "string", next: "start"}, {regex: /.*/, token: "string"} ], stack: [ - {regex: /\)/, token: "meta", next: "start"}, - {regex: /--/, token: "meta"}, - {regex: /\S+/, token: "variable-3"}, - { - regex: /./, - token: null - } + {regex: /\)/, token: "bracket", next: "start"}, + {regex: /--/, token: "bracket"}, + {regex: /\S+/, token: "meta"}, + {regex: /\s+|./, token: null} ], // The meta property contains global information about the mode. It // can contain properties like lineComment, which are supported by diff --git a/rhodecode/public/js/mode/fcl/fcl.js b/rhodecode/public/js/mode/fcl/fcl.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/fcl/fcl.js @@ -0,0 +1,173 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + +CodeMirror.defineMode("fcl", function(config) { + var indentUnit = config.indentUnit; + + var keywords = { + "term": true, + "method": true, "accu": true, + "rule": true, "then": true, "is": true, "and": true, "or": true, + "if": true, "default": true + }; + + var start_blocks = { + "var_input": true, + "var_output": true, + "fuzzify": true, + "defuzzify": true, + "function_block": true, + "ruleblock": true + }; + + var end_blocks = { + "end_ruleblock": true, + "end_defuzzify": true, + "end_function_block": true, + "end_fuzzify": true, + "end_var": true + }; + + var atoms = { + "true": true, "false": true, "nan": true, + "real": true, "min": true, "max": true, "cog": true, "cogs": true + }; + + var isOperatorChar = /[+\-*&^%:=<>!|\/]/; + + function tokenBase(stream, state) { + var ch = stream.next(); + + if (/[\d\.]/.test(ch)) { + if (ch == ".") { + stream.match(/^[0-9]+([eE][\-+]?[0-9]+)?/); + } else if (ch == "0") { + stream.match(/^[xX][0-9a-fA-F]+/) || stream.match(/^0[0-7]+/); + } else { + stream.match(/^[0-9]*\.?[0-9]*([eE][\-+]?[0-9]+)?/); + } + return "number"; + } + + if (ch == "/" || ch == "(") { + if (stream.eat("*")) { + state.tokenize = tokenComment; + return tokenComment(stream, state); + } + if (stream.eat("/")) { + stream.skipToEnd(); + return "comment"; + } + } + if (isOperatorChar.test(ch)) { + stream.eatWhile(isOperatorChar); + return "operator"; + } + stream.eatWhile(/[\w\$_\xa1-\uffff]/); + + var cur = stream.current().toLowerCase(); + if (keywords.propertyIsEnumerable(cur) || + start_blocks.propertyIsEnumerable(cur) || + end_blocks.propertyIsEnumerable(cur)) { + return "keyword"; + } + if (atoms.propertyIsEnumerable(cur)) return "atom"; + return "variable"; + } + + + function tokenComment(stream, state) { + var maybeEnd = false, ch; + while (ch = stream.next()) { + if ((ch == "/" || ch == ")") && maybeEnd) { + state.tokenize = tokenBase; + break; + } + maybeEnd = (ch == "*"); + } + return "comment"; + } + + function Context(indented, column, type, align, prev) { + this.indented = indented; + this.column = column; + this.type = type; + this.align = align; + this.prev = prev; + } + + function pushContext(state, col, type) { + return state.context = new Context(state.indented, col, type, null, state.context); + } + + function popContext(state) { + if (!state.context.prev) return; + var t = state.context.type; + if (t == "end_block") + state.indented = state.context.indented; + return state.context = state.context.prev; + } + + // Interface + + return { + startState: function(basecolumn) { + return { + tokenize: null, + context: new Context((basecolumn || 0) - indentUnit, 0, "top", false), + indented: 0, + startOfLine: true + }; + }, + + token: function(stream, state) { + var ctx = state.context; + if (stream.sol()) { + if (ctx.align == null) ctx.align = false; + state.indented = stream.indentation(); + state.startOfLine = true; + } + if (stream.eatSpace()) return null; + + var style = (state.tokenize || tokenBase)(stream, state); + if (style == "comment") return style; + if (ctx.align == null) ctx.align = true; + + var cur = stream.current().toLowerCase(); + + if (start_blocks.propertyIsEnumerable(cur)) pushContext(state, stream.column(), "end_block"); + else if (end_blocks.propertyIsEnumerable(cur)) popContext(state); + + state.startOfLine = false; + return style; + }, + + indent: function(state, textAfter) { + if (state.tokenize != tokenBase && state.tokenize != null) return 0; + var ctx = state.context; + + var closing = end_blocks.propertyIsEnumerable(textAfter); + if (ctx.align) return ctx.column + (closing ? 0 : 1); + else return ctx.indented + (closing ? 0 : indentUnit); + }, + + electricChars: "ryk", + fold: "brace", + blockCommentStart: "(*", + blockCommentEnd: "*)", + lineComment: "//" + }; +}); + +CodeMirror.defineMIME("text/x-fcl", "fcl"); +}); diff --git a/rhodecode/public/js/mode/forth/forth.js b/rhodecode/public/js/mode/forth/forth.js --- a/rhodecode/public/js/mode/forth/forth.js +++ b/rhodecode/public/js/mode/forth/forth.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Author: Aliaksei Chapyzhenka diff --git a/rhodecode/public/js/mode/fortran/fortran.js b/rhodecode/public/js/mode/fortran/fortran.js --- a/rhodecode/public/js/mode/fortran/fortran.js +++ b/rhodecode/public/js/mode/fortran/fortran.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/gas/gas.js b/rhodecode/public/js/mode/gas/gas.js --- a/rhodecode/public/js/mode/gas/gas.js +++ b/rhodecode/public/js/mode/gas/gas.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/gfm/gfm.js b/rhodecode/public/js/mode/gfm/gfm.js --- a/rhodecode/public/js/mode/gfm/gfm.js +++ b/rhodecode/public/js/mode/gfm/gfm.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -81,7 +81,7 @@ CodeMirror.defineMode("gfm", function(co if (stream.sol() || state.ateSpace) { state.ateSpace = false; if (modeConfig.gitHubSpice !== false) { - if(stream.match(/^(?:[a-zA-Z0-9\-_]+\/)?(?:[a-zA-Z0-9\-_]+@)?(?:[a-f0-9]{7,40}\b)/)) { + if(stream.match(/^(?:[a-zA-Z0-9\-_]+\/)?(?:[a-zA-Z0-9\-_]+@)?(?=.{0,6}\d)(?:[a-f0-9]{7,40}\b)/)) { // User/Project@SHA // User@SHA // SHA @@ -113,10 +113,9 @@ CodeMirror.defineMode("gfm", function(co }; var markdownConfig = { - underscoresBreakWords: false, taskLists: true, - fencedCodeBlocks: '```', - strikethrough: true + strikethrough: true, + emoji: true }; for (var attr in modeConfig) { markdownConfig[attr] = modeConfig[attr]; diff --git a/rhodecode/public/js/mode/gherkin/gherkin.js b/rhodecode/public/js/mode/gherkin/gherkin.js --- a/rhodecode/public/js/mode/gherkin/gherkin.js +++ b/rhodecode/public/js/mode/gherkin/gherkin.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* Gherkin mode - http://www.cukes.info/ diff --git a/rhodecode/public/js/mode/go/go.js b/rhodecode/public/js/mode/go/go.js --- a/rhodecode/public/js/mode/go/go.js +++ b/rhodecode/public/js/mode/go/go.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -23,12 +23,13 @@ CodeMirror.defineMode("go", function(con "bool":true, "byte":true, "complex64":true, "complex128":true, "float32":true, "float64":true, "int8":true, "int16":true, "int32":true, "int64":true, "string":true, "uint8":true, "uint16":true, "uint32":true, - "uint64":true, "int":true, "uint":true, "uintptr":true + "uint64":true, "int":true, "uint":true, "uintptr":true, "error": true, + "rune":true }; var atoms = { "true":true, "false":true, "iota":true, "nil":true, "append":true, - "cap":true, "close":true, "complex":true, "copy":true, "imag":true, + "cap":true, "close":true, "complex":true, "copy":true, "delete":true, "imag":true, "len":true, "make":true, "new":true, "panic":true, "print":true, "println":true, "real":true, "recover":true }; @@ -154,14 +155,14 @@ CodeMirror.defineMode("go", function(con else if (curPunc == "[") pushContext(state, stream.column(), "]"); else if (curPunc == "(") pushContext(state, stream.column(), ")"); else if (curPunc == "case") ctx.type = "case"; - else if (curPunc == "}" && ctx.type == "}") ctx = popContext(state); + else if (curPunc == "}" && ctx.type == "}") popContext(state); else if (curPunc == ctx.type) popContext(state); state.startOfLine = false; return style; }, indent: function(state, textAfter) { - if (state.tokenize != tokenBase && state.tokenize != null) return 0; + if (state.tokenize != tokenBase && state.tokenize != null) return CodeMirror.Pass; var ctx = state.context, firstChar = textAfter && textAfter.charAt(0); if (ctx.type == "case" && /^(?:case|default)\b/.test(textAfter)) { state.context.type = "}"; @@ -173,6 +174,7 @@ CodeMirror.defineMode("go", function(con }, electricChars: "{}):", + closeBrackets: "()[]{}''\"\"``", fold: "brace", blockCommentStart: "/*", blockCommentEnd: "*/", diff --git a/rhodecode/public/js/mode/groovy/groovy.js b/rhodecode/public/js/mode/groovy/groovy.js --- a/rhodecode/public/js/mode/groovy/groovy.js +++ b/rhodecode/public/js/mode/groovy/groovy.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -21,9 +21,9 @@ CodeMirror.defineMode("groovy", function "abstract as assert boolean break byte case catch char class const continue def default " + "do double else enum extends final finally float for goto if implements import in " + "instanceof int interface long native new package private protected public return " + - "short static strictfp super switch synchronized threadsafe throw throws transient " + + "short static strictfp super switch synchronized threadsafe throw throws trait transient " + "try void volatile while"); - var blockKeywords = words("catch class do else finally for if switch try while enum interface def"); + var blockKeywords = words("catch class def do else enum finally for if interface switch trait try while"); var standaloneKeywords = words("return break continue"); var atoms = words("null true false this"); @@ -210,7 +210,7 @@ CodeMirror.defineMode("groovy", function }, indent: function(state, textAfter) { - if (!state.tokenize[state.tokenize.length-1].isBase) return 0; + if (!state.tokenize[state.tokenize.length-1].isBase) return CodeMirror.Pass; var firstChar = textAfter && textAfter.charAt(0), ctx = state.context; if (ctx.type == "statement" && !expectExpression(state.lastToken, true)) ctx = ctx.prev; var closing = firstChar == ctx.type; @@ -221,7 +221,10 @@ CodeMirror.defineMode("groovy", function electricChars: "{}", closeBrackets: {triples: "'\""}, - fold: "brace" + fold: "brace", + blockCommentStart: "/*", + blockCommentEnd: "*/", + lineComment: "//" }; }); diff --git a/rhodecode/public/js/mode/haml/haml.js b/rhodecode/public/js/mode/haml/haml.js --- a/rhodecode/public/js/mode/haml/haml.js +++ b/rhodecode/public/js/mode/haml/haml.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,7 +11,7 @@ })(function(CodeMirror) { "use strict"; - // full haml mode. This handled embeded ruby and html fragments too + // full haml mode. This handled embedded ruby and html fragments too CodeMirror.defineMode("haml", function(config) { var htmlMode = CodeMirror.getMode(config, {name: "htmlmixed"}); var rubyMode = CodeMirror.getMode(config, "ruby"); @@ -98,8 +98,8 @@ return { // default to html mode startState: function() { - var htmlState = htmlMode.startState(); - var rubyState = rubyMode.startState(); + var htmlState = CodeMirror.startState(htmlMode); + var rubyState = CodeMirror.startState(rubyMode); return { htmlState: htmlState, rubyState: rubyState, diff --git a/rhodecode/public/js/mode/handlebars/handlebars.js b/rhodecode/public/js/mode/handlebars/handlebars.js --- a/rhodecode/public/js/mode/handlebars/handlebars.js +++ b/rhodecode/public/js/mode/handlebars/handlebars.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -13,10 +13,14 @@ CodeMirror.defineSimpleMode("handlebars-tags", { start: [ + { regex: /\{\{\{/, push: "handlebars_raw", token: "tag" }, { regex: /\{\{!--/, push: "dash_comment", token: "comment" }, { regex: /\{\{!/, push: "comment", token: "comment" }, { regex: /\{\{/, push: "handlebars", token: "tag" } ], + handlebars_raw: [ + { regex: /\}\}\}/, pop: true, token: "tag" }, + ], handlebars: [ { regex: /\}\}/, pop: true, token: "tag" }, @@ -46,7 +50,11 @@ comment: [ { regex: /\}\}/, pop: true, token: "comment" }, { regex: /./, token: "comment" } - ] + ], + meta: { + blockCommentStart: "{{--", + blockCommentEnd: "--}}" + } }); CodeMirror.defineMode("handlebars", function(config, parserConfig) { diff --git a/rhodecode/public/js/mode/haskell-literate/haskell-literate.js b/rhodecode/public/js/mode/haskell-literate/haskell-literate.js --- a/rhodecode/public/js/mode/haskell-literate/haskell-literate.js +++ b/rhodecode/public/js/mode/haskell-literate/haskell-literate.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function (mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -40,4 +40,4 @@ }, "haskell") CodeMirror.defineMIME("text/x-literate-haskell", "haskell-literate") -}) +}); diff --git a/rhodecode/public/js/mode/haskell/haskell.js b/rhodecode/public/js/mode/haskell/haskell.js --- a/rhodecode/public/js/mode/haskell/haskell.js +++ b/rhodecode/public/js/mode/haskell/haskell.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -56,7 +56,7 @@ CodeMirror.defineMode("haskell", functio if (source.eat('\'')) { return "string"; } - return "error"; + return "string error"; } if (ch == '"') { @@ -166,7 +166,7 @@ CodeMirror.defineMode("haskell", functio } } setState(normal); - return "error"; + return "string error"; } function stringGap(source, setState) { @@ -194,16 +194,17 @@ CodeMirror.defineMode("haskell", functio "module", "newtype", "of", "then", "type", "where", "_"); setType("keyword")( - "\.\.", ":", "::", "=", "\\", "\"", "<-", "->", "@", "~", "=>"); + "\.\.", ":", "::", "=", "\\", "<-", "->", "@", "~", "=>"); setType("builtin")( - "!!", "$!", "$", "&&", "+", "++", "-", ".", "/", "/=", "<", "<=", "=<<", - "==", ">", ">=", ">>", ">>=", "^", "^^", "||", "*", "**"); + "!!", "$!", "$", "&&", "+", "++", "-", ".", "/", "/=", "<", "<*", "<=", + "<$>", "<*>", "=<<", "==", ">", ">=", ">>", ">>=", "^", "^^", "||", "*", + "*>", "**"); setType("builtin")( - "Bool", "Bounded", "Char", "Double", "EQ", "Either", "Enum", "Eq", - "False", "FilePath", "Float", "Floating", "Fractional", "Functor", "GT", - "IO", "IOError", "Int", "Integer", "Integral", "Just", "LT", "Left", + "Applicative", "Bool", "Bounded", "Char", "Double", "EQ", "Either", "Enum", + "Eq", "False", "FilePath", "Float", "Floating", "Fractional", "Functor", + "GT", "IO", "IOError", "Int", "Integer", "Integral", "Just", "LT", "Left", "Maybe", "Monad", "Nothing", "Num", "Ord", "Ordering", "Rational", "Read", "ReadS", "Real", "RealFloat", "RealFrac", "Right", "Show", "ShowS", "String", "True"); @@ -223,7 +224,7 @@ CodeMirror.defineMode("haskell", functio "lcm", "length", "lex", "lines", "log", "logBase", "lookup", "map", "mapM", "mapM_", "max", "maxBound", "maximum", "maybe", "min", "minBound", "minimum", "mod", "negate", "not", "notElem", "null", "odd", "or", - "otherwise", "pi", "pred", "print", "product", "properFraction", + "otherwise", "pi", "pred", "print", "product", "properFraction", "pure", "putChar", "putStr", "putStrLn", "quot", "quotRem", "read", "readFile", "readIO", "readList", "readLn", "readParen", "reads", "readsPrec", "realToFrac", "recip", "rem", "repeat", "replicate", "return", "reverse", diff --git a/rhodecode/public/js/mode/haxe/haxe.js b/rhodecode/public/js/mode/haxe/haxe.js --- a/rhodecode/public/js/mode/haxe/haxe.js +++ b/rhodecode/public/js/mode/haxe/haxe.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -485,7 +485,7 @@ CodeMirror.defineMode("hxml", function ( if (state.inString == false && ch == "'") { state.inString = true; - ch = stream.next(); + stream.next(); } if (state.inString == true) { diff --git a/rhodecode/public/js/mode/htmlembedded/htmlembedded.js b/rhodecode/public/js/mode/htmlembedded/htmlembedded.js --- a/rhodecode/public/js/mode/htmlembedded/htmlembedded.js +++ b/rhodecode/public/js/mode/htmlembedded/htmlembedded.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -14,7 +14,16 @@ "use strict"; CodeMirror.defineMode("htmlembedded", function(config, parserConfig) { + var closeComment = parserConfig.closeComment || "--%>" return CodeMirror.multiplexingMode(CodeMirror.getMode(config, "htmlmixed"), { + open: parserConfig.openComment || "<%--", + close: closeComment, + delimStyle: "comment", + mode: {token: function(stream) { + stream.skipTo(closeComment) || stream.skipToEnd() + return "comment" + }} + }, { open: parserConfig.open || parserConfig.scriptStartRegex || "<%", close: parserConfig.close || parserConfig.scriptEndRegex || "%>", mode: CodeMirror.getMode(config, parserConfig.scriptingModeSpec) diff --git a/rhodecode/public/js/mode/htmlmixed/htmlmixed.js b/rhodecode/public/js/mode/htmlmixed/htmlmixed.js --- a/rhodecode/public/js/mode/htmlmixed/htmlmixed.js +++ b/rhodecode/public/js/mode/htmlmixed/htmlmixed.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -14,7 +14,7 @@ var defaultTags = { script: [ ["lang", /(javascript|babel)/i, "javascript"], - ["type", /^(?:text|application)\/(?:x-)?(?:java|ecma)script$|^$/i, "javascript"], + ["type", /^(?:text|application)\/(?:x-)?(?:java|ecma)script$|^module$|^$/i, "javascript"], ["type", /./, "text/plain"], [null, null, "javascript"] ], @@ -44,13 +44,9 @@ return attrRegexpCache[attr] = new RegExp("\\s+" + attr + "\\s*=\\s*('|\")?([^'\"]+)('|\")?\\s*"); } - function getAttrValue(stream, attr) { - var pos = stream.pos, match; - while (pos >= 0 && stream.string.charAt(pos) !== "<") pos--; - if (pos < 0) return pos; - if (match = stream.string.slice(pos, stream.pos).match(getAttrRegexp(attr))) - return match[2]; - return ""; + function getAttrValue(text, attr) { + var match = text.match(getAttrRegexp(attr)) + return match ? /^\s*(.*?)\s*$/.exec(match[2])[1] : "" } function getTagRegexp(tagName, anchored) { @@ -66,10 +62,10 @@ } } - function findMatchingMode(tagInfo, stream) { + function findMatchingMode(tagInfo, tagText) { for (var i = 0; i < tagInfo.length; i++) { var spec = tagInfo[i]; - if (!spec[0] || spec[1].test(getAttrValue(stream, spec[0]))) return spec[2]; + if (!spec[0] || spec[1].test(getAttrValue(tagText, spec[0]))) return spec[2]; } } @@ -89,15 +85,17 @@ tags.script.unshift(["type", configScript[i].matches, configScript[i].mode]) function html(stream, state) { - var tagName = state.htmlState.tagName && state.htmlState.tagName.toLowerCase(); - var tagInfo = tagName && tags.hasOwnProperty(tagName) && tags[tagName]; - - var style = htmlMode.token(stream, state.htmlState), modeSpec; - - if (tagInfo && /\btag\b/.test(style) && stream.current() === ">" && - (modeSpec = findMatchingMode(tagInfo, stream))) { - var mode = CodeMirror.getMode(config, modeSpec); - var endTagA = getTagRegexp(tagName, true), endTag = getTagRegexp(tagName, false); + var style = htmlMode.token(stream, state.htmlState), tag = /\btag\b/.test(style), tagName + if (tag && !/[<>\s\/]/.test(stream.current()) && + (tagName = state.htmlState.tagName && state.htmlState.tagName.toLowerCase()) && + tags.hasOwnProperty(tagName)) { + state.inTag = tagName + " " + } else if (state.inTag && tag && />$/.test(stream.current())) { + var inTag = /^([\S]+) (.*)/.exec(state.inTag) + state.inTag = null + var modeSpec = stream.current() == ">" && findMatchingMode(tags[inTag[1]], inTag[2]) + var mode = CodeMirror.getMode(config, modeSpec) + var endTagA = getTagRegexp(inTag[1], true), endTag = getTagRegexp(inTag[1], false); state.token = function (stream, state) { if (stream.match(endTagA, false)) { state.token = html; @@ -107,15 +105,18 @@ return maybeBackup(stream, endTag, state.localMode.token(stream, state.localState)); }; state.localMode = mode; - state.localState = CodeMirror.startState(mode, htmlMode.indent(state.htmlState, "")); + state.localState = CodeMirror.startState(mode, htmlMode.indent(state.htmlState, "", "")); + } else if (state.inTag) { + state.inTag += stream.current() + if (stream.eol()) state.inTag += " " } return style; }; return { startState: function () { - var state = htmlMode.startState(); - return {token: html, localMode: null, localState: null, htmlState: state}; + var state = CodeMirror.startState(htmlMode); + return {token: html, inTag: null, localMode: null, localState: null, htmlState: state}; }, copyState: function (state) { @@ -123,7 +124,8 @@ if (state.localState) { local = CodeMirror.copyState(state.localMode, state.localState); } - return {token: state.token, localMode: state.localMode, localState: local, + return {token: state.token, inTag: state.inTag, + localMode: state.localMode, localState: local, htmlState: CodeMirror.copyState(htmlMode, state.htmlState)}; }, @@ -131,11 +133,11 @@ return state.token(stream, state); }, - indent: function (state, textAfter) { + indent: function (state, textAfter, line) { if (!state.localMode || /^\s*<\//.test(textAfter)) - return htmlMode.indent(state.htmlState, textAfter); + return htmlMode.indent(state.htmlState, textAfter, line); else if (state.localMode.indent) - return state.localMode.indent(state.localState, textAfter); + return state.localMode.indent(state.localState, textAfter, line); else return CodeMirror.Pass; }, diff --git a/rhodecode/public/js/mode/http/http.js b/rhodecode/public/js/mode/http/http.js --- a/rhodecode/public/js/mode/http/http.js +++ b/rhodecode/public/js/mode/http/http.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/idl/idl.js b/rhodecode/public/js/mode/idl/idl.js --- a/rhodecode/public/js/mode/idl/idl.js +++ b/rhodecode/public/js/mode/idl/idl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/javascript/javascript.js b/rhodecode/public/js/mode/javascript/javascript.js --- a/rhodecode/public/js/mode/javascript/javascript.js +++ b/rhodecode/public/js/mode/javascript/javascript.js @@ -1,7 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -// TODO actually recognize syntax of TypeScript constructs +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -13,11 +11,6 @@ })(function(CodeMirror) { "use strict"; -function expressionAllowed(stream, state, backUp) { - return /^(?:operator|sof|keyword c|case|new|[\[{}\(,;:]|=>)$/.test(state.lastType) || - (state.lastType == "quasi" && /\{\s*$/.test(stream.string.slice(0, stream.pos - (backUp || 0)))) -} - CodeMirror.defineMode("javascript", function(config, parserConfig) { var indentUnit = config.indentUnit; var statementIndent = parserConfig.statementIndent; @@ -30,54 +23,24 @@ CodeMirror.defineMode("javascript", func var keywords = function(){ function kw(type) {return {type: type, style: "keyword"};} - var A = kw("keyword a"), B = kw("keyword b"), C = kw("keyword c"); + var A = kw("keyword a"), B = kw("keyword b"), C = kw("keyword c"), D = kw("keyword d"); var operator = kw("operator"), atom = {type: "atom", style: "atom"}; - var jsKeywords = { + return { "if": kw("if"), "while": A, "with": A, "else": B, "do": B, "try": B, "finally": B, - "return": C, "break": C, "continue": C, "new": kw("new"), "delete": C, "throw": C, "debugger": C, - "var": kw("var"), "const": kw("var"), "let": kw("var"), + "return": D, "break": D, "continue": D, "new": kw("new"), "delete": C, "void": C, "throw": C, + "debugger": kw("debugger"), "var": kw("var"), "const": kw("var"), "let": kw("var"), "function": kw("function"), "catch": kw("catch"), "for": kw("for"), "switch": kw("switch"), "case": kw("case"), "default": kw("default"), "in": operator, "typeof": operator, "instanceof": operator, "true": atom, "false": atom, "null": atom, "undefined": atom, "NaN": atom, "Infinity": atom, "this": kw("this"), "class": kw("class"), "super": kw("atom"), - "yield": C, "export": kw("export"), "import": kw("import"), "extends": C + "yield": C, "export": kw("export"), "import": kw("import"), "extends": C, + "await": C }; - - // Extend the 'normal' keywords with the TypeScript language extensions - if (isTS) { - var type = {type: "variable", style: "variable-3"}; - var tsKeywords = { - // object-like things - "interface": kw("class"), - "implements": C, - "namespace": C, - "module": kw("module"), - "enum": kw("module"), - - // scope modifiers - "public": kw("modifier"), - "private": kw("modifier"), - "protected": kw("modifier"), - "abstract": kw("modifier"), - - // operators - "as": operator, - - // types - "string": type, "number": type, "boolean": type, "any": type - }; - - for (var attr in tsKeywords) { - jsKeywords[attr] = tsKeywords[attr]; - } - } - - return jsKeywords; }(); - var isOperatorChar = /[+\-*&%=<>!?|~^]/; + var isOperatorChar = /[+\-*&%=<>!?|~^@]/; var isJsonldKeyword = /^@(context|id|value|language|type|container|list|set|reverse|index|base|vocab|graph)"/; function readRegexp(stream) { @@ -104,7 +67,7 @@ CodeMirror.defineMode("javascript", func if (ch == '"' || ch == "'") { state.tokenize = tokenString(ch); return state.tokenize(stream, state); - } else if (ch == "." && stream.match(/^\d+(?:[eE][+\-]?\d+)?/)) { + } else if (ch == "." && stream.match(/^\d[\d_]*(?:[eE][+\-]?[\d_]+)?/)) { return ret("number", "number"); } else if (ch == "." && stream.match("..")) { return ret("spread", "meta"); @@ -112,17 +75,10 @@ CodeMirror.defineMode("javascript", func return ret(ch); } else if (ch == "=" && stream.eat(">")) { return ret("=>", "operator"); - } else if (ch == "0" && stream.eat(/x/i)) { - stream.eatWhile(/[\da-f]/i); - return ret("number", "number"); - } else if (ch == "0" && stream.eat(/o/i)) { - stream.eatWhile(/[0-7]/i); - return ret("number", "number"); - } else if (ch == "0" && stream.eat(/b/i)) { - stream.eatWhile(/[01]/i); + } else if (ch == "0" && stream.match(/^(?:x[\dA-Fa-f_]+|o[0-7_]+|b[01_]+)n?/)) { return ret("number", "number"); } else if (/\d/.test(ch)) { - stream.match(/^\d*(?:\.\d*)?(?:[eE][+\-]?\d+)?/); + stream.match(/^[\d_]*(?:n|(?:\.[\d_]*)?(?:[eE][+\-]?[\d_]+)?)?/); return ret("number", "number"); } else if (ch == "/") { if (stream.eat("*")) { @@ -133,10 +89,10 @@ CodeMirror.defineMode("javascript", func return ret("comment", "comment"); } else if (expressionAllowed(stream, state, 1)) { readRegexp(stream); - stream.match(/^\b(([gimyu])(?![gimyu]*\2))+\b/); + stream.match(/^\b(([gimyus])(?![gimyus]*\2))+\b/); return ret("regexp", "string-2"); } else { - stream.eatWhile(isOperatorChar); + stream.eat("="); return ret("operator", "operator", stream.current()); } } else if (ch == "`") { @@ -145,14 +101,31 @@ CodeMirror.defineMode("javascript", func } else if (ch == "#") { stream.skipToEnd(); return ret("error", "error"); + } else if (ch == "<" && stream.match("!--") || ch == "-" && stream.match("->")) { + stream.skipToEnd() + return ret("comment", "comment") } else if (isOperatorChar.test(ch)) { - stream.eatWhile(isOperatorChar); + if (ch != ">" || !state.lexical || state.lexical.type != ">") { + if (stream.eat("=")) { + if (ch == "!" || ch == "=") stream.eat("=") + } else if (/[<>*+\-]/.test(ch)) { + stream.eat(ch) + if (ch == ">") stream.eat(ch) + } + } return ret("operator", "operator", stream.current()); } else if (wordRE.test(ch)) { stream.eatWhile(wordRE); - var word = stream.current(), known = keywords.propertyIsEnumerable(word) && keywords[word]; - return (known && state.lastType != ".") ? ret(known.type, known.style, word) : - ret("variable", "variable", word); + var word = stream.current() + if (state.lastType != ".") { + if (keywords.propertyIsEnumerable(word)) { + var kw = keywords[word] + return ret(kw.type, kw.style, word) + } + if (word == "async" && stream.match(/^(\s|\/\*.*?\*\/)*[\[\(\w]/, false)) + return ret("async", "keyword", word) + } + return ret("variable", "variable", word) } } @@ -209,19 +182,28 @@ CodeMirror.defineMode("javascript", func var arrow = stream.string.indexOf("=>", stream.start); if (arrow < 0) return; + if (isTS) { // Try to skip TypeScript return type declarations after the arguments + var m = /:\s*(?:\w+(?:<[^>]*>|\[\])?|\{[^}]*\})\s*$/.exec(stream.string.slice(stream.start, arrow)) + if (m) arrow = m.index + } + var depth = 0, sawSomething = false; for (var pos = arrow - 1; pos >= 0; --pos) { var ch = stream.string.charAt(pos); var bracket = brackets.indexOf(ch); if (bracket >= 0 && bracket < 3) { if (!depth) { ++pos; break; } - if (--depth == 0) break; + if (--depth == 0) { if (ch == "(") sawSomething = true; break; } } else if (bracket >= 3 && bracket < 6) { ++depth; } else if (wordRE.test(ch)) { sawSomething = true; - } else if (/["'\/]/.test(ch)) { - return; + } else if (/["'\/`]/.test(ch)) { + for (;; --pos) { + if (pos == 0) return + var next = stream.string.charAt(pos - 1) + if (next == ch && stream.string.charAt(pos - 2) != "\\") { pos--; break } + } } else if (sawSomething && !depth) { ++pos; break; @@ -283,35 +265,68 @@ CodeMirror.defineMode("javascript", func pass.apply(null, arguments); return true; } + function inList(name, list) { + for (var v = list; v; v = v.next) if (v.name == name) return true + return false; + } function register(varname) { - function inList(list) { - for (var v = list; v; v = v.next) - if (v.name == varname) return true; - return false; - } var state = cx.state; cx.marked = "def"; if (state.context) { - if (inList(state.localVars)) return; - state.localVars = {name: varname, next: state.localVars}; + if (state.lexical.info == "var" && state.context && state.context.block) { + // FIXME function decls are also not block scoped + var newContext = registerVarScoped(varname, state.context) + if (newContext != null) { + state.context = newContext + return + } + } else if (!inList(varname, state.localVars)) { + state.localVars = new Var(varname, state.localVars) + return + } + } + // Fall through means this is global + if (parserConfig.globalVars && !inList(varname, state.globalVars)) + state.globalVars = new Var(varname, state.globalVars) + } + function registerVarScoped(varname, context) { + if (!context) { + return null + } else if (context.block) { + var inner = registerVarScoped(varname, context.prev) + if (!inner) return null + if (inner == context.prev) return context + return new Context(inner, context.vars, true) + } else if (inList(varname, context.vars)) { + return context } else { - if (inList(state.globalVars)) return; - if (parserConfig.globalVars) - state.globalVars = {name: varname, next: state.globalVars}; + return new Context(context.prev, new Var(varname, context.vars), false) } } + function isModifier(name) { + return name == "public" || name == "private" || name == "protected" || name == "abstract" || name == "readonly" + } + // Combinators - var defaultVars = {name: "this", next: {name: "arguments"}}; + function Context(prev, vars, block) { this.prev = prev; this.vars = vars; this.block = block } + function Var(name, next) { this.name = name; this.next = next } + + var defaultVars = new Var("this", new Var("arguments", null)) function pushcontext() { - cx.state.context = {prev: cx.state.context, vars: cx.state.localVars}; - cx.state.localVars = defaultVars; + cx.state.context = new Context(cx.state.context, cx.state.localVars, false) + cx.state.localVars = defaultVars + } + function pushblockcontext() { + cx.state.context = new Context(cx.state.context, cx.state.localVars, true) + cx.state.localVars = null } function popcontext() { - cx.state.localVars = cx.state.context.vars; - cx.state.context = cx.state.context.prev; + cx.state.localVars = cx.state.context.vars + cx.state.context = cx.state.context.prev } + popcontext.lex = true function pushlex(type, info) { var result = function() { var state = cx.state, indent = state.indented; @@ -336,71 +351,99 @@ CodeMirror.defineMode("javascript", func function expect(wanted) { function exp(type) { if (type == wanted) return cont(); - else if (wanted == ";") return pass(); + else if (wanted == ";" || type == "}" || type == ")" || type == "]") return pass(); else return cont(exp); }; return exp; } function statement(type, value) { - if (type == "var") return cont(pushlex("vardef", value.length), vardef, expect(";"), poplex); - if (type == "keyword a") return cont(pushlex("form"), expression, statement, poplex); + if (type == "var") return cont(pushlex("vardef", value), vardef, expect(";"), poplex); + if (type == "keyword a") return cont(pushlex("form"), parenExpr, statement, poplex); if (type == "keyword b") return cont(pushlex("form"), statement, poplex); - if (type == "{") return cont(pushlex("}"), block, poplex); + if (type == "keyword d") return cx.stream.match(/^\s*$/, false) ? cont() : cont(pushlex("stat"), maybeexpression, expect(";"), poplex); + if (type == "debugger") return cont(expect(";")); + if (type == "{") return cont(pushlex("}"), pushblockcontext, block, poplex, popcontext); if (type == ";") return cont(); if (type == "if") { if (cx.state.lexical.info == "else" && cx.state.cc[cx.state.cc.length - 1] == poplex) cx.state.cc.pop()(); - return cont(pushlex("form"), expression, statement, poplex, maybeelse); + return cont(pushlex("form"), parenExpr, statement, poplex, maybeelse); } if (type == "function") return cont(functiondef); if (type == "for") return cont(pushlex("form"), forspec, statement, poplex); - if (type == "variable") return cont(pushlex("stat"), maybelabel); - if (type == "switch") return cont(pushlex("form"), expression, pushlex("}", "switch"), expect("{"), - block, poplex, poplex); + if (type == "class" || (isTS && value == "interface")) { + cx.marked = "keyword" + return cont(pushlex("form", type == "class" ? type : value), className, poplex) + } + if (type == "variable") { + if (isTS && value == "declare") { + cx.marked = "keyword" + return cont(statement) + } else if (isTS && (value == "module" || value == "enum" || value == "type") && cx.stream.match(/^\s*\w/, false)) { + cx.marked = "keyword" + if (value == "enum") return cont(enumdef); + else if (value == "type") return cont(typename, expect("operator"), typeexpr, expect(";")); + else return cont(pushlex("form"), pattern, expect("{"), pushlex("}"), block, poplex, poplex) + } else if (isTS && value == "namespace") { + cx.marked = "keyword" + return cont(pushlex("form"), expression, statement, poplex) + } else if (isTS && value == "abstract") { + cx.marked = "keyword" + return cont(statement) + } else { + return cont(pushlex("stat"), maybelabel); + } + } + if (type == "switch") return cont(pushlex("form"), parenExpr, expect("{"), pushlex("}", "switch"), pushblockcontext, + block, poplex, poplex, popcontext); if (type == "case") return cont(expression, expect(":")); if (type == "default") return cont(expect(":")); - if (type == "catch") return cont(pushlex("form"), pushcontext, expect("("), funarg, expect(")"), - statement, poplex, popcontext); - if (type == "class") return cont(pushlex("form"), className, poplex); + if (type == "catch") return cont(pushlex("form"), pushcontext, maybeCatchBinding, statement, poplex, popcontext); if (type == "export") return cont(pushlex("stat"), afterExport, poplex); if (type == "import") return cont(pushlex("stat"), afterImport, poplex); - if (type == "module") return cont(pushlex("form"), pattern, pushlex("}"), expect("{"), block, poplex, poplex) + if (type == "async") return cont(statement) + if (value == "@") return cont(expression, statement) return pass(pushlex("stat"), expression, expect(";"), poplex); } - function expression(type) { - return expressionInner(type, false); + function maybeCatchBinding(type) { + if (type == "(") return cont(funarg, expect(")")) + } + function expression(type, value) { + return expressionInner(type, value, false); } - function expressionNoComma(type) { - return expressionInner(type, true); + function expressionNoComma(type, value) { + return expressionInner(type, value, true); } - function expressionInner(type, noComma) { + function parenExpr(type) { + if (type != "(") return pass() + return cont(pushlex(")"), expression, expect(")"), poplex) + } + function expressionInner(type, value, noComma) { if (cx.state.fatArrowAt == cx.stream.start) { var body = noComma ? arrowBodyNoComma : arrowBody; - if (type == "(") return cont(pushcontext, pushlex(")"), commasep(pattern, ")"), poplex, expect("=>"), body, popcontext); + if (type == "(") return cont(pushcontext, pushlex(")"), commasep(funarg, ")"), poplex, expect("=>"), body, popcontext); else if (type == "variable") return pass(pushcontext, pattern, expect("=>"), body, popcontext); } var maybeop = noComma ? maybeoperatorNoComma : maybeoperatorComma; if (atomicTypes.hasOwnProperty(type)) return cont(maybeop); if (type == "function") return cont(functiondef, maybeop); - if (type == "keyword c") return cont(noComma ? maybeexpressionNoComma : maybeexpression); - if (type == "(") return cont(pushlex(")"), maybeexpression, comprehension, expect(")"), poplex, maybeop); + if (type == "class" || (isTS && value == "interface")) { cx.marked = "keyword"; return cont(pushlex("form"), classExpression, poplex); } + if (type == "keyword c" || type == "async") return cont(noComma ? expressionNoComma : expression); + if (type == "(") return cont(pushlex(")"), maybeexpression, expect(")"), poplex, maybeop); if (type == "operator" || type == "spread") return cont(noComma ? expressionNoComma : expression); if (type == "[") return cont(pushlex("]"), arrayLiteral, poplex, maybeop); if (type == "{") return contCommasep(objprop, "}", null, maybeop); if (type == "quasi") return pass(quasi, maybeop); if (type == "new") return cont(maybeTarget(noComma)); + if (type == "import") return cont(expression); return cont(); } function maybeexpression(type) { if (type.match(/[;\}\)\],]/)) return pass(); return pass(expression); } - function maybeexpressionNoComma(type) { - if (type.match(/[;\}\)\],]/)) return pass(); - return pass(expressionNoComma); - } function maybeoperatorComma(type, value) { if (type == ",") return cont(expression); @@ -411,7 +454,9 @@ CodeMirror.defineMode("javascript", func var expr = noComma == false ? expression : expressionNoComma; if (type == "=>") return cont(pushcontext, noComma ? arrowBodyNoComma : arrowBody, popcontext); if (type == "operator") { - if (/\+\+|--/.test(value)) return cont(me); + if (/\+\+|--/.test(value) || isTS && value == "!") return cont(me); + if (isTS && value == "<" && cx.stream.match(/^([^>]|<.*?>)*>\s*\(/, false)) + return cont(pushlex(">"), commasep(typeexpr, ">"), poplex, me); if (value == "?") return cont(expression, expect(":"), expr); return cont(expr); } @@ -420,6 +465,12 @@ CodeMirror.defineMode("javascript", func if (type == "(") return contCommasep(expressionNoComma, ")", "call", me); if (type == ".") return cont(property, me); if (type == "[") return cont(pushlex("]"), maybeexpression, expect("]"), poplex, me); + if (isTS && value == "as") { cx.marked = "keyword"; return cont(typeexpr, me) } + if (type == "regexp") { + cx.state.lastType = cx.marked = "operator" + cx.stream.backUp(cx.stream.pos - cx.stream.start - 1) + return cont(expr) + } } function quasi(type, value) { if (type != "quasi") return pass(); @@ -444,6 +495,7 @@ CodeMirror.defineMode("javascript", func function maybeTarget(noComma) { return function(type) { if (type == ".") return cont(noComma ? targetNoComma : target); + else if (type == "variable" && isTS) return cont(maybeTypeArgs, noComma ? maybeoperatorNoComma : maybeoperatorComma) else return pass(noComma ? expressionNoComma : expression); }; } @@ -461,21 +513,33 @@ CodeMirror.defineMode("javascript", func if (type == "variable") {cx.marked = "property"; return cont();} } function objprop(type, value) { - if (type == "variable" || cx.style == "keyword") { + if (type == "async") { + cx.marked = "property"; + return cont(objprop); + } else if (type == "variable" || cx.style == "keyword") { cx.marked = "property"; if (value == "get" || value == "set") return cont(getterSetter); + var m // Work around fat-arrow-detection complication for detecting typescript typed arrow params + if (isTS && cx.state.fatArrowAt == cx.stream.start && (m = cx.stream.match(/^\s*:\s*/, false))) + cx.state.fatArrowAt = cx.stream.pos + m[0].length return cont(afterprop); } else if (type == "number" || type == "string") { cx.marked = jsonldMode ? "property" : (cx.style + " property"); return cont(afterprop); } else if (type == "jsonld-keyword") { return cont(afterprop); - } else if (type == "modifier") { + } else if (isTS && isModifier(value)) { + cx.marked = "keyword" return cont(objprop) } else if (type == "[") { - return cont(expression, expect("]"), afterprop); + return cont(expression, maybetype, expect("]"), afterprop); } else if (type == "spread") { - return cont(expression); + return cont(expressionNoComma, afterprop); + } else if (value == "*") { + cx.marked = "keyword"; + return cont(objprop); + } else if (type == ":") { + return pass(afterprop) } } function getterSetter(type) { @@ -487,18 +551,22 @@ CodeMirror.defineMode("javascript", func if (type == ":") return cont(expressionNoComma); if (type == "(") return pass(functiondef); } - function commasep(what, end) { - function proceed(type) { - if (type == ",") { + function commasep(what, end, sep) { + function proceed(type, value) { + if (sep ? sep.indexOf(type) > -1 : type == ",") { var lex = cx.state.lexical; if (lex.info == "call") lex.pos = (lex.pos || 0) + 1; - return cont(what, proceed); + return cont(function(type, value) { + if (type == end || value == end) return pass() + return pass(what) + }, proceed); } - if (type == end) return cont(); + if (type == end || value == end) return cont(); + if (sep && sep.indexOf(";") > -1) return pass(what) return cont(expect(end)); } - return function(type) { - if (type == end) return cont(); + return function(type, value) { + if (type == end || value == end) return cont(); return pass(what, proceed); }; } @@ -511,23 +579,91 @@ CodeMirror.defineMode("javascript", func if (type == "}") return cont(); return pass(statement, block); } - function maybetype(type) { - if (isTS && type == ":") return cont(typedef); + function maybetype(type, value) { + if (isTS) { + if (type == ":") return cont(typeexpr); + if (value == "?") return cont(maybetype); + } + } + function maybetypeOrIn(type, value) { + if (isTS && (type == ":" || value == "in")) return cont(typeexpr) + } + function mayberettype(type) { + if (isTS && type == ":") { + if (cx.stream.match(/^\s*\w+\s+is\b/, false)) return cont(expression, isKW, typeexpr) + else return cont(typeexpr) + } + } + function isKW(_, value) { + if (value == "is") { + cx.marked = "keyword" + return cont() + } + } + function typeexpr(type, value) { + if (value == "keyof" || value == "typeof" || value == "infer") { + cx.marked = "keyword" + return cont(value == "typeof" ? expressionNoComma : typeexpr) + } + if (type == "variable" || value == "void") { + cx.marked = "type" + return cont(afterType) + } + if (value == "|" || value == "&") return cont(typeexpr) + if (type == "string" || type == "number" || type == "atom") return cont(afterType); + if (type == "[") return cont(pushlex("]"), commasep(typeexpr, "]", ","), poplex, afterType) + if (type == "{") return cont(pushlex("}"), commasep(typeprop, "}", ",;"), poplex, afterType) + if (type == "(") return cont(commasep(typearg, ")"), maybeReturnType, afterType) + if (type == "<") return cont(commasep(typeexpr, ">"), typeexpr) } - function maybedefault(_, value) { - if (value == "=") return cont(expressionNoComma); + function maybeReturnType(type) { + if (type == "=>") return cont(typeexpr) + } + function typeprop(type, value) { + if (type == "variable" || cx.style == "keyword") { + cx.marked = "property" + return cont(typeprop) + } else if (value == "?" || type == "number" || type == "string") { + return cont(typeprop) + } else if (type == ":") { + return cont(typeexpr) + } else if (type == "[") { + return cont(expect("variable"), maybetypeOrIn, expect("]"), typeprop) + } else if (type == "(") { + return pass(functiondecl, typeprop) + } } - function typedef(type) { - if (type == "variable") {cx.marked = "variable-3"; return cont();} + function typearg(type, value) { + if (type == "variable" && cx.stream.match(/^\s*[?:]/, false) || value == "?") return cont(typearg) + if (type == ":") return cont(typeexpr) + if (type == "spread") return cont(typearg) + return pass(typeexpr) } - function vardef() { + function afterType(type, value) { + if (value == "<") return cont(pushlex(">"), commasep(typeexpr, ">"), poplex, afterType) + if (value == "|" || type == "." || value == "&") return cont(typeexpr) + if (type == "[") return cont(typeexpr, expect("]"), afterType) + if (value == "extends" || value == "implements") { cx.marked = "keyword"; return cont(typeexpr) } + if (value == "?") return cont(typeexpr, expect(":"), typeexpr) + } + function maybeTypeArgs(_, value) { + if (value == "<") return cont(pushlex(">"), commasep(typeexpr, ">"), poplex, afterType) + } + function typeparam() { + return pass(typeexpr, maybeTypeDefault) + } + function maybeTypeDefault(_, value) { + if (value == "=") return cont(typeexpr) + } + function vardef(_, value) { + if (value == "enum") {cx.marked = "keyword"; return cont(enumdef)} return pass(pattern, maybetype, maybeAssign, vardefCont); } function pattern(type, value) { - if (type == "modifier") return cont(pattern) + if (isTS && isModifier(value)) { cx.marked = "keyword"; return cont(pattern) } if (type == "variable") { register(value); return cont(); } if (type == "spread") return cont(pattern); - if (type == "[") return contCommasep(pattern, "]"); + if (type == "[") return contCommasep(eltpattern, "]"); if (type == "{") return contCommasep(proppattern, "}"); } function proppattern(type, value) { @@ -538,8 +674,12 @@ CodeMirror.defineMode("javascript", func if (type == "variable") cx.marked = "property"; if (type == "spread") return cont(pattern); if (type == "}") return pass(); + if (type == "[") return cont(expression, expect(']'), expect(':'), proppattern); return cont(expect(":"), pattern, maybeAssign); } + function eltpattern() { + return pass(pattern, maybeAssign) + } function maybeAssign(_type, value) { if (value == "=") return cont(expressionNoComma); } @@ -549,73 +689,109 @@ CodeMirror.defineMode("javascript", func function maybeelse(type, value) { if (type == "keyword b" && value == "else") return cont(pushlex("form", "else"), statement, poplex); } - function forspec(type) { - if (type == "(") return cont(pushlex(")"), forspec1, expect(")"), poplex); + function forspec(type, value) { + if (value == "await") return cont(forspec); + if (type == "(") return cont(pushlex(")"), forspec1, poplex); } function forspec1(type) { - if (type == "var") return cont(vardef, expect(";"), forspec2); - if (type == ";") return cont(forspec2); - if (type == "variable") return cont(formaybeinof); - return pass(expression, expect(";"), forspec2); - } - function formaybeinof(_type, value) { - if (value == "in" || value == "of") { cx.marked = "keyword"; return cont(expression); } - return cont(maybeoperatorComma, forspec2); + if (type == "var") return cont(vardef, forspec2); + if (type == "variable") return cont(forspec2); + return pass(forspec2) } function forspec2(type, value) { - if (type == ";") return cont(forspec3); - if (value == "in" || value == "of") { cx.marked = "keyword"; return cont(expression); } - return pass(expression, expect(";"), forspec3); - } - function forspec3(type) { - if (type != ")") cont(expression); + if (type == ")") return cont() + if (type == ";") return cont(forspec2) + if (value == "in" || value == "of") { cx.marked = "keyword"; return cont(expression, forspec2) } + return pass(expression, forspec2) } function functiondef(type, value) { if (value == "*") {cx.marked = "keyword"; return cont(functiondef);} if (type == "variable") {register(value); return cont(functiondef);} - if (type == "(") return cont(pushcontext, pushlex(")"), commasep(funarg, ")"), poplex, statement, popcontext); + if (type == "(") return cont(pushcontext, pushlex(")"), commasep(funarg, ")"), poplex, mayberettype, statement, popcontext); + if (isTS && value == "<") return cont(pushlex(">"), commasep(typeparam, ">"), poplex, functiondef) + } + function functiondecl(type, value) { + if (value == "*") {cx.marked = "keyword"; return cont(functiondecl);} + if (type == "variable") {register(value); return cont(functiondecl);} + if (type == "(") return cont(pushcontext, pushlex(")"), commasep(funarg, ")"), poplex, mayberettype, popcontext); + if (isTS && value == "<") return cont(pushlex(">"), commasep(typeparam, ">"), poplex, functiondecl) } - function funarg(type) { + function typename(type, value) { + if (type == "keyword" || type == "variable") { + cx.marked = "type" + return cont(typename) + } else if (value == "<") { + return cont(pushlex(">"), commasep(typeparam, ">"), poplex) + } + } + function funarg(type, value) { + if (value == "@") cont(expression, funarg) if (type == "spread") return cont(funarg); - return pass(pattern, maybetype, maybedefault); + if (isTS && isModifier(value)) { cx.marked = "keyword"; return cont(funarg); } + if (isTS && type == "this") return cont(maybetype, maybeAssign) + return pass(pattern, maybetype, maybeAssign); + } + function classExpression(type, value) { + // Class expressions may have an optional name. + if (type == "variable") return className(type, value); + return classNameAfter(type, value); } function className(type, value) { if (type == "variable") {register(value); return cont(classNameAfter);} } function classNameAfter(type, value) { - if (value == "extends") return cont(expression, classNameAfter); + if (value == "<") return cont(pushlex(">"), commasep(typeparam, ">"), poplex, classNameAfter) + if (value == "extends" || value == "implements" || (isTS && type == ",")) { + if (value == "implements") cx.marked = "keyword"; + return cont(isTS ? typeexpr : expression, classNameAfter); + } if (type == "{") return cont(pushlex("}"), classBody, poplex); } function classBody(type, value) { + if (type == "async" || + (type == "variable" && + (value == "static" || value == "get" || value == "set" || (isTS && isModifier(value))) && + cx.stream.match(/^\s+[\w$\xa1-\uffff]/, false))) { + cx.marked = "keyword"; + return cont(classBody); + } if (type == "variable" || cx.style == "keyword") { - if (value == "static") { - cx.marked = "keyword"; - return cont(classBody); - } cx.marked = "property"; - if (value == "get" || value == "set") return cont(classGetterSetter, functiondef, classBody); - return cont(functiondef, classBody); + return cont(isTS ? classfield : functiondef, classBody); } + if (type == "number" || type == "string") return cont(isTS ? classfield : functiondef, classBody); + if (type == "[") + return cont(expression, maybetype, expect("]"), isTS ? classfield : functiondef, classBody) if (value == "*") { cx.marked = "keyword"; return cont(classBody); } - if (type == ";") return cont(classBody); + if (isTS && type == "(") return pass(functiondecl, classBody) + if (type == ";" || type == ",") return cont(classBody); if (type == "}") return cont(); + if (value == "@") return cont(expression, classBody) } - function classGetterSetter(type) { - if (type != "variable") return pass(); - cx.marked = "property"; - return cont(); + function classfield(type, value) { + if (value == "?") return cont(classfield) + if (type == ":") return cont(typeexpr, maybeAssign) + if (value == "=") return cont(expressionNoComma) + var context = cx.state.lexical.prev, isInterface = context && context.info == "interface" + return pass(isInterface ? functiondecl : functiondef) } - function afterExport(_type, value) { + function afterExport(type, value) { if (value == "*") { cx.marked = "keyword"; return cont(maybeFrom, expect(";")); } if (value == "default") { cx.marked = "keyword"; return cont(expression, expect(";")); } + if (type == "{") return cont(commasep(exportField, "}"), maybeFrom, expect(";")); return pass(statement); } + function exportField(type, value) { + if (value == "as") { cx.marked = "keyword"; return cont(expect("variable")); } + if (type == "variable") return pass(expressionNoComma, exportField); + } function afterImport(type) { if (type == "string") return cont(); - return pass(importSpec, maybeFrom); + if (type == "(") return pass(expression); + return pass(importSpec, maybeMoreImports, maybeFrom); } function importSpec(type, value) { if (type == "{") return contCommasep(importSpec, "}"); @@ -623,6 +799,9 @@ CodeMirror.defineMode("javascript", func if (value == "*") cx.marked = "keyword"; return cont(maybeAs); } + function maybeMoreImports(type) { + if (type == ",") return cont(importSpec, maybeMoreImports) + } function maybeAs(_type, value) { if (value == "as") { cx.marked = "keyword"; return cont(importSpec); } } @@ -631,16 +810,13 @@ CodeMirror.defineMode("javascript", func } function arrayLiteral(type) { if (type == "]") return cont(); - return pass(expressionNoComma, maybeArrayComprehension); - } - function maybeArrayComprehension(type) { - if (type == "for") return pass(comprehension, expect("]")); - if (type == ",") return cont(commasep(maybeexpressionNoComma, "]")); return pass(commasep(expressionNoComma, "]")); } - function comprehension(type) { - if (type == "for") return cont(forspec, comprehension); - if (type == "if") return cont(expression, comprehension); + function enumdef() { + return pass(pushlex("form"), pattern, expect("{"), pushlex("}"), commasep(enummember, "}"), poplex, poplex) + } + function enummember() { + return pass(pattern, maybeAssign); } function isContinuedStatement(state, textAfter) { @@ -649,6 +825,12 @@ CodeMirror.defineMode("javascript", func /[,.]/.test(textAfter.charAt(0)); } + function expressionAllowed(stream, state, backUp) { + return state.tokenize == tokenBase && + /^(?:operator|sof|keyword [bcd]|case|new|export|default|spread|[\[{}\(,;:]|=>)$/.test(state.lastType) || + (state.lastType == "quasi" && /\{\s*$/.test(stream.string.slice(0, stream.pos - (backUp || 0)))) + } + // Interface return { @@ -659,7 +841,7 @@ CodeMirror.defineMode("javascript", func cc: [], lexical: new JSLexical((basecolumn || 0) - indentUnit, 0, "block", false), localVars: parserConfig.localVars, - context: parserConfig.localVars && {vars: parserConfig.localVars}, + context: parserConfig.localVars && new Context(null, null, false), indented: basecolumn || 0 }; if (parserConfig.globalVars && typeof parserConfig.globalVars == "object") @@ -684,19 +866,23 @@ CodeMirror.defineMode("javascript", func indent: function(state, textAfter) { if (state.tokenize == tokenComment) return CodeMirror.Pass; if (state.tokenize != tokenBase) return 0; - var firstChar = textAfter && textAfter.charAt(0), lexical = state.lexical; + var firstChar = textAfter && textAfter.charAt(0), lexical = state.lexical, top // Kludge to prevent 'maybelse' from blocking lexical scope pops if (!/^\s*else\b/.test(textAfter)) for (var i = state.cc.length - 1; i >= 0; --i) { var c = state.cc[i]; if (c == poplex) lexical = lexical.prev; else if (c != maybeelse) break; } - if (lexical.type == "stat" && firstChar == "}") lexical = lexical.prev; + while ((lexical.type == "stat" || lexical.type == "form") && + (firstChar == "}" || ((top = state.cc[state.cc.length - 1]) && + (top == maybeoperatorComma || top == maybeoperatorNoComma) && + !/^[,\.=+\-*:?[\(]/.test(textAfter)))) + lexical = lexical.prev; if (statementIndent && lexical.type == ")" && lexical.prev.type == "stat") lexical = lexical.prev; var type = lexical.type, closing = firstChar == type; - if (type == "vardef") return lexical.indented + (state.lastType == "operator" || state.lastType == "," ? lexical.info + 1 : 0); + if (type == "vardef") return lexical.indented + (state.lastType == "operator" || state.lastType == "," ? lexical.info.length + 1 : 0); else if (type == "form" && firstChar == "{") return lexical.indented; else if (type == "form") return lexical.indented + indentUnit; else if (type == "stat") @@ -710,6 +896,7 @@ CodeMirror.defineMode("javascript", func electricInput: /^\s*(?:case .*?:|default:|\{|\})$/, blockCommentStart: jsonMode ? null : "/*", blockCommentEnd: jsonMode ? null : "*/", + blockCommentContinue: jsonMode ? null : " * ", lineComment: jsonMode ? null : "//", fold: "brace", closeBrackets: "()[]{}''\"\"``", @@ -719,6 +906,7 @@ CodeMirror.defineMode("javascript", func jsonMode: jsonMode, expressionAllowed: expressionAllowed, + skipExpression: function(state) { var top = state.cc[state.cc.length - 1] if (top == expression || top == expressionNoComma) state.cc.pop() diff --git a/rhodecode/public/js/mode/jinja2/jinja2.js b/rhodecode/public/js/mode/jinja2/jinja2.js --- a/rhodecode/public/js/mode/jinja2/jinja2.js +++ b/rhodecode/public/js/mode/jinja2/jinja2.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -107,7 +107,7 @@ } return "variable"; } else if (stream.eat("{")) { - if (ch = stream.eat("#")) { + if (stream.eat("#")) { state.incomment = true; if(!stream.skipTo("#}")) { stream.skipToEnd(); @@ -136,7 +136,11 @@ }, token: function (stream, state) { return state.tokenize(stream, state); - } + }, + blockCommentStart: "{#", + blockCommentEnd: "#}" }; }); + + CodeMirror.defineMIME("text/jinja2", "jinja2"); }); diff --git a/rhodecode/public/js/mode/jsx/jsx.js b/rhodecode/public/js/mode/jsx/jsx.js --- a/rhodecode/public/js/mode/jsx/jsx.js +++ b/rhodecode/public/js/mode/jsx/jsx.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -25,14 +25,14 @@ context.prev && copyContext(context.prev)) } - CodeMirror.defineMode("jsx", function(config) { - var xmlMode = CodeMirror.getMode(config, {name: "xml", allowMissing: true, multilineTagIndentPastTag: false}) - var jsMode = CodeMirror.getMode(config, "javascript") + CodeMirror.defineMode("jsx", function(config, modeConfig) { + var xmlMode = CodeMirror.getMode(config, {name: "xml", allowMissing: true, multilineTagIndentPastTag: false, allowMissingTagName: true}) + var jsMode = CodeMirror.getMode(config, modeConfig && modeConfig.base || "javascript") function flatXMLIndent(state) { var tagName = state.tagName state.tagName = null - var result = xmlMode.indent(state, "") + var result = xmlMode.indent(state, "", "") state.tagName = tagName return result } @@ -105,7 +105,7 @@ function jsToken(stream, state, cx) { if (stream.peek() == "<" && jsMode.expressionAllowed(stream, cx.state)) { jsMode.skipExpression(cx.state) - state.context = new Context(CodeMirror.startState(xmlMode, jsMode.indent(cx.state, "")), + state.context = new Context(CodeMirror.startState(xmlMode, jsMode.indent(cx.state, "", "")), xmlMode, 0, state.context) return null } @@ -144,4 +144,5 @@ }, "xml", "javascript") CodeMirror.defineMIME("text/jsx", "jsx") -}) + CodeMirror.defineMIME("text/typescript-jsx", {name: "jsx", base: {name: "javascript", typescript: true}}) +}); diff --git a/rhodecode/public/js/mode/julia/julia.js b/rhodecode/public/js/mode/julia/julia.js --- a/rhodecode/public/js/mode/julia/julia.js +++ b/rhodecode/public/js/mode/julia/julia.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,61 +11,78 @@ })(function(CodeMirror) { "use strict"; -CodeMirror.defineMode("julia", function(_conf, parserConf) { - var ERRORCLASS = 'error'; - - function wordRegexp(words) { - return new RegExp("^((" + words.join(")|(") + "))\\b"); +CodeMirror.defineMode("julia", function(config, parserConf) { + function wordRegexp(words, end) { + if (typeof end === "undefined") { end = "\\b"; } + return new RegExp("^((" + words.join(")|(") + "))" + end); } - var operators = parserConf.operators || /^\.?[|&^\\%*+\-<>!=\/]=?|\?|~|:|\$|\.[<>]|<<=?|>>>?=?|\.[<>=]=|->?|\/\/|\bin\b(?!\()|[\u2208\u2209](?!\()/; + var octChar = "\\\\[0-7]{1,3}"; + var hexChar = "\\\\x[A-Fa-f0-9]{1,2}"; + var sChar = "\\\\[abefnrtv0%?'\"\\\\]"; + var uChar = "([^\\u0027\\u005C\\uD800-\\uDFFF]|[\\uD800-\\uDFFF][\\uDC00-\\uDFFF])"; + + var operators = parserConf.operators || wordRegexp([ + "[<>]:", "[<>=]=", "<<=?", ">>>?=?", "=>", "->", "\\/\\/", + "[\\\\%*+\\-<>!=\\/^|&\\u00F7\\u22BB]=?", "\\?", "\\$", "~", ":", + "\\u00D7", "\\u2208", "\\u2209", "\\u220B", "\\u220C", "\\u2218", + "\\u221A", "\\u221B", "\\u2229", "\\u222A", "\\u2260", "\\u2264", + "\\u2265", "\\u2286", "\\u2288", "\\u228A", "\\u22C5", + "\\b(in|isa)\\b(?!\.?\\()"], ""); var delimiters = parserConf.delimiters || /^[;,()[\]{}]/; - var identifiers = parserConf.identifiers || /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*!*/; - var blockOpeners = ["begin", "function", "type", "immutable", "let", "macro", "for", "while", "quote", "if", "else", "elseif", "try", "finally", "catch", "do"]; - var blockClosers = ["end", "else", "elseif", "catch", "finally"]; - var keywordList = ['if', 'else', 'elseif', 'while', 'for', 'begin', 'let', 'end', 'do', 'try', 'catch', 'finally', 'return', 'break', 'continue', 'global', 'local', 'const', 'export', 'import', 'importall', 'using', 'function', 'macro', 'module', 'baremodule', 'type', 'immutable', 'quote', 'typealias', 'abstract', 'bitstype']; - var builtinList = ['true', 'false', 'nothing', 'NaN', 'Inf']; + var identifiers = parserConf.identifiers || + /^[_A-Za-z\u00A1-\u2217\u2219-\uFFFF][\w\u00A1-\u2217\u2219-\uFFFF]*!*/; + + var chars = wordRegexp([octChar, hexChar, sChar, uChar], "'"); + + var openersList = ["begin", "function", "type", "struct", "immutable", "let", + "macro", "for", "while", "quote", "if", "else", "elseif", "try", + "finally", "catch", "do"]; - //var stringPrefixes = new RegExp("^[br]?('|\")") - var stringPrefixes = /^(`|'|"{3}|([brv]?"))/; - var keywords = wordRegexp(keywordList); - var builtins = wordRegexp(builtinList); - var openers = wordRegexp(blockOpeners); - var closers = wordRegexp(blockClosers); - var macro = /^@[_A-Za-z][_A-Za-z0-9]*/; - var symbol = /^:[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*!*/; - var typeAnnotation = /^::[^.,;"{()=$\s]+({[^}]*}+)*/; + var closersList = ["end", "else", "elseif", "catch", "finally"]; + + var keywordsList = ["if", "else", "elseif", "while", "for", "begin", "let", + "end", "do", "try", "catch", "finally", "return", "break", "continue", + "global", "local", "const", "export", "import", "importall", "using", + "function", "where", "macro", "module", "baremodule", "struct", "type", + "mutable", "immutable", "quote", "typealias", "abstract", "primitive", + "bitstype"]; + + var builtinsList = ["true", "false", "nothing", "NaN", "Inf"]; + + CodeMirror.registerHelper("hintWords", "julia", keywordsList.concat(builtinsList)); + + var openers = wordRegexp(openersList); + var closers = wordRegexp(closersList); + var keywords = wordRegexp(keywordsList); + var builtins = wordRegexp(builtinsList); + + var macro = /^@[_A-Za-z][\w]*/; + var symbol = /^:[_A-Za-z\u00A1-\uFFFF][\w\u00A1-\uFFFF]*!*/; + var stringPrefixes = /^(`|([_A-Za-z\u00A1-\uFFFF]*"("")?))/; function inArray(state) { - var ch = currentScope(state); - if (ch == '[') { - return true; - } - return false; + return (state.nestedArrays > 0); + } + + function inGenerator(state) { + return (state.nestedGenerators > 0); } - function currentScope(state) { - if (state.scopes.length == 0) { + function currentScope(state, n) { + if (typeof(n) === "undefined") { n = 0; } + if (state.scopes.length <= n) { return null; } - return state.scopes[state.scopes.length - 1]; + return state.scopes[state.scopes.length - (n + 1)]; } // tokenizers function tokenBase(stream, state) { - //Handle multiline comments - if (stream.match(/^#=\s*/)) { - state.scopes.push('#='); - } - if (currentScope(state) == '#=' && stream.match(/^=#/)) { - state.scopes.pop(); - return 'comment'; - } - if (state.scopes.indexOf('#=') >= 0) { - if (!stream.match(/.*?(?=(#=|=#))/)) { - stream.skipToEnd(); - } - return 'comment'; + // Handle multiline comments + if (stream.match(/^#=/, false)) { + state.tokenize = tokenComment; + return state.tokenize(stream, state); } // Handle scope changes @@ -74,14 +91,17 @@ CodeMirror.defineMode("julia", function( leavingExpr = false; } state.leavingExpr = false; + if (leavingExpr) { if (stream.match(/^'+/)) { - return 'operator'; + return "operator"; } } - if (stream.match(/^\.{2,3}/)) { - return 'operator'; + if (stream.match(/\.{4,}/)) { + return "error"; + } else if (stream.match(/\.{1,3}/)) { + return "operator"; } if (stream.eatSpace()) { @@ -93,105 +113,101 @@ CodeMirror.defineMode("julia", function( // Handle single line comments if (ch === '#') { stream.skipToEnd(); - return 'comment'; + return "comment"; } if (ch === '[') { state.scopes.push('['); + state.nestedArrays++; } - var scope = currentScope(state); + if (ch === '(') { + state.scopes.push('('); + state.nestedGenerators++; + } - if (scope == '[' && ch === ']') { + if (inArray(state) && ch === ']') { + if (currentScope(state) === "if") { state.scopes.pop(); } + while (currentScope(state) === "for") { state.scopes.pop(); } state.scopes.pop(); + state.nestedArrays--; state.leavingExpr = true; } - if (scope == '(' && ch === ')') { + if (inGenerator(state) && ch === ')') { + if (currentScope(state) === "if") { state.scopes.pop(); } + while (currentScope(state) === "for") { state.scopes.pop(); } state.scopes.pop(); + state.nestedGenerators--; state.leavingExpr = true; } + if (inArray(state)) { + if (state.lastToken == "end" && stream.match(/^:/)) { + return "operator"; + } + if (stream.match(/^end/)) { + return "number"; + } + } + var match; - if (!inArray(state) && (match=stream.match(openers, false))) { - state.scopes.push(match); + if (match = stream.match(openers, false)) { + state.scopes.push(match[0]); } - if (!inArray(state) && stream.match(closers, false)) { + if (stream.match(closers, false)) { state.scopes.pop(); } - if (inArray(state)) { - if (state.lastToken == 'end' && stream.match(/^:/)) { - return 'operator'; - } - if (stream.match(/^end/)) { - return 'number'; - } + // Handle type annotations + if (stream.match(/^::(?![:\$])/)) { + state.tokenize = tokenAnnotation; + return state.tokenize(stream, state); } - if (stream.match(/^=>/)) { - return 'operator'; + // Handle symbols + if (!leavingExpr && stream.match(symbol) || + stream.match(/:([<>]:|<<=?|>>>?=?|->|\/\/|\.{2,3}|[\.\\%*+\-<>!\/^|&]=?|[~\?\$])/)) { + return "builtin"; + } + + // Handle parametric types + //if (stream.match(/^{[^}]*}(?=\()/)) { + // return "builtin"; + //} + + // Handle operators and Delimiters + if (stream.match(operators)) { + return "operator"; } // Handle Number Literals - if (stream.match(/^[0-9\.]/, false)) { + if (stream.match(/^\.?\d/, false)) { var imMatcher = RegExp(/^im\b/); - var floatLiteral = false; + var numberLiteral = false; // Floats - if (stream.match(/^\d*\.(?!\.)\d+([ef][\+\-]?\d+)?/i)) { floatLiteral = true; } - if (stream.match(/^\d+\.(?!\.)\d*/)) { floatLiteral = true; } - if (stream.match(/^\.\d+/)) { floatLiteral = true; } - if (stream.match(/^0x\.[0-9a-f]+p[\+\-]?\d+/i)) { floatLiteral = true; } - if (floatLiteral) { - // Float literals may be "imaginary" - stream.match(imMatcher); - state.leavingExpr = true; - return 'number'; - } + if (stream.match(/^(?:(?:\d[_\d]*)?\.(?!\.)(?:\d[_\d]*)?|\d[_\d]*\.(?!\.)(?:\d[_\d]*))?([Eef][\+\-]?[_\d]+)?/i)) { numberLiteral = true; } + if (stream.match(/^0x\.[0-9a-f_]+p[\+\-]?[_\d]+/i)) { numberLiteral = true; } // Integers - var intLiteral = false; - // Hex - if (stream.match(/^0x[0-9a-f]+/i)) { intLiteral = true; } - // Binary - if (stream.match(/^0b[01]+/i)) { intLiteral = true; } - // Octal - if (stream.match(/^0o[0-7]+/i)) { intLiteral = true; } - // Decimal - if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) { - intLiteral = true; - } + if (stream.match(/^0x[0-9a-f_]+/i)) { numberLiteral = true; } // Hex + if (stream.match(/^0b[01_]+/i)) { numberLiteral = true; } // Binary + if (stream.match(/^0o[0-7_]+/i)) { numberLiteral = true; } // Octal + if (stream.match(/^[1-9][_\d]*(e[\+\-]?\d+)?/)) { numberLiteral = true; } // Decimal // Zero by itself with no other piece of number. - if (stream.match(/^0(?![\dx])/i)) { intLiteral = true; } - if (intLiteral) { + if (stream.match(/^0(?![\dx])/i)) { numberLiteral = true; } + if (numberLiteral) { // Integer literals may be "long" stream.match(imMatcher); state.leavingExpr = true; - return 'number'; + return "number"; } } - if (stream.match(/^<:/)) { - return 'operator'; - } - - if (stream.match(typeAnnotation)) { - return 'builtin'; - } - - // Handle symbols - if (!leavingExpr && stream.match(symbol) || stream.match(/:\./)) { - return 'builtin'; - } - - // Handle parametric types - if (stream.match(/^{[^}]*}(?=\()/)) { - return 'builtin'; - } - - // Handle operators and Delimiters - if (stream.match(operators)) { - return 'operator'; + // Handle Chars + if (stream.match(/^'/)) { + state.tokenize = tokenChar; + return state.tokenize(stream, state); } // Handle Strings @@ -201,7 +217,7 @@ CodeMirror.defineMode("julia", function( } if (stream.match(macro)) { - return 'meta'; + return "meta"; } if (stream.match(delimiters)) { @@ -209,41 +225,40 @@ CodeMirror.defineMode("julia", function( } if (stream.match(keywords)) { - return 'keyword'; + return "keyword"; } if (stream.match(builtins)) { - return 'builtin'; + return "builtin"; } - var isDefinition = state.isDefinition || - state.lastToken == 'function' || - state.lastToken == 'macro' || - state.lastToken == 'type' || - state.lastToken == 'immutable'; + var isDefinition = state.isDefinition || state.lastToken == "function" || + state.lastToken == "macro" || state.lastToken == "type" || + state.lastToken == "struct" || state.lastToken == "immutable"; if (stream.match(identifiers)) { if (isDefinition) { if (stream.peek() === '.') { state.isDefinition = true; - return 'variable'; + return "variable"; } state.isDefinition = false; - return 'def'; + return "def"; } if (stream.match(/^({[^}]*})*\(/, false)) { - return callOrDef(stream, state); + state.tokenize = tokenCallOrDef; + return state.tokenize(stream, state); } state.leavingExpr = true; - return 'variable'; + return "variable"; } // Handle non-detected items stream.next(); - return ERRORCLASS; + return "error"; } - function callOrDef(stream, state) { + function tokenCallOrDef(stream, state) { var match = stream.match(/^(\(\s*)/); if (match) { if (state.firstParenPos < 0) @@ -255,13 +270,14 @@ CodeMirror.defineMode("julia", function( state.scopes.pop(); state.charsAdvanced += 1; if (state.scopes.length <= state.firstParenPos) { - var isDefinition = stream.match(/^\s*?=(?!=)/, false); + var isDefinition = stream.match(/^(\s*where\s+[^\s=]+)*\s*?=(?!=)/, false); stream.backUp(state.charsAdvanced); state.firstParenPos = -1; state.charsAdvanced = 0; + state.tokenize = tokenBase; if (isDefinition) - return 'def'; - return 'builtin'; + return "def"; + return "builtin"; } } // Unfortunately javascript does not support multiline strings, so we have @@ -269,48 +285,93 @@ CodeMirror.defineMode("julia", function( // over two or more lines. if (stream.match(/^$/g, false)) { stream.backUp(state.charsAdvanced); - while (state.scopes.length > state.firstParenPos + 1) + while (state.scopes.length > state.firstParenPos) state.scopes.pop(); state.firstParenPos = -1; state.charsAdvanced = 0; - return 'builtin'; + state.tokenize = tokenBase; + return "builtin"; } state.charsAdvanced += stream.match(/^([^()]*)/)[1].length; - return callOrDef(stream, state); + return state.tokenize(stream, state); + } + + function tokenAnnotation(stream, state) { + stream.match(/.*?(?=,|;|{|}|\(|\)|=|$|\s)/); + if (stream.match(/^{/)) { + state.nestedParameters++; + } else if (stream.match(/^}/) && state.nestedParameters > 0) { + state.nestedParameters--; + } + if (state.nestedParameters > 0) { + stream.match(/.*?(?={|})/) || stream.next(); + } else if (state.nestedParameters == 0) { + state.tokenize = tokenBase; + } + return "builtin"; + } + + function tokenComment(stream, state) { + if (stream.match(/^#=/)) { + state.nestedComments++; + } + if (!stream.match(/.*?(?=(#=|=#))/)) { + stream.skipToEnd(); + } + if (stream.match(/^=#/)) { + state.nestedComments--; + if (state.nestedComments == 0) + state.tokenize = tokenBase; + } + return "comment"; + } + + function tokenChar(stream, state) { + var isChar = false, match; + if (stream.match(chars)) { + isChar = true; + } else if (match = stream.match(/\\u([a-f0-9]{1,4})(?=')/i)) { + var value = parseInt(match[1], 16); + if (value <= 55295 || value >= 57344) { // (U+0,U+D7FF), (U+E000,U+FFFF) + isChar = true; + stream.next(); + } + } else if (match = stream.match(/\\U([A-Fa-f0-9]{5,8})(?=')/)) { + var value = parseInt(match[1], 16); + if (value <= 1114111) { // U+10FFFF + isChar = true; + stream.next(); + } + } + if (isChar) { + state.leavingExpr = true; + state.tokenize = tokenBase; + return "string"; + } + if (!stream.match(/^[^']+(?=')/)) { stream.skipToEnd(); } + if (stream.match(/^'/)) { state.tokenize = tokenBase; } + return "error"; } function tokenStringFactory(delimiter) { - while ('bruv'.indexOf(delimiter.charAt(0).toLowerCase()) >= 0) { - delimiter = delimiter.substr(1); + if (delimiter.substr(-3) === '"""') { + delimiter = '"""'; + } else if (delimiter.substr(-1) === '"') { + delimiter = '"'; } - var singleline = delimiter == "'"; - var OUTCLASS = 'string'; - function tokenString(stream, state) { - while (!stream.eol()) { - stream.eatWhile(/[^'"\\]/); - if (stream.eat('\\')) { - stream.next(); - if (singleline && stream.eol()) { - return OUTCLASS; - } - } else if (stream.match(delimiter)) { - state.tokenize = tokenBase; - return OUTCLASS; - } else { - stream.eat(/['"]/); - } + if (stream.eat('\\')) { + stream.next(); + } else if (stream.match(delimiter)) { + state.tokenize = tokenBase; + state.leavingExpr = true; + return "string"; + } else { + stream.eat(/[`"]/); } - if (singleline) { - if (parserConf.singleLineStringErrors) { - return ERRORCLASS; - } else { - state.tokenize = tokenBase; - } - } - return OUTCLASS; + stream.eatWhile(/[^\\`"]/); + return "string"; } - tokenString.isString = true; return tokenString; } @@ -322,6 +383,10 @@ CodeMirror.defineMode("julia", function( lastToken: null, leavingExpr: false, isDefinition: false, + nestedArrays: 0, + nestedComments: 0, + nestedGenerators: 0, + nestedParameters: 0, charsAdvanced: 0, firstParenPos: -1 }; @@ -335,25 +400,25 @@ CodeMirror.defineMode("julia", function( state.lastToken = current; } - // Handle '.' connected identifiers - if (current === '.') { - style = stream.match(identifiers, false) || stream.match(macro, false) || - stream.match(/\(/, false) ? 'operator' : ERRORCLASS; - } return style; }, indent: function(state, textAfter) { var delta = 0; - if (textAfter == "end" || textAfter == "]" || textAfter == "}" || textAfter == "else" || textAfter == "elseif" || textAfter == "catch" || textAfter == "finally") { + if ( textAfter === ']' || textAfter === ')' || textAfter === "end" || + textAfter === "else" || textAfter === "catch" || textAfter === "elseif" || + textAfter === "finally" ) { delta = -1; } - return (state.scopes.length + delta) * _conf.indentUnit; + return (state.scopes.length + delta) * config.indentUnit; }, + electricInput: /\b(end|else|catch|finally)\b/, + blockCommentStart: "#=", + blockCommentEnd: "=#", lineComment: "#", - fold: "indent", - electricChars: "edlsifyh]}" + closeBrackets: "()[]{}\"\"", + fold: "indent" }; return external; }); diff --git a/rhodecode/public/js/mode/livescript/livescript.js b/rhodecode/public/js/mode/livescript/livescript.js --- a/rhodecode/public/js/mode/livescript/livescript.js +++ b/rhodecode/public/js/mode/livescript/livescript.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /** * Link to the project's GitHub page: @@ -50,7 +50,7 @@ startState: function(){ return { next: 'start', - lastToken: null + lastToken: {style: null, indent: 0, content: ""} }; }, token: function(stream, state){ diff --git a/rhodecode/public/js/mode/lua/lua.js b/rhodecode/public/js/mode/lua/lua.js --- a/rhodecode/public/js/mode/lua/lua.js +++ b/rhodecode/public/js/mode/lua/lua.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // LUA mode. Ported to CodeMirror 2 from Franciszek Wawrzak's // CodeMirror 1 mode. diff --git a/rhodecode/public/js/mode/markdown/markdown.js b/rhodecode/public/js/mode/markdown/markdown.js --- a/rhodecode/public/js/mode/markdown/markdown.js +++ b/rhodecode/public/js/mode/markdown/markdown.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -13,8 +13,8 @@ CodeMirror.defineMode("markdown", function(cmCfg, modeCfg) { - var htmlFound = CodeMirror.modes.hasOwnProperty("xml"); - var htmlMode = CodeMirror.getMode(cmCfg, htmlFound ? {name: "xml", htmlMode: true} : "text/plain"); + var htmlMode = CodeMirror.getMode(cmCfg, "text/html"); + var htmlModeMissing = htmlMode.name == "null" function getMode(name) { if (CodeMirror.findModeByName) { @@ -35,15 +35,6 @@ CodeMirror.defineMode("markdown", functi if (modeCfg.maxBlockquoteDepth === undefined) modeCfg.maxBlockquoteDepth = 0; - // Should underscores in words open/close em/strong? - if (modeCfg.underscoresBreakWords === undefined) - modeCfg.underscoresBreakWords = true; - - // Use `fencedCodeBlocks` to configure fenced code blocks. false to - // disable, string to specify a precise regexp that the fence should - // match, and true to allow three or more backticks or tildes (as - // per CommonMark). - // Turn on task lists? ("- [ ] " and "- [x] ") if (modeCfg.taskLists === undefined) modeCfg.taskLists = false; @@ -51,12 +42,19 @@ CodeMirror.defineMode("markdown", functi if (modeCfg.strikethrough === undefined) modeCfg.strikethrough = false; + if (modeCfg.emoji === undefined) + modeCfg.emoji = false; + + if (modeCfg.fencedCodeBlockHighlighting === undefined) + modeCfg.fencedCodeBlockHighlighting = true; + + if (modeCfg.xml === undefined) + modeCfg.xml = true; + // Allow token types to be overridden by user-provided token types. if (modeCfg.tokenTypeOverrides === undefined) modeCfg.tokenTypeOverrides = {}; - var codeDepth = 0; - var tokenTypes = { header: "header", code: "comment", @@ -65,7 +63,9 @@ CodeMirror.defineMode("markdown", functi list2: "variable-3", list3: "keyword", hr: "hr", - image: "tag", + image: "image", + imageAltText: "image-alt-text", + imageMarker: "image-marker", formatting: "formatting", linkInline: "link", linkEmail: "link", @@ -73,7 +73,8 @@ CodeMirror.defineMode("markdown", functi linkHref: "string", em: "em", strong: "strong", - strikethrough: "strikethrough" + strikethrough: "strikethrough", + emoji: "builtin" }; for (var tokenType in tokenTypes) { @@ -83,14 +84,15 @@ CodeMirror.defineMode("markdown", functi } var hrRE = /^([*\-_])(?:\s*\1){2,}\s*$/ - , ulRE = /^[*\-+]\s+/ - , olRE = /^[0-9]+([.)])\s+/ - , taskListRE = /^\[(x| )\](?=\s)/ // Must follow ulRE or olRE + , listRE = /^(?:[*\-+]|^[0-9]+([.)]))\s+/ + , taskListRE = /^\[(x| )\](?=\s)/i // Must follow listRE , atxHeaderRE = modeCfg.allowAtxHeaderWithoutSpace ? /^(#+)/ : /^(#+)(?: |$)/ , setextHeaderRE = /^ *(?:\={1,}|-{1,})\s*$/ - , textRE = /^[^#!\[\]*_\\<>` "'(~]+/ - , fencedCodeRE = new RegExp("^(" + (modeCfg.fencedCodeBlocks === true ? "~~~+|```+" : modeCfg.fencedCodeBlocks) + - ")[ \\t]*([\\w+#]*)"); + , textRE = /^[^#!\[\]*_\\<>` "'(~:]+/ + , fencedCodeRE = /^(~~~+|```+)[ \t]*([\w+#-]*)[^\n`]*$/ + , linkDefRE = /^\s*\[[^\]]+?\]:.*$/ // naive link-definition + , punctuation = /[!"#$%&'()*+,\-.\/:;<=>?@\[\\\]^_`{|}~\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u0AF0\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166D\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E42\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]|\uD800[\uDD00-\uDD02\uDF9F\uDFD0]|\uD801\uDD6F|\uD802[\uDC57\uDD1F\uDD3F\uDE50-\uDE58\uDE7F\uDEF0-\uDEF6\uDF39-\uDF3F\uDF99-\uDF9C]|\uD804[\uDC47-\uDC4D\uDCBB\uDCBC\uDCBE-\uDCC1\uDD40-\uDD43\uDD74\uDD75\uDDC5-\uDDC9\uDDCD\uDDDB\uDDDD-\uDDDF\uDE38-\uDE3D\uDEA9]|\uD805[\uDCC6\uDDC1-\uDDD7\uDE41-\uDE43\uDF3C-\uDF3E]|\uD809[\uDC70-\uDC74]|\uD81A[\uDE6E\uDE6F\uDEF5\uDF37-\uDF3B\uDF44]|\uD82F\uDC9F|\uD836[\uDE87-\uDE8B]/ + , expandedTab = " " // CommonMark specifies tab as 4 spaces function switchInline(stream, state, f) { state.f = state.inline = f; @@ -111,6 +113,8 @@ CodeMirror.defineMode("markdown", functi function blankLine(state) { // Reset linkTitle state state.linkTitle = false; + state.linkHref = false; + state.linkText = false; // Reset EM state state.em = false; // Reset STRONG state @@ -121,102 +125,154 @@ CodeMirror.defineMode("markdown", functi state.quote = 0; // Reset state.indentedCode state.indentedCode = false; - if (!htmlFound && state.f == htmlBlock) { - state.f = inlineNormal; - state.block = blockNormal; + if (state.f == htmlBlock) { + var exit = htmlModeMissing + if (!exit) { + var inner = CodeMirror.innerMode(htmlMode, state.htmlState) + exit = inner.mode.name == "xml" && inner.state.tagStart === null && + (!inner.state.context && inner.state.tokenize.isInText) + } + if (exit) { + state.f = inlineNormal; + state.block = blockNormal; + state.htmlState = null; + } } // Reset state.trailingSpace state.trailingSpace = 0; state.trailingSpaceNewLine = false; // Mark this line as blank state.prevLine = state.thisLine - state.thisLine = null + state.thisLine = {stream: null} return null; } function blockNormal(stream, state) { - - var sol = stream.sol(); - - var prevLineIsList = state.list !== false, - prevLineIsIndentedCode = state.indentedCode; + var firstTokenOnLine = stream.column() === state.indentation; + var prevLineLineIsEmpty = lineIsEmpty(state.prevLine.stream); + var prevLineIsIndentedCode = state.indentedCode; + var prevLineIsHr = state.prevLine.hr; + var prevLineIsList = state.list !== false; + var maxNonCodeIndentation = (state.listStack[state.listStack.length - 1] || 0) + 3; state.indentedCode = false; - if (prevLineIsList) { - if (state.indentationDiff >= 0) { // Continued list - if (state.indentationDiff < 4) { // Only adjust indentation if *not* a code block - state.indentation -= state.indentationDiff; - } + var lineIndentation = state.indentation; + // compute once per line (on first token) + if (state.indentationDiff === null) { + state.indentationDiff = state.indentation; + if (prevLineIsList) { state.list = null; - } else if (state.indentation > 0) { - state.list = null; - state.listDepth = Math.floor(state.indentation / 4); - } else { // No longer a list - state.list = false; - state.listDepth = 0; + // While this list item's marker's indentation is less than the deepest + // list item's content's indentation,pop the deepest list item + // indentation off the stack, and update block indentation state + while (lineIndentation < state.listStack[state.listStack.length - 1]) { + state.listStack.pop(); + if (state.listStack.length) { + state.indentation = state.listStack[state.listStack.length - 1]; + // less than the first list's indent -> the line is no longer a list + } else { + state.list = false; + } + } + if (state.list !== false) { + state.indentationDiff = lineIndentation - state.listStack[state.listStack.length - 1] + } } } + // not comprehensive (currently only for setext detection purposes) + var allowsInlineContinuation = ( + !prevLineLineIsEmpty && !prevLineIsHr && !state.prevLine.header && + (!prevLineIsList || !prevLineIsIndentedCode) && + !state.prevLine.fencedCodeEnd + ); + + var isHr = (state.list === false || prevLineIsHr || prevLineLineIsEmpty) && + state.indentation <= maxNonCodeIndentation && stream.match(hrRE); + var match = null; - if (state.indentationDiff >= 4) { + if (state.indentationDiff >= 4 && (prevLineIsIndentedCode || state.prevLine.fencedCodeEnd || + state.prevLine.header || prevLineLineIsEmpty)) { stream.skipToEnd(); - if (prevLineIsIndentedCode || lineIsEmpty(state.prevLine)) { - state.indentation -= 4; - state.indentedCode = true; - return tokenTypes.code; - } else { - return null; - } + state.indentedCode = true; + return tokenTypes.code; } else if (stream.eatSpace()) { return null; - } else if ((match = stream.match(atxHeaderRE)) && match[1].length <= 6) { + } else if (firstTokenOnLine && state.indentation <= maxNonCodeIndentation && (match = stream.match(atxHeaderRE)) && match[1].length <= 6) { + state.quote = 0; state.header = match[1].length; + state.thisLine.header = true; if (modeCfg.highlightFormatting) state.formatting = "header"; state.f = state.inline; return getType(state); - } else if (!lineIsEmpty(state.prevLine) && !state.quote && !prevLineIsList && - !prevLineIsIndentedCode && (match = stream.match(setextHeaderRE))) { - state.header = match[0].charAt(0) == '=' ? 1 : 2; - if (modeCfg.highlightFormatting) state.formatting = "header"; - state.f = state.inline; - return getType(state); - } else if (stream.eat('>')) { - state.quote = sol ? 1 : state.quote + 1; + } else if (state.indentation <= maxNonCodeIndentation && stream.eat('>')) { + state.quote = firstTokenOnLine ? 1 : state.quote + 1; if (modeCfg.highlightFormatting) state.formatting = "quote"; stream.eatSpace(); return getType(state); - } else if (stream.peek() === '[') { - return switchInline(stream, state, footnoteLink); - } else if (stream.match(hrRE, true)) { - state.hr = true; - return tokenTypes.hr; - } else if ((lineIsEmpty(state.prevLine) || prevLineIsList) && (stream.match(ulRE, false) || stream.match(olRE, false))) { - var listType = null; - if (stream.match(ulRE, true)) { - listType = 'ul'; - } else { - stream.match(olRE, true); - listType = 'ol'; - } - state.indentation = stream.column() + stream.current().length; + } else if (!isHr && !state.setext && firstTokenOnLine && state.indentation <= maxNonCodeIndentation && (match = stream.match(listRE))) { + var listType = match[1] ? "ol" : "ul"; + + state.indentation = lineIndentation + stream.current().length; state.list = true; - state.listDepth++; + state.quote = 0; + + // Add this list item's content's indentation to the stack + state.listStack.push(state.indentation); + // Reset inline styles which shouldn't propagate aross list items + state.em = false; + state.strong = false; + state.code = false; + state.strikethrough = false; + if (modeCfg.taskLists && stream.match(taskListRE, false)) { state.taskList = true; } state.f = state.inline; if (modeCfg.highlightFormatting) state.formatting = ["list", "list-" + listType]; return getType(state); - } else if (modeCfg.fencedCodeBlocks && (match = stream.match(fencedCodeRE, true))) { - state.fencedChars = match[1] + } else if (firstTokenOnLine && state.indentation <= maxNonCodeIndentation && (match = stream.match(fencedCodeRE, true))) { + state.quote = 0; + state.fencedEndRE = new RegExp(match[1] + "+ *$"); // try switching mode - state.localMode = getMode(match[2]); - if (state.localMode) state.localState = state.localMode.startState(); + state.localMode = modeCfg.fencedCodeBlockHighlighting && getMode(match[2]); + if (state.localMode) state.localState = CodeMirror.startState(state.localMode); state.f = state.block = local; if (modeCfg.highlightFormatting) state.formatting = "code-block"; - state.code = true; + state.code = -1 return getType(state); + // SETEXT has lowest block-scope precedence after HR, so check it after + // the others (code, blockquote, list...) + } else if ( + // if setext set, indicates line after ---/=== + state.setext || ( + // line before ---/=== + (!allowsInlineContinuation || !prevLineIsList) && !state.quote && state.list === false && + !state.code && !isHr && !linkDefRE.test(stream.string) && + (match = stream.lookAhead(1)) && (match = match.match(setextHeaderRE)) + ) + ) { + if ( !state.setext ) { + state.header = match[0].charAt(0) == '=' ? 1 : 2; + state.setext = state.header; + } else { + state.header = state.setext; + // has no effect on type so we can reset it now + state.setext = 0; + stream.skipToEnd(); + if (modeCfg.highlightFormatting) state.formatting = "header"; + } + state.thisLine.header = true; + state.f = state.inline; + return getType(state); + } else if (isHr) { + stream.skipToEnd(); + state.hr = true; + state.thisLine.hr = true; + return tokenTypes.hr; + } else if (stream.peek() === '[') { + return switchInline(stream, state, footnoteLink); } return switchInline(stream, state, state.inline); @@ -224,21 +280,35 @@ CodeMirror.defineMode("markdown", functi function htmlBlock(stream, state) { var style = htmlMode.token(stream, state.htmlState); - if ((htmlFound && state.htmlState.tagStart === null && - (!state.htmlState.context && state.htmlState.tokenize.isInText)) || - (state.md_inside && stream.current().indexOf(">") > -1)) { - state.f = inlineNormal; - state.block = blockNormal; - state.htmlState = null; + if (!htmlModeMissing) { + var inner = CodeMirror.innerMode(htmlMode, state.htmlState) + if ((inner.mode.name == "xml" && inner.state.tagStart === null && + (!inner.state.context && inner.state.tokenize.isInText)) || + (state.md_inside && stream.current().indexOf(">") > -1)) { + state.f = inlineNormal; + state.block = blockNormal; + state.htmlState = null; + } } return style; } function local(stream, state) { - if (state.fencedChars && stream.match(state.fencedChars, false)) { + var currListInd = state.listStack[state.listStack.length - 1] || 0; + var hasExitedList = state.indentation < currListInd; + var maxFencedEndInd = currListInd + 3; + if (state.fencedEndRE && state.indentation <= maxFencedEndInd && (hasExitedList || stream.match(state.fencedEndRE))) { + if (modeCfg.highlightFormatting) state.formatting = "code-block"; + var returnType; + if (!hasExitedList) returnType = getType(state) state.localMode = state.localState = null; - state.f = state.block = leavingLocal; - return null; + state.block = blockNormal; + state.f = inlineNormal; + state.fencedEndRE = null; + state.code = 0 + state.thisLine.fencedCodeEnd = true; + if (hasExitedList) return switchBlock(stream, state, state.block); + return returnType; } else if (state.localMode) { return state.localMode.token(stream, state.localState); } else { @@ -247,18 +317,6 @@ CodeMirror.defineMode("markdown", functi } } - function leavingLocal(stream, state) { - stream.match(state.fencedChars); - state.block = blockNormal; - state.f = inlineNormal; - state.fencedChars = null; - if (modeCfg.highlightFormatting) state.formatting = "code-block"; - state.code = true; - var returnType = getType(state); - state.code = false; - return returnType; - } - // Inline function getType(state) { var styles = []; @@ -302,8 +360,12 @@ CodeMirror.defineMode("markdown", functi if (state.strong) { styles.push(tokenTypes.strong); } if (state.em) { styles.push(tokenTypes.em); } if (state.strikethrough) { styles.push(tokenTypes.strikethrough); } + if (state.emoji) { styles.push(tokenTypes.emoji); } if (state.linkText) { styles.push(tokenTypes.linkText); } if (state.code) { styles.push(tokenTypes.code); } + if (state.image) { styles.push(tokenTypes.image); } + if (state.imageAltText) { styles.push(tokenTypes.imageAltText, "link"); } + if (state.imageMarker) { styles.push(tokenTypes.imageMarker); } } if (state.header) { styles.push(tokenTypes.header, tokenTypes.header + "-" + state.header); } @@ -320,7 +382,7 @@ CodeMirror.defineMode("markdown", functi } if (state.list !== false) { - var listMod = (state.listDepth - 1) % 3; + var listMod = (state.listStack.length - 1) % 3; if (!listMod) { styles.push(tokenTypes.list1); } else if (listMod === 1) { @@ -357,7 +419,7 @@ CodeMirror.defineMode("markdown", functi } if (state.taskList) { - var taskOpen = stream.match(taskListRE, true)[1] !== "x"; + var taskOpen = stream.match(taskListRE, true)[1] === " "; if (taskOpen) state.taskOpen = true; else state.taskClosed = true; if (modeCfg.highlightFormatting) state.formatting = "task"; @@ -373,20 +435,8 @@ CodeMirror.defineMode("markdown", functi return getType(state); } - // Get sol() value now, before character is consumed - var sol = stream.sol(); - var ch = stream.next(); - if (ch === '\\') { - stream.next(); - if (modeCfg.highlightFormatting) { - var type = getType(state); - var formattingEscape = tokenTypes.formatting + "-escape"; - return type ? type + " " + formattingEscape : formattingEscape; - } - } - // Matches link titles present on next line if (state.linkTitle) { state.linkTitle = false; @@ -394,7 +444,7 @@ CodeMirror.defineMode("markdown", functi if (ch === '(') { matchCh = ')'; } - matchCh = (matchCh+'').replace(/([.?*+^$[\]\\(){}|-])/g, "\\$1"); + matchCh = (matchCh+'').replace(/([.?*+^\[\]\\(){}|-])/g, "\\$1"); var regex = '^\\s*(?:[^' + matchCh + '\\\\]+|\\\\\\\\|\\\\.)' + matchCh; if (stream.match(new RegExp(regex), true)) { return tokenTypes.linkHref; @@ -405,43 +455,67 @@ CodeMirror.defineMode("markdown", functi if (ch === '`') { var previousFormatting = state.formatting; if (modeCfg.highlightFormatting) state.formatting = "code"; - var t = getType(state); - var before = stream.pos; stream.eatWhile('`'); - var difference = 1 + stream.pos - before; - if (!state.code) { - codeDepth = difference; - state.code = true; - return getType(state); + var count = stream.current().length + if (state.code == 0 && (!state.quote || count == 1)) { + state.code = count + return getType(state) + } else if (count == state.code) { // Must be exact + var t = getType(state) + state.code = 0 + return t } else { - if (difference === codeDepth) { // Must be exact - state.code = false; - return t; - } - state.formatting = previousFormatting; - return getType(state); + state.formatting = previousFormatting + return getType(state) } } else if (state.code) { return getType(state); } + if (ch === '\\') { + stream.next(); + if (modeCfg.highlightFormatting) { + var type = getType(state); + var formattingEscape = tokenTypes.formatting + "-escape"; + return type ? type + " " + formattingEscape : formattingEscape; + } + } + if (ch === '!' && stream.match(/\[[^\]]*\] ?(?:\(|\[)/, false)) { - stream.match(/\[[^\]]*\]/); - state.inline = state.f = linkHref; - return tokenTypes.image; + state.imageMarker = true; + state.image = true; + if (modeCfg.highlightFormatting) state.formatting = "image"; + return getType(state); } - if (ch === '[' && stream.match(/.*\](\(.*\)| ?\[.*\])/, false)) { + if (ch === '[' && state.imageMarker && stream.match(/[^\]]*\](\(.*?\)| ?\[.*?\])/, false)) { + state.imageMarker = false; + state.imageAltText = true + if (modeCfg.highlightFormatting) state.formatting = "image"; + return getType(state); + } + + if (ch === ']' && state.imageAltText) { + if (modeCfg.highlightFormatting) state.formatting = "image"; + var type = getType(state); + state.imageAltText = false; + state.image = false; + state.inline = state.f = linkHref; + return type; + } + + if (ch === '[' && !state.image) { + if (state.linkText && stream.match(/^.*?\]/)) return getType(state) state.linkText = true; if (modeCfg.highlightFormatting) state.formatting = "link"; return getType(state); } - if (ch === ']' && state.linkText && stream.match(/\(.*\)| ?\[.*\]/, false)) { + if (ch === ']' && state.linkText) { if (modeCfg.highlightFormatting) state.formatting = "link"; var type = getType(state); state.linkText = false; - state.inline = state.f = linkHref; + state.inline = state.f = stream.match(/\(.*?\)| ?\[.*?\]/, false) ? linkHref : inlineNormal return type; } @@ -469,7 +543,7 @@ CodeMirror.defineMode("markdown", functi return type + tokenTypes.linkEmail; } - if (ch === '<' && stream.match(/^(!--|\w)/, false)) { + if (modeCfg.xml && ch === '<' && stream.match(/^(!--|\?|!\[CDATA\[|[a-z][a-z0-9-]*(?:\s+[a-z_:.\-]+(?:\s*=\s*[^>]+)?)*\s*(?:>|$))/i, false)) { var end = stream.string.indexOf(">", stream.pos); if (end != -1) { var atts = stream.string.substring(stream.start, end); @@ -480,44 +554,37 @@ CodeMirror.defineMode("markdown", functi return switchBlock(stream, state, htmlBlock); } - if (ch === '<' && stream.match(/^\/\w*?>/)) { + if (modeCfg.xml && ch === '<' && stream.match(/^\/\w*?>/)) { state.md_inside = false; return "tag"; - } - - var ignoreUnderscore = false; - if (!modeCfg.underscoresBreakWords) { - if (ch === '_' && stream.peek() !== '_' && stream.match(/(\w)/, false)) { - var prevPos = stream.pos - 2; - if (prevPos >= 0) { - var prevCh = stream.string.charAt(prevPos); - if (prevCh !== '_' && prevCh.match(/(\w)/, false)) { - ignoreUnderscore = true; - } - } + } else if (ch === "*" || ch === "_") { + var len = 1, before = stream.pos == 1 ? " " : stream.string.charAt(stream.pos - 2) + while (len < 3 && stream.eat(ch)) len++ + var after = stream.peek() || " " + // See http://spec.commonmark.org/0.27/#emphasis-and-strong-emphasis + var leftFlanking = !/\s/.test(after) && (!punctuation.test(after) || /\s/.test(before) || punctuation.test(before)) + var rightFlanking = !/\s/.test(before) && (!punctuation.test(before) || /\s/.test(after) || punctuation.test(after)) + var setEm = null, setStrong = null + if (len % 2) { // Em + if (!state.em && leftFlanking && (ch === "*" || !rightFlanking || punctuation.test(before))) + setEm = true + else if (state.em == ch && rightFlanking && (ch === "*" || !leftFlanking || punctuation.test(after))) + setEm = false } - } - if (ch === '*' || (ch === '_' && !ignoreUnderscore)) { - if (sol && stream.peek() === ' ') { - // Do nothing, surrounded by newline and space - } else if (state.strong === ch && stream.eat(ch)) { // Remove STRONG - if (modeCfg.highlightFormatting) state.formatting = "strong"; - var t = getType(state); - state.strong = false; - return t; - } else if (!state.strong && stream.eat(ch)) { // Add STRONG - state.strong = ch; - if (modeCfg.highlightFormatting) state.formatting = "strong"; - return getType(state); - } else if (state.em === ch) { // Remove EM - if (modeCfg.highlightFormatting) state.formatting = "em"; - var t = getType(state); - state.em = false; - return t; - } else if (!state.em) { // Add EM - state.em = ch; - if (modeCfg.highlightFormatting) state.formatting = "em"; - return getType(state); + if (len > 1) { // Strong + if (!state.strong && leftFlanking && (ch === "*" || !rightFlanking || punctuation.test(before))) + setStrong = true + else if (state.strong == ch && rightFlanking && (ch === "*" || !leftFlanking || punctuation.test(after))) + setStrong = false + } + if (setStrong != null || setEm != null) { + if (modeCfg.highlightFormatting) state.formatting = setEm == null ? "strong" : setStrong == null ? "em" : "strong em" + if (setEm === true) state.em = ch + if (setStrong === true) state.strong = ch + var t = getType(state) + if (setEm === false) state.em = false + if (setStrong === false) state.strong = false + return t } } else if (ch === ' ') { if (stream.eat('*') || stream.eat('_')) { // Probably surrounded by spaces @@ -552,8 +619,16 @@ CodeMirror.defineMode("markdown", functi } } + if (modeCfg.emoji && ch === ":" && stream.match(/^(?:[a-z_\d+][a-z_\d+-]*|\-[a-z_\d+][a-z_\d+-]*):/)) { + state.emoji = true; + if (modeCfg.highlightFormatting) state.formatting = "emoji"; + var retType = getType(state); + state.emoji = false; + return retType; + } + if (ch === ' ') { - if (stream.match(/ +$/, false)) { + if (stream.match(/^ +$/, false)) { state.trailingSpace++; } else if (state.trailingSpace) { state.trailingSpaceNewLine = true; @@ -598,6 +673,11 @@ CodeMirror.defineMode("markdown", functi return 'error'; } + var linkRE = { + ")": /^(?:[^\\\(\)]|\\.|\((?:[^\\\(\)]|\\.)*\))*?(?=\))/, + "]": /^(?:[^\\\[\]]|\\.|\[(?:[^\\\[\]]|\\.)*\])*?(?=\])/ + } + function getLinkHrefInside(endChar) { return function(stream, state) { var ch = stream.next(); @@ -610,10 +690,7 @@ CodeMirror.defineMode("markdown", functi return returnState; } - if (stream.match(inlineRE(endChar), true)) { - stream.backUp(1); - } - + stream.match(linkRE[endChar]) state.linkHref = true; return getType(state); }; @@ -661,25 +738,13 @@ CodeMirror.defineMode("markdown", functi return tokenTypes.linkHref + " url"; } - var savedInlineRE = []; - function inlineRE(endChar) { - if (!savedInlineRE[endChar]) { - // Escape endChar for RegExp (taken from http://stackoverflow.com/a/494122/526741) - endChar = (endChar+'').replace(/([.?*+^$[\]\\(){}|-])/g, "\\$1"); - // Match any non-endChar, escaped character, as well as the closing - // endChar. - savedInlineRE[endChar] = new RegExp('^(?:[^\\\\]|\\\\.)*?(' + endChar + ')'); - } - return savedInlineRE[endChar]; - } - var mode = { startState: function() { return { f: blockNormal, - prevLine: null, - thisLine: null, + prevLine: {stream: null}, + thisLine: {stream: null}, block: blockNormal, htmlState: null, @@ -692,18 +757,21 @@ CodeMirror.defineMode("markdown", functi linkText: false, linkHref: false, linkTitle: false, + code: 0, em: false, strong: false, header: 0, + setext: 0, hr: false, taskList: false, list: false, - listDepth: 0, + listStack: [], quote: 0, trailingSpace: 0, trailingSpaceNewLine: false, strikethrough: false, - fencedChars: null + emoji: false, + fencedEndRE: null }; }, @@ -724,22 +792,26 @@ CodeMirror.defineMode("markdown", functi inline: s.inline, text: s.text, formatting: false, + linkText: s.linkText, linkTitle: s.linkTitle, + linkHref: s.linkHref, code: s.code, em: s.em, strong: s.strong, strikethrough: s.strikethrough, + emoji: s.emoji, header: s.header, + setext: s.setext, hr: s.hr, taskList: s.taskList, list: s.list, - listDepth: s.listDepth, + listStack: s.listStack.slice(0), quote: s.quote, indentedCode: s.indentedCode, trailingSpace: s.trailingSpace, trailingSpaceNewLine: s.trailingSpaceNewLine, md_inside: s.md_inside, - fencedChars: s.fencedChars + fencedEndRE: s.fencedEndRE }; }, @@ -748,21 +820,17 @@ CodeMirror.defineMode("markdown", functi // Reset state.formatting state.formatting = false; - if (stream != state.thisLine) { - var forceBlankLine = state.header || state.hr; - - // Reset state.header and state.hr + if (stream != state.thisLine.stream) { state.header = 0; state.hr = false; - if (stream.match(/^\s*$/, true) || forceBlankLine) { + if (stream.match(/^\s*$/, true)) { blankLine(state); - if (!forceBlankLine) return null - state.prevLine = null + return null; } state.prevLine = state.thisLine - state.thisLine = stream + state.thisLine = {stream: stream} // Reset state.taskList state.taskList = false; @@ -771,14 +839,15 @@ CodeMirror.defineMode("markdown", functi state.trailingSpace = 0; state.trailingSpaceNewLine = false; - state.f = state.block; - var indentation = stream.match(/^\s*/, true)[0].replace(/\t/g, ' ').length; - var difference = Math.floor((indentation - state.indentation) / 4) * 4; - if (difference > 4) difference = 4; - var adjustedIndentation = state.indentation + difference; - state.indentationDiff = adjustedIndentation - state.indentation; - state.indentation = adjustedIndentation; - if (indentation > 0) return null; + if (!state.localState) { + state.f = state.block; + if (state.f != htmlBlock) { + var indentation = stream.match(/^\s*/, true)[0].replace(/\t/g, expandedTab).length; + state.indentation = indentation; + state.indentationDiff = null; + if (indentation > 0) return null; + } + } } return state.f(stream, state); }, @@ -789,15 +858,26 @@ CodeMirror.defineMode("markdown", functi return {state: state, mode: mode}; }, + indent: function(state, textAfter, line) { + if (state.block == htmlBlock && htmlMode.indent) return htmlMode.indent(state.htmlState, textAfter, line) + if (state.localState && state.localMode.indent) return state.localMode.indent(state.localState, textAfter, line) + return CodeMirror.Pass + }, + blankLine: blankLine, getType: getType, + blockCommentStart: "", + closeBrackets: "()[]{}''\"\"``", fold: "markdown" }; return mode; }, "xml"); +CodeMirror.defineMIME("text/markdown", "markdown"); + CodeMirror.defineMIME("text/x-markdown", "markdown"); }); diff --git a/rhodecode/public/js/mode/mathematica/mathematica.js b/rhodecode/public/js/mode/mathematica/mathematica.js --- a/rhodecode/public/js/mode/mathematica/mathematica.js +++ b/rhodecode/public/js/mode/mathematica/mathematica.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Mathematica mode copyright (c) 2015 by Calin Barbat // Based on code by Patrick Scheibe (halirutan) @@ -71,12 +71,12 @@ CodeMirror.defineMode('mathematica', fun } // usage - if (stream.match(/([a-zA-Z\$]+(?:`?[a-zA-Z0-9\$])*::usage)/, true, false)) { + if (stream.match(/([a-zA-Z\$][a-zA-Z0-9\$]*(?:`[a-zA-Z0-9\$]+)*::usage)/, true, false)) { return 'meta'; } // message - if (stream.match(/([a-zA-Z\$]+(?:`?[a-zA-Z0-9\$])*::[a-zA-Z\$][a-zA-Z0-9\$]*):?/, true, false)) { + if (stream.match(/([a-zA-Z\$][a-zA-Z0-9\$]*(?:`[a-zA-Z0-9\$]+)*::[a-zA-Z\$][a-zA-Z0-9\$]*):?/, true, false)) { return 'string-2'; } @@ -126,6 +126,7 @@ CodeMirror.defineMode('mathematica', fun } // everything else is an error + stream.next(); // advance the stream. return 'error'; } diff --git a/rhodecode/public/js/mode/mbox/mbox.js b/rhodecode/public/js/mode/mbox/mbox.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/mbox/mbox.js @@ -0,0 +1,129 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + +var rfc2822 = [ + "From", "Sender", "Reply-To", "To", "Cc", "Bcc", "Message-ID", + "In-Reply-To", "References", "Resent-From", "Resent-Sender", "Resent-To", + "Resent-Cc", "Resent-Bcc", "Resent-Message-ID", "Return-Path", "Received" +]; +var rfc2822NoEmail = [ + "Date", "Subject", "Comments", "Keywords", "Resent-Date" +]; + +CodeMirror.registerHelper("hintWords", "mbox", rfc2822.concat(rfc2822NoEmail)); + +var whitespace = /^[ \t]/; +var separator = /^From /; // See RFC 4155 +var rfc2822Header = new RegExp("^(" + rfc2822.join("|") + "): "); +var rfc2822HeaderNoEmail = new RegExp("^(" + rfc2822NoEmail.join("|") + "): "); +var header = /^[^:]+:/; // Optional fields defined in RFC 2822 +var email = /^[^ ]+@[^ ]+/; +var untilEmail = /^.*?(?=[^ ]+?@[^ ]+)/; +var bracketedEmail = /^<.*?>/; +var untilBracketedEmail = /^.*?(?=<.*>)/; + +function styleForHeader(header) { + if (header === "Subject") return "header"; + return "string"; +} + +function readToken(stream, state) { + if (stream.sol()) { + // From last line + state.inSeparator = false; + if (state.inHeader && stream.match(whitespace)) { + // Header folding + return null; + } else { + state.inHeader = false; + state.header = null; + } + + if (stream.match(separator)) { + state.inHeaders = true; + state.inSeparator = true; + return "atom"; + } + + var match; + var emailPermitted = false; + if ((match = stream.match(rfc2822HeaderNoEmail)) || + (emailPermitted = true) && (match = stream.match(rfc2822Header))) { + state.inHeaders = true; + state.inHeader = true; + state.emailPermitted = emailPermitted; + state.header = match[1]; + return "atom"; + } + + // Use vim's heuristics: recognize custom headers only if the line is in a + // block of legitimate headers. + if (state.inHeaders && (match = stream.match(header))) { + state.inHeader = true; + state.emailPermitted = true; + state.header = match[1]; + return "atom"; + } + + state.inHeaders = false; + stream.skipToEnd(); + return null; + } + + if (state.inSeparator) { + if (stream.match(email)) return "link"; + if (stream.match(untilEmail)) return "atom"; + stream.skipToEnd(); + return "atom"; + } + + if (state.inHeader) { + var style = styleForHeader(state.header); + + if (state.emailPermitted) { + if (stream.match(bracketedEmail)) return style + " link"; + if (stream.match(untilBracketedEmail)) return style; + } + stream.skipToEnd(); + return style; + } + + stream.skipToEnd(); + return null; +}; + +CodeMirror.defineMode("mbox", function() { + return { + startState: function() { + return { + // Is in a mbox separator + inSeparator: false, + // Is in a mail header + inHeader: false, + // If bracketed email is permitted. Only applicable when inHeader + emailPermitted: false, + // Name of current header + header: null, + // Is in a region of mail headers + inHeaders: false + }; + }, + token: readToken, + blankLine: function(state) { + state.inHeaders = state.inSeparator = state.inHeader = false; + } + }; +}); + +CodeMirror.defineMIME("application/mbox", "mbox"); +}); diff --git a/rhodecode/public/js/mode/meta.js b/rhodecode/public/js/mode/meta.js --- a/rhodecode/public/js/mode/meta.js +++ b/rhodecode/public/js/mode/meta.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -13,19 +13,19 @@ CodeMirror.modeInfo = [ {name: "APL", mime: "text/apl", mode: "apl", ext: ["dyalog", "apl"]}, - {name: "PGP", mimes: ["application/pgp", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["pgp"]}, + {name: "PGP", mimes: ["application/pgp", "application/pgp-encrypted", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["asc", "pgp", "sig"]}, {name: "ASN.1", mime: "text/x-ttcn-asn", mode: "asn.1", ext: ["asn", "asn1"]}, {name: "Asterisk", mime: "text/x-asterisk", mode: "asterisk", file: /^extensions\.conf$/i}, {name: "Brainfuck", mime: "text/x-brainfuck", mode: "brainfuck", ext: ["b", "bf"]}, - {name: "C", mime: "text/x-csrc", mode: "clike", ext: ["c", "h"]}, + {name: "C", mime: "text/x-csrc", mode: "clike", ext: ["c", "h", "ino"]}, {name: "C++", mime: "text/x-c++src", mode: "clike", ext: ["cpp", "c++", "cc", "cxx", "hpp", "h++", "hh", "hxx"], alias: ["cpp"]}, {name: "Cobol", mime: "text/x-cobol", mode: "cobol", ext: ["cob", "cpy"]}, - {name: "C#", mime: "text/x-csharp", mode: "clike", ext: ["cs"], alias: ["csharp"]}, - {name: "Clojure", mime: "text/x-clojure", mode: "clojure", ext: ["clj"]}, + {name: "C#", mime: "text/x-csharp", mode: "clike", ext: ["cs"], alias: ["csharp", "cs"]}, + {name: "Clojure", mime: "text/x-clojure", mode: "clojure", ext: ["clj", "cljc", "cljx"]}, {name: "ClojureScript", mime: "text/x-clojurescript", mode: "clojure", ext: ["cljs"]}, {name: "Closure Stylesheets (GSS)", mime: "text/x-gss", mode: "css", ext: ["gss"]}, {name: "CMake", mime: "text/x-cmake", mode: "cmake", ext: ["cmake", "cmake.in"], file: /^CMakeLists.txt$/}, - {name: "CoffeeScript", mime: "text/x-coffeescript", mode: "coffeescript", ext: ["coffee"], alias: ["coffee", "coffee-script"]}, + {name: "CoffeeScript", mimes: ["application/vnd.coffeescript", "text/coffeescript", "text/x-coffeescript"], mode: "coffeescript", ext: ["coffee"], alias: ["coffee", "coffee-script"]}, {name: "Common Lisp", mime: "text/x-common-lisp", mode: "commonlisp", ext: ["cl", "lisp", "el"], alias: ["lisp"]}, {name: "Cypher", mime: "application/x-cypher-query", mode: "cypher", ext: ["cyp", "cypher"]}, {name: "Cython", mime: "text/x-cython", mode: "python", ext: ["pyx", "pxd", "pxi"]}, @@ -41,30 +41,33 @@ {name: "Dylan", mime: "text/x-dylan", mode: "dylan", ext: ["dylan", "dyl", "intr"]}, {name: "EBNF", mime: "text/x-ebnf", mode: "ebnf"}, {name: "ECL", mime: "text/x-ecl", mode: "ecl", ext: ["ecl"]}, + {name: "edn", mime: "application/edn", mode: "clojure", ext: ["edn"]}, {name: "Eiffel", mime: "text/x-eiffel", mode: "eiffel", ext: ["e"]}, {name: "Elm", mime: "text/x-elm", mode: "elm", ext: ["elm"]}, {name: "Embedded Javascript", mime: "application/x-ejs", mode: "htmlembedded", ext: ["ejs"]}, {name: "Embedded Ruby", mime: "application/x-erb", mode: "htmlembedded", ext: ["erb"]}, {name: "Erlang", mime: "text/x-erlang", mode: "erlang", ext: ["erl"]}, + {name: "Esper", mime: "text/x-esper", mode: "sql"}, {name: "Factor", mime: "text/x-factor", mode: "factor", ext: ["factor"]}, + {name: "FCL", mime: "text/x-fcl", mode: "fcl"}, {name: "Forth", mime: "text/x-forth", mode: "forth", ext: ["forth", "fth", "4th"]}, - {name: "Fortran", mime: "text/x-fortran", mode: "fortran", ext: ["f", "for", "f77", "f90"]}, + {name: "Fortran", mime: "text/x-fortran", mode: "fortran", ext: ["f", "for", "f77", "f90", "f95"]}, {name: "F#", mime: "text/x-fsharp", mode: "mllike", ext: ["fs"], alias: ["fsharp"]}, {name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]}, {name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]}, {name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i}, {name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]}, - {name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy"]}, + {name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/}, {name: "HAML", mime: "text/x-haml", mode: "haml", ext: ["haml"]}, {name: "Haskell", mime: "text/x-haskell", mode: "haskell", ext: ["hs"]}, {name: "Haskell (Literate)", mime: "text/x-literate-haskell", mode: "haskell-literate", ext: ["lhs"]}, {name: "Haxe", mime: "text/x-haxe", mode: "haxe", ext: ["hx"]}, {name: "HXML", mime: "text/x-hxml", mode: "haxe", ext: ["hxml"]}, {name: "ASP.NET", mime: "application/x-aspx", mode: "htmlembedded", ext: ["aspx"], alias: ["asp", "aspx"]}, - {name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm"], alias: ["xhtml"]}, + {name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm", "handlebars", "hbs"], alias: ["xhtml"]}, {name: "HTTP", mime: "message/http", mode: "http"}, {name: "IDL", mime: "text/x-idl", mode: "idl", ext: ["pro"]}, - {name: "Jade", mime: "text/x-jade", mode: "jade", ext: ["jade"]}, + {name: "Pug", mime: "text/x-pug", mode: "pug", ext: ["jade", "pug"], alias: ["jade"]}, {name: "Java", mime: "text/x-java", mode: "clike", ext: ["java"]}, {name: "Java Server Pages", mime: "application/x-jsp", mode: "htmlembedded", ext: ["jsp"], alias: ["jsp"]}, {name: "JavaScript", mimes: ["text/javascript", "text/ecmascript", "application/javascript", "application/x-javascript", "application/ecmascript"], @@ -72,7 +75,7 @@ {name: "JSON", mimes: ["application/json", "application/x-json"], mode: "javascript", ext: ["json", "map"], alias: ["json5"]}, {name: "JSON-LD", mime: "application/ld+json", mode: "javascript", ext: ["jsonld"], alias: ["jsonld"]}, {name: "JSX", mime: "text/jsx", mode: "jsx", ext: ["jsx"]}, - {name: "Jinja2", mime: "null", mode: "jinja2"}, + {name: "Jinja2", mime: "text/jinja2", mode: "jinja2", ext: ["j2", "jinja", "jinja2"]}, {name: "Julia", mime: "text/x-julia", mode: "julia", ext: ["jl"]}, {name: "Kotlin", mime: "text/x-kotlin", mode: "clike", ext: ["kt"]}, {name: "LESS", mime: "text/x-less", mode: "css", ext: ["less"]}, @@ -81,75 +84,88 @@ {name: "Markdown", mime: "text/x-markdown", mode: "markdown", ext: ["markdown", "md", "mkd"]}, {name: "mIRC", mime: "text/mirc", mode: "mirc"}, {name: "MariaDB SQL", mime: "text/x-mariadb", mode: "sql"}, - {name: "Mathematica", mime: "text/x-mathematica", mode: "mathematica", ext: ["m", "nb"]}, + {name: "Mathematica", mime: "text/x-mathematica", mode: "mathematica", ext: ["m", "nb", "wl", "wls"]}, {name: "Modelica", mime: "text/x-modelica", mode: "modelica", ext: ["mo"]}, - {name: "MUMPS", mime: "text/x-mumps", mode: "mumps"}, + {name: "MUMPS", mime: "text/x-mumps", mode: "mumps", ext: ["mps"]}, {name: "MS SQL", mime: "text/x-mssql", mode: "sql"}, + {name: "mbox", mime: "application/mbox", mode: "mbox", ext: ["mbox"]}, {name: "MySQL", mime: "text/x-mysql", mode: "sql"}, {name: "Nginx", mime: "text/x-nginx-conf", mode: "nginx", file: /nginx.*\.conf$/i}, {name: "NSIS", mime: "text/x-nsis", mode: "nsis", ext: ["nsh", "nsi"]}, - {name: "NTriples", mime: "text/n-triples", mode: "ntriples", ext: ["nt"]}, - {name: "Objective C", mime: "text/x-objectivec", mode: "clike", ext: ["m", "mm"]}, + {name: "NTriples", mimes: ["application/n-triples", "application/n-quads", "text/n-triples"], + mode: "ntriples", ext: ["nt", "nq"]}, + {name: "Objective-C", mime: "text/x-objectivec", mode: "clike", ext: ["m"], alias: ["objective-c", "objc"]}, + {name: "Objective-C++", mime: "text/x-objectivec++", mode: "clike", ext: ["mm"], alias: ["objective-c++", "objc++"]}, {name: "OCaml", mime: "text/x-ocaml", mode: "mllike", ext: ["ml", "mli", "mll", "mly"]}, {name: "Octave", mime: "text/x-octave", mode: "octave", ext: ["m"]}, {name: "Oz", mime: "text/x-oz", mode: "oz", ext: ["oz"]}, {name: "Pascal", mime: "text/x-pascal", mode: "pascal", ext: ["p", "pas"]}, {name: "PEG.js", mime: "null", mode: "pegjs", ext: ["jsonld"]}, {name: "Perl", mime: "text/x-perl", mode: "perl", ext: ["pl", "pm"]}, - {name: "PHP", mime: "application/x-httpd-php", mode: "php", ext: ["php", "php3", "php4", "php5", "phtml"]}, + {name: "PHP", mimes: ["text/x-php", "application/x-httpd-php", "application/x-httpd-php-open"], mode: "php", ext: ["php", "php3", "php4", "php5", "php7", "phtml"]}, {name: "Pig", mime: "text/x-pig", mode: "pig", ext: ["pig"]}, {name: "Plain Text", mime: "text/plain", mode: "null", ext: ["txt", "text", "conf", "def", "list", "log"]}, {name: "PLSQL", mime: "text/x-plsql", mode: "sql", ext: ["pls"]}, + {name: "PostgreSQL", mime: "text/x-pgsql", mode: "sql"}, + {name: "PowerShell", mime: "application/x-powershell", mode: "powershell", ext: ["ps1", "psd1", "psm1"]}, {name: "Properties files", mime: "text/x-properties", mode: "properties", ext: ["properties", "ini", "in"], alias: ["ini", "properties"]}, - {name: "Python", mime: "text/x-python", mode: "python", ext: ["py", "pyw"]}, + {name: "ProtoBuf", mime: "text/x-protobuf", mode: "protobuf", ext: ["proto"]}, + {name: "Python", mime: "text/x-python", mode: "python", ext: ["BUILD", "bzl", "py", "pyw"], file: /^(BUCK|BUILD)$/}, {name: "Puppet", mime: "text/x-puppet", mode: "puppet", ext: ["pp"]}, {name: "Q", mime: "text/x-q", mode: "q", ext: ["q"]}, - {name: "R", mime: "text/x-rsrc", mode: "r", ext: ["r"], alias: ["rscript"]}, + {name: "R", mime: "text/x-rsrc", mode: "r", ext: ["r", "R"], alias: ["rscript"]}, {name: "reStructuredText", mime: "text/x-rst", mode: "rst", ext: ["rst"], alias: ["rst"]}, {name: "RPM Changes", mime: "text/x-rpm-changes", mode: "rpm"}, {name: "RPM Spec", mime: "text/x-rpm-spec", mode: "rpm", ext: ["spec"]}, {name: "Ruby", mime: "text/x-ruby", mode: "ruby", ext: ["rb"], alias: ["jruby", "macruby", "rake", "rb", "rbx"]}, {name: "Rust", mime: "text/x-rustsrc", mode: "rust", ext: ["rs"]}, + {name: "SAS", mime: "text/x-sas", mode: "sas", ext: ["sas"]}, {name: "Sass", mime: "text/x-sass", mode: "sass", ext: ["sass"]}, {name: "Scala", mime: "text/x-scala", mode: "clike", ext: ["scala"]}, {name: "Scheme", mime: "text/x-scheme", mode: "scheme", ext: ["scm", "ss"]}, {name: "SCSS", mime: "text/x-scss", mode: "css", ext: ["scss"]}, - {name: "Shell", mime: "text/x-sh", mode: "shell", ext: ["sh", "ksh", "bash"], alias: ["bash", "sh", "zsh"], file: /^PKGBUILD$/}, + {name: "Shell", mimes: ["text/x-sh", "application/x-sh"], mode: "shell", ext: ["sh", "ksh", "bash"], alias: ["bash", "sh", "zsh"], file: /^PKGBUILD$/}, {name: "Sieve", mime: "application/sieve", mode: "sieve", ext: ["siv", "sieve"]}, {name: "Slim", mimes: ["text/x-slim", "application/x-slim"], mode: "slim", ext: ["slim"]}, {name: "Smalltalk", mime: "text/x-stsrc", mode: "smalltalk", ext: ["st"]}, {name: "Smarty", mime: "text/x-smarty", mode: "smarty", ext: ["tpl"]}, {name: "Solr", mime: "text/x-solr", mode: "solr"}, + {name: "SML", mime: "text/x-sml", mode: "mllike", ext: ["sml", "sig", "fun", "smackspec"]}, {name: "Soy", mime: "text/x-soy", mode: "soy", ext: ["soy"], alias: ["closure template"]}, {name: "SPARQL", mime: "application/sparql-query", mode: "sparql", ext: ["rq", "sparql"], alias: ["sparul"]}, {name: "Spreadsheet", mime: "text/x-spreadsheet", mode: "spreadsheet", alias: ["excel", "formula"]}, {name: "SQL", mime: "text/x-sql", mode: "sql", ext: ["sql"]}, + {name: "SQLite", mime: "text/x-sqlite", mode: "sql"}, {name: "Squirrel", mime: "text/x-squirrel", mode: "clike", ext: ["nut"]}, + {name: "Stylus", mime: "text/x-styl", mode: "stylus", ext: ["styl"]}, {name: "Swift", mime: "text/x-swift", mode: "swift", ext: ["swift"]}, - {name: "MariaDB", mime: "text/x-mariadb", mode: "sql"}, {name: "sTeX", mime: "text/x-stex", mode: "stex"}, - {name: "LaTeX", mime: "text/x-latex", mode: "stex", ext: ["text", "ltx"], alias: ["tex"]}, - {name: "SystemVerilog", mime: "text/x-systemverilog", mode: "verilog", ext: ["v"]}, + {name: "LaTeX", mime: "text/x-latex", mode: "stex", ext: ["text", "ltx", "tex"], alias: ["tex"]}, + {name: "SystemVerilog", mime: "text/x-systemverilog", mode: "verilog", ext: ["v", "sv", "svh"]}, {name: "Tcl", mime: "text/x-tcl", mode: "tcl", ext: ["tcl"]}, {name: "Textile", mime: "text/x-textile", mode: "textile", ext: ["textile"]}, {name: "TiddlyWiki ", mime: "text/x-tiddlywiki", mode: "tiddlywiki"}, {name: "Tiki wiki", mime: "text/tiki", mode: "tiki"}, {name: "TOML", mime: "text/x-toml", mode: "toml", ext: ["toml"]}, {name: "Tornado", mime: "text/x-tornado", mode: "tornado"}, - {name: "troff", mime: "troff", mode: "troff", ext: ["1", "2", "3", "4", "5", "6", "7", "8", "9"]}, + {name: "troff", mime: "text/troff", mode: "troff", ext: ["1", "2", "3", "4", "5", "6", "7", "8", "9"]}, {name: "TTCN", mime: "text/x-ttcn", mode: "ttcn", ext: ["ttcn", "ttcn3", "ttcnpp"]}, {name: "TTCN_CFG", mime: "text/x-ttcn-cfg", mode: "ttcn-cfg", ext: ["cfg"]}, {name: "Turtle", mime: "text/turtle", mode: "turtle", ext: ["ttl"]}, {name: "TypeScript", mime: "application/typescript", mode: "javascript", ext: ["ts"], alias: ["ts"]}, + {name: "TypeScript-JSX", mime: "text/typescript-jsx", mode: "jsx", ext: ["tsx"], alias: ["tsx"]}, {name: "Twig", mime: "text/x-twig", mode: "twig"}, + {name: "Web IDL", mime: "text/x-webidl", mode: "webidl", ext: ["webidl"]}, {name: "VB.NET", mime: "text/x-vb", mode: "vb", ext: ["vb"]}, {name: "VBScript", mime: "text/vbscript", mode: "vbscript", ext: ["vbs"]}, {name: "Velocity", mime: "text/velocity", mode: "velocity", ext: ["vtl"]}, {name: "Verilog", mime: "text/x-verilog", mode: "verilog", ext: ["v"]}, {name: "VHDL", mime: "text/x-vhdl", mode: "vhdl", ext: ["vhd", "vhdl"]}, - {name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd"], alias: ["rss", "wsdl", "xsd"]}, + {name: "Vue.js Component", mimes: ["script/x-vue", "text/x-vue"], mode: "vue", ext: ["vue"]}, + {name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd", "svg"], alias: ["rss", "wsdl", "xsd"]}, {name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]}, - {name: "YAML", mime: "text/x-yaml", mode: "yaml", ext: ["yaml", "yml"], alias: ["yml"]}, + {name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]}, + {name: "YAML", mimes: ["text/x-yaml", "text/yaml"], mode: "yaml", ext: ["yaml", "yml"], alias: ["yml"]}, {name: "Z80", mime: "text/x-z80", mode: "z80", ext: ["z80"]}, {name: "mscgen", mime: "text/x-mscgen", mode: "mscgen", ext: ["mscgen", "mscin", "msc"]}, {name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]}, @@ -169,6 +185,8 @@ if (info.mimes) for (var j = 0; j < info.mimes.length; j++) if (info.mimes[j] == mime) return info; } + if (/\+xml$/.test(mime)) return CodeMirror.findModeByMIME("application/xml") + if (/\+json$/.test(mime)) return CodeMirror.findModeByMIME("application/json") }; CodeMirror.findModeByExtension = function(ext) { diff --git a/rhodecode/public/js/mode/meta_ext.js b/rhodecode/public/js/mode/meta_ext.js --- a/rhodecode/public/js/mode/meta_ext.js +++ b/rhodecode/public/js/mode/meta_ext.js @@ -36,7 +36,7 @@ MIME_TO_EXT = { "application/x-ssp": {"exts": ["*.ssp"], "mode": ""}, "application/x-troff": {"exts": ["*.[1234567]","*.man"], "mode": ""}, "application/x-urbiscript": {"exts": ["*.u"], "mode": ""}, -"application/xml": {"exts": ["*.xml","*.xsl","*.rss","*.xslt","*.xsd","*.wsdl"], "mode": "xml"}, +"application/xml": {"exts": ["*.xml","*.xsl","*.rss","*.xslt","*.xsd","*.wsdl","*.svg"], "mode": "xml"}, "application/xml+evoque": {"exts": ["*.xml"], "mode": ""}, "application/xml-dtd": {"exts": ["*.dtd"], "mode": "dtd"}, "application/xquery": {"exts": ["*.xqy","*.xquery","*.xq","*.xql","*.xqm","*.xy"], "mode": "xquery"}, @@ -48,7 +48,7 @@ MIME_TO_EXT = { "text/coffeescript": {"exts": ["*.coffee"], "mode": ""}, "text/css": {"exts": ["*.css"], "mode": "css"}, "text/haxe": {"exts": ["*.hx"], "mode": ""}, -"text/html": {"exts": ["*.html","*.htm","*.xhtml","*.xslt"], "mode": "htmlmixed"}, +"text/html": {"exts": ["*.html","*.htm","*.xhtml","*.xslt","*.handlebars","*.hbs"], "mode": "htmlmixed"}, "text/html+evoque": {"exts": ["*.html"], "mode": ""}, "text/html+ruby": {"exts": ["*.rhtml"], "mode": ""}, "text/idl": {"exts": ["*.pro"], "mode": ""}, @@ -80,7 +80,7 @@ MIME_TO_EXT = { "text/x-c-objdump": {"exts": ["*.c-objdump"], "mode": ""}, "text/x-ceylon": {"exts": ["*.ceylon"], "mode": ""}, "text/x-chdr": {"exts": ["*.c","*.h","*.idc"], "mode": "clike"}, -"text/x-clojure": {"exts": ["*.clj"], "mode": "clojure"}, +"text/x-clojure": {"exts": ["*.clj","*.cljc","*.cljx"], "mode": "clojure"}, "text/x-cmake": {"exts": ["*.cmake","CMakeLists.txt","*.cmake.in"], "mode": "cmake"}, "text/x-cobol": {"exts": ["*.cob","*.COB","*.cpy","*.CPY"], "mode": "cobol"}, "text/x-coffeescript": {"exts": ["*.coffee"], "mode": "coffeescript"}, @@ -89,7 +89,7 @@ MIME_TO_EXT = { "text/x-cpp-objdump": {"exts": ["*.cpp-objdump","*.c++-objdump","*.cxx-objdump"], "mode": ""}, "text/x-crocsrc": {"exts": ["*.croc"], "mode": ""}, "text/x-csharp": {"exts": ["*.cs"], "mode": "clike"}, -"text/x-csrc": {"exts": ["*.c","*.h"], "mode": "clike"}, +"text/x-csrc": {"exts": ["*.c","*.h","*.ino"], "mode": "clike"}, "text/x-cuda": {"exts": ["*.cu","*.cuh"], "mode": ""}, "text/x-cython": {"exts": ["*.pyx","*.pxd","*.pxi"], "mode": "python"}, "text/x-d": {"exts": ["*.d"], "mode": "d"}, @@ -110,7 +110,7 @@ MIME_TO_EXT = { "text/x-factor": {"exts": ["*.factor"], "mode": "factor"}, "text/x-fancysrc": {"exts": ["*.fy","*.fancypack"], "mode": ""}, "text/x-felix": {"exts": ["*.flx","*.flxh"], "mode": ""}, -"text/x-fortran": {"exts": ["*.f","*.f90","*.F","*.F90","*.for","*.f77"], "mode": "fortran"}, +"text/x-fortran": {"exts": ["*.f","*.f90","*.F","*.F90","*.for","*.f77","*.f95"], "mode": "fortran"}, "text/x-fsharp": {"exts": ["*.fs","*.fsi"], "mode": "mllike"}, "text/x-gas": {"exts": ["*.s","*.S"], "mode": "gas"}, "text/x-gfm": {"exts": ["*.md","*.MD"], "mode": "gfm"}, @@ -123,7 +123,7 @@ MIME_TO_EXT = { "text/x-gosrc": {"exts": ["*.go"], "mode": ""}, "text/x-gosu": {"exts": ["*.gs","*.gsx","*.gsp","*.vark"], "mode": ""}, "text/x-gosu-template": {"exts": ["*.gst"], "mode": ""}, -"text/x-groovy": {"exts": ["*.groovy"], "mode": "groovy"}, +"text/x-groovy": {"exts": ["*.groovy","*.gradle"], "mode": "groovy"}, "text/x-haml": {"exts": ["*.haml"], "mode": "haml"}, "text/x-haskell": {"exts": ["*.hs"], "mode": "haskell"}, "text/x-haxe": {"exts": ["*.hx"], "mode": "haxe"}, @@ -139,7 +139,7 @@ MIME_TO_EXT = { "text/x-koka": {"exts": ["*.kk","*.kki"], "mode": ""}, "text/x-kotlin": {"exts": ["*.kt"], "mode": "clike"}, "text/x-lasso": {"exts": ["*.lasso","*.lasso[89]"], "mode": ""}, -"text/x-latex": {"exts": ["*.ltx","*.text"], "mode": "stex"}, +"text/x-latex": {"exts": ["*.ltx","*.text","*.tex"], "mode": "stex"}, "text/x-less": {"exts": ["*.less"], "mode": "css"}, "text/x-literate-haskell": {"exts": ["*.lhs"], "mode": "haskell-literate"}, "text/x-livescript": {"exts": ["*.ls"], "mode": "livescript"}, @@ -173,13 +173,13 @@ MIME_TO_EXT = { "text/x-openedge": {"exts": ["*.p","*.cls"], "mode": ""}, "text/x-pascal": {"exts": ["*.pas","*.p"], "mode": "pascal"}, "text/x-perl": {"exts": ["*.pl","*.pm"], "mode": "perl"}, -"text/x-php": {"exts": ["*.php","*.php[345]","*.inc"], "mode": "php"}, +"text/x-php": {"exts": ["*.php","*.php[345]","*.inc","*.php3","*.php4","*.php5","*.php7","*.phtml"], "mode": "php"}, "text/x-pig": {"exts": ["*.pig"], "mode": "pig"}, "text/x-povray": {"exts": ["*.pov","*.inc"], "mode": ""}, "text/x-powershell": {"exts": ["*.ps1"], "mode": ""}, "text/x-prolog": {"exts": ["*.prolog","*.pro","*.pl"], "mode": ""}, "text/x-properties": {"exts": ["*.properties","*.ini","*.in"], "mode": "properties"}, -"text/x-python": {"exts": ["*.py","*.pyw","*.sc","SConstruct","SConscript","*.tac","*.sage"], "mode": "python"}, +"text/x-python": {"exts": ["*.py","*.pyw","*.sc","SConstruct","SConscript","*.tac","*.sage","*.BUILD","*.bzl"], "mode": "python"}, "text/x-python-traceback": {"exts": ["*.pytb"], "mode": ""}, "text/x-python3-traceback": {"exts": ["*.py3tb"], "mode": ""}, "text/x-r-doc": {"exts": ["*.Rd"], "mode": ""}, @@ -187,7 +187,7 @@ MIME_TO_EXT = { "text/x-rebol": {"exts": ["*.r","*.r3"], "mode": ""}, "text/x-robotframework": {"exts": ["*.txt","*.robot"], "mode": ""}, "text/x-rpm-spec": {"exts": ["*.spec"], "mode": "rpm"}, -"text/x-rsrc": {"exts": ["*.r"], "mode": "r"}, +"text/x-rsrc": {"exts": ["*.r","*.R"], "mode": "r"}, "text/x-rst": {"exts": ["*.rst","*.rest"], "mode": "rst"}, "text/x-ruby": {"exts": ["*.rb","*.rbw","Rakefile","*.rake","*.gemspec","*.rbx","*.duby"], "mode": "ruby"}, "text/x-rustsrc": {"exts": ["*.rs","*.rc"], "mode": "rust"}, @@ -224,7 +224,7 @@ MIME_TO_EXT = { "text/x-yaml": {"exts": ["*.yaml","*.yml"], "mode": "yaml"}, "text/x-z80": {"exts": ["*.z80"], "mode": "z80"}, "text/xml": {"exts": ["*.xml","*.xsl","*.rss","*.xslt","*.xsd","*.wsdl"], "mode": ""}, -"text/xquery": {"exts": ["*.xqy","*.xquery","*.xq","*.xql","*.xqm"], "mode": ""} +"text/xquery": {"exts": ["*.xqy","*.xquery","*.xq","*.xql","*.xqm"], "mode": ""}, }; /* Special case for overriding mode by file extensions diff --git a/rhodecode/public/js/mode/mirc/mirc.js b/rhodecode/public/js/mode/mirc/mirc.js --- a/rhodecode/public/js/mode/mirc/mirc.js +++ b/rhodecode/public/js/mode/mirc/mirc.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE //mIRC mode by Ford_Lawnmower :: Based on Velocity mode by Steve O'Hara @@ -130,7 +130,7 @@ CodeMirror.defineMode("mirc", function() } } else if (ch == "%") { - stream.eatWhile(/[^,^\s^\(^\)]/); + stream.eatWhile(/[^,\s()]/); state.beforeParams = true; return "string"; } diff --git a/rhodecode/public/js/mode/mllike/mllike.js b/rhodecode/public/js/mode/mllike/mllike.js --- a/rhodecode/public/js/mode/mllike/mllike.js +++ b/rhodecode/public/js/mode/mllike/mllike.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -13,31 +13,26 @@ CodeMirror.defineMode('mllike', function(_config, parserConfig) { var words = { - 'let': 'keyword', - 'rec': 'keyword', - 'in': 'keyword', - 'of': 'keyword', - 'and': 'keyword', - 'if': 'keyword', - 'then': 'keyword', - 'else': 'keyword', - 'for': 'keyword', - 'to': 'keyword', - 'while': 'keyword', + 'as': 'keyword', 'do': 'keyword', - 'done': 'keyword', + 'else': 'keyword', + 'end': 'keyword', + 'exception': 'keyword', 'fun': 'keyword', - 'function': 'keyword', - 'val': 'keyword', + 'functor': 'keyword', + 'if': 'keyword', + 'in': 'keyword', + 'include': 'keyword', + 'let': 'keyword', + 'of': 'keyword', + 'open': 'keyword', + 'rec': 'keyword', + 'struct': 'keyword', + 'then': 'keyword', 'type': 'keyword', - 'mutable': 'keyword', - 'match': 'keyword', - 'with': 'keyword', - 'try': 'keyword', - 'open': 'builtin', - 'ignore': 'builtin', - 'begin': 'keyword', - 'end': 'keyword' + 'val': 'keyword', + 'while': 'keyword', + 'with': 'keyword' }; var extraWords = parserConfig.extraWords || {}; @@ -46,6 +41,9 @@ CodeMirror.defineMode('mllike', function words[prop] = parserConfig.extraWords[prop]; } } + var hintWords = []; + for (var k in words) { hintWords.push(k); } + CodeMirror.registerHelper("hintWords", "mllike", hintWords); function tokenBase(stream, state) { var ch = stream.next(); @@ -54,6 +52,13 @@ CodeMirror.defineMode('mllike', function state.tokenize = tokenString; return state.tokenize(stream, state); } + if (ch === '{') { + if (stream.eat('|')) { + state.longString = true; + state.tokenize = tokenLongString; + return state.tokenize(stream, state); + } + } if (ch === '(') { if (stream.eat('*')) { state.commentLevel++; @@ -61,7 +66,7 @@ CodeMirror.defineMode('mllike', function return state.tokenize(stream, state); } } - if (ch === '~') { + if (ch === '~' || ch === '?') { stream.eatWhile(/\w/); return 'variable-2'; } @@ -74,18 +79,32 @@ CodeMirror.defineMode('mllike', function return 'comment'; } if (/\d/.test(ch)) { - stream.eatWhile(/[\d]/); - if (stream.eat('.')) { - stream.eatWhile(/[\d]/); + if (ch === '0' && stream.eat(/[bB]/)) { + stream.eatWhile(/[01]/); + } if (ch === '0' && stream.eat(/[xX]/)) { + stream.eatWhile(/[0-9a-fA-F]/) + } if (ch === '0' && stream.eat(/[oO]/)) { + stream.eatWhile(/[0-7]/); + } else { + stream.eatWhile(/[\d_]/); + if (stream.eat('.')) { + stream.eatWhile(/[\d]/); + } + if (stream.eat(/[eE]/)) { + stream.eatWhile(/[\d\-+]/); + } } return 'number'; } - if ( /[+\-*&%=<>!?|]/.test(ch)) { + if ( /[+\-*&%=<>!?|@\.~:]/.test(ch)) { return 'operator'; } - stream.eatWhile(/\w/); - var cur = stream.current(); - return words.hasOwnProperty(cur) ? words[cur] : 'variable'; + if (/[\w\xa1-\uffff]/.test(ch)) { + stream.eatWhile(/[\w\xa1-\uffff]/); + var cur = stream.current(); + return words.hasOwnProperty(cur) ? words[cur] : 'variable'; + } + return null } function tokenString(stream, state) { @@ -116,8 +135,20 @@ CodeMirror.defineMode('mllike', function return 'comment'; } + function tokenLongString(stream, state) { + var prev, next; + while (state.longString && (next = stream.next()) != null) { + if (prev === '|' && next === '}') state.longString = false; + prev = next; + } + if (!state.longString) { + state.tokenize = tokenBase; + } + return 'string'; + } + return { - startState: function() {return {tokenize: tokenBase, commentLevel: 0};}, + startState: function() {return {tokenize: tokenBase, commentLevel: 0, longString: false};}, token: function(stream, state) { if (stream.eatSpace()) return null; return state.tokenize(stream, state); @@ -132,14 +163,64 @@ CodeMirror.defineMode('mllike', function CodeMirror.defineMIME('text/x-ocaml', { name: 'mllike', extraWords: { - 'succ': 'keyword', + 'and': 'keyword', + 'assert': 'keyword', + 'begin': 'keyword', + 'class': 'keyword', + 'constraint': 'keyword', + 'done': 'keyword', + 'downto': 'keyword', + 'external': 'keyword', + 'function': 'keyword', + 'initializer': 'keyword', + 'lazy': 'keyword', + 'match': 'keyword', + 'method': 'keyword', + 'module': 'keyword', + 'mutable': 'keyword', + 'new': 'keyword', + 'nonrec': 'keyword', + 'object': 'keyword', + 'private': 'keyword', + 'sig': 'keyword', + 'to': 'keyword', + 'try': 'keyword', + 'value': 'keyword', + 'virtual': 'keyword', + 'when': 'keyword', + + // builtins + 'raise': 'builtin', + 'failwith': 'builtin', + 'true': 'builtin', + 'false': 'builtin', + + // Pervasives builtins + 'asr': 'builtin', + 'land': 'builtin', + 'lor': 'builtin', + 'lsl': 'builtin', + 'lsr': 'builtin', + 'lxor': 'builtin', + 'mod': 'builtin', + 'or': 'builtin', + + // More Pervasives + 'raise_notrace': 'builtin', 'trace': 'builtin', 'exit': 'builtin', 'print_string': 'builtin', 'print_endline': 'builtin', - 'true': 'atom', - 'false': 'atom', - 'raise': 'keyword' + + 'int': 'type', + 'float': 'type', + 'bool': 'type', + 'char': 'type', + 'string': 'type', + 'unit': 'type', + + // Modules + 'List': 'builtin' } }); @@ -147,18 +228,21 @@ CodeMirror.defineMIME('text/x-fsharp', { name: 'mllike', extraWords: { 'abstract': 'keyword', - 'as': 'keyword', 'assert': 'keyword', 'base': 'keyword', + 'begin': 'keyword', 'class': 'keyword', 'default': 'keyword', 'delegate': 'keyword', + 'do!': 'keyword', + 'done': 'keyword', 'downcast': 'keyword', 'downto': 'keyword', 'elif': 'keyword', - 'exception': 'keyword', 'extern': 'keyword', 'finally': 'keyword', + 'for': 'keyword', + 'function': 'keyword', 'global': 'keyword', 'inherit': 'keyword', 'inline': 'keyword', @@ -166,38 +250,108 @@ CodeMirror.defineMIME('text/x-fsharp', { 'internal': 'keyword', 'lazy': 'keyword', 'let!': 'keyword', - 'member' : 'keyword', + 'match': 'keyword', + 'member': 'keyword', 'module': 'keyword', + 'mutable': 'keyword', 'namespace': 'keyword', 'new': 'keyword', 'null': 'keyword', 'override': 'keyword', 'private': 'keyword', 'public': 'keyword', + 'return!': 'keyword', 'return': 'keyword', - 'return!': 'keyword', 'select': 'keyword', 'static': 'keyword', - 'struct': 'keyword', + 'to': 'keyword', + 'try': 'keyword', 'upcast': 'keyword', - 'use': 'keyword', 'use!': 'keyword', - 'val': 'keyword', + 'use': 'keyword', + 'void': 'keyword', 'when': 'keyword', + 'yield!': 'keyword', 'yield': 'keyword', - 'yield!': 'keyword', + // Reserved words + 'atomic': 'keyword', + 'break': 'keyword', + 'checked': 'keyword', + 'component': 'keyword', + 'const': 'keyword', + 'constraint': 'keyword', + 'constructor': 'keyword', + 'continue': 'keyword', + 'eager': 'keyword', + 'event': 'keyword', + 'external': 'keyword', + 'fixed': 'keyword', + 'method': 'keyword', + 'mixin': 'keyword', + 'object': 'keyword', + 'parallel': 'keyword', + 'process': 'keyword', + 'protected': 'keyword', + 'pure': 'keyword', + 'sealed': 'keyword', + 'tailcall': 'keyword', + 'trait': 'keyword', + 'virtual': 'keyword', + 'volatile': 'keyword', + + // builtins 'List': 'builtin', 'Seq': 'builtin', 'Map': 'builtin', 'Set': 'builtin', + 'Option': 'builtin', 'int': 'builtin', 'string': 'builtin', - 'raise': 'builtin', - 'failwith': 'builtin', 'not': 'builtin', 'true': 'builtin', - 'false': 'builtin' + 'false': 'builtin', + + 'raise': 'builtin', + 'failwith': 'builtin' + }, + slashComments: true +}); + + +CodeMirror.defineMIME('text/x-sml', { + name: 'mllike', + extraWords: { + 'abstype': 'keyword', + 'and': 'keyword', + 'andalso': 'keyword', + 'case': 'keyword', + 'datatype': 'keyword', + 'fn': 'keyword', + 'handle': 'keyword', + 'infix': 'keyword', + 'infixr': 'keyword', + 'local': 'keyword', + 'nonfix': 'keyword', + 'op': 'keyword', + 'orelse': 'keyword', + 'raise': 'keyword', + 'withtype': 'keyword', + 'eqtype': 'keyword', + 'sharing': 'keyword', + 'sig': 'keyword', + 'signature': 'keyword', + 'structure': 'keyword', + 'where': 'keyword', + 'true': 'keyword', + 'false': 'keyword', + + // types + 'int': 'builtin', + 'real': 'builtin', + 'string': 'builtin', + 'char': 'builtin', + 'bool': 'builtin' }, slashComments: true }); diff --git a/rhodecode/public/js/mode/modelica/modelica.js b/rhodecode/public/js/mode/modelica/modelica.js --- a/rhodecode/public/js/mode/modelica/modelica.js +++ b/rhodecode/public/js/mode/modelica/modelica.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Modelica support for CodeMirror, copyright (c) by Lennart Ochel diff --git a/rhodecode/public/js/mode/mscgen/mscgen.js b/rhodecode/public/js/mode/mscgen/mscgen.js --- a/rhodecode/public/js/mode/mscgen/mscgen.js +++ b/rhodecode/public/js/mode/mscgen/mscgen.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // mode(s) for the sequence chart dsl's mscgen, xù and msgenny // For more information on mscgen, see the site of the original author: @@ -23,6 +23,7 @@ mscgen: { "keywords" : ["msc"], "options" : ["hscale", "width", "arcgradient", "wordwraparcs"], + "constants" : ["true", "false", "on", "off"], "attributes" : ["label", "idurl", "id", "url", "linecolor", "linecolour", "textcolor", "textcolour", "textbgcolor", "textbgcolour", "arclinecolor", "arclinecolour", "arctextcolor", "arctextcolour", "arctextbgcolor", "arctextbgcolour", "arcskip"], "brackets" : ["\\{", "\\}"], // [ and ] are brackets too, but these get handled in with lists "arcsWords" : ["note", "abox", "rbox", "box"], @@ -31,9 +32,10 @@ "operators" : ["="] }, xu: { - "keywords" : ["msc"], - "options" : ["hscale", "width", "arcgradient", "wordwraparcs", "watermark"], - "attributes" : ["label", "idurl", "id", "url", "linecolor", "linecolour", "textcolor", "textcolour", "textbgcolor", "textbgcolour", "arclinecolor", "arclinecolour", "arctextcolor", "arctextcolour", "arctextbgcolor", "arctextbgcolour", "arcskip"], + "keywords" : ["msc", "xu"], + "options" : ["hscale", "width", "arcgradient", "wordwraparcs", "wordwrapentities", "watermark"], + "constants" : ["true", "false", "on", "off", "auto"], + "attributes" : ["label", "idurl", "id", "url", "linecolor", "linecolour", "textcolor", "textcolour", "textbgcolor", "textbgcolour", "arclinecolor", "arclinecolour", "arctextcolor", "arctextcolour", "arctextbgcolor", "arctextbgcolour", "arcskip", "title", "deactivate", "activate", "activation"], "brackets" : ["\\{", "\\}"], // [ and ] are brackets too, but these get handled in with lists "arcsWords" : ["note", "abox", "rbox", "box", "alt", "else", "opt", "break", "par", "seq", "strict", "neg", "critical", "ignore", "consider", "assert", "loop", "ref", "exc"], "arcsOthers" : ["\\|\\|\\|", "\\.\\.\\.", "---", "--", "<->", "==", "<<=>>", "<=>", "\\.\\.", "<<>>", "::", "<:>", "->", "=>>", "=>", ">>", ":>", "<-", "<<=", "<=", "<<", "<:", "x-", "-x"], @@ -42,7 +44,8 @@ }, msgenny: { "keywords" : null, - "options" : ["hscale", "width", "arcgradient", "wordwraparcs", "watermark"], + "options" : ["hscale", "width", "arcgradient", "wordwraparcs", "wordwrapentities", "watermark"], + "constants" : ["true", "false", "on", "off", "auto"], "attributes" : null, "brackets" : ["\\{", "\\}"], "arcsWords" : ["note", "abox", "rbox", "box", "alt", "else", "opt", "break", "par", "seq", "strict", "neg", "critical", "ignore", "consider", "assert", "loop", "ref", "exc"], @@ -146,6 +149,9 @@ if (!!pConfig.operators && pStream.match(wordRegexp(pConfig.operators), true, true)) return "operator"; + if (!!pConfig.constants && pStream.match(wordRegexp(pConfig.constants), true, true)) + return "variable"; + /* attribute lists */ if (!pConfig.inAttributeList && !!pConfig.attributes && pStream.match(/\[/, true, true)) { pConfig.inAttributeList = true; diff --git a/rhodecode/public/js/mode/mscgen/mscgen_test.js b/rhodecode/public/js/mode/mscgen/mscgen_test.js --- a/rhodecode/public/js/mode/mscgen/mscgen_test.js +++ b/rhodecode/public/js/mode/mscgen/mscgen_test.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function() { var mode = CodeMirror.getMode({indentUnit: 2}, "mscgen"); @@ -26,9 +26,18 @@ MT("xù/ msgenny keywords classify as 'base'", "[base watermark]", + "[base wordwrapentities]", "[base alt loop opt ref else break par seq assert]" ); + MT("xù/ msgenny constants classify as 'base'", + "[base auto]" + ); + + MT("mscgen constants classify as 'variable'", + "[variable true]", "[variable false]", "[variable on]", "[variable off]" + ); + MT("mscgen options classify as keyword", "[keyword hscale]", "[keyword width]", "[keyword arcgradient]", "[keyword wordwraparcs]" ); @@ -63,7 +72,7 @@ MT("a typical program", "[comment # typical mscgen program]", "[keyword msc][base ][bracket {]", - "[keyword wordwraparcs][operator =][string \"true\"][base , ][keyword hscale][operator =][string \"0.8\"][keyword arcgradient][operator =][base 30;]", + "[keyword wordwraparcs][operator =][variable true][base , ][keyword hscale][operator =][string \"0.8\"][base , ][keyword arcgradient][operator =][base 30;]", "[base a][bracket [[][attribute label][operator =][string \"Entity A\"][bracket ]]][base ,]", "[base b][bracket [[][attribute label][operator =][string \"Entity B\"][bracket ]]][base ,]", "[base c][bracket [[][attribute label][operator =][string \"Entity C\"][bracket ]]][base ;]", diff --git a/rhodecode/public/js/mode/mscgen/msgenny_test.js b/rhodecode/public/js/mode/mscgen/msgenny_test.js --- a/rhodecode/public/js/mode/mscgen/msgenny_test.js +++ b/rhodecode/public/js/mode/mscgen/msgenny_test.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function() { var mode = CodeMirror.getMode({indentUnit: 2}, "text/x-msgenny"); @@ -20,9 +20,15 @@ MT("xù/ msgenny keywords classify as 'keyword'", "[keyword watermark]", + "[keyword wordwrapentities]", "[keyword alt]","[keyword loop]","[keyword opt]","[keyword ref]","[keyword else]","[keyword break]","[keyword par]","[keyword seq]","[keyword assert]" ); + MT("xù/ msgenny constants classify as 'variable'", + "[variable auto]", + "[variable true]", "[variable false]", "[variable on]", "[variable off]" + ); + MT("mscgen options classify as keyword", "[keyword hscale]", "[keyword width]", "[keyword arcgradient]", "[keyword wordwraparcs]" ); @@ -56,7 +62,7 @@ MT("a typical program", "[comment # typical msgenny program]", - "[keyword wordwraparcs][operator =][string \"true\"][base , ][keyword hscale][operator =][string \"0.8\"][base , ][keyword arcgradient][operator =][base 30;]", + "[keyword wordwraparcs][operator =][variable true][base , ][keyword hscale][operator =][string \"0.8\"][base , ][keyword arcgradient][operator =][base 30;]", "[base a : ][string \"Entity A\"][base ,]", "[base b : Entity B,]", "[base c : Entity C;]", diff --git a/rhodecode/public/js/mode/mscgen/xu_test.js b/rhodecode/public/js/mode/mscgen/xu_test.js --- a/rhodecode/public/js/mode/mscgen/xu_test.js +++ b/rhodecode/public/js/mode/mscgen/xu_test.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function() { var mode = CodeMirror.getMode({indentUnit: 2}, "text/x-xu"); @@ -9,7 +9,13 @@ "[keyword msc][bracket {]", "[base ]", "[bracket }]" - ); + ); + + MT("empty chart", + "[keyword xu][bracket {]", + "[base ]", + "[bracket }]" + ); MT("comments", "[comment // a single line comment]", @@ -29,6 +35,11 @@ "[keyword alt]","[keyword loop]","[keyword opt]","[keyword ref]","[keyword else]","[keyword break]","[keyword par]","[keyword seq]","[keyword assert]" ); + MT("xù/ msgenny constants classify as 'variable'", + "[variable auto]", + "[variable true]", "[variable false]", "[variable on]", "[variable off]" + ); + MT("mscgen options classify as keyword", "[keyword hscale]", "[keyword width]", "[keyword arcgradient]", "[keyword wordwraparcs]" ); @@ -49,7 +60,8 @@ "[attribute id]","[attribute url]","[attribute idurl]", "[attribute linecolor]","[attribute linecolour]","[attribute textcolor]","[attribute textcolour]","[attribute textbgcolor]","[attribute textbgcolour]", "[attribute arclinecolor]","[attribute arclinecolour]","[attribute arctextcolor]","[attribute arctextcolour]","[attribute arctextbgcolor]","[attribute arctextbgcolour]", - "[attribute arcskip][bracket ]]]" + "[attribute arcskip]","[attribute title]", + "[attribute activate]","[attribute deactivate]","[attribute activation][bracket ]]]" ); MT("outside an attribute list, attributes classify as base", @@ -57,18 +69,18 @@ "[base id]","[base url]","[base idurl]", "[base linecolor]","[base linecolour]","[base textcolor]","[base textcolour]","[base textbgcolor]","[base textbgcolour]", "[base arclinecolor]","[base arclinecolour]","[base arctextcolor]","[base arctextcolour]","[base arctextbgcolor]","[base arctextbgcolour]", - "[base arcskip]" + "[base arcskip]", "[base title]" ); MT("a typical program", - "[comment # typical mscgen program]", - "[keyword msc][base ][bracket {]", - "[keyword wordwraparcs][operator =][string \"true\"][keyword hscale][operator =][string \"0.8\"][keyword arcgradient][operator =][base 30;]", + "[comment # typical xu program]", + "[keyword xu][base ][bracket {]", + "[keyword wordwraparcs][operator =][string \"true\"][base , ][keyword hscale][operator =][string \"0.8\"][base , ][keyword arcgradient][operator =][base 30, ][keyword width][operator =][variable auto][base ;]", "[base a][bracket [[][attribute label][operator =][string \"Entity A\"][bracket ]]][base ,]", "[base b][bracket [[][attribute label][operator =][string \"Entity B\"][bracket ]]][base ,]", "[base c][bracket [[][attribute label][operator =][string \"Entity C\"][bracket ]]][base ;]", "[base a ][keyword =>>][base b][bracket [[][attribute label][operator =][string \"Hello entity B\"][bracket ]]][base ;]", - "[base a ][keyword <<][base b][bracket [[][attribute label][operator =][string \"Here's an answer dude!\"][bracket ]]][base ;]", + "[base a ][keyword <<][base b][bracket [[][attribute label][operator =][string \"Here's an answer dude!\"][base , ][attribute title][operator =][string \"This is a title for this message\"][bracket ]]][base ;]", "[base c ][keyword :>][base *][bracket [[][attribute label][operator =][string \"What about me?\"][base , ][attribute textcolor][operator =][base red][bracket ]]][base ;]", "[bracket }]" ); diff --git a/rhodecode/public/js/mode/mumps/mumps.js b/rhodecode/public/js/mode/mumps/mumps.js --- a/rhodecode/public/js/mode/mumps/mumps.js +++ b/rhodecode/public/js/mode/mumps/mumps.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* This MUMPS Language script was constructed using vbscript.js as a template. diff --git a/rhodecode/public/js/mode/nginx/nginx.js b/rhodecode/public/js/mode/nginx/nginx.js --- a/rhodecode/public/js/mode/nginx/nginx.js +++ b/rhodecode/public/js/mode/nginx/nginx.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/nsis/nsis.js b/rhodecode/public/js/mode/nsis/nsis.js --- a/rhodecode/public/js/mode/nsis/nsis.js +++ b/rhodecode/public/js/mode/nsis/nsis.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Author: Jan T. Sott (http://github.com/idleberg) @@ -24,20 +24,20 @@ CodeMirror.defineSimpleMode("nsis",{ { regex: /`(?:[^\\`]|\\.)*`?/, token: "string" }, // Compile Time Commands - {regex: /(?:\!(include|addincludedir|addplugindir|appendfile|cd|delfile|echo|error|execute|packhdr|finalize|getdllversion|system|tempfile|warning|verbose|define|undef|insertmacro|makensis|searchparse|searchreplace))\b/, token: "keyword"}, + {regex: /^\s*(?:\!(include|addincludedir|addplugindir|appendfile|cd|delfile|echo|error|execute|packhdr|pragma|finalize|getdllversion|gettlbversion|system|tempfile|warning|verbose|define|undef|insertmacro|macro|macroend|makensis|searchparse|searchreplace))\b/, token: "keyword"}, // Conditional Compilation - {regex: /(?:\!(if(?:n?def)?|ifmacron?def|macro))\b/, token: "keyword", indent: true}, - {regex: /(?:\!(else|endif|macroend))\b/, token: "keyword", dedent: true}, + {regex: /^\s*(?:\!(if(?:n?def)?|ifmacron?def|macro))\b/, token: "keyword", indent: true}, + {regex: /^\s*(?:\!(else|endif|macroend))\b/, token: "keyword", dedent: true}, // Runtime Commands - {regex: /\b(?:Abort|AddBrandingImage|AddSize|AllowRootDirInstall|AllowSkipFiles|AutoCloseWindow|BGFont|BGGradient|BrandingText|BringToFront|Call|CallInstDLL|Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|ComponentText|CopyFiles|CRCCheck|CreateDirectory|CreateFont|CreateShortCut|Delete|DeleteINISec|DeleteINIStr|DeleteRegKey|DeleteRegValue|DetailPrint|DetailsButtonText|DirText|DirVar|DirVerify|EnableWindow|EnumRegKey|EnumRegValue|Exch|Exec|ExecShell|ExecWait|ExpandEnvStrings|File|FileBufSize|FileClose|FileErrorText|FileOpen|FileRead|FileReadByte|FileReadUTF16LE|FileReadWord|FileWriteUTF16LE|FileSeek|FileWrite|FileWriteByte|FileWriteWord|FindClose|FindFirst|FindNext|FindWindow|FlushINI|GetCurInstType|GetCurrentAddress|GetDlgItem|GetDLLVersion|GetDLLVersionLocal|GetErrorLevel|GetFileTime|GetFileTimeLocal|GetFullPathName|GetFunctionAddress|GetInstDirError|GetLabelAddress|GetTempFileName|Goto|HideWindow|Icon|IfAbort|IfErrors|IfFileExists|IfRebootFlag|IfSilent|InitPluginsDir|InstallButtonText|InstallColors|InstallDir|InstallDirRegKey|InstProgressFlags|InstType|InstTypeGetText|InstTypeSetText|IntCmp|IntCmpU|IntFmt|IntOp|IsWindow|LangString|LicenseBkColor|LicenseData|LicenseForceSelection|LicenseLangString|LicenseText|LoadLanguageFile|LockWindow|LogSet|LogText|ManifestDPIAware|ManifestSupportedOS|MessageBox|MiscButtonText|Name|Nop|OutFile|Page|PageCallbacks|Pop|Push|Quit|ReadEnvStr|ReadINIStr|ReadRegDWORD|ReadRegStr|Reboot|RegDLL|Rename|RequestExecutionLevel|ReserveFile|Return|RMDir|SearchPath|SectionGetFlags|SectionGetInstTypes|SectionGetSize|SectionGetText|SectionIn|SectionSetFlags|SectionSetInstTypes|SectionSetSize|SectionSetText|SendMessage|SetAutoClose|SetBrandingImage|SetCompress|SetCompressor|SetCompressorDictSize|SetCtlColors|SetCurInstType|SetDatablockOptimize|SetDateSave|SetDetailsPrint|SetDetailsView|SetErrorLevel|SetErrors|SetFileAttributes|SetFont|SetOutPath|SetOverwrite|SetPluginUnload|SetRebootFlag|SetRegView|SetShellVarContext|SetSilent|ShowInstDetails|ShowUninstDetails|ShowWindow|SilentInstall|SilentUnInstall|Sleep|SpaceTexts|StrCmp|StrCmpS|StrCpy|StrLen|SubCaption|Unicode|UninstallButtonText|UninstallCaption|UninstallIcon|UninstallSubCaption|UninstallText|UninstPage|UnRegDLL|Var|VIAddVersionKey|VIFileVersion|VIProductVersion|WindowIcon|WriteINIStr|WriteRegBin|WriteRegDWORD|WriteRegExpandStr|WriteRegStr|WriteUninstaller|XPStyle)\b/, token: "keyword"}, - {regex: /\b(?:Function|PageEx|Section(?:Group)?)\b/, token: "keyword", indent: true}, - {regex: /\b(?:(Function|PageEx|Section(?:Group)?)End)\b/, token: "keyword", dedent: true}, + {regex: /^\s*(?:Abort|AddBrandingImage|AddSize|AllowRootDirInstall|AllowSkipFiles|AutoCloseWindow|BGFont|BGGradient|BrandingText|BringToFront|Call|CallInstDLL|Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|ComponentText|CopyFiles|CRCCheck|CreateDirectory|CreateFont|CreateShortCut|Delete|DeleteINISec|DeleteINIStr|DeleteRegKey|DeleteRegValue|DetailPrint|DetailsButtonText|DirText|DirVar|DirVerify|EnableWindow|EnumRegKey|EnumRegValue|Exch|Exec|ExecShell|ExecShellWait|ExecWait|ExpandEnvStrings|File|FileBufSize|FileClose|FileErrorText|FileOpen|FileRead|FileReadByte|FileReadUTF16LE|FileReadWord|FileWriteUTF16LE|FileSeek|FileWrite|FileWriteByte|FileWriteWord|FindClose|FindFirst|FindNext|FindWindow|FlushINI|GetCurInstType|GetCurrentAddress|GetDlgItem|GetDLLVersion|GetDLLVersionLocal|GetErrorLevel|GetFileTime|GetFileTimeLocal|GetFullPathName|GetFunctionAddress|GetInstDirError|GetLabelAddress|GetTempFileName|Goto|HideWindow|Icon|IfAbort|IfErrors|IfFileExists|IfRebootFlag|IfSilent|InitPluginsDir|InstallButtonText|InstallColors|InstallDir|InstallDirRegKey|InstProgressFlags|InstType|InstTypeGetText|InstTypeSetText|Int64Cmp|Int64CmpU|Int64Fmt|IntCmp|IntCmpU|IntFmt|IntOp|IntPtrCmp|IntPtrCmpU|IntPtrOp|IsWindow|LangString|LicenseBkColor|LicenseData|LicenseForceSelection|LicenseLangString|LicenseText|LoadLanguageFile|LockWindow|LogSet|LogText|ManifestDPIAware|ManifestSupportedOS|MessageBox|MiscButtonText|Name|Nop|OutFile|Page|PageCallbacks|PEDllCharacteristics|PESubsysVer|Pop|Push|Quit|ReadEnvStr|ReadINIStr|ReadRegDWORD|ReadRegStr|Reboot|RegDLL|Rename|RequestExecutionLevel|ReserveFile|Return|RMDir|SearchPath|SectionGetFlags|SectionGetInstTypes|SectionGetSize|SectionGetText|SectionIn|SectionSetFlags|SectionSetInstTypes|SectionSetSize|SectionSetText|SendMessage|SetAutoClose|SetBrandingImage|SetCompress|SetCompressor|SetCompressorDictSize|SetCtlColors|SetCurInstType|SetDatablockOptimize|SetDateSave|SetDetailsPrint|SetDetailsView|SetErrorLevel|SetErrors|SetFileAttributes|SetFont|SetOutPath|SetOverwrite|SetRebootFlag|SetRegView|SetShellVarContext|SetSilent|ShowInstDetails|ShowUninstDetails|ShowWindow|SilentInstall|SilentUnInstall|Sleep|SpaceTexts|StrCmp|StrCmpS|StrCpy|StrLen|SubCaption|Unicode|UninstallButtonText|UninstallCaption|UninstallIcon|UninstallSubCaption|UninstallText|UninstPage|UnRegDLL|Var|VIAddVersionKey|VIFileVersion|VIProductVersion|WindowIcon|WriteINIStr|WriteRegBin|WriteRegDWORD|WriteRegExpandStr|WriteRegMultiStr|WriteRegNone|WriteRegStr|WriteUninstaller|XPStyle)\b/, token: "keyword"}, + {regex: /^\s*(?:Function|PageEx|Section(?:Group)?)\b/, token: "keyword", indent: true}, + {regex: /^\s*(?:(Function|PageEx|Section(?:Group)?)End)\b/, token: "keyword", dedent: true}, // Command Options - {regex: /\b(?:ARCHIVE|FILE_ATTRIBUTE_ARCHIVE|FILE_ATTRIBUTE_HIDDEN|FILE_ATTRIBUTE_NORMAL|FILE_ATTRIBUTE_OFFLINE|FILE_ATTRIBUTE_READONLY|FILE_ATTRIBUTE_SYSTEM|FILE_ATTRIBUTE_TEMPORARY|HIDDEN|HKCC|HKCR|HKCU|HKDD|HKEY_CLASSES_ROOT|HKEY_CURRENT_CONFIG|HKEY_CURRENT_USER|HKEY_DYN_DATA|HKEY_LOCAL_MACHINE|HKEY_PERFORMANCE_DATA|HKEY_USERS|HKLM|HKPD|HKU|IDABORT|IDCANCEL|IDD_DIR|IDD_INST|IDD_INSTFILES|IDD_LICENSE|IDD_SELCOM|IDD_UNINST|IDD_VERIFY|IDIGNORE|IDNO|IDOK|IDRETRY|IDYES|MB_ABORTRETRYIGNORE|MB_DEFBUTTON1|MB_DEFBUTTON2|MB_DEFBUTTON3|MB_DEFBUTTON4|MB_ICONEXCLAMATION|MB_ICONINFORMATION|MB_ICONQUESTION|MB_ICONSTOP|MB_OK|MB_OKCANCEL|MB_RETRYCANCEL|MB_RIGHT|MB_RTLREADING|MB_SETFOREGROUND|MB_TOPMOST|MB_USERICON|MB_YESNO|MB_YESNOCANCEL|NORMAL|OFFLINE|READONLY|SHCTX|SHELL_CONTEXT|SW_HIDE|SW_SHOWDEFAULT|SW_SHOWMAXIMIZED|SW_SHOWMINIMIZED|SW_SHOWNORMAL|SYSTEM|TEMPORARY)\b/, token: "atom"}, - {regex: /\b(?:admin|all|auto|both|bottom|bzip2|components|current|custom|directory|force|hide|highest|ifdiff|ifnewer|instfiles|lastused|leave|left|license|listonly|lzma|nevershow|none|normal|notset|right|show|silent|silentlog|textonly|top|try|un\.components|un\.custom|un\.directory|un\.instfiles|un\.license|uninstConfirm|user|Win10|Win7|Win8|WinVista|zlib)\b/, token: "builtin"}, + {regex: /\b(?:ARCHIVE|FILE_ATTRIBUTE_ARCHIVE|FILE_ATTRIBUTE_HIDDEN|FILE_ATTRIBUTE_NORMAL|FILE_ATTRIBUTE_OFFLINE|FILE_ATTRIBUTE_READONLY|FILE_ATTRIBUTE_SYSTEM|FILE_ATTRIBUTE_TEMPORARY|HIDDEN|HKCC|HKCR(32|64)?|HKCU(32|64)?|HKDD|HKEY_CLASSES_ROOT|HKEY_CURRENT_CONFIG|HKEY_CURRENT_USER|HKEY_DYN_DATA|HKEY_LOCAL_MACHINE|HKEY_PERFORMANCE_DATA|HKEY_USERS|HKLM(32|64)?|HKPD|HKU|IDABORT|IDCANCEL|IDD_DIR|IDD_INST|IDD_INSTFILES|IDD_LICENSE|IDD_SELCOM|IDD_UNINST|IDD_VERIFY|IDIGNORE|IDNO|IDOK|IDRETRY|IDYES|MB_ABORTRETRYIGNORE|MB_DEFBUTTON1|MB_DEFBUTTON2|MB_DEFBUTTON3|MB_DEFBUTTON4|MB_ICONEXCLAMATION|MB_ICONINFORMATION|MB_ICONQUESTION|MB_ICONSTOP|MB_OK|MB_OKCANCEL|MB_RETRYCANCEL|MB_RIGHT|MB_RTLREADING|MB_SETFOREGROUND|MB_TOPMOST|MB_USERICON|MB_YESNO|MB_YESNOCANCEL|NORMAL|OFFLINE|READONLY|SHCTX|SHELL_CONTEXT|SW_HIDE|SW_SHOWDEFAULT|SW_SHOWMAXIMIZED|SW_SHOWMINIMIZED|SW_SHOWNORMAL|SYSTEM|TEMPORARY)\b/, token: "atom"}, + {regex: /\b(?:admin|all|auto|both|bottom|bzip2|components|current|custom|directory|false|force|hide|highest|ifdiff|ifnewer|instfiles|lastused|leave|left|license|listonly|lzma|nevershow|none|normal|notset|off|on|right|show|silent|silentlog|textonly|top|true|try|un\.components|un\.custom|un\.directory|un\.instfiles|un\.license|uninstConfirm|user|Win10|Win7|Win8|WinVista|zlib)\b/, token: "builtin"}, // LogicLib.nsh {regex: /\$\{(?:And(?:If(?:Not)?|Unless)|Break|Case(?:Else)?|Continue|Default|Do(?:Until|While)?|Else(?:If(?:Not)?|Unless)?|End(?:If|Select|Switch)|Exit(?:Do|For|While)|For(?:Each)?|If(?:Cmd|Not(?:Then)?|Then)?|Loop(?:Until|While)?|Or(?:If(?:Not)?|Unless)|Select|Switch|Unless|While)\}/, token: "variable-2", indent: true}, @@ -71,13 +71,13 @@ CodeMirror.defineSimpleMode("nsis",{ {regex: /[-+\/*=<>!]+/, token: "operator"}, // Variable - {regex: /\$[\w]+/, token: "variable"}, + {regex: /\$\w+/, token: "variable"}, // Constant - {regex: /\${[\w]+}/,token: "variable-2"}, + {regex: /\${[\w\.:-]+}/, token: "variable-2"}, // Language String - {regex: /\$\([\w]+\)/,token: "variable-3"} + {regex: /\$\([\w\.:-]+\)/, token: "variable-3"} ], comment: [ {regex: /.*?\*\//, token: "comment", next: "start"}, diff --git a/rhodecode/public/js/mode/ntriples/ntriples.js b/rhodecode/public/js/mode/ntriples/ntriples.js --- a/rhodecode/public/js/mode/ntriples/ntriples.js +++ b/rhodecode/public/js/mode/ntriples/ntriples.js @@ -1,11 +1,11 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /********************************************************** * This script provides syntax highlighting support for -* the Ntriples format. -* Ntriples format specification: -* http://www.w3.org/TR/rdf-testcases/#ntriples +* the N-Triples format. +* N-Triples format specification: +* https://www.w3.org/TR/n-triples/ ***********************************************************/ /* @@ -181,6 +181,15 @@ CodeMirror.defineMode("ntriples", functi }; }); +// define the registered Media Type for n-triples: +// https://www.w3.org/TR/n-triples/#n-triples-mediatype +CodeMirror.defineMIME("application/n-triples", "ntriples"); + +// N-Quads is based on the N-Triples format (so same highlighting works) +// https://www.w3.org/TR/n-quads/ +CodeMirror.defineMIME("application/n-quads", "ntriples"); + +// previously used, though technically incorrect media type for n-triples CodeMirror.defineMIME("text/n-triples", "ntriples"); }); diff --git a/rhodecode/public/js/mode/octave/octave.js b/rhodecode/public/js/mode/octave/octave.js --- a/rhodecode/public/js/mode/octave/octave.js +++ b/rhodecode/public/js/mode/octave/octave.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -17,7 +17,7 @@ CodeMirror.defineMode("octave", function } var singleOperators = new RegExp("^[\\+\\-\\*/&|\\^~<>!@'\\\\]"); - var singleDelimiters = new RegExp('^[\\(\\[\\{\\},:=;]'); + var singleDelimiters = new RegExp('^[\\(\\[\\{\\},:=;\\.]'); var doubleOperators = new RegExp("^((==)|(~=)|(<=)|(>=)|(<<)|(>>)|(\\.[\\+\\-\\*/\\^\\\\]))"); var doubleDelimiters = new RegExp("^((!=)|(\\+=)|(\\-=)|(\\*=)|(/=)|(&=)|(\\|=)|(\\^=))"); var tripleDelimiters = new RegExp("^((>>=)|(<<=))"); @@ -90,8 +90,8 @@ CodeMirror.defineMode("octave", function if (stream.match(wordRegexp(['nan','NaN','inf','Inf']))) { return 'number'; }; // Handle Strings - if (stream.match(/^"([^"]|(""))*"/)) { return 'string'; } ; - if (stream.match(/^'([^']|(''))*'/)) { return 'string'; } ; + var m = stream.match(/^"(?:[^"]|"")*("|$)/) || stream.match(/^'(?:[^']|'')*('|$)/) + if (m) { return m[1] ? 'string' : "string error"; } // Handle words if (stream.match(keywords)) { return 'keyword'; } ; @@ -126,7 +126,11 @@ CodeMirror.defineMode("octave", function state.tokenize = tokenTranspose; } return style; - } + }, + + lineComment: '%', + + fold: 'indent' }; }); diff --git a/rhodecode/public/js/mode/oz/oz.js b/rhodecode/public/js/mode/oz/oz.js --- a/rhodecode/public/js/mode/oz/oz.js +++ b/rhodecode/public/js/mode/oz/oz.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -27,7 +27,7 @@ CodeMirror.defineMode("oz", function (co var atoms = wordRegexp(["true", "false", "nil", "unit"]); var commonKeywords = wordRegexp(["andthen", "at", "attr", "declare", "feat", "from", "lex", - "mod", "mode", "orelse", "parser", "prod", "prop", "scanner", "self", "syn", "token"]); + "mod", "div", "mode", "orelse", "parser", "prod", "prop", "scanner", "self", "syn", "token"]); var openingKeywords = wordRegexp(["local", "proc", "fun", "case", "class", "if", "cond", "or", "dis", "choice", "not", "thread", "try", "raise", "lock", "for", "suchthat", "meth", "functor"]); var middleKeywords = wordRegexp(middle); diff --git a/rhodecode/public/js/mode/pascal/pascal.js b/rhodecode/public/js/mode/pascal/pascal.js --- a/rhodecode/public/js/mode/pascal/pascal.js +++ b/rhodecode/public/js/mode/pascal/pascal.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -17,9 +17,21 @@ CodeMirror.defineMode("pascal", function for (var i = 0; i < words.length; ++i) obj[words[i]] = true; return obj; } - var keywords = words("and array begin case const div do downto else end file for forward integer " + - "boolean char function goto if in label mod nil not of or packed procedure " + - "program record repeat set string then to type until var while with"); + var keywords = words( + "absolute and array asm begin case const constructor destructor div do " + + "downto else end file for function goto if implementation in inherited " + + "inline interface label mod nil not object of operator or packed procedure " + + "program record reintroduce repeat self set shl shr string then to type " + + "unit until uses var while with xor as class dispinterface except exports " + + "finalization finally initialization inline is library on out packed " + + "property raise resourcestring threadvar try absolute abstract alias " + + "assembler bitpacked break cdecl continue cppdecl cvar default deprecated " + + "dynamic enumerator experimental export external far far16 forward generic " + + "helper implements index interrupt iocheck local message name near " + + "nodefault noreturn nostackframe oldfpccall otherwise overload override " + + "pascal platform private protected public published read register " + + "reintroduce result safecall saveregisters softfloat specialize static " + + "stdcall stored strict unaligned unimplemented varargs virtual write"); var atoms = {"null": true}; var isOperatorChar = /[+\-*&%=<>!?|\/]/; diff --git a/rhodecode/public/js/mode/pegjs/pegjs.js b/rhodecode/public/js/mode/pegjs/pegjs.js --- a/rhodecode/public/js/mode/pegjs/pegjs.js +++ b/rhodecode/public/js/mode/pegjs/pegjs.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -24,7 +24,7 @@ CodeMirror.defineMode("pegjs", function inString: false, stringType: null, inComment: false, - inChracterClass: false, + inCharacterClass: false, braced: 0, lhs: true, localState: null @@ -66,22 +66,22 @@ CodeMirror.defineMode("pegjs", function } } return "comment"; - } else if (state.inChracterClass) { - while (state.inChracterClass && !stream.eol()) { + } else if (state.inCharacterClass) { + while (state.inCharacterClass && !stream.eol()) { if (!(stream.match(/^[^\]\\]+/) || stream.match(/^\\./))) { - state.inChracterClass = false; + state.inCharacterClass = false; } } } else if (stream.peek() === '[') { stream.next(); - state.inChracterClass = true; + state.inCharacterClass = true; return 'bracket'; } else if (stream.match(/^\/\//)) { stream.skipToEnd(); return "comment"; } else if (state.braced || stream.peek() === '{') { if (state.localState === null) { - state.localState = jsMode.startState(); + state.localState = CodeMirror.startState(jsMode); } var token = jsMode.token(stream, state.localState); var text = stream.current(); diff --git a/rhodecode/public/js/mode/perl/perl.js b/rhodecode/public/js/mode/perl/perl.js --- a/rhodecode/public/js/mode/perl/perl.js +++ b/rhodecode/public/js/mode/perl/perl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // CodeMirror2 mode/perl/perl.js (text/x-perl) beta 0.10 (2011-11-08) // This is a part of CodeMirror from https://github.com/sabaca/CodeMirror_mode_perl (mail@sabaca.com) @@ -268,7 +268,7 @@ CodeMirror.defineMode("perl",function(){ chmod :1, // - changes the permissions on a list of files chomp :1, // - remove a trailing record separator from a string chop :1, // - remove the last character from a string - chown :1, // - change the owership on a list of files + chown :1, // - change the ownership on a list of files chr :1, // - get character this number represents chroot :1, // - make directory new root for path lookups close :1, // - close file (or pipe or socket) handle diff --git a/rhodecode/public/js/mode/php/php.js b/rhodecode/public/js/mode/php/php.js --- a/rhodecode/public/js/mode/php/php.js +++ b/rhodecode/public/js/mode/php/php.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -86,7 +86,7 @@ "die echo empty exit eval include include_once isset list require require_once return " + "print unset __halt_compiler self static parent yield insteadof finally"; var phpAtoms = "true false null TRUE FALSE NULL __CLASS__ __DIR__ __FILE__ __LINE__ __METHOD__ __FUNCTION__ __NAMESPACE__ __TRAIT__"; - var phpBuiltin = "func_num_args func_get_arg func_get_args strlen strcmp strncmp strcasecmp strncasecmp each error_reporting define defined trigger_error user_error set_error_handler restore_error_handler get_declared_classes get_loaded_extensions extension_loaded get_extension_funcs debug_backtrace constant bin2hex hex2bin sleep usleep time mktime gmmktime strftime gmstrftime strtotime date gmdate getdate localtime checkdate flush wordwrap htmlspecialchars htmlentities html_entity_decode md5 md5_file crc32 getimagesize image_type_to_mime_type phpinfo phpversion phpcredits strnatcmp strnatcasecmp substr_count strspn strcspn strtok strtoupper strtolower strpos strrpos strrev hebrev hebrevc nl2br basename dirname pathinfo stripslashes stripcslashes strstr stristr strrchr str_shuffle str_word_count strcoll substr substr_replace quotemeta ucfirst ucwords strtr addslashes addcslashes rtrim str_replace str_repeat count_chars chunk_split trim ltrim strip_tags similar_text explode implode setlocale localeconv parse_str str_pad chop strchr sprintf printf vprintf vsprintf sscanf fscanf parse_url urlencode urldecode rawurlencode rawurldecode readlink linkinfo link unlink exec system escapeshellcmd escapeshellarg passthru shell_exec proc_open proc_close rand srand getrandmax mt_rand mt_srand mt_getrandmax base64_decode base64_encode abs ceil floor round is_finite is_nan is_infinite bindec hexdec octdec decbin decoct dechex base_convert number_format fmod ip2long long2ip getenv putenv getopt microtime gettimeofday getrusage uniqid quoted_printable_decode set_time_limit get_cfg_var magic_quotes_runtime set_magic_quotes_runtime get_magic_quotes_gpc get_magic_quotes_runtime import_request_variables error_log serialize unserialize memory_get_usage var_dump var_export debug_zval_dump print_r highlight_file show_source highlight_string ini_get ini_get_all ini_set ini_alter ini_restore get_include_path set_include_path restore_include_path setcookie header headers_sent connection_aborted connection_status ignore_user_abort parse_ini_file is_uploaded_file move_uploaded_file intval floatval doubleval strval gettype settype is_null is_resource is_bool is_long is_float is_int is_integer is_double is_real is_numeric is_string is_array is_object is_scalar ereg ereg_replace eregi eregi_replace split spliti join sql_regcase dl pclose popen readfile rewind rmdir umask fclose feof fgetc fgets fgetss fread fopen fpassthru ftruncate fstat fseek ftell fflush fwrite fputs mkdir rename copy tempnam tmpfile file file_get_contents file_put_contents stream_select stream_context_create stream_context_set_params stream_context_set_option stream_context_get_options stream_filter_prepend stream_filter_append fgetcsv flock get_meta_tags stream_set_write_buffer set_file_buffer set_socket_blocking stream_set_blocking socket_set_blocking stream_get_meta_data stream_register_wrapper stream_wrapper_register stream_set_timeout socket_set_timeout socket_get_status realpath fnmatch fsockopen pfsockopen pack unpack get_browser crypt opendir closedir chdir getcwd rewinddir readdir dir glob fileatime filectime filegroup fileinode filemtime fileowner fileperms filesize filetype file_exists is_writable is_writeable is_readable is_executable is_file is_dir is_link stat lstat chown touch clearstatcache mail ob_start ob_flush ob_clean ob_end_flush ob_end_clean ob_get_flush ob_get_clean ob_get_length ob_get_level ob_get_status ob_get_contents ob_implicit_flush ob_list_handlers ksort krsort natsort natcasesort asort arsort sort rsort usort uasort uksort shuffle array_walk count end prev next reset current key min max in_array array_search extract compact array_fill range array_multisort array_push array_pop array_shift array_unshift array_splice array_slice array_merge array_merge_recursive array_keys array_values array_count_values array_reverse array_reduce array_pad array_flip array_change_key_case array_rand array_unique array_intersect array_intersect_assoc array_diff array_diff_assoc array_sum array_filter array_map array_chunk array_key_exists pos sizeof key_exists assert assert_options version_compare ftok str_rot13 aggregate session_name session_module_name session_save_path session_id session_regenerate_id session_decode session_register session_unregister session_is_registered session_encode session_start session_destroy session_unset session_set_save_handler session_cache_limiter session_cache_expire session_set_cookie_params session_get_cookie_params session_write_close preg_match preg_match_all preg_replace preg_replace_callback preg_split preg_quote preg_grep overload ctype_alnum ctype_alpha ctype_cntrl ctype_digit ctype_lower ctype_graph ctype_print ctype_punct ctype_space ctype_upper ctype_xdigit virtual apache_request_headers apache_note apache_lookup_uri apache_child_terminate apache_setenv apache_response_headers apache_get_version getallheaders mysql_connect mysql_pconnect mysql_close mysql_select_db mysql_create_db mysql_drop_db mysql_query mysql_unbuffered_query mysql_db_query mysql_list_dbs mysql_list_tables mysql_list_fields mysql_list_processes mysql_error mysql_errno mysql_affected_rows mysql_insert_id mysql_result mysql_num_rows mysql_num_fields mysql_fetch_row mysql_fetch_array mysql_fetch_assoc mysql_fetch_object mysql_data_seek mysql_fetch_lengths mysql_fetch_field mysql_field_seek mysql_free_result mysql_field_name mysql_field_table mysql_field_len mysql_field_type mysql_field_flags mysql_escape_string mysql_real_escape_string mysql_stat mysql_thread_id mysql_client_encoding mysql_get_client_info mysql_get_host_info mysql_get_proto_info mysql_get_server_info mysql_info mysql mysql_fieldname mysql_fieldtable mysql_fieldlen mysql_fieldtype mysql_fieldflags mysql_selectdb mysql_createdb mysql_dropdb mysql_freeresult mysql_numfields mysql_numrows mysql_listdbs mysql_listtables mysql_listfields mysql_db_name mysql_dbname mysql_tablename mysql_table_name pg_connect pg_pconnect pg_close pg_connection_status pg_connection_busy pg_connection_reset pg_host pg_dbname pg_port pg_tty pg_options pg_ping pg_query pg_send_query pg_cancel_query pg_fetch_result pg_fetch_row pg_fetch_assoc pg_fetch_array pg_fetch_object pg_fetch_all pg_affected_rows pg_get_result pg_result_seek pg_result_status pg_free_result pg_last_oid pg_num_rows pg_num_fields pg_field_name pg_field_num pg_field_size pg_field_type pg_field_prtlen pg_field_is_null pg_get_notify pg_get_pid pg_result_error pg_last_error pg_last_notice pg_put_line pg_end_copy pg_copy_to pg_copy_from pg_trace pg_untrace pg_lo_create pg_lo_unlink pg_lo_open pg_lo_close pg_lo_read pg_lo_write pg_lo_read_all pg_lo_import pg_lo_export pg_lo_seek pg_lo_tell pg_escape_string pg_escape_bytea pg_unescape_bytea pg_client_encoding pg_set_client_encoding pg_meta_data pg_convert pg_insert pg_update pg_delete pg_select pg_exec pg_getlastoid pg_cmdtuples pg_errormessage pg_numrows pg_numfields pg_fieldname pg_fieldsize pg_fieldtype pg_fieldnum pg_fieldprtlen pg_fieldisnull pg_freeresult pg_result pg_loreadall pg_locreate pg_lounlink pg_loopen pg_loclose pg_loread pg_lowrite pg_loimport pg_loexport http_response_code get_declared_traits getimagesizefromstring socket_import_stream stream_set_chunk_size trait_exists header_register_callback class_uses session_status session_register_shutdown echo print global static exit array empty eval isset unset die include require include_once require_once json_decode json_encode json_last_error json_last_error_msg curl_close curl_copy_handle curl_errno curl_error curl_escape curl_exec curl_file_create curl_getinfo curl_init curl_multi_add_handle curl_multi_close curl_multi_exec curl_multi_getcontent curl_multi_info_read curl_multi_init curl_multi_remove_handle curl_multi_select curl_multi_setopt curl_multi_strerror curl_pause curl_reset curl_setopt_array curl_setopt curl_share_close curl_share_init curl_share_setopt curl_strerror curl_unescape curl_version mysqli_affected_rows mysqli_autocommit mysqli_change_user mysqli_character_set_name mysqli_close mysqli_commit mysqli_connect_errno mysqli_connect_error mysqli_connect mysqli_data_seek mysqli_debug mysqli_dump_debug_info mysqli_errno mysqli_error_list mysqli_error mysqli_fetch_all mysqli_fetch_array mysqli_fetch_assoc mysqli_fetch_field_direct mysqli_fetch_field mysqli_fetch_fields mysqli_fetch_lengths mysqli_fetch_object mysqli_fetch_row mysqli_field_count mysqli_field_seek mysqli_field_tell mysqli_free_result mysqli_get_charset mysqli_get_client_info mysqli_get_client_stats mysqli_get_client_version mysqli_get_connection_stats mysqli_get_host_info mysqli_get_proto_info mysqli_get_server_info mysqli_get_server_version mysqli_info mysqli_init mysqli_insert_id mysqli_kill mysqli_more_results mysqli_multi_query mysqli_next_result mysqli_num_fields mysqli_num_rows mysqli_options mysqli_ping mysqli_prepare mysqli_query mysqli_real_connect mysqli_real_escape_string mysqli_real_query mysqli_reap_async_query mysqli_refresh mysqli_rollback mysqli_select_db mysqli_set_charset mysqli_set_local_infile_default mysqli_set_local_infile_handler mysqli_sqlstate mysqli_ssl_set mysqli_stat mysqli_stmt_init mysqli_store_result mysqli_thread_id mysqli_thread_safe mysqli_use_result mysqli_warning_count"; + var phpBuiltin = "func_num_args func_get_arg func_get_args strlen strcmp strncmp strcasecmp strncasecmp each error_reporting define defined trigger_error user_error set_error_handler restore_error_handler get_declared_classes get_loaded_extensions extension_loaded get_extension_funcs debug_backtrace constant bin2hex hex2bin sleep usleep time mktime gmmktime strftime gmstrftime strtotime date gmdate getdate localtime checkdate flush wordwrap htmlspecialchars htmlentities html_entity_decode md5 md5_file crc32 getimagesize image_type_to_mime_type phpinfo phpversion phpcredits strnatcmp strnatcasecmp substr_count strspn strcspn strtok strtoupper strtolower strpos strrpos strrev hebrev hebrevc nl2br basename dirname pathinfo stripslashes stripcslashes strstr stristr strrchr str_shuffle str_word_count strcoll substr substr_replace quotemeta ucfirst ucwords strtr addslashes addcslashes rtrim str_replace str_repeat count_chars chunk_split trim ltrim strip_tags similar_text explode implode setlocale localeconv parse_str str_pad chop strchr sprintf printf vprintf vsprintf sscanf fscanf parse_url urlencode urldecode rawurlencode rawurldecode readlink linkinfo link unlink exec system escapeshellcmd escapeshellarg passthru shell_exec proc_open proc_close rand srand getrandmax mt_rand mt_srand mt_getrandmax base64_decode base64_encode abs ceil floor round is_finite is_nan is_infinite bindec hexdec octdec decbin decoct dechex base_convert number_format fmod ip2long long2ip getenv putenv getopt microtime gettimeofday getrusage uniqid quoted_printable_decode set_time_limit get_cfg_var magic_quotes_runtime set_magic_quotes_runtime get_magic_quotes_gpc get_magic_quotes_runtime import_request_variables error_log serialize unserialize memory_get_usage var_dump var_export debug_zval_dump print_r highlight_file show_source highlight_string ini_get ini_get_all ini_set ini_alter ini_restore get_include_path set_include_path restore_include_path setcookie header headers_sent connection_aborted connection_status ignore_user_abort parse_ini_file is_uploaded_file move_uploaded_file intval floatval doubleval strval gettype settype is_null is_resource is_bool is_long is_float is_int is_integer is_double is_real is_numeric is_string is_array is_object is_scalar ereg ereg_replace eregi eregi_replace split spliti join sql_regcase dl pclose popen readfile rewind rmdir umask fclose feof fgetc fgets fgetss fread fopen fpassthru ftruncate fstat fseek ftell fflush fwrite fputs mkdir rename copy tempnam tmpfile file file_get_contents file_put_contents stream_select stream_context_create stream_context_set_params stream_context_set_option stream_context_get_options stream_filter_prepend stream_filter_append fgetcsv flock get_meta_tags stream_set_write_buffer set_file_buffer set_socket_blocking stream_set_blocking socket_set_blocking stream_get_meta_data stream_register_wrapper stream_wrapper_register stream_set_timeout socket_set_timeout socket_get_status realpath fnmatch fsockopen pfsockopen pack unpack get_browser crypt opendir closedir chdir getcwd rewinddir readdir dir glob fileatime filectime filegroup fileinode filemtime fileowner fileperms filesize filetype file_exists is_writable is_writeable is_readable is_executable is_file is_dir is_link stat lstat chown touch clearstatcache mail ob_start ob_flush ob_clean ob_end_flush ob_end_clean ob_get_flush ob_get_clean ob_get_length ob_get_level ob_get_status ob_get_contents ob_implicit_flush ob_list_handlers ksort krsort natsort natcasesort asort arsort sort rsort usort uasort uksort shuffle array_walk count end prev next reset current key min max in_array array_search extract compact array_fill range array_multisort array_push array_pop array_shift array_unshift array_splice array_slice array_merge array_merge_recursive array_keys array_values array_count_values array_reverse array_reduce array_pad array_flip array_change_key_case array_rand array_unique array_intersect array_intersect_assoc array_diff array_diff_assoc array_sum array_filter array_map array_chunk array_key_exists array_intersect_key array_combine array_column pos sizeof key_exists assert assert_options version_compare ftok str_rot13 aggregate session_name session_module_name session_save_path session_id session_regenerate_id session_decode session_register session_unregister session_is_registered session_encode session_start session_destroy session_unset session_set_save_handler session_cache_limiter session_cache_expire session_set_cookie_params session_get_cookie_params session_write_close preg_match preg_match_all preg_replace preg_replace_callback preg_split preg_quote preg_grep overload ctype_alnum ctype_alpha ctype_cntrl ctype_digit ctype_lower ctype_graph ctype_print ctype_punct ctype_space ctype_upper ctype_xdigit virtual apache_request_headers apache_note apache_lookup_uri apache_child_terminate apache_setenv apache_response_headers apache_get_version getallheaders mysql_connect mysql_pconnect mysql_close mysql_select_db mysql_create_db mysql_drop_db mysql_query mysql_unbuffered_query mysql_db_query mysql_list_dbs mysql_list_tables mysql_list_fields mysql_list_processes mysql_error mysql_errno mysql_affected_rows mysql_insert_id mysql_result mysql_num_rows mysql_num_fields mysql_fetch_row mysql_fetch_array mysql_fetch_assoc mysql_fetch_object mysql_data_seek mysql_fetch_lengths mysql_fetch_field mysql_field_seek mysql_free_result mysql_field_name mysql_field_table mysql_field_len mysql_field_type mysql_field_flags mysql_escape_string mysql_real_escape_string mysql_stat mysql_thread_id mysql_client_encoding mysql_get_client_info mysql_get_host_info mysql_get_proto_info mysql_get_server_info mysql_info mysql mysql_fieldname mysql_fieldtable mysql_fieldlen mysql_fieldtype mysql_fieldflags mysql_selectdb mysql_createdb mysql_dropdb mysql_freeresult mysql_numfields mysql_numrows mysql_listdbs mysql_listtables mysql_listfields mysql_db_name mysql_dbname mysql_tablename mysql_table_name pg_connect pg_pconnect pg_close pg_connection_status pg_connection_busy pg_connection_reset pg_host pg_dbname pg_port pg_tty pg_options pg_ping pg_query pg_send_query pg_cancel_query pg_fetch_result pg_fetch_row pg_fetch_assoc pg_fetch_array pg_fetch_object pg_fetch_all pg_affected_rows pg_get_result pg_result_seek pg_result_status pg_free_result pg_last_oid pg_num_rows pg_num_fields pg_field_name pg_field_num pg_field_size pg_field_type pg_field_prtlen pg_field_is_null pg_get_notify pg_get_pid pg_result_error pg_last_error pg_last_notice pg_put_line pg_end_copy pg_copy_to pg_copy_from pg_trace pg_untrace pg_lo_create pg_lo_unlink pg_lo_open pg_lo_close pg_lo_read pg_lo_write pg_lo_read_all pg_lo_import pg_lo_export pg_lo_seek pg_lo_tell pg_escape_string pg_escape_bytea pg_unescape_bytea pg_client_encoding pg_set_client_encoding pg_meta_data pg_convert pg_insert pg_update pg_delete pg_select pg_exec pg_getlastoid pg_cmdtuples pg_errormessage pg_numrows pg_numfields pg_fieldname pg_fieldsize pg_fieldtype pg_fieldnum pg_fieldprtlen pg_fieldisnull pg_freeresult pg_result pg_loreadall pg_locreate pg_lounlink pg_loopen pg_loclose pg_loread pg_lowrite pg_loimport pg_loexport http_response_code get_declared_traits getimagesizefromstring socket_import_stream stream_set_chunk_size trait_exists header_register_callback class_uses session_status session_register_shutdown echo print global static exit array empty eval isset unset die include require include_once require_once json_decode json_encode json_last_error json_last_error_msg curl_close curl_copy_handle curl_errno curl_error curl_escape curl_exec curl_file_create curl_getinfo curl_init curl_multi_add_handle curl_multi_close curl_multi_exec curl_multi_getcontent curl_multi_info_read curl_multi_init curl_multi_remove_handle curl_multi_select curl_multi_setopt curl_multi_strerror curl_pause curl_reset curl_setopt_array curl_setopt curl_share_close curl_share_init curl_share_setopt curl_strerror curl_unescape curl_version mysqli_affected_rows mysqli_autocommit mysqli_change_user mysqli_character_set_name mysqli_close mysqli_commit mysqli_connect_errno mysqli_connect_error mysqli_connect mysqli_data_seek mysqli_debug mysqli_dump_debug_info mysqli_errno mysqli_error_list mysqli_error mysqli_fetch_all mysqli_fetch_array mysqli_fetch_assoc mysqli_fetch_field_direct mysqli_fetch_field mysqli_fetch_fields mysqli_fetch_lengths mysqli_fetch_object mysqli_fetch_row mysqli_field_count mysqli_field_seek mysqli_field_tell mysqli_free_result mysqli_get_charset mysqli_get_client_info mysqli_get_client_stats mysqli_get_client_version mysqli_get_connection_stats mysqli_get_host_info mysqli_get_proto_info mysqli_get_server_info mysqli_get_server_version mysqli_info mysqli_init mysqli_insert_id mysqli_kill mysqli_more_results mysqli_multi_query mysqli_next_result mysqli_num_fields mysqli_num_rows mysqli_options mysqli_ping mysqli_prepare mysqli_query mysqli_real_connect mysqli_real_escape_string mysqli_real_query mysqli_reap_async_query mysqli_refresh mysqli_rollback mysqli_select_db mysqli_set_charset mysqli_set_local_infile_default mysqli_set_local_infile_handler mysqli_sqlstate mysqli_ssl_set mysqli_stat mysqli_stmt_init mysqli_store_result mysqli_thread_id mysqli_thread_safe mysqli_use_result mysqli_warning_count"; CodeMirror.registerHelper("hintWords", "php", [phpKeywords, phpAtoms, phpBuiltin].join(" ").split(" ")); CodeMirror.registerHelper("wordChars", "php", /[\w$]/); @@ -151,7 +151,7 @@ }; CodeMirror.defineMode("php", function(config, parserConfig) { - var htmlMode = CodeMirror.getMode(config, "text/html"); + var htmlMode = CodeMirror.getMode(config, (parserConfig && parserConfig.htmlMode) || "text/html"); var phpMode = CodeMirror.getMode(config, phpConfig); function dispatch(stream, state) { @@ -160,7 +160,7 @@ if (!isPHP) { if (stream.match(/^<\?\w*/)) { state.curMode = phpMode; - if (!state.php) state.php = CodeMirror.startState(phpMode, htmlMode.indent(state.html, "")) + if (!state.php) state.php = CodeMirror.startState(phpMode, htmlMode.indent(state.html, "", "")) state.curState = state.php; return "meta"; } @@ -213,11 +213,11 @@ token: dispatch, - indent: function(state, textAfter) { + indent: function(state, textAfter, line) { if ((state.curMode != phpMode && /^\s*<\//.test(textAfter)) || (state.curMode == phpMode && /^\?>/.test(textAfter))) - return htmlMode.indent(state.html, textAfter); - return state.curMode.indent(state.curState, textAfter); + return htmlMode.indent(state.html, textAfter, line); + return state.curMode.indent(state.curState, textAfter, line); }, blockCommentStart: "/*", diff --git a/rhodecode/public/js/mode/pig/pig.js b/rhodecode/public/js/mode/pig/pig.js --- a/rhodecode/public/js/mode/pig/pig.js +++ b/rhodecode/public/js/mode/pig/pig.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* * Pig Latin Mode for CodeMirror 2 diff --git a/rhodecode/public/js/mode/powershell/powershell.js b/rhodecode/public/js/mode/powershell/powershell.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/powershell/powershell.js @@ -0,0 +1,398 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + 'use strict'; + if (typeof exports == 'object' && typeof module == 'object') // CommonJS + mod(require('../../lib/codemirror')); + else if (typeof define == 'function' && define.amd) // AMD + define(['../../lib/codemirror'], mod); + else // Plain browser env + mod(window.CodeMirror); +})(function(CodeMirror) { +'use strict'; + +CodeMirror.defineMode('powershell', function() { + function buildRegexp(patterns, options) { + options = options || {}; + var prefix = options.prefix !== undefined ? options.prefix : '^'; + var suffix = options.suffix !== undefined ? options.suffix : '\\b'; + + for (var i = 0; i < patterns.length; i++) { + if (patterns[i] instanceof RegExp) { + patterns[i] = patterns[i].source; + } + else { + patterns[i] = patterns[i].replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&'); + } + } + + return new RegExp(prefix + '(' + patterns.join('|') + ')' + suffix, 'i'); + } + + var notCharacterOrDash = '(?=[^A-Za-z\\d\\-_]|$)'; + var varNames = /[\w\-:]/ + var keywords = buildRegexp([ + /begin|break|catch|continue|data|default|do|dynamicparam/, + /else|elseif|end|exit|filter|finally|for|foreach|from|function|if|in/, + /param|process|return|switch|throw|trap|try|until|where|while/ + ], { suffix: notCharacterOrDash }); + + var punctuation = /[\[\]{},;`\.]|@[({]/; + var wordOperators = buildRegexp([ + 'f', + /b?not/, + /[ic]?split/, 'join', + /is(not)?/, 'as', + /[ic]?(eq|ne|[gl][te])/, + /[ic]?(not)?(like|match|contains)/, + /[ic]?replace/, + /b?(and|or|xor)/ + ], { prefix: '-' }); + var symbolOperators = /[+\-*\/%]=|\+\+|--|\.\.|[+\-*&^%:=!|\/]|<(?!#)|(?!#)>/; + var operators = buildRegexp([wordOperators, symbolOperators], { suffix: '' }); + + var numbers = /^((0x[\da-f]+)|((\d+\.\d+|\d\.|\.\d+|\d+)(e[\+\-]?\d+)?))[ld]?([kmgtp]b)?/i; + + var identifiers = /^[A-Za-z\_][A-Za-z\-\_\d]*\b/; + + var symbolBuiltins = /[A-Z]:|%|\?/i; + var namedBuiltins = buildRegexp([ + /Add-(Computer|Content|History|Member|PSSnapin|Type)/, + /Checkpoint-Computer/, + /Clear-(Content|EventLog|History|Host|Item(Property)?|Variable)/, + /Compare-Object/, + /Complete-Transaction/, + /Connect-PSSession/, + /ConvertFrom-(Csv|Json|SecureString|StringData)/, + /Convert-Path/, + /ConvertTo-(Csv|Html|Json|SecureString|Xml)/, + /Copy-Item(Property)?/, + /Debug-Process/, + /Disable-(ComputerRestore|PSBreakpoint|PSRemoting|PSSessionConfiguration)/, + /Disconnect-PSSession/, + /Enable-(ComputerRestore|PSBreakpoint|PSRemoting|PSSessionConfiguration)/, + /(Enter|Exit)-PSSession/, + /Export-(Alias|Clixml|Console|Counter|Csv|FormatData|ModuleMember|PSSession)/, + /ForEach-Object/, + /Format-(Custom|List|Table|Wide)/, + new RegExp('Get-(Acl|Alias|AuthenticodeSignature|ChildItem|Command|ComputerRestorePoint|Content|ControlPanelItem|Counter|Credential' + + '|Culture|Date|Event|EventLog|EventSubscriber|ExecutionPolicy|FormatData|Help|History|Host|HotFix|Item|ItemProperty|Job' + + '|Location|Member|Module|PfxCertificate|Process|PSBreakpoint|PSCallStack|PSDrive|PSProvider|PSSession|PSSessionConfiguration' + + '|PSSnapin|Random|Service|TraceSource|Transaction|TypeData|UICulture|Unique|Variable|Verb|WinEvent|WmiObject)'), + /Group-Object/, + /Import-(Alias|Clixml|Counter|Csv|LocalizedData|Module|PSSession)/, + /ImportSystemModules/, + /Invoke-(Command|Expression|History|Item|RestMethod|WebRequest|WmiMethod)/, + /Join-Path/, + /Limit-EventLog/, + /Measure-(Command|Object)/, + /Move-Item(Property)?/, + new RegExp('New-(Alias|Event|EventLog|Item(Property)?|Module|ModuleManifest|Object|PSDrive|PSSession|PSSessionConfigurationFile' + + '|PSSessionOption|PSTransportOption|Service|TimeSpan|Variable|WebServiceProxy|WinEvent)'), + /Out-(Default|File|GridView|Host|Null|Printer|String)/, + /Pause/, + /(Pop|Push)-Location/, + /Read-Host/, + /Receive-(Job|PSSession)/, + /Register-(EngineEvent|ObjectEvent|PSSessionConfiguration|WmiEvent)/, + /Remove-(Computer|Event|EventLog|Item(Property)?|Job|Module|PSBreakpoint|PSDrive|PSSession|PSSnapin|TypeData|Variable|WmiObject)/, + /Rename-(Computer|Item(Property)?)/, + /Reset-ComputerMachinePassword/, + /Resolve-Path/, + /Restart-(Computer|Service)/, + /Restore-Computer/, + /Resume-(Job|Service)/, + /Save-Help/, + /Select-(Object|String|Xml)/, + /Send-MailMessage/, + new RegExp('Set-(Acl|Alias|AuthenticodeSignature|Content|Date|ExecutionPolicy|Item(Property)?|Location|PSBreakpoint|PSDebug' + + '|PSSessionConfiguration|Service|StrictMode|TraceSource|Variable|WmiInstance)'), + /Show-(Command|ControlPanelItem|EventLog)/, + /Sort-Object/, + /Split-Path/, + /Start-(Job|Process|Service|Sleep|Transaction|Transcript)/, + /Stop-(Computer|Job|Process|Service|Transcript)/, + /Suspend-(Job|Service)/, + /TabExpansion2/, + /Tee-Object/, + /Test-(ComputerSecureChannel|Connection|ModuleManifest|Path|PSSessionConfigurationFile)/, + /Trace-Command/, + /Unblock-File/, + /Undo-Transaction/, + /Unregister-(Event|PSSessionConfiguration)/, + /Update-(FormatData|Help|List|TypeData)/, + /Use-Transaction/, + /Wait-(Event|Job|Process)/, + /Where-Object/, + /Write-(Debug|Error|EventLog|Host|Output|Progress|Verbose|Warning)/, + /cd|help|mkdir|more|oss|prompt/, + /ac|asnp|cat|cd|chdir|clc|clear|clhy|cli|clp|cls|clv|cnsn|compare|copy|cp|cpi|cpp|cvpa|dbp|del|diff|dir|dnsn|ebp/, + /echo|epal|epcsv|epsn|erase|etsn|exsn|fc|fl|foreach|ft|fw|gal|gbp|gc|gci|gcm|gcs|gdr|ghy|gi|gjb|gl|gm|gmo|gp|gps/, + /group|gsn|gsnp|gsv|gu|gv|gwmi|h|history|icm|iex|ihy|ii|ipal|ipcsv|ipmo|ipsn|irm|ise|iwmi|iwr|kill|lp|ls|man|md/, + /measure|mi|mount|move|mp|mv|nal|ndr|ni|nmo|npssc|nsn|nv|ogv|oh|popd|ps|pushd|pwd|r|rbp|rcjb|rcsn|rd|rdr|ren|ri/, + /rjb|rm|rmdir|rmo|rni|rnp|rp|rsn|rsnp|rujb|rv|rvpa|rwmi|sajb|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls/, + /sort|sp|spjb|spps|spsv|start|sujb|sv|swmi|tee|trcm|type|where|wjb|write/ + ], { prefix: '', suffix: '' }); + var variableBuiltins = buildRegexp([ + /[$?^_]|Args|ConfirmPreference|ConsoleFileName|DebugPreference|Error|ErrorActionPreference|ErrorView|ExecutionContext/, + /FormatEnumerationLimit|Home|Host|Input|MaximumAliasCount|MaximumDriveCount|MaximumErrorCount|MaximumFunctionCount/, + /MaximumHistoryCount|MaximumVariableCount|MyInvocation|NestedPromptLevel|OutputEncoding|Pid|Profile|ProgressPreference/, + /PSBoundParameters|PSCommandPath|PSCulture|PSDefaultParameterValues|PSEmailServer|PSHome|PSScriptRoot|PSSessionApplicationName/, + /PSSessionConfigurationName|PSSessionOption|PSUICulture|PSVersionTable|Pwd|ShellId|StackTrace|VerbosePreference/, + /WarningPreference|WhatIfPreference/, + + /Event|EventArgs|EventSubscriber|Sender/, + /Matches|Ofs|ForEach|LastExitCode|PSCmdlet|PSItem|PSSenderInfo|This/, + /true|false|null/ + ], { prefix: '\\$', suffix: '' }); + + var builtins = buildRegexp([symbolBuiltins, namedBuiltins, variableBuiltins], { suffix: notCharacterOrDash }); + + var grammar = { + keyword: keywords, + number: numbers, + operator: operators, + builtin: builtins, + punctuation: punctuation, + identifier: identifiers + }; + + // tokenizers + function tokenBase(stream, state) { + // Handle Comments + //var ch = stream.peek(); + + var parent = state.returnStack[state.returnStack.length - 1]; + if (parent && parent.shouldReturnFrom(state)) { + state.tokenize = parent.tokenize; + state.returnStack.pop(); + return state.tokenize(stream, state); + } + + if (stream.eatSpace()) { + return null; + } + + if (stream.eat('(')) { + state.bracketNesting += 1; + return 'punctuation'; + } + + if (stream.eat(')')) { + state.bracketNesting -= 1; + return 'punctuation'; + } + + for (var key in grammar) { + if (stream.match(grammar[key])) { + return key; + } + } + + var ch = stream.next(); + + // single-quote string + if (ch === "'") { + return tokenSingleQuoteString(stream, state); + } + + if (ch === '$') { + return tokenVariable(stream, state); + } + + // double-quote string + if (ch === '"') { + return tokenDoubleQuoteString(stream, state); + } + + if (ch === '<' && stream.eat('#')) { + state.tokenize = tokenComment; + return tokenComment(stream, state); + } + + if (ch === '#') { + stream.skipToEnd(); + return 'comment'; + } + + if (ch === '@') { + var quoteMatch = stream.eat(/["']/); + if (quoteMatch && stream.eol()) { + state.tokenize = tokenMultiString; + state.startQuote = quoteMatch[0]; + return tokenMultiString(stream, state); + } else if (stream.eol()) { + return 'error'; + } else if (stream.peek().match(/[({]/)) { + return 'punctuation'; + } else if (stream.peek().match(varNames)) { + // splatted variable + return tokenVariable(stream, state); + } + } + return 'error'; + } + + function tokenSingleQuoteString(stream, state) { + var ch; + while ((ch = stream.peek()) != null) { + stream.next(); + + if (ch === "'" && !stream.eat("'")) { + state.tokenize = tokenBase; + return 'string'; + } + } + + return 'error'; + } + + function tokenDoubleQuoteString(stream, state) { + var ch; + while ((ch = stream.peek()) != null) { + if (ch === '$') { + state.tokenize = tokenStringInterpolation; + return 'string'; + } + + stream.next(); + if (ch === '`') { + stream.next(); + continue; + } + + if (ch === '"' && !stream.eat('"')) { + state.tokenize = tokenBase; + return 'string'; + } + } + + return 'error'; + } + + function tokenStringInterpolation(stream, state) { + return tokenInterpolation(stream, state, tokenDoubleQuoteString); + } + + function tokenMultiStringReturn(stream, state) { + state.tokenize = tokenMultiString; + state.startQuote = '"' + return tokenMultiString(stream, state); + } + + function tokenHereStringInterpolation(stream, state) { + return tokenInterpolation(stream, state, tokenMultiStringReturn); + } + + function tokenInterpolation(stream, state, parentTokenize) { + if (stream.match('$(')) { + var savedBracketNesting = state.bracketNesting; + state.returnStack.push({ + /*jshint loopfunc:true */ + shouldReturnFrom: function(state) { + return state.bracketNesting === savedBracketNesting; + }, + tokenize: parentTokenize + }); + state.tokenize = tokenBase; + state.bracketNesting += 1; + return 'punctuation'; + } else { + stream.next(); + state.returnStack.push({ + shouldReturnFrom: function() { return true; }, + tokenize: parentTokenize + }); + state.tokenize = tokenVariable; + return state.tokenize(stream, state); + } + } + + function tokenComment(stream, state) { + var maybeEnd = false, ch; + while ((ch = stream.next()) != null) { + if (maybeEnd && ch == '>') { + state.tokenize = tokenBase; + break; + } + maybeEnd = (ch === '#'); + } + return 'comment'; + } + + function tokenVariable(stream, state) { + var ch = stream.peek(); + if (stream.eat('{')) { + state.tokenize = tokenVariableWithBraces; + return tokenVariableWithBraces(stream, state); + } else if (ch != undefined && ch.match(varNames)) { + stream.eatWhile(varNames); + state.tokenize = tokenBase; + return 'variable-2'; + } else { + state.tokenize = tokenBase; + return 'error'; + } + } + + function tokenVariableWithBraces(stream, state) { + var ch; + while ((ch = stream.next()) != null) { + if (ch === '}') { + state.tokenize = tokenBase; + break; + } + } + return 'variable-2'; + } + + function tokenMultiString(stream, state) { + var quote = state.startQuote; + if (stream.sol() && stream.match(new RegExp(quote + '@'))) { + state.tokenize = tokenBase; + } + else if (quote === '"') { + while (!stream.eol()) { + var ch = stream.peek(); + if (ch === '$') { + state.tokenize = tokenHereStringInterpolation; + return 'string'; + } + + stream.next(); + if (ch === '`') { + stream.next(); + } + } + } + else { + stream.skipToEnd(); + } + + return 'string'; + } + + var external = { + startState: function() { + return { + returnStack: [], + bracketNesting: 0, + tokenize: tokenBase + }; + }, + + token: function(stream, state) { + return state.tokenize(stream, state); + }, + + blockCommentStart: '<#', + blockCommentEnd: '#>', + lineComment: '#', + fold: 'brace' + }; + return external; +}); + +CodeMirror.defineMIME('application/x-powershell', 'powershell'); +}); diff --git a/rhodecode/public/js/mode/properties/properties.js b/rhodecode/public/js/mode/properties/properties.js --- a/rhodecode/public/js/mode/properties/properties.js +++ b/rhodecode/public/js/mode/properties/properties.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -34,7 +34,7 @@ CodeMirror.defineMode("properties", func } if (sol) { - while(stream.eatSpace()); + while(stream.eatSpace()) {} } var ch = stream.next(); diff --git a/rhodecode/public/js/mode/protobuf/protobuf.js b/rhodecode/public/js/mode/protobuf/protobuf.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/protobuf/protobuf.js @@ -0,0 +1,69 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { + "use strict"; + + function wordRegexp(words) { + return new RegExp("^((" + words.join(")|(") + "))\\b", "i"); + }; + + var keywordArray = [ + "package", "message", "import", "syntax", + "required", "optional", "repeated", "reserved", "default", "extensions", "packed", + "bool", "bytes", "double", "enum", "float", "string", + "int32", "int64", "uint32", "uint64", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64", + "option", "service", "rpc", "returns" + ]; + var keywords = wordRegexp(keywordArray); + + CodeMirror.registerHelper("hintWords", "protobuf", keywordArray); + + var identifiers = new RegExp("^[_A-Za-z\xa1-\uffff][_A-Za-z0-9\xa1-\uffff]*"); + + function tokenBase(stream) { + // whitespaces + if (stream.eatSpace()) return null; + + // Handle one line Comments + if (stream.match("//")) { + stream.skipToEnd(); + return "comment"; + } + + // Handle Number Literals + if (stream.match(/^[0-9\.+-]/, false)) { + if (stream.match(/^[+-]?0x[0-9a-fA-F]+/)) + return "number"; + if (stream.match(/^[+-]?\d*\.\d+([EeDd][+-]?\d+)?/)) + return "number"; + if (stream.match(/^[+-]?\d+([EeDd][+-]?\d+)?/)) + return "number"; + } + + // Handle Strings + if (stream.match(/^"([^"]|(""))*"/)) { return "string"; } + if (stream.match(/^'([^']|(''))*'/)) { return "string"; } + + // Handle words + if (stream.match(keywords)) { return "keyword"; } + if (stream.match(identifiers)) { return "variable"; } ; + + // Handle non-detected items + stream.next(); + return null; + }; + + CodeMirror.defineMode("protobuf", function() { + return {token: tokenBase}; + }); + + CodeMirror.defineMIME("text/x-protobuf", "protobuf"); +}); diff --git a/rhodecode/public/js/mode/pug/pug.js b/rhodecode/public/js/mode/pug/pug.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/pug/pug.js @@ -0,0 +1,591 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror"), require("../javascript/javascript"), require("../css/css"), require("../htmlmixed/htmlmixed")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror", "../javascript/javascript", "../css/css", "../htmlmixed/htmlmixed"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + +CodeMirror.defineMode("pug", function (config) { + // token types + var KEYWORD = 'keyword'; + var DOCTYPE = 'meta'; + var ID = 'builtin'; + var CLASS = 'qualifier'; + + var ATTRS_NEST = { + '{': '}', + '(': ')', + '[': ']' + }; + + var jsMode = CodeMirror.getMode(config, 'javascript'); + + function State() { + this.javaScriptLine = false; + this.javaScriptLineExcludesColon = false; + + this.javaScriptArguments = false; + this.javaScriptArgumentsDepth = 0; + + this.isInterpolating = false; + this.interpolationNesting = 0; + + this.jsState = CodeMirror.startState(jsMode); + + this.restOfLine = ''; + + this.isIncludeFiltered = false; + this.isEach = false; + + this.lastTag = ''; + this.scriptType = ''; + + // Attributes Mode + this.isAttrs = false; + this.attrsNest = []; + this.inAttributeName = true; + this.attributeIsType = false; + this.attrValue = ''; + + // Indented Mode + this.indentOf = Infinity; + this.indentToken = ''; + + this.innerMode = null; + this.innerState = null; + + this.innerModeForLine = false; + } + /** + * Safely copy a state + * + * @return {State} + */ + State.prototype.copy = function () { + var res = new State(); + res.javaScriptLine = this.javaScriptLine; + res.javaScriptLineExcludesColon = this.javaScriptLineExcludesColon; + res.javaScriptArguments = this.javaScriptArguments; + res.javaScriptArgumentsDepth = this.javaScriptArgumentsDepth; + res.isInterpolating = this.isInterpolating; + res.interpolationNesting = this.interpolationNesting; + + res.jsState = CodeMirror.copyState(jsMode, this.jsState); + + res.innerMode = this.innerMode; + if (this.innerMode && this.innerState) { + res.innerState = CodeMirror.copyState(this.innerMode, this.innerState); + } + + res.restOfLine = this.restOfLine; + + res.isIncludeFiltered = this.isIncludeFiltered; + res.isEach = this.isEach; + res.lastTag = this.lastTag; + res.scriptType = this.scriptType; + res.isAttrs = this.isAttrs; + res.attrsNest = this.attrsNest.slice(); + res.inAttributeName = this.inAttributeName; + res.attributeIsType = this.attributeIsType; + res.attrValue = this.attrValue; + res.indentOf = this.indentOf; + res.indentToken = this.indentToken; + + res.innerModeForLine = this.innerModeForLine; + + return res; + }; + + function javaScript(stream, state) { + if (stream.sol()) { + // if javaScriptLine was set at end of line, ignore it + state.javaScriptLine = false; + state.javaScriptLineExcludesColon = false; + } + if (state.javaScriptLine) { + if (state.javaScriptLineExcludesColon && stream.peek() === ':') { + state.javaScriptLine = false; + state.javaScriptLineExcludesColon = false; + return; + } + var tok = jsMode.token(stream, state.jsState); + if (stream.eol()) state.javaScriptLine = false; + return tok || true; + } + } + function javaScriptArguments(stream, state) { + if (state.javaScriptArguments) { + if (state.javaScriptArgumentsDepth === 0 && stream.peek() !== '(') { + state.javaScriptArguments = false; + return; + } + if (stream.peek() === '(') { + state.javaScriptArgumentsDepth++; + } else if (stream.peek() === ')') { + state.javaScriptArgumentsDepth--; + } + if (state.javaScriptArgumentsDepth === 0) { + state.javaScriptArguments = false; + return; + } + + var tok = jsMode.token(stream, state.jsState); + return tok || true; + } + } + + function yieldStatement(stream) { + if (stream.match(/^yield\b/)) { + return 'keyword'; + } + } + + function doctype(stream) { + if (stream.match(/^(?:doctype) *([^\n]+)?/)) { + return DOCTYPE; + } + } + + function interpolation(stream, state) { + if (stream.match('#{')) { + state.isInterpolating = true; + state.interpolationNesting = 0; + return 'punctuation'; + } + } + + function interpolationContinued(stream, state) { + if (state.isInterpolating) { + if (stream.peek() === '}') { + state.interpolationNesting--; + if (state.interpolationNesting < 0) { + stream.next(); + state.isInterpolating = false; + return 'punctuation'; + } + } else if (stream.peek() === '{') { + state.interpolationNesting++; + } + return jsMode.token(stream, state.jsState) || true; + } + } + + function caseStatement(stream, state) { + if (stream.match(/^case\b/)) { + state.javaScriptLine = true; + return KEYWORD; + } + } + + function when(stream, state) { + if (stream.match(/^when\b/)) { + state.javaScriptLine = true; + state.javaScriptLineExcludesColon = true; + return KEYWORD; + } + } + + function defaultStatement(stream) { + if (stream.match(/^default\b/)) { + return KEYWORD; + } + } + + function extendsStatement(stream, state) { + if (stream.match(/^extends?\b/)) { + state.restOfLine = 'string'; + return KEYWORD; + } + } + + function append(stream, state) { + if (stream.match(/^append\b/)) { + state.restOfLine = 'variable'; + return KEYWORD; + } + } + function prepend(stream, state) { + if (stream.match(/^prepend\b/)) { + state.restOfLine = 'variable'; + return KEYWORD; + } + } + function block(stream, state) { + if (stream.match(/^block\b *(?:(prepend|append)\b)?/)) { + state.restOfLine = 'variable'; + return KEYWORD; + } + } + + function include(stream, state) { + if (stream.match(/^include\b/)) { + state.restOfLine = 'string'; + return KEYWORD; + } + } + + function includeFiltered(stream, state) { + if (stream.match(/^include:([a-zA-Z0-9\-]+)/, false) && stream.match('include')) { + state.isIncludeFiltered = true; + return KEYWORD; + } + } + + function includeFilteredContinued(stream, state) { + if (state.isIncludeFiltered) { + var tok = filter(stream, state); + state.isIncludeFiltered = false; + state.restOfLine = 'string'; + return tok; + } + } + + function mixin(stream, state) { + if (stream.match(/^mixin\b/)) { + state.javaScriptLine = true; + return KEYWORD; + } + } + + function call(stream, state) { + if (stream.match(/^\+([-\w]+)/)) { + if (!stream.match(/^\( *[-\w]+ *=/, false)) { + state.javaScriptArguments = true; + state.javaScriptArgumentsDepth = 0; + } + return 'variable'; + } + if (stream.match(/^\+#{/, false)) { + stream.next(); + state.mixinCallAfter = true; + return interpolation(stream, state); + } + } + function callArguments(stream, state) { + if (state.mixinCallAfter) { + state.mixinCallAfter = false; + if (!stream.match(/^\( *[-\w]+ *=/, false)) { + state.javaScriptArguments = true; + state.javaScriptArgumentsDepth = 0; + } + return true; + } + } + + function conditional(stream, state) { + if (stream.match(/^(if|unless|else if|else)\b/)) { + state.javaScriptLine = true; + return KEYWORD; + } + } + + function each(stream, state) { + if (stream.match(/^(- *)?(each|for)\b/)) { + state.isEach = true; + return KEYWORD; + } + } + function eachContinued(stream, state) { + if (state.isEach) { + if (stream.match(/^ in\b/)) { + state.javaScriptLine = true; + state.isEach = false; + return KEYWORD; + } else if (stream.sol() || stream.eol()) { + state.isEach = false; + } else if (stream.next()) { + while (!stream.match(/^ in\b/, false) && stream.next()); + return 'variable'; + } + } + } + + function whileStatement(stream, state) { + if (stream.match(/^while\b/)) { + state.javaScriptLine = true; + return KEYWORD; + } + } + + function tag(stream, state) { + var captures; + if (captures = stream.match(/^(\w(?:[-:\w]*\w)?)\/?/)) { + state.lastTag = captures[1].toLowerCase(); + if (state.lastTag === 'script') { + state.scriptType = 'application/javascript'; + } + return 'tag'; + } + } + + function filter(stream, state) { + if (stream.match(/^:([\w\-]+)/)) { + var innerMode; + if (config && config.innerModes) { + innerMode = config.innerModes(stream.current().substring(1)); + } + if (!innerMode) { + innerMode = stream.current().substring(1); + } + if (typeof innerMode === 'string') { + innerMode = CodeMirror.getMode(config, innerMode); + } + setInnerMode(stream, state, innerMode); + return 'atom'; + } + } + + function code(stream, state) { + if (stream.match(/^(!?=|-)/)) { + state.javaScriptLine = true; + return 'punctuation'; + } + } + + function id(stream) { + if (stream.match(/^#([\w-]+)/)) { + return ID; + } + } + + function className(stream) { + if (stream.match(/^\.([\w-]+)/)) { + return CLASS; + } + } + + function attrs(stream, state) { + if (stream.peek() == '(') { + stream.next(); + state.isAttrs = true; + state.attrsNest = []; + state.inAttributeName = true; + state.attrValue = ''; + state.attributeIsType = false; + return 'punctuation'; + } + } + + function attrsContinued(stream, state) { + if (state.isAttrs) { + if (ATTRS_NEST[stream.peek()]) { + state.attrsNest.push(ATTRS_NEST[stream.peek()]); + } + if (state.attrsNest[state.attrsNest.length - 1] === stream.peek()) { + state.attrsNest.pop(); + } else if (stream.eat(')')) { + state.isAttrs = false; + return 'punctuation'; + } + if (state.inAttributeName && stream.match(/^[^=,\)!]+/)) { + if (stream.peek() === '=' || stream.peek() === '!') { + state.inAttributeName = false; + state.jsState = CodeMirror.startState(jsMode); + if (state.lastTag === 'script' && stream.current().trim().toLowerCase() === 'type') { + state.attributeIsType = true; + } else { + state.attributeIsType = false; + } + } + return 'attribute'; + } + + var tok = jsMode.token(stream, state.jsState); + if (state.attributeIsType && tok === 'string') { + state.scriptType = stream.current().toString(); + } + if (state.attrsNest.length === 0 && (tok === 'string' || tok === 'variable' || tok === 'keyword')) { + try { + Function('', 'var x ' + state.attrValue.replace(/,\s*$/, '').replace(/^!/, '')); + state.inAttributeName = true; + state.attrValue = ''; + stream.backUp(stream.current().length); + return attrsContinued(stream, state); + } catch (ex) { + //not the end of an attribute + } + } + state.attrValue += stream.current(); + return tok || true; + } + } + + function attributesBlock(stream, state) { + if (stream.match(/^&attributes\b/)) { + state.javaScriptArguments = true; + state.javaScriptArgumentsDepth = 0; + return 'keyword'; + } + } + + function indent(stream) { + if (stream.sol() && stream.eatSpace()) { + return 'indent'; + } + } + + function comment(stream, state) { + if (stream.match(/^ *\/\/(-)?([^\n]*)/)) { + state.indentOf = stream.indentation(); + state.indentToken = 'comment'; + return 'comment'; + } + } + + function colon(stream) { + if (stream.match(/^: */)) { + return 'colon'; + } + } + + function text(stream, state) { + if (stream.match(/^(?:\| ?| )([^\n]+)/)) { + return 'string'; + } + if (stream.match(/^(<[^\n]*)/, false)) { + // html string + setInnerMode(stream, state, 'htmlmixed'); + state.innerModeForLine = true; + return innerMode(stream, state, true); + } + } + + function dot(stream, state) { + if (stream.eat('.')) { + var innerMode = null; + if (state.lastTag === 'script' && state.scriptType.toLowerCase().indexOf('javascript') != -1) { + innerMode = state.scriptType.toLowerCase().replace(/"|'/g, ''); + } else if (state.lastTag === 'style') { + innerMode = 'css'; + } + setInnerMode(stream, state, innerMode); + return 'dot'; + } + } + + function fail(stream) { + stream.next(); + return null; + } + + + function setInnerMode(stream, state, mode) { + mode = CodeMirror.mimeModes[mode] || mode; + mode = config.innerModes ? config.innerModes(mode) || mode : mode; + mode = CodeMirror.mimeModes[mode] || mode; + mode = CodeMirror.getMode(config, mode); + state.indentOf = stream.indentation(); + + if (mode && mode.name !== 'null') { + state.innerMode = mode; + } else { + state.indentToken = 'string'; + } + } + function innerMode(stream, state, force) { + if (stream.indentation() > state.indentOf || (state.innerModeForLine && !stream.sol()) || force) { + if (state.innerMode) { + if (!state.innerState) { + state.innerState = state.innerMode.startState ? CodeMirror.startState(state.innerMode, stream.indentation()) : {}; + } + return stream.hideFirstChars(state.indentOf + 2, function () { + return state.innerMode.token(stream, state.innerState) || true; + }); + } else { + stream.skipToEnd(); + return state.indentToken; + } + } else if (stream.sol()) { + state.indentOf = Infinity; + state.indentToken = null; + state.innerMode = null; + state.innerState = null; + } + } + function restOfLine(stream, state) { + if (stream.sol()) { + // if restOfLine was set at end of line, ignore it + state.restOfLine = ''; + } + if (state.restOfLine) { + stream.skipToEnd(); + var tok = state.restOfLine; + state.restOfLine = ''; + return tok; + } + } + + + function startState() { + return new State(); + } + function copyState(state) { + return state.copy(); + } + /** + * Get the next token in the stream + * + * @param {Stream} stream + * @param {State} state + */ + function nextToken(stream, state) { + var tok = innerMode(stream, state) + || restOfLine(stream, state) + || interpolationContinued(stream, state) + || includeFilteredContinued(stream, state) + || eachContinued(stream, state) + || attrsContinued(stream, state) + || javaScript(stream, state) + || javaScriptArguments(stream, state) + || callArguments(stream, state) + + || yieldStatement(stream) + || doctype(stream) + || interpolation(stream, state) + || caseStatement(stream, state) + || when(stream, state) + || defaultStatement(stream) + || extendsStatement(stream, state) + || append(stream, state) + || prepend(stream, state) + || block(stream, state) + || include(stream, state) + || includeFiltered(stream, state) + || mixin(stream, state) + || call(stream, state) + || conditional(stream, state) + || each(stream, state) + || whileStatement(stream, state) + || tag(stream, state) + || filter(stream, state) + || code(stream, state) + || id(stream) + || className(stream) + || attrs(stream, state) + || attributesBlock(stream, state) + || indent(stream) + || text(stream, state) + || comment(stream, state) + || colon(stream) + || dot(stream, state) + || fail(stream); + + return tok === true ? null : tok; + } + return { + startState: startState, + copyState: copyState, + token: nextToken + }; +}, 'javascript', 'css', 'htmlmixed'); + +CodeMirror.defineMIME('text/x-pug', 'pug'); +CodeMirror.defineMIME('text/x-jade', 'pug'); + +}); diff --git a/rhodecode/public/js/mode/puppet/puppet.js b/rhodecode/public/js/mode/puppet/puppet.js --- a/rhodecode/public/js/mode/puppet/puppet.js +++ b/rhodecode/public/js/mode/puppet/puppet.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -126,7 +126,7 @@ CodeMirror.defineMode("puppet", function if (word && words.hasOwnProperty(word)) { // Negates the initial next() stream.backUp(1); - // Acutally move the stream + // rs move the stream stream.match(/[\w]+/); // We want to process these words differently // do to the importance they have in Puppet diff --git a/rhodecode/public/js/mode/python/python.js b/rhodecode/public/js/mode/python/python.js --- a/rhodecode/public/js/mode/python/python.js +++ b/rhodecode/public/js/mode/python/python.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -32,13 +32,6 @@ "sorted", "staticmethod", "str", "sum", "super", "tuple", "type", "vars", "zip", "__import__", "NotImplemented", "Ellipsis", "__debug__"]; - var py2 = {builtins: ["apply", "basestring", "buffer", "cmp", "coerce", "execfile", - "file", "intern", "long", "raw_input", "reduce", "reload", - "unichr", "unicode", "xrange", "False", "True", "None"], - keywords: ["exec", "print"]}; - var py3 = {builtins: ["ascii", "bytes", "exec", "print"], - keywords: ["nonlocal", "False", "True", "None", "async", "await"]}; - CodeMirror.registerHelper("hintWords", "python", commonKeywords.concat(commonBuiltins)); function top(state) { @@ -48,51 +41,51 @@ CodeMirror.defineMode("python", function(conf, parserConf) { var ERRORCLASS = "error"; - var singleDelimiters = parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.]/; - var doubleOperators = parserConf.doubleOperators || /^([!<>]==|<>|<<|>>|\/\/|\*\*)/; - var doubleDelimiters = parserConf.doubleDelimiters || /^(\+=|\-=|\*=|%=|\/=|&=|\|=|\^=)/; - var tripleDelimiters = parserConf.tripleDelimiters || /^(\/\/=|>>=|<<=|\*\*=)/; - - if (parserConf.version && parseInt(parserConf.version, 10) == 3){ - // since http://legacy.python.org/dev/peps/pep-0465/ @ is also an operator - var singleOperators = parserConf.singleOperators || /^[\+\-\*\/%&|\^~<>!@]/; - var identifiers = parserConf.identifiers|| /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*/; - } else { - var singleOperators = parserConf.singleOperators || /^[\+\-\*\/%&|\^~<>!]/; - var identifiers = parserConf.identifiers|| /^[_A-Za-z][_A-Za-z0-9]*/; - } + var delimiters = parserConf.delimiters || parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.\\]/; + // (Backwards-compatiblity with old, cumbersome config system) + var operators = [parserConf.singleOperators, parserConf.doubleOperators, parserConf.doubleDelimiters, parserConf.tripleDelimiters, + parserConf.operators || /^([-+*/%\/&|^]=?|[<>=]+|\/\/=?|\*\*=?|!=|[~!@]|\.\.\.)/] + for (var i = 0; i < operators.length; i++) if (!operators[i]) operators.splice(i--, 1) var hangingIndent = parserConf.hangingIndent || conf.indentUnit; var myKeywords = commonKeywords, myBuiltins = commonBuiltins; - if(parserConf.extra_keywords != undefined){ + if (parserConf.extra_keywords != undefined) myKeywords = myKeywords.concat(parserConf.extra_keywords); - } - if(parserConf.extra_builtins != undefined){ + + if (parserConf.extra_builtins != undefined) myBuiltins = myBuiltins.concat(parserConf.extra_builtins); - } - if (parserConf.version && parseInt(parserConf.version, 10) == 3) { - myKeywords = myKeywords.concat(py3.keywords); - myBuiltins = myBuiltins.concat(py3.builtins); - var stringPrefixes = new RegExp("^(([rb]|(br))?('{3}|\"{3}|['\"]))", "i"); + + var py3 = !(parserConf.version && Number(parserConf.version) < 3) + if (py3) { + // since http://legacy.python.org/dev/peps/pep-0465/ @ is also an operator + var identifiers = parserConf.identifiers|| /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*/; + myKeywords = myKeywords.concat(["nonlocal", "False", "True", "None", "async", "await"]); + myBuiltins = myBuiltins.concat(["ascii", "bytes", "exec", "print"]); + var stringPrefixes = new RegExp("^(([rbuf]|(br)|(fr))?('{3}|\"{3}|['\"]))", "i"); } else { - myKeywords = myKeywords.concat(py2.keywords); - myBuiltins = myBuiltins.concat(py2.builtins); - var stringPrefixes = new RegExp("^(([rub]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i"); + var identifiers = parserConf.identifiers|| /^[_A-Za-z][_A-Za-z0-9]*/; + myKeywords = myKeywords.concat(["exec", "print"]); + myBuiltins = myBuiltins.concat(["apply", "basestring", "buffer", "cmp", "coerce", "execfile", + "file", "intern", "long", "raw_input", "reduce", "reload", + "unichr", "unicode", "xrange", "False", "True", "None"]); + var stringPrefixes = new RegExp("^(([rubf]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i"); } var keywords = wordRegexp(myKeywords); var builtins = wordRegexp(myBuiltins); // tokenizers function tokenBase(stream, state) { + var sol = stream.sol() && state.lastToken != "\\" + if (sol) state.indent = stream.indentation() // Handle scope changes - if (stream.sol() && top(state).type == "py") { + if (sol && top(state).type == "py") { var scopeOffset = top(state).offset; if (stream.eatSpace()) { var lineOffset = stream.indentation(); if (lineOffset > scopeOffset) - pushScope(stream, state, "py"); - else if (lineOffset < scopeOffset && dedent(stream, state)) + pushPyScope(state); + else if (lineOffset < scopeOffset && dedent(stream, state) && stream.peek() != "#") state.errorToken = true; return null; } else { @@ -108,20 +101,15 @@ function tokenBaseInner(stream, state) { if (stream.eatSpace()) return null; - var ch = stream.peek(); - // Handle Comments - if (ch == "#") { - stream.skipToEnd(); - return "comment"; - } + if (stream.match(/^#.*/)) return "comment"; // Handle Number Literals if (stream.match(/^[0-9\.]/, false)) { var floatLiteral = false; // Floats - if (stream.match(/^\d*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; } - if (stream.match(/^\d+\.\d*/)) { floatLiteral = true; } + if (stream.match(/^[\d_]*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; } + if (stream.match(/^[\d_]+\.\d*/)) { floatLiteral = true; } if (stream.match(/^\.\d+/)) { floatLiteral = true; } if (floatLiteral) { // Float literals may be "imaginary" @@ -131,13 +119,13 @@ // Integers var intLiteral = false; // Hex - if (stream.match(/^0x[0-9a-f]+/i)) intLiteral = true; + if (stream.match(/^0x[0-9a-f_]+/i)) intLiteral = true; // Binary - if (stream.match(/^0b[01]+/i)) intLiteral = true; + if (stream.match(/^0b[01_]+/i)) intLiteral = true; // Octal - if (stream.match(/^0o[0-7]+/i)) intLiteral = true; + if (stream.match(/^0o[0-7_]+/i)) intLiteral = true; // Decimal - if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) { + if (stream.match(/^[1-9][\d_]*(e[\+\-]?[\d_]+)?/)) { // Decimal literals may be "imaginary" stream.eat(/J/i); // TODO - Can you have imaginary longs? @@ -154,19 +142,20 @@ // Handle Strings if (stream.match(stringPrefixes)) { - state.tokenize = tokenStringFactory(stream.current()); - return state.tokenize(stream, state); + var isFmtString = stream.current().toLowerCase().indexOf('f') !== -1; + if (!isFmtString) { + state.tokenize = tokenStringFactory(stream.current(), state.tokenize); + return state.tokenize(stream, state); + } else { + state.tokenize = formatStringFactory(stream.current(), state.tokenize); + return state.tokenize(stream, state); + } } - // Handle operators and Delimiters - if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) - return "punctuation"; + for (var i = 0; i < operators.length; i++) + if (stream.match(operators[i])) return "operator" - if (stream.match(doubleOperators) || stream.match(singleOperators)) - return "operator"; - - if (stream.match(singleDelimiters)) - return "punctuation"; + if (stream.match(delimiters)) return "punctuation"; if (state.lastToken == "." && stream.match(identifiers)) return "property"; @@ -191,8 +180,69 @@ return ERRORCLASS; } - function tokenStringFactory(delimiter) { - while ("rub".indexOf(delimiter.charAt(0).toLowerCase()) >= 0) + function formatStringFactory(delimiter, tokenOuter) { + while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0) + delimiter = delimiter.substr(1); + + var singleline = delimiter.length == 1; + var OUTCLASS = "string"; + + function tokenNestedExpr(depth) { + return function(stream, state) { + var inner = tokenBaseInner(stream, state) + if (inner == "punctuation") { + if (stream.current() == "{") { + state.tokenize = tokenNestedExpr(depth + 1) + } else if (stream.current() == "}") { + if (depth > 1) state.tokenize = tokenNestedExpr(depth - 1) + else state.tokenize = tokenString + } + } + return inner + } + } + + function tokenString(stream, state) { + while (!stream.eol()) { + stream.eatWhile(/[^'"\{\}\\]/); + if (stream.eat("\\")) { + stream.next(); + if (singleline && stream.eol()) + return OUTCLASS; + } else if (stream.match(delimiter)) { + state.tokenize = tokenOuter; + return OUTCLASS; + } else if (stream.match('{{')) { + // ignore {{ in f-str + return OUTCLASS; + } else if (stream.match('{', false)) { + // switch to nested mode + state.tokenize = tokenNestedExpr(0) + if (stream.current()) return OUTCLASS; + else return state.tokenize(stream, state) + } else if (stream.match('}}')) { + return OUTCLASS; + } else if (stream.match('}')) { + // single } in f-string is an error + return ERRORCLASS; + } else { + stream.eat(/['"]/); + } + } + if (singleline) { + if (parserConf.singleLineStringErrors) + return ERRORCLASS; + else + state.tokenize = tokenOuter; + } + return OUTCLASS; + } + tokenString.isString = true; + return tokenString; + } + + function tokenStringFactory(delimiter, tokenOuter) { + while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0) delimiter = delimiter.substr(1); var singleline = delimiter.length == 1; @@ -206,7 +256,7 @@ if (singleline && stream.eol()) return OUTCLASS; } else if (stream.match(delimiter)) { - state.tokenize = tokenBase; + state.tokenize = tokenOuter; return OUTCLASS; } else { stream.eat(/['"]/); @@ -216,7 +266,7 @@ if (parserConf.singleLineStringErrors) return ERRORCLASS; else - state.tokenize = tokenBase; + state.tokenize = tokenOuter; } return OUTCLASS; } @@ -224,21 +274,23 @@ return tokenString; } - function pushScope(stream, state, type) { - var offset = 0, align = null; - if (type == "py") { - while (top(state).type != "py") - state.scopes.pop(); - } - offset = top(state).offset + (type == "py" ? conf.indentUnit : hangingIndent); - if (type != "py" && !stream.match(/^(\s|#.*)*$/, false)) - align = stream.column() + 1; - state.scopes.push({offset: offset, type: type, align: align}); + function pushPyScope(state) { + while (top(state).type != "py") state.scopes.pop() + state.scopes.push({offset: top(state).offset + conf.indentUnit, + type: "py", + align: null}) + } + + function pushBracketScope(stream, state, type) { + var align = stream.match(/^([\s\[\{\(]|#.*)*$/, false) ? null : stream.column() + 1 + state.scopes.push({offset: state.indent + hangingIndent, + type: type, + align: align}) } function dedent(stream, state) { var indented = stream.indentation(); - while (top(state).offset > indented) { + while (state.scopes.length > 1 && top(state).offset > indented) { if (top(state).type != "py") return true; state.scopes.pop(); } @@ -246,17 +298,16 @@ } function tokenLexer(stream, state) { + if (stream.sol()) state.beginningOfLine = true; + var style = state.tokenize(stream, state); var current = stream.current(); // Handle decorators - if (current == "@"){ - if(parserConf.version && parseInt(parserConf.version, 10) == 3){ - return stream.match(identifiers, false) ? "meta" : "operator"; - } else { - return stream.match(identifiers, false) ? "meta" : ERRORCLASS; - } - } + if (state.beginningOfLine && current == "@") + return stream.match(identifiers, false) ? "meta" : py3 ? "operator" : ERRORCLASS; + + if (/\S/.test(current)) state.beginningOfLine = false; if ((style == "variable" || style == "builtin") && state.lastToken == "meta") @@ -268,16 +319,18 @@ if (current == "lambda") state.lambda = true; if (current == ":" && !state.lambda && top(state).type == "py") - pushScope(stream, state, "py"); + pushPyScope(state); - var delimiter_index = current.length == 1 ? "[({".indexOf(current) : -1; - if (delimiter_index != -1) - pushScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1)); + if (current.length == 1 && !/string|comment/.test(style)) { + var delimiter_index = "[({".indexOf(current); + if (delimiter_index != -1) + pushBracketScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1)); - delimiter_index = "])}".indexOf(current); - if (delimiter_index != -1) { - if (top(state).type == current) state.scopes.pop(); - else return ERRORCLASS; + delimiter_index = "])}".indexOf(current); + if (delimiter_index != -1) { + if (top(state).type == current) state.indent = state.scopes.pop().offset - hangingIndent + else return ERRORCLASS; + } } if (state.dedent > 0 && stream.eol() && top(state).type == "py") { if (state.scopes.length > 1) state.scopes.pop(); @@ -292,6 +345,7 @@ return { tokenize: tokenBase, scopes: [{offset: basecolumn || 0, type: "py", align: null}], + indent: basecolumn || 0, lastToken: null, lambda: false, dedent: 0 @@ -316,16 +370,14 @@ if (state.tokenize != tokenBase) return state.tokenize.isString ? CodeMirror.Pass : 0; - var scope = top(state); - var closing = textAfter && textAfter.charAt(0) == scope.type; + var scope = top(state), closing = scope.type == textAfter.charAt(0) if (scope.align != null) - return scope.align - (closing ? 1 : 0); - else if (closing && state.scopes.length > 1) - return state.scopes[state.scopes.length - 2].offset; + return scope.align - (closing ? 1 : 0) else - return scope.offset; + return scope.offset - (closing ? hangingIndent : 0) }, + electricInput: /^\s*[\}\]\)]$/, closeBrackets: {triples: "'\""}, lineComment: "#", fold: "indent" @@ -339,8 +391,8 @@ CodeMirror.defineMIME("text/x-cython", { name: "python", - extra_keywords: words("by cdef cimport cpdef ctypedef enum except"+ - "extern gil include nogil property public"+ + extra_keywords: words("by cdef cimport cpdef ctypedef enum except "+ + "extern gil include nogil property public "+ "readonly struct union DEF IF ELIF ELSE") }); diff --git a/rhodecode/public/js/mode/q/q.js b/rhodecode/public/js/mode/q/q.js --- a/rhodecode/public/js/mode/q/q.js +++ b/rhodecode/public/js/mode/q/q.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -25,7 +25,7 @@ CodeMirror.defineMode("q",function(confi return(state.tokenize=tokenLineComment)(stream,state); else if(c=="\\"){ if(stream.eol()||/\s/.test(stream.peek())) - return stream.skipToEnd(),/^\\\s*$/.test(stream.current())?(state.tokenize=tokenCommentToEOF)(stream, state):state.tokenize=tokenBase,"comment"; + return stream.skipToEnd(),/^\\\s*$/.test(stream.current())?(state.tokenize=tokenCommentToEOF)(stream):state.tokenize=tokenBase,"comment"; else return state.tokenize=tokenBase,"builtin"; } @@ -34,25 +34,25 @@ CodeMirror.defineMode("q",function(confi if(c=='"') return(state.tokenize=tokenString)(stream,state); if(c=='`') - return stream.eatWhile(/[A-Z|a-z|\d|_|:|\/|\.]/),"symbol"; + return stream.eatWhile(/[A-Za-z\d_:\/.]/),"symbol"; if(("."==c&&/\d/.test(stream.peek()))||/\d/.test(c)){ var t=null; stream.backUp(1); - if(stream.match(/^\d{4}\.\d{2}(m|\.\d{2}([D|T](\d{2}(:\d{2}(:\d{2}(\.\d{1,9})?)?)?)?)?)/) + if(stream.match(/^\d{4}\.\d{2}(m|\.\d{2}([DT](\d{2}(:\d{2}(:\d{2}(\.\d{1,9})?)?)?)?)?)/) || stream.match(/^\d+D(\d{2}(:\d{2}(:\d{2}(\.\d{1,9})?)?)?)/) || stream.match(/^\d{2}:\d{2}(:\d{2}(\.\d{1,9})?)?/) || stream.match(/^\d+[ptuv]{1}/)) t="temporal"; else if(stream.match(/^0[NwW]{1}/) - || stream.match(/^0x[\d|a-f|A-F]*/) - || stream.match(/^[0|1]+[b]{1}/) + || stream.match(/^0x[\da-fA-F]*/) + || stream.match(/^[01]+[b]{1}/) || stream.match(/^\d+[chijn]{1}/) || stream.match(/-?\d*(\.\d*)?(e[+\-]?\d+)?(e|f)?/)) t="number"; return(t&&(!(c=stream.peek())||E.test(c)))?t:(stream.next(),"error"); } - if(/[A-Z|a-z]|\./.test(c)) - return stream.eatWhile(/[A-Z|a-z|\.|_|\d]/),keywords.test(stream.current())?"keyword":"variable"; + if(/[A-Za-z]|\./.test(c)) + return stream.eatWhile(/[A-Za-z._\d]/),keywords.test(stream.current())?"keyword":"variable"; if(/[|/&^!+:\\\-*%$=~#;@><\.,?_\']/.test(c)) return null; if(/[{}\(\[\]\)]/.test(c)) diff --git a/rhodecode/public/js/mode/r/r.js b/rhodecode/public/js/mode/r/r.js --- a/rhodecode/public/js/mode/r/r.js +++ b/rhodecode/public/js/mode/r/r.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,16 +11,25 @@ })(function(CodeMirror) { "use strict"; +CodeMirror.registerHelper("wordChars", "r", /[\w.]/); + CodeMirror.defineMode("r", function(config) { - function wordObj(str) { - var words = str.split(" "), res = {}; + function wordObj(words) { + var res = {}; for (var i = 0; i < words.length; ++i) res[words[i]] = true; return res; } - var atoms = wordObj("NULL NA Inf NaN NA_integer_ NA_real_ NA_complex_ NA_character_"); - var builtins = wordObj("list quote bquote eval return call parse deparse"); - var keywords = wordObj("if else repeat while function for in next break"); - var blockkeywords = wordObj("if else repeat while function for"); + var commonAtoms = ["NULL", "NA", "Inf", "NaN", "NA_integer_", "NA_real_", "NA_complex_", "NA_character_", "TRUE", "FALSE"]; + var commonBuiltins = ["list", "quote", "bquote", "eval", "return", "call", "parse", "deparse"]; + var commonKeywords = ["if", "else", "repeat", "while", "function", "for", "in", "next", "break"]; + var commonBlockKeywords = ["if", "else", "repeat", "while", "function", "for"]; + + CodeMirror.registerHelper("hintWords", "r", commonAtoms.concat(commonBuiltins, commonKeywords)); + + var atoms = wordObj(commonAtoms); + var builtins = wordObj(commonBuiltins); + var keywords = wordObj(commonKeywords); + var blockkeywords = wordObj(commonBlockKeywords); var opChars = /[+\-*\/^<>=!&|~$:]/; var curPunc; @@ -42,6 +51,9 @@ CodeMirror.defineMode("r", function(conf } else if (ch == "'" || ch == '"') { state.tokenize = tokenString(ch); return "string"; + } else if (ch == "`") { + stream.match(/[^`]+`/); + return "variable-3"; } else if (ch == "." && stream.match(/.[.\d]+/)) { return "keyword"; } else if (/[\w\.]/.test(ch) && ch != "_") { @@ -60,13 +72,17 @@ CodeMirror.defineMode("r", function(conf return "variable"; } else if (ch == "%") { if (stream.skipTo("%")) stream.next(); - return "variable-2"; - } else if (ch == "<" && stream.eat("-")) { - return "arrow"; + return "operator variable-2"; + } else if ( + (ch == "<" && stream.eat("-")) || + (ch == "<" && stream.match("<-")) || + (ch == "-" && stream.match(/>>?/)) + ) { + return "operator arrow"; } else if (ch == "=" && state.ctx.argList) { return "arg-is"; } else if (opChars.test(ch)) { - if (ch == "$") return "dollar"; + if (ch == "$") return "operator dollar"; stream.eatWhile(opChars); return "operator"; } else if (/[\(\){}\[\];]/.test(ch)) { @@ -99,13 +115,23 @@ CodeMirror.defineMode("r", function(conf }; } + var ALIGN_YES = 1, ALIGN_NO = 2, BRACELESS = 4 + function push(state, type, stream) { state.ctx = {type: type, indent: state.indent, - align: null, + flags: 0, column: stream.column(), prev: state.ctx}; } + function setFlag(state, flag) { + var ctx = state.ctx + state.ctx = {type: ctx.type, + indent: ctx.indent, + flags: ctx.flags | flag, + column: ctx.column, + prev: ctx.prev} + } function pop(state) { state.indent = state.ctx.indent; state.ctx = state.ctx.prev; @@ -116,22 +142,22 @@ CodeMirror.defineMode("r", function(conf return {tokenize: tokenBase, ctx: {type: "top", indent: -config.indentUnit, - align: false}, + flags: ALIGN_NO}, indent: 0, afterIdent: false}; }, token: function(stream, state) { if (stream.sol()) { - if (state.ctx.align == null) state.ctx.align = false; + if ((state.ctx.flags & 3) == 0) state.ctx.flags |= ALIGN_NO + if (state.ctx.flags & BRACELESS) pop(state) state.indent = stream.indentation(); } if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); - if (style != "comment" && state.ctx.align == null) state.ctx.align = true; + if (style != "comment" && (state.ctx.flags & ALIGN_NO) == 0) setFlag(state, ALIGN_YES) - var ctype = state.ctx.type; - if ((curPunc == ";" || curPunc == "{" || curPunc == "}") && ctype == "block") pop(state); + if ((curPunc == ";" || curPunc == "{" || curPunc == "}") && state.ctx.type == "block") pop(state); if (curPunc == "{") push(state, "}", stream); else if (curPunc == "(") { push(state, ")", stream); @@ -139,7 +165,8 @@ CodeMirror.defineMode("r", function(conf } else if (curPunc == "[") push(state, "]", stream); else if (curPunc == "block") push(state, "block", stream); - else if (curPunc == ctype) pop(state); + else if (curPunc == state.ctx.type) pop(state); + else if (state.ctx.type == "block" && style != "comment") setFlag(state, BRACELESS) state.afterIdent = style == "variable" || style == "keyword"; return style; }, @@ -148,8 +175,9 @@ CodeMirror.defineMode("r", function(conf if (state.tokenize != tokenBase) return 0; var firstChar = textAfter && textAfter.charAt(0), ctx = state.ctx, closing = firstChar == ctx.type; + if (ctx.flags & BRACELESS) ctx = ctx.prev if (ctx.type == "block") return ctx.indent + (firstChar == "{" ? 0 : config.indentUnit); - else if (ctx.align) return ctx.column + (closing ? 0 : 1); + else if (ctx.flags & ALIGN_YES) return ctx.column + (closing ? 0 : 1); else return ctx.indent + (closing ? 0 : config.indentUnit); }, diff --git a/rhodecode/public/js/mode/rpm/rpm.js b/rhodecode/public/js/mode/rpm/rpm.js --- a/rhodecode/public/js/mode/rpm/rpm.js +++ b/rhodecode/public/js/mode/rpm/rpm.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/rst/rst.js b/rhodecode/public/js/mode/rst/rst.js --- a/rhodecode/public/js/mode/rst/rst.js +++ b/rhodecode/public/js/mode/rst/rst.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/ruby/ruby.js b/rhodecode/public/js/mode/ruby/ruby.js --- a/rhodecode/public/js/mode/ruby/ruby.js +++ b/rhodecode/public/js/mode/ruby/ruby.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -28,7 +28,8 @@ CodeMirror.defineMode("ruby", function(c var indentWords = wordObj(["def", "class", "case", "for", "while", "until", "module", "then", "catch", "loop", "proc", "begin"]); var dedentWords = wordObj(["end", "until"]); - var matching = {"[": "]", "{": "}", "(": ")"}; + var opening = {"[": "]", "{": "}", "(": ")"}; + var closing = {"]": "[", "}": "{", ")": "("}; var curPunc; function chain(newtok, stream, state) { @@ -46,22 +47,10 @@ CodeMirror.defineMode("ruby", function(c if (ch == "`" || ch == "'" || ch == '"') { return chain(readQuoted(ch, "string", ch == '"' || ch == "`"), stream, state); } else if (ch == "/") { - var currentIndex = stream.current().length; - if (stream.skipTo("/")) { - var search_till = stream.current().length; - stream.backUp(stream.current().length - currentIndex); - var balance = 0; // balance brackets - while (stream.current().length < search_till) { - var chchr = stream.next(); - if (chchr == "(") balance += 1; - else if (chchr == ")") balance -= 1; - if (balance < 0) break; - } - stream.backUp(stream.current().length - currentIndex); - if (balance == 0) - return chain(readQuoted(ch, "string-2", true), stream, state); - } - return "operator"; + if (regexpAhead(stream)) + return chain(readQuoted(ch, "string-2", true), stream, state); + else + return "operator"; } else if (ch == "%") { var style = "string", embed = true; if (stream.eat("s")) style = "atom"; @@ -70,13 +59,13 @@ CodeMirror.defineMode("ruby", function(c else if (stream.eat(/[wxq]/)) { style = "string"; embed = false; } var delim = stream.eat(/[^\w\s=]/); if (!delim) return "operator"; - if (matching.propertyIsEnumerable(delim)) delim = matching[delim]; + if (opening.propertyIsEnumerable(delim)) delim = opening[delim]; return chain(readQuoted(delim, style, embed, true), stream, state); } else if (ch == "#") { stream.skipToEnd(); return "comment"; - } else if (ch == "<" && (m = stream.match(/^<-?[\`\"\']?([a-zA-Z_?]\w*)[\`\"\']?(?:;|$)/))) { - return chain(readHereDoc(m[1]), stream, state); + } else if (ch == "<" && (m = stream.match(/^<([-~])[\`\"\']?([a-zA-Z_?]\w*)[\`\"\']?(?:;|$)/))) { + return chain(readHereDoc(m[2], m[1]), stream, state); } else if (ch == "0") { if (stream.eat("x")) stream.eatWhile(/[\da-fA-F]/); else if (stream.eat("b")) stream.eatWhile(/[01]/); @@ -148,6 +137,28 @@ CodeMirror.defineMode("ruby", function(c } } + function regexpAhead(stream) { + var start = stream.pos, depth = 0, next, found = false, escaped = false + while ((next = stream.next()) != null) { + if (!escaped) { + if ("[{(".indexOf(next) > -1) { + depth++ + } else if ("]})".indexOf(next) > -1) { + depth-- + if (depth < 0) break + } else if (next == "/" && depth == 0) { + found = true + break + } + escaped = next == "\\" + } else { + escaped = false + } + } + stream.backUp(stream.pos - start) + return found + } + function tokenBaseUntilBrace(depth) { if (!depth) depth = 1; return function(stream, state) { @@ -206,8 +217,9 @@ CodeMirror.defineMode("ruby", function(c return style; }; } - function readHereDoc(phrase) { + function readHereDoc(phrase, mayIndent) { return function(stream, state) { + if (mayIndent) stream.eatSpace() if (stream.match(phrase)) state.tokenize.pop(); else stream.skipToEnd(); return "string"; @@ -266,17 +278,18 @@ CodeMirror.defineMode("ruby", function(c }, indent: function(state, textAfter) { - if (state.tokenize[state.tokenize.length-1] != tokenBase) return 0; + if (state.tokenize[state.tokenize.length-1] != tokenBase) return CodeMirror.Pass; var firstChar = textAfter && textAfter.charAt(0); var ct = state.context; - var closing = ct.type == matching[firstChar] || + var closed = ct.type == closing[firstChar] || ct.type == "keyword" && /^(?:end|until|else|elsif|when|rescue)\b/.test(textAfter); - return ct.indented + (closing ? 0 : config.indentUnit) + + return ct.indented + (closed ? 0 : config.indentUnit) + (state.continuedLine ? config.indentUnit : 0); }, - electricInput: /^\s*(?:end|rescue|\})$/, - lineComment: "#" + electricInput: /^\s*(?:end|rescue|elsif|else|\})$/, + lineComment: "#", + fold: "indent" }; }); diff --git a/rhodecode/public/js/mode/rust/rust.js b/rhodecode/public/js/mode/rust/rust.js --- a/rhodecode/public/js/mode/rust/rust.js +++ b/rhodecode/public/js/mode/rust/rust.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -68,4 +68,5 @@ CodeMirror.defineSimpleMode("rust",{ CodeMirror.defineMIME("text/x-rustsrc", "rust"); +CodeMirror.defineMIME("text/rust", "rust"); }); diff --git a/rhodecode/public/js/mode/sas/sas.js b/rhodecode/public/js/mode/sas/sas.js new file mode 100755 --- /dev/null +++ b/rhodecode/public/js/mode/sas/sas.js @@ -0,0 +1,303 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + + +// SAS mode copyright (c) 2016 Jared Dean, SAS Institute +// Created by Jared Dean + +// TODO +// indent and de-indent +// identify macro variables + + +//Definitions +// comment -- text within * ; or /* */ +// keyword -- SAS language variable +// variable -- macro variables starts with '&' or variable formats +// variable-2 -- DATA Step, proc, or macro names +// string -- text within ' ' or " " +// operator -- numeric operator + / - * ** le eq ge ... and so on +// builtin -- proc %macro data run mend +// atom +// def + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { + "use strict"; + + CodeMirror.defineMode("sas", function () { + var words = {}; + var isDoubleOperatorSym = { + eq: 'operator', + lt: 'operator', + le: 'operator', + gt: 'operator', + ge: 'operator', + "in": 'operator', + ne: 'operator', + or: 'operator' + }; + var isDoubleOperatorChar = /(<=|>=|!=|<>)/; + var isSingleOperatorChar = /[=\(:\),{}.*<>+\-\/^\[\]]/; + + // Takes a string of words separated by spaces and adds them as + // keys with the value of the first argument 'style' + function define(style, string, context) { + if (context) { + var split = string.split(' '); + for (var i = 0; i < split.length; i++) { + words[split[i]] = {style: style, state: context}; + } + } + } + //datastep + define('def', 'stack pgm view source debug nesting nolist', ['inDataStep']); + define('def', 'if while until for do do; end end; then else cancel', ['inDataStep']); + define('def', 'label format _n_ _error_', ['inDataStep']); + define('def', 'ALTER BUFNO BUFSIZE CNTLLEV COMPRESS DLDMGACTION ENCRYPT ENCRYPTKEY EXTENDOBSCOUNTER GENMAX GENNUM INDEX LABEL OBSBUF OUTREP PW PWREQ READ REPEMPTY REPLACE REUSE ROLE SORTEDBY SPILL TOBSNO TYPE WRITE FILECLOSE FIRSTOBS IN OBS POINTOBS WHERE WHEREUP IDXNAME IDXWHERE DROP KEEP RENAME', ['inDataStep']); + define('def', 'filevar finfo finv fipname fipnamel fipstate first firstobs floor', ['inDataStep']); + define('def', 'varfmt varinfmt varlabel varlen varname varnum varray varrayx vartype verify vformat vformatd vformatdx vformatn vformatnx vformatw vformatwx vformatx vinarray vinarrayx vinformat vinformatd vinformatdx vinformatn vinformatnx vinformatw vinformatwx vinformatx vlabel vlabelx vlength vlengthx vname vnamex vnferr vtype vtypex weekday', ['inDataStep']); + define('def', 'zipfips zipname zipnamel zipstate', ['inDataStep']); + define('def', 'put putc putn', ['inDataStep']); + define('builtin', 'data run', ['inDataStep']); + + + //proc + define('def', 'data', ['inProc']); + + // flow control for macros + define('def', '%if %end %end; %else %else; %do %do; %then', ['inMacro']); + + //everywhere + define('builtin', 'proc run; quit; libname filename %macro %mend option options', ['ALL']); + + define('def', 'footnote title libname ods', ['ALL']); + define('def', '%let %put %global %sysfunc %eval ', ['ALL']); + // automatic macro variables http://support.sas.com/documentation/cdl/en/mcrolref/61885/HTML/default/viewer.htm#a003167023.htm + define('variable', '&sysbuffr &syscc &syscharwidth &syscmd &sysdate &sysdate9 &sysday &sysdevic &sysdmg &sysdsn &sysencoding &sysenv &syserr &syserrortext &sysfilrc &syshostname &sysindex &sysinfo &sysjobid &syslast &syslckrc &syslibrc &syslogapplname &sysmacroname &sysmenv &sysmsg &sysncpu &sysodspath &sysparm &syspbuff &sysprocessid &sysprocessname &sysprocname &sysrc &sysscp &sysscpl &sysscpl &syssite &sysstartid &sysstartname &systcpiphostname &systime &sysuserid &sysver &sysvlong &sysvlong4 &syswarningtext', ['ALL']); + + //footnote[1-9]? title[1-9]? + + //options statement + define('def', 'source2 nosource2 page pageno pagesize', ['ALL']); + + //proc and datastep + define('def', '_all_ _character_ _cmd_ _freq_ _i_ _infile_ _last_ _msg_ _null_ _numeric_ _temporary_ _type_ abort abs addr adjrsq airy alpha alter altlog altprint and arcos array arsin as atan attrc attrib attrn authserver autoexec awscontrol awsdef awsmenu awsmenumerge awstitle backward band base betainv between blocksize blshift bnot bor brshift bufno bufsize bxor by byerr byline byte calculated call cards cards4 catcache cbufno cdf ceil center cexist change chisq cinv class cleanup close cnonct cntllev coalesce codegen col collate collin column comamid comaux1 comaux2 comdef compbl compound compress config continue convert cos cosh cpuid create cross crosstab css curobs cv daccdb daccdbsl daccsl daccsyd dacctab dairy datalines datalines4 datejul datepart datetime day dbcslang dbcstype dclose ddm delete delimiter depdb depdbsl depsl depsyd deptab dequote descending descript design= device dflang dhms dif digamma dim dinfo display distinct dkricond dkrocond dlm dnum do dopen doptname doptnum dread drop dropnote dsname dsnferr echo else emaildlg emailid emailpw emailserver emailsys encrypt end endsas engine eof eov erf erfc error errorcheck errors exist exp fappend fclose fcol fdelete feedback fetch fetchobs fexist fget file fileclose fileexist filefmt filename fileref fmterr fmtsearch fnonct fnote font fontalias fopen foptname foptnum force formatted formchar formdelim formdlim forward fpoint fpos fput fread frewind frlen from fsep fuzz fwrite gaminv gamma getoption getvarc getvarn go goto group gwindow hbar hbound helpenv helploc hms honorappearance hosthelp hostprint hour hpct html hvar ibessel ibr id if index indexc indexw initcmd initstmt inner input inputc inputn inr insert int intck intnx into intrr invaliddata irr is jbessel join juldate keep kentb kurtosis label lag last lbound leave left length levels lgamma lib library libref line linesize link list log log10 log2 logpdf logpmf logsdf lostcard lowcase lrecl ls macro macrogen maps mautosource max maxdec maxr mdy mean measures median memtype merge merror min minute missing missover mlogic mod mode model modify month mopen mort mprint mrecall msglevel msymtabmax mvarsize myy n nest netpv new news nmiss no nobatch nobs nocaps nocardimage nocenter nocharcode nocmdmac nocol nocum nodate nodbcs nodetails nodmr nodms nodmsbatch nodup nodupkey noduplicates noechoauto noequals noerrorabend noexitwindows nofullstimer noicon noimplmac noint nolist noloadlist nomiss nomlogic nomprint nomrecall nomsgcase nomstored nomultenvappl nonotes nonumber noobs noovp nopad nopercent noprint noprintinit normal norow norsasuser nosetinit nosplash nosymbolgen note notes notitle notitles notsorted noverbose noxsync noxwait npv null number numkeys nummousekeys nway obs on open order ordinal otherwise out outer outp= output over ovp p(1 5 10 25 50 75 90 95 99) pad pad2 paired parm parmcards path pathdll pathname pdf peek peekc pfkey pmf point poisson poke position printer probbeta probbnml probchi probf probgam probhypr probit probnegb probnorm probsig probt procleave prt ps pw pwreq qtr quote r ranbin rancau ranexp rangam range ranks rannor ranpoi rantbl rantri ranuni read recfm register regr remote remove rename repeat replace resolve retain return reuse reverse rewind right round rsquare rtf rtrace rtraceloc s s2 samploc sasautos sascontrol sasfrscr sasmsg sasmstore sasscript sasuser saving scan sdf second select selection separated seq serror set setcomm setot sign simple sin sinh siteinfo skewness skip sle sls sortedby sortpgm sortseq sortsize soundex spedis splashlocation split spool sqrt start std stderr stdin stfips stimer stname stnamel stop stopover subgroup subpopn substr sum sumwgt symbol symbolgen symget symput sysget sysin sysleave sysmsg sysparm sysprint sysprintfont sysprod sysrc system t table tables tan tanh tapeclose tbufsize terminal test then timepart tinv tnonct to today tol tooldef totper transformout translate trantab tranwrd trigamma trim trimn trunc truncover type unformatted uniform union until upcase update user usericon uss validate value var weight when where while wincharset window work workinit workterm write wsum xsync xwait yearcutoff yes yyq min max', ['inDataStep', 'inProc']); + define('operator', 'and not ', ['inDataStep', 'inProc']); + + // Main function + function tokenize(stream, state) { + // Finally advance the stream + var ch = stream.next(); + + // BLOCKCOMMENT + if (ch === '/' && stream.eat('*')) { + state.continueComment = true; + return "comment"; + } else if (state.continueComment === true) { // in comment block + //comment ends at the beginning of the line + if (ch === '*' && stream.peek() === '/') { + stream.next(); + state.continueComment = false; + } else if (stream.skipTo('*')) { //comment is potentially later in line + stream.skipTo('*'); + stream.next(); + if (stream.eat('/')) + state.continueComment = false; + } else { + stream.skipToEnd(); + } + return "comment"; + } + + if (ch == "*" && stream.column() == stream.indentation()) { + stream.skipToEnd() + return "comment" + } + + // DoubleOperator match + var doubleOperator = ch + stream.peek(); + + if ((ch === '"' || ch === "'") && !state.continueString) { + state.continueString = ch + return "string" + } else if (state.continueString) { + if (state.continueString == ch) { + state.continueString = null; + } else if (stream.skipTo(state.continueString)) { + // quote found on this line + stream.next(); + state.continueString = null; + } else { + stream.skipToEnd(); + } + return "string"; + } else if (state.continueString !== null && stream.eol()) { + stream.skipTo(state.continueString) || stream.skipToEnd(); + return "string"; + } else if (/[\d\.]/.test(ch)) { //find numbers + if (ch === ".") + stream.match(/^[0-9]+([eE][\-+]?[0-9]+)?/); + else if (ch === "0") + stream.match(/^[xX][0-9a-fA-F]+/) || stream.match(/^0[0-7]+/); + else + stream.match(/^[0-9]*\.?[0-9]*([eE][\-+]?[0-9]+)?/); + return "number"; + } else if (isDoubleOperatorChar.test(ch + stream.peek())) { // TWO SYMBOL TOKENS + stream.next(); + return "operator"; + } else if (isDoubleOperatorSym.hasOwnProperty(doubleOperator)) { + stream.next(); + if (stream.peek() === ' ') + return isDoubleOperatorSym[doubleOperator.toLowerCase()]; + } else if (isSingleOperatorChar.test(ch)) { // SINGLE SYMBOL TOKENS + return "operator"; + } + + // Matches one whole word -- even if the word is a character + var word; + if (stream.match(/[%&;\w]+/, false) != null) { + word = ch + stream.match(/[%&;\w]+/, true); + if (/&/.test(word)) return 'variable' + } else { + word = ch; + } + // the word after DATA PROC or MACRO + if (state.nextword) { + stream.match(/[\w]+/); + // match memname.libname + if (stream.peek() === '.') stream.skipTo(' '); + state.nextword = false; + return 'variable-2'; + } + + word = word.toLowerCase() + // Are we in a DATA Step? + if (state.inDataStep) { + if (word === 'run;' || stream.match(/run\s;/)) { + state.inDataStep = false; + return 'builtin'; + } + // variable formats + if ((word) && stream.next() === '.') { + //either a format or libname.memname + if (/\w/.test(stream.peek())) return 'variable-2'; + else return 'variable'; + } + // do we have a DATA Step keyword + if (word && words.hasOwnProperty(word) && + (words[word].state.indexOf("inDataStep") !== -1 || + words[word].state.indexOf("ALL") !== -1)) { + //backup to the start of the word + if (stream.start < stream.pos) + stream.backUp(stream.pos - stream.start); + //advance the length of the word and return + for (var i = 0; i < word.length; ++i) stream.next(); + return words[word].style; + } + } + // Are we in an Proc statement? + if (state.inProc) { + if (word === 'run;' || word === 'quit;') { + state.inProc = false; + return 'builtin'; + } + // do we have a proc keyword + if (word && words.hasOwnProperty(word) && + (words[word].state.indexOf("inProc") !== -1 || + words[word].state.indexOf("ALL") !== -1)) { + stream.match(/[\w]+/); + return words[word].style; + } + } + // Are we in a Macro statement? + if (state.inMacro) { + if (word === '%mend') { + if (stream.peek() === ';') stream.next(); + state.inMacro = false; + return 'builtin'; + } + if (word && words.hasOwnProperty(word) && + (words[word].state.indexOf("inMacro") !== -1 || + words[word].state.indexOf("ALL") !== -1)) { + stream.match(/[\w]+/); + return words[word].style; + } + + return 'atom'; + } + // Do we have Keywords specific words? + if (word && words.hasOwnProperty(word)) { + // Negates the initial next() + stream.backUp(1); + // Actually move the stream + stream.match(/[\w]+/); + if (word === 'data' && /=/.test(stream.peek()) === false) { + state.inDataStep = true; + state.nextword = true; + return 'builtin'; + } + if (word === 'proc') { + state.inProc = true; + state.nextword = true; + return 'builtin'; + } + if (word === '%macro') { + state.inMacro = true; + state.nextword = true; + return 'builtin'; + } + if (/title[1-9]/.test(word)) return 'def'; + + if (word === 'footnote') { + stream.eat(/[1-9]/); + return 'def'; + } + + // Returns their value as state in the prior define methods + if (state.inDataStep === true && words[word].state.indexOf("inDataStep") !== -1) + return words[word].style; + if (state.inProc === true && words[word].state.indexOf("inProc") !== -1) + return words[word].style; + if (state.inMacro === true && words[word].state.indexOf("inMacro") !== -1) + return words[word].style; + if (words[word].state.indexOf("ALL") !== -1) + return words[word].style; + return null; + } + // Unrecognized syntax + return null; + } + + return { + startState: function () { + return { + inDataStep: false, + inProc: false, + inMacro: false, + nextword: false, + continueString: null, + continueComment: false + }; + }, + token: function (stream, state) { + // Strip the spaces, but regex will account for them either way + if (stream.eatSpace()) return null; + // Go through the main process + return tokenize(stream, state); + }, + + blockCommentStart: "/*", + blockCommentEnd: "*/" + }; + + }); + + CodeMirror.defineMIME("text/x-sas", "sas"); +}); diff --git a/rhodecode/public/js/mode/sass/sass.js b/rhodecode/public/js/mode/sass/sass.js --- a/rhodecode/public/js/mode/sass/sass.js +++ b/rhodecode/public/js/mode/sass/sass.js @@ -1,17 +1,23 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); + mod(require("../../lib/codemirror"), require("../css/css")); else if (typeof define == "function" && define.amd) // AMD - define(["../../lib/codemirror"], mod); + define(["../../lib/codemirror", "../css/css"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMode("sass", function(config) { + var cssMode = CodeMirror.mimeModes["text/css"]; + var propertyKeywords = cssMode.propertyKeywords || {}, + colorKeywords = cssMode.colorKeywords || {}, + valueKeywords = cssMode.valueKeywords || {}, + fontProperties = cssMode.fontProperties || {}; + function tokenRegexp(words) { return new RegExp("^" + words.join("|")); } @@ -25,6 +31,12 @@ CodeMirror.defineMode("sass", function(c var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/; + var word; + + function isEndLine(stream) { + return !stream.peek() || stream.match(/\s+$/, false); + } + function urlTokens(stream, state) { var ch = stream.peek(); @@ -76,6 +88,9 @@ CodeMirror.defineMode("sass", function(c if (endingString) { if (nextChar !== quote && greedy) { stream.next(); } + if (isEndLine(stream)) { + state.cursorHalf = 0; + } state.tokenizer = tokenBase; return "string"; } else if (nextChar === "#" && peekChar === "{") { @@ -147,14 +162,20 @@ CodeMirror.defineMode("sass", function(c // first half i.e. before : for key-value pairs // including selectors + if (ch === "-") { + if (stream.match(/^-\w+-/)) { + return "meta"; + } + } + if (ch === ".") { stream.next(); if (stream.match(/^[\w-]+/)) { indent(state); - return "atom"; + return "qualifier"; } else if (stream.peek() === "#") { indent(state); - return "atom"; + return "tag"; } } @@ -163,11 +184,11 @@ CodeMirror.defineMode("sass", function(c // ID selectors if (stream.match(/^[\w-]+/)) { indent(state); - return "atom"; + return "builtin"; } if (stream.peek() === "#") { indent(state); - return "atom"; + return "tag"; } } @@ -220,37 +241,48 @@ CodeMirror.defineMode("sass", function(c // Indent Directives if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) { indent(state); - return "meta"; + return "def"; } // Other Directives if (ch === "@") { stream.next(); stream.eatWhile(/[\w-]/); - return "meta"; + return "def"; } if (stream.eatWhile(/[\w-]/)){ if(stream.match(/ *: *[\w-\+\$#!\("']/,false)){ - return "property"; + word = stream.current().toLowerCase(); + var prop = state.prevProp + "-" + word; + if (propertyKeywords.hasOwnProperty(prop)) { + return "property"; + } else if (propertyKeywords.hasOwnProperty(word)) { + state.prevProp = word; + return "property"; + } else if (fontProperties.hasOwnProperty(word)) { + return "property"; + } + return "tag"; } else if(stream.match(/ *:/,false)){ indent(state); state.cursorHalf = 1; - return "atom"; + state.prevProp = stream.current().toLowerCase(); + return "property"; } else if(stream.match(/ *,/,false)){ - return "atom"; + return "tag"; } else{ indent(state); - return "atom"; + return "tag"; } } if(ch === ":"){ if (stream.match(pseudoElementsRegexp)){ // could be a pseudo-element - return "keyword"; + return "variable-3"; } stream.next(); state.cursorHalf=1; @@ -264,7 +296,7 @@ CodeMirror.defineMode("sass", function(c stream.next(); // Hex numbers if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){ - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "number"; @@ -273,7 +305,7 @@ CodeMirror.defineMode("sass", function(c // Numbers if (stream.match(/^-?[0-9\.]+/)){ - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "number"; @@ -281,14 +313,14 @@ CodeMirror.defineMode("sass", function(c // Units if (stream.match(/^(px|em|in)\b/)){ - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "unit"; } if (stream.match(keywordsRegexp)){ - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "keyword"; @@ -296,7 +328,7 @@ CodeMirror.defineMode("sass", function(c if (stream.match(/^url/) && stream.peek() === "(") { state.tokenizer = urlTokens; - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "atom"; @@ -306,23 +338,21 @@ CodeMirror.defineMode("sass", function(c if (ch === "$") { stream.next(); stream.eatWhile(/[\w-]/); - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } - return "variable-3"; + return "variable-2"; } // bang character for !important, !default, etc. if (ch === "!") { stream.next(); - if(!stream.peek()){ - state.cursorHalf = 0; - } + state.cursorHalf = 0; return stream.match(/^[\w]+/) ? "keyword": "operator"; } if (stream.match(opRegexp)){ - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } return "operator"; @@ -330,14 +360,24 @@ CodeMirror.defineMode("sass", function(c // attributes if (stream.eatWhile(/[\w-]/)) { - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; } - return "attribute"; + word = stream.current().toLowerCase(); + if (valueKeywords.hasOwnProperty(word)) { + return "atom"; + } else if (colorKeywords.hasOwnProperty(word)) { + return "keyword"; + } else if (propertyKeywords.hasOwnProperty(word)) { + state.prevProp = stream.current().toLowerCase(); + return "property"; + } else { + return "tag"; + } } //stream.eatSpace(); - if(!stream.peek()){ + if (isEndLine(stream)) { state.cursorHalf = 0; return null; } @@ -407,7 +447,7 @@ CodeMirror.defineMode("sass", function(c return state.scopes[0].offset; } }; -}); +}, "css"); CodeMirror.defineMIME("text/x-sass", "sass"); diff --git a/rhodecode/public/js/mode/scheme/scheme.js b/rhodecode/public/js/mode/scheme/scheme.js --- a/rhodecode/public/js/mode/scheme/scheme.js +++ b/rhodecode/public/js/mode/scheme/scheme.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /** * Author: Koh Zi Han, based on implementation by Koh Zi Chun @@ -73,7 +73,8 @@ CodeMirror.defineMode("scheme", function indentStack: null, indentation: 0, mode: false, - sExprComment: false + sExprComment: false, + sExprQuote: false }; }, @@ -121,7 +122,7 @@ CodeMirror.defineMode("scheme", function state.sExprComment = 0; }else{ // if not we just comment the entire of the next token - stream.eatWhile(/[^/s]/); // eat non spaces + stream.eatWhile(/[^\s\(\)\[\]]/); // eat symbol atom returnType = COMMENT; break; } @@ -133,7 +134,15 @@ CodeMirror.defineMode("scheme", function returnType = STRING; } else if (ch == "'") { - returnType = ATOM; + if (stream.peek() == "(" || stream.peek() == "["){ + if (typeof state.sExprQuote != "number") { + state.sExprQuote = 0; + } // else already in a quoted expression + returnType = ATOM; + } else { + stream.eatWhile(/[\w_\-!$%&*+\.\/:<=>?@\^~]/); + returnType = ATOM; + } } else if (ch == '#') { if (stream.eat("|")) { // Multi-line comment state.mode = "comment"; // toggle to comment mode @@ -209,6 +218,7 @@ CodeMirror.defineMode("scheme", function stream.backUp(stream.current().length - 1); // undo all the eating if(typeof state.sExprComment == "number") state.sExprComment++; + if(typeof state.sExprQuote == "number") state.sExprQuote++; returnType = BRACKET; } else if (ch == ")" || ch == "]") { @@ -222,16 +232,22 @@ CodeMirror.defineMode("scheme", function state.sExprComment = false; // turn off s-expr commenting mode } } + if(typeof state.sExprQuote == "number"){ + if(--state.sExprQuote == 0){ + returnType = ATOM; // final closing bracket + state.sExprQuote = false; // turn off s-expr quote mode + } + } } } else { - stream.eatWhile(/[\w\$_\-!$%&*+\.\/:<=>?@\^~]/); + stream.eatWhile(/[\w_\-!$%&*+\.\/:<=>?@\^~]/); if (keywords && keywords.propertyIsEnumerable(stream.current())) { returnType = BUILTIN; } else returnType = "variable"; } } - return (typeof state.sExprComment == "number") ? COMMENT : returnType; + return (typeof state.sExprComment == "number") ? COMMENT : ((typeof state.sExprQuote == "number") ? ATOM : returnType); }, indent: function (state) { diff --git a/rhodecode/public/js/mode/shell/shell.js b/rhodecode/public/js/mode/shell/shell.js --- a/rhodecode/public/js/mode/shell/shell.js +++ b/rhodecode/public/js/mode/shell/shell.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -14,26 +14,27 @@ CodeMirror.defineMode('shell', function() { var words = {}; - function define(style, string) { - var split = string.split(' '); - for(var i = 0; i < split.length; i++) { - words[split[i]] = style; + function define(style, dict) { + for(var i = 0; i < dict.length; i++) { + words[dict[i]] = style; } }; - // Atoms - define('atom', 'true false'); - - // Keywords - define('keyword', 'if then do else elif while until for in esac fi fin ' + - 'fil done exit set unset export function'); + var commonAtoms = ["true", "false"]; + var commonKeywords = ["if", "then", "do", "else", "elif", "while", "until", "for", "in", "esac", "fi", + "fin", "fil", "done", "exit", "set", "unset", "export", "function"]; + var commonCommands = ["ab", "awk", "bash", "beep", "cat", "cc", "cd", "chown", "chmod", "chroot", "clear", + "cp", "curl", "cut", "diff", "echo", "find", "gawk", "gcc", "get", "git", "grep", "hg", "kill", "killall", + "ln", "ls", "make", "mkdir", "openssl", "mv", "nc", "nl", "node", "npm", "ping", "ps", "restart", "rm", + "rmdir", "sed", "service", "sh", "shopt", "shred", "source", "sort", "sleep", "ssh", "start", "stop", + "su", "sudo", "svn", "tee", "telnet", "top", "touch", "vi", "vim", "wall", "wc", "wget", "who", "write", + "yes", "zsh"]; - // Commands - define('builtin', 'ab awk bash beep cat cc cd chown chmod chroot clear cp ' + - 'curl cut diff echo find gawk gcc get git grep kill killall ln ls make ' + - 'mkdir openssl mv nc node npm ping ps restart rm rmdir sed service sh ' + - 'shopt shred source sort sleep ssh start stop su sudo tee telnet top ' + - 'touch vi vim wall wc wget who write yes zsh'); + CodeMirror.registerHelper("hintWords", "shell", commonAtoms.concat(commonKeywords, commonCommands)); + + define('atom', commonAtoms); + define('keyword', commonKeywords); + define('builtin', commonCommands); function tokenBase(stream, state) { if (stream.eatSpace()) return null; @@ -46,7 +47,7 @@ CodeMirror.defineMode('shell', function( return null; } if (ch === '\'' || ch === '"' || ch === '`') { - state.tokens.unshift(tokenString(ch)); + state.tokens.unshift(tokenString(ch, ch === "`" ? "quote" : "string")); return tokenize(stream, state); } if (ch === '#') { @@ -81,41 +82,49 @@ CodeMirror.defineMode('shell', function( return words.hasOwnProperty(cur) ? words[cur] : null; } - function tokenString(quote) { + function tokenString(quote, style) { + var close = quote == "(" ? ")" : quote == "{" ? "}" : quote return function(stream, state) { - var next, end = false, escaped = false; + var next, escaped = false; while ((next = stream.next()) != null) { - if (next === quote && !escaped) { - end = true; + if (next === close && !escaped) { + state.tokens.shift(); break; - } - if (next === '$' && !escaped && quote !== '\'') { + } else if (next === '$' && !escaped && quote !== "'" && stream.peek() != close) { escaped = true; stream.backUp(1); state.tokens.unshift(tokenDollar); break; + } else if (!escaped && quote !== close && next === quote) { + state.tokens.unshift(tokenString(quote, style)) + return tokenize(stream, state) + } else if (!escaped && /['"]/.test(next) && !/['"]/.test(quote)) { + state.tokens.unshift(tokenStringStart(next, "string")); + stream.backUp(1); + break; } escaped = !escaped && next === '\\'; } - if (end || !escaped) { - state.tokens.shift(); - } - return (quote === '`' || quote === ')' ? 'quote' : 'string'); + return style; }; }; + function tokenStringStart(quote, style) { + return function(stream, state) { + state.tokens[0] = tokenString(quote, style) + stream.next() + return tokenize(stream, state) + } + } + var tokenDollar = function(stream, state) { if (state.tokens.length > 1) stream.eat('$'); - var ch = stream.next(), hungry = /\w/; - if (ch === '{') hungry = /[^}]/; - if (ch === '(') { - state.tokens[0] = tokenString(')'); + var ch = stream.next() + if (/['"({]/.test(ch)) { + state.tokens[0] = tokenString(ch, ch == "(" ? "quote" : ch == "{" ? "def" : "string"); return tokenize(stream, state); } - if (!/\d/.test(ch)) { - stream.eatWhile(hungry); - stream.eat('}'); - } + if (!/\d/.test(ch)) stream.eatWhile(/\w/); state.tokens.shift(); return 'def'; }; @@ -129,11 +138,15 @@ CodeMirror.defineMode('shell', function( token: function(stream, state) { return tokenize(stream, state); }, + closeBrackets: "()[]{}''\"\"``", lineComment: '#', fold: "brace" }; }); CodeMirror.defineMIME('text/x-sh', 'shell'); +// Apache uses a slightly different Media Type for Shell scripts +// http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +CodeMirror.defineMIME('application/x-sh', 'shell'); }); diff --git a/rhodecode/public/js/mode/sieve/sieve.js b/rhodecode/public/js/mode/sieve/sieve.js --- a/rhodecode/public/js/mode/sieve/sieve.js +++ b/rhodecode/public/js/mode/sieve/sieve.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -170,7 +170,7 @@ CodeMirror.defineMode("sieve", function( if (stream.eatSpace()) return null; - return (state.tokenize || tokenBase)(stream, state);; + return (state.tokenize || tokenBase)(stream, state); }, indent: function(state, _textAfter) { diff --git a/rhodecode/public/js/mode/slim/slim.js b/rhodecode/public/js/mode/slim/slim.js --- a/rhodecode/public/js/mode/slim/slim.js +++ b/rhodecode/public/js/mode/slim/slim.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Slim Highlighting for CodeMirror copyright (c) HicknHack Software Gmbh @@ -165,7 +165,7 @@ }; return function(stream, state) { rubyState = state.rubyState; - state.rubyState = rubyMode.startState(); + state.rubyState = CodeMirror.startState(rubyMode); state.tokenize = runSplat; return ruby(stream, state); }; @@ -317,7 +317,7 @@ function startSubMode(mode, state) { var subMode = getMode(mode); - var subState = subMode.startState && subMode.startState(); + var subState = CodeMirror.startState(subMode); state.subMode = subMode; state.subState = subState; @@ -507,8 +507,8 @@ var mode = { // default to html mode startState: function() { - var htmlState = htmlMode.startState(); - var rubyState = rubyMode.startState(); + var htmlState = CodeMirror.startState(htmlMode); + var rubyState = CodeMirror.startState(rubyMode); return { htmlState: htmlState, rubyState: rubyState, diff --git a/rhodecode/public/js/mode/smalltalk/smalltalk.js b/rhodecode/public/js/mode/smalltalk/smalltalk.js --- a/rhodecode/public/js/mode/smalltalk/smalltalk.js +++ b/rhodecode/public/js/mode/smalltalk/smalltalk.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/smarty/smarty.js b/rhodecode/public/js/mode/smarty/smarty.js --- a/rhodecode/public/js/mode/smarty/smarty.js +++ b/rhodecode/public/js/mode/smarty/smarty.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /** * Smarty 2 and 3 mode. @@ -210,9 +210,9 @@ state.last = last; return style; }, - indent: function(state, text) { + indent: function(state, text, line) { if (state.tokenize == tokenTop && baseMode.indent) - return baseMode.indent(state.base, text); + return baseMode.indent(state.base, text, line); else return CodeMirror.Pass; }, diff --git a/rhodecode/public/js/mode/solr/solr.js b/rhodecode/public/js/mode/solr/solr.js --- a/rhodecode/public/js/mode/solr/solr.js +++ b/rhodecode/public/js/mode/solr/solr.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -14,12 +14,12 @@ CodeMirror.defineMode("solr", function() { "use strict"; - var isStringChar = /[^\s\|\!\+\-\*\?\~\^\&\:\(\)\[\]\{\}\^\"\\]/; + var isStringChar = /[^\s\|\!\+\-\*\?\~\^\&\:\(\)\[\]\{\}\"\\]/; var isOperatorChar = /[\|\!\+\-\*\?\~\^\&]/; var isOperatorString = /^(OR|AND|NOT|TO)$/i; function isNumber(word) { - return parseFloat(word, 10).toString() === word; + return parseFloat(word).toString() === word; } function tokenString(quote) { diff --git a/rhodecode/public/js/mode/soy/soy.js b/rhodecode/public/js/mode/soy/soy.js --- a/rhodecode/public/js/mode/soy/soy.js +++ b/rhodecode/public/js/mode/soy/soy.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -11,9 +11,45 @@ })(function(CodeMirror) { "use strict"; - var indentingTags = ["template", "literal", "msg", "fallbackmsg", "let", "if", "elseif", - "else", "switch", "case", "default", "foreach", "ifempty", "for", - "call", "param", "deltemplate", "delcall", "log"]; + var paramData = { noEndTag: true, soyState: "param-def" }; + var tags = { + "alias": { noEndTag: true }, + "delpackage": { noEndTag: true }, + "namespace": { noEndTag: true, soyState: "namespace-def" }, + "@param": paramData, + "@param?": paramData, + "@inject": paramData, + "@inject?": paramData, + "@state": paramData, + "@state?": paramData, + "template": { soyState: "templ-def", variableScope: true}, + "literal": { }, + "msg": {}, + "fallbackmsg": { noEndTag: true, reduceIndent: true}, + "select": {}, + "plural": {}, + "let": { soyState: "var-def" }, + "if": {}, + "elseif": { noEndTag: true, reduceIndent: true}, + "else": { noEndTag: true, reduceIndent: true}, + "switch": {}, + "case": { noEndTag: true, reduceIndent: true}, + "default": { noEndTag: true, reduceIndent: true}, + "foreach": { variableScope: true, soyState: "var-def" }, + "ifempty": { noEndTag: true, reduceIndent: true}, + "for": { variableScope: true, soyState: "var-def" }, + "call": { soyState: "templ-ref" }, + "param": { soyState: "param-ref"}, + "print": { noEndTag: true }, + "deltemplate": { soyState: "templ-def", variableScope: true}, + "delcall": { soyState: "templ-ref" }, + "log": {}, + "element": { variableScope: true }, + }; + + var indentingTags = Object.keys(tags).filter(function(tag) { + return !tags[tag].noEndTag || tags[tag].reduceIndent; + }); CodeMirror.defineMode("soy", function(config) { var textMode = CodeMirror.getMode(config, "text/plain"); @@ -22,6 +58,7 @@ attributes: textMode, text: textMode, uri: textMode, + trusted_resource_uri: textMode, css: CodeMirror.getMode(config, "text/css"), js: CodeMirror.getMode(config, {name: "text/javascript", statementIndent: 2 * config.indentUnit}) }; @@ -31,6 +68,12 @@ } function tokenUntil(stream, state, untilRegExp) { + if (stream.sol()) { + for (var indent = 0; indent < state.indent; indent++) { + if (!stream.eat(/\s/)) break; + } + if (indent) return null; + } var oldString = stream.string; var match = untilRegExp.exec(oldString.substr(stream.pos)); if (match) { @@ -39,33 +82,82 @@ stream.string = oldString.substr(0, stream.pos + match.index); } var result = stream.hideFirstChars(state.indent, function() { - return state.localMode.token(stream, state.localState); + var localState = last(state.localStates); + return localState.mode.token(stream, localState.state); }); stream.string = oldString; return result; } + function contains(list, element) { + while (list) { + if (list.element === element) return true; + list = list.next; + } + return false; + } + + function prepend(list, element) { + return { + element: element, + next: list + }; + } + + function popcontext(state) { + if (!state.context) return; + if (state.context.scope) { + state.variables = state.context.scope; + } + state.context = state.context.previousContext; + } + + // Reference a variable `name` in `list`. + // Let `loose` be truthy to ignore missing identifiers. + function ref(list, name, loose) { + return contains(list, name) ? "variable-2" : (loose ? "variable" : "variable-2 error"); + } + + // Data for an open soy tag. + function Context(previousContext, tag, scope) { + this.previousContext = previousContext; + this.tag = tag; + this.kind = null; + this.scope = scope; + } + return { startState: function() { return { - kind: [], - kindTag: [], soyState: [], + templates: null, + variables: prepend(null, 'ij'), + scopes: null, indent: 0, - localMode: modes.html, - localState: CodeMirror.startState(modes.html) + quoteKind: null, + context: null, + localStates: [{ + mode: modes.html, + state: CodeMirror.startState(modes.html) + }] }; }, copyState: function(state) { return { tag: state.tag, // Last seen Soy tag. - kind: state.kind.concat([]), // Values of kind="" attributes. - kindTag: state.kindTag.concat([]), // Opened tags with kind="" attributes. soyState: state.soyState.concat([]), + templates: state.templates, + variables: state.variables, + context: state.context, indent: state.indent, // Indentation of the following line. - localMode: state.localMode, - localState: CodeMirror.copyState(state.localMode, state.localState) + quoteKind: state.quoteKind, + localStates: state.localStates.map(function(localState) { + return { + mode: localState.mode, + state: CodeMirror.copyState(localState.mode, localState.state) + }; + }) }; }, @@ -79,36 +171,159 @@ } else { stream.skipToEnd(); } + if (!state.context || !state.context.scope) { + var paramRe = /@param\??\s+(\S+)/g; + var current = stream.current(); + for (var match; (match = paramRe.exec(current)); ) { + state.variables = prepend(state.variables, match[1]); + } + } return "comment"; - case "variable": - if (stream.match(/^}/)) { - state.indent -= 2 * config.indentUnit; + case "string": + var match = stream.match(/^.*?(["']|\\[\s\S])/); + if (!match) { + stream.skipToEnd(); + } else if (match[1] == state.quoteKind) { + state.quoteKind = null; + state.soyState.pop(); + } + return "string"; + } + + if (!state.soyState.length || last(state.soyState) != "literal") { + if (stream.match(/^\/\*/)) { + state.soyState.push("comment"); + return "comment"; + } else if (stream.match(stream.sol() ? /^\s*\/\/.*/ : /^\s+\/\/.*/)) { + return "comment"; + } + } + + switch (last(state.soyState)) { + case "templ-def": + if (match = stream.match(/^\.?([\w]+(?!\.[\w]+)*)/)) { + state.templates = prepend(state.templates, match[1]); + state.soyState.pop(); + return "def"; + } + stream.next(); + return null; + + case "templ-ref": + if (match = stream.match(/(\.?[a-zA-Z_][a-zA-Z_0-9]+)+/)) { state.soyState.pop(); - return "variable-2"; + // If the first character is '.', it can only be a local template. + if (match[0][0] == '.') { + return "variable-2" + } + // Otherwise + return "variable"; + } + stream.next(); + return null; + + case "namespace-def": + if (match = stream.match(/^\.?([\w\.]+)/)) { + state.soyState.pop(); + return "variable"; + } + stream.next(); + return null; + + case "param-def": + if (match = stream.match(/^\w+/)) { + state.variables = prepend(state.variables, match[0]); + state.soyState.pop(); + state.soyState.push("param-type"); + return "def"; + } + stream.next(); + return null; + + case "param-ref": + if (match = stream.match(/^\w+/)) { + state.soyState.pop(); + return "property"; + } + stream.next(); + return null; + + case "param-type": + if (stream.peek() == "}") { + state.soyState.pop(); + return null; + } + if (stream.eatWhile(/^([\w]+|[?])/)) { + return "type"; + } + stream.next(); + return null; + + case "var-def": + if (match = stream.match(/^\$([\w]+)/)) { + state.variables = prepend(state.variables, match[1]); + state.soyState.pop(); + return "def"; } stream.next(); return null; case "tag": + var endTag = state.tag[0] == "/"; + var tagName = endTag ? state.tag.substring(1) : state.tag; + var tag = tags[tagName]; if (stream.match(/^\/?}/)) { - if (state.tag == "/template" || state.tag == "/deltemplate") state.indent = 0; - else state.indent -= (stream.current() == "/}" || indentingTags.indexOf(state.tag) == -1 ? 2 : 1) * config.indentUnit; + var selfClosed = stream.current() == "/}"; + if (selfClosed && !endTag) { + popcontext(state); + } + if (state.tag == "/template" || state.tag == "/deltemplate") { + state.variables = prepend(null, 'ij'); + state.indent = 0; + } else { + state.indent -= config.indentUnit * + (selfClosed || indentingTags.indexOf(state.tag) == -1 ? 2 : 1); + } state.soyState.pop(); return "keyword"; } else if (stream.match(/^([\w?]+)(?==)/)) { - if (stream.current() == "kind" && (match = stream.match(/^="([^"]+)/, false))) { + if (state.context && state.context.tag == tagName && stream.current() == "kind" && (match = stream.match(/^="([^"]+)/, false))) { var kind = match[1]; - state.kind.push(kind); - state.kindTag.push(state.tag); - state.localMode = modes[kind] || modes.html; - state.localState = CodeMirror.startState(state.localMode); + state.context.kind = kind; + var mode = modes[kind] || modes.html; + var localState = last(state.localStates); + if (localState.mode.indent) { + state.indent += localState.mode.indent(localState.state, "", ""); + } + state.localStates.push({ + mode: mode, + state: CodeMirror.startState(mode) + }); } return "attribute"; - } else if (stream.match(/^"/)) { + } else if (match = stream.match(/([\w]+)(?=\()/)) { + return "variable callee"; + } else if (match = stream.match(/^["']/)) { state.soyState.push("string"); + state.quoteKind = match; return "string"; } + if (stream.match(/(null|true|false)(?!\w)/) || + stream.match(/0x([0-9a-fA-F]{2,})/) || + stream.match(/-?([0-9]*[.])?[0-9]+(e[0-9]*)?/)) { + return "atom"; + } + if (stream.match(/(\||[+\-*\/%]|[=!]=|\?:|[<>]=?)/)) { + // Tokenize filter, binary, null propagator, and equality operators. + return "operator"; + } + if (match = stream.match(/^\$([\w]+)/)) { + return ref(state.variables, match[1]); + } + if (match = stream.match(/^\w+/)) { + return /^(?:as|and|or|not|in)$/.test(match[0]) ? "keyword" : null; + } stream.next(); return null; @@ -119,40 +334,59 @@ return this.token(stream, state); } return tokenUntil(stream, state, /\{\/literal}/); - - case "string": - if (stream.match(/^.*?"/)) { - state.soyState.pop(); - } else { - stream.skipToEnd(); - } - return "string"; } - if (stream.match(/^\/\*/)) { - state.soyState.push("comment"); - return "comment"; - } else if (stream.match(stream.sol() ? /^\s*\/\/.*/ : /^\s+\/\/.*/)) { - return "comment"; - } else if (stream.match(/^\{\$[\w?]*/)) { - state.indent += 2 * config.indentUnit; - state.soyState.push("variable"); - return "variable-2"; - } else if (stream.match(/^\{literal}/)) { + if (stream.match(/^\{literal}/)) { state.indent += config.indentUnit; state.soyState.push("literal"); + state.context = new Context(state.context, "literal", state.variables); return "keyword"; - } else if (match = stream.match(/^\{([\/@\\]?[\w?]*)/)) { - if (match[1] != "/switch") - state.indent += (/^(\/|(else|elseif|case|default)$)/.test(match[1]) && state.tag != "switch" ? 1 : 2) * config.indentUnit; + + // A tag-keyword must be followed by whitespace, comment or a closing tag. + } else if (match = stream.match(/^\{([/@\\]?\w+\??)(?=$|[\s}]|\/[/*])/)) { + var prevTag = state.tag; state.tag = match[1]; - if (state.tag == "/" + last(state.kindTag)) { - // We found the tag that opened the current kind="". - state.kind.pop(); - state.kindTag.pop(); - state.localMode = modes[last(state.kind)] || modes.html; - state.localState = CodeMirror.startState(state.localMode); + var endTag = state.tag[0] == "/"; + var indentingTag = !!tags[state.tag]; + var tagName = endTag ? state.tag.substring(1) : state.tag; + var tag = tags[tagName]; + if (state.tag != "/switch") + state.indent += ((endTag || tag && tag.reduceIndent) && prevTag != "switch" ? 1 : 2) * config.indentUnit; + + state.soyState.push("tag"); + var tagError = false; + if (tag) { + if (!endTag) { + if (tag.soyState) state.soyState.push(tag.soyState); + } + // If a new tag, open a new context. + if (!tag.noEndTag && (indentingTag || !endTag)) { + state.context = new Context(state.context, state.tag, tag.variableScope ? state.variables : null); + // Otherwise close the current context. + } else if (endTag) { + if (!state.context || state.context.tag != tagName) { + tagError = true; + } else if (state.context) { + if (state.context.kind) { + state.localStates.pop(); + var localState = last(state.localStates); + if (localState.mode.indent) { + state.indent -= localState.mode.indent(localState.state, "", ""); + } + } + popcontext(state); + } + } + } else if (endTag) { + // Assume all tags with a closing tag are defined in the config. + tagError = true; } + return (tagError ? "error " : "") + "keyword"; + + // Not a tag-keyword; it's an implicit print tag. + } else if (stream.eat('{')) { + state.tag = "print"; + state.indent += 2 * config.indentUnit; state.soyState.push("tag"); return "keyword"; } @@ -160,7 +394,7 @@ return tokenUntil(stream, state, /\{|\s+\/\/|\/\*/); }, - indent: function(state, textAfter) { + indent: function(state, textAfter, line) { var indent = state.indent, top = last(state.soyState); if (top == "comment") return CodeMirror.Pass; @@ -172,14 +406,16 @@ if (state.tag != "switch" && /^\{(case|default)\b/.test(textAfter)) indent -= config.indentUnit; if (/^\{\/switch\b/.test(textAfter)) indent -= config.indentUnit; } - if (indent && state.localMode.indent) - indent += state.localMode.indent(state.localState, textAfter); + var localState = last(state.localStates); + if (indent && localState.mode.indent) { + indent += localState.mode.indent(localState.state, textAfter, line); + } return indent; }, innerMode: function(state) { if (state.soyState.length && last(state.soyState) != "literal") return null; - else return {state: state.localState, mode: state.localMode}; + else return last(state.localStates); }, electricInput: /^\s*\{(\/|\/template|\/deltemplate|\/switch|fallbackmsg|elseif|else|case|default|ifempty|\/literal\})$/, @@ -187,12 +423,15 @@ blockCommentStart: "/*", blockCommentEnd: "*/", blockCommentContinue: " * ", + useInnerComments: false, fold: "indent" }; }, "htmlmixed"); - CodeMirror.registerHelper("hintWords", "soy", indentingTags.concat( - ["delpackage", "namespace", "alias", "print", "css", "debugger"])); + CodeMirror.registerHelper("wordChars", "soy", /[\w$]/); + + CodeMirror.registerHelper("hintWords", "soy", Object.keys(tags).concat( + ["css", "debugger"])); CodeMirror.defineMIME("text/x-soy", "soy"); }); diff --git a/rhodecode/public/js/mode/sparql/sparql.js b/rhodecode/public/js/mode/sparql/sparql.js --- a/rhodecode/public/js/mode/sparql/sparql.js +++ b/rhodecode/public/js/mode/sparql/sparql.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -25,7 +25,7 @@ CodeMirror.defineMode("sparql", function "strbefore", "strafter", "year", "month", "day", "hours", "minutes", "seconds", "timezone", "tz", "now", "uuid", "struuid", "md5", "sha1", "sha256", "sha384", "sha512", "coalesce", "if", "strlang", "strdt", "isnumeric", "regex", "exists", - "isblank", "isliteral", "a"]); + "isblank", "isliteral", "a", "bind"]); var keywords = wordRegexp(["base", "prefix", "select", "distinct", "reduced", "construct", "describe", "ask", "from", "named", "where", "order", "limit", "offset", "filter", "optional", "graph", "by", "asc", "desc", "as", "having", "undef", "values", "group", @@ -41,7 +41,7 @@ CodeMirror.defineMode("sparql", function if(ch == "?" && stream.match(/\s/, false)){ return "operator"; } - stream.match(/^[\w\d]*/); + stream.match(/^[A-Za-z0-9_\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][A-Za-z0-9_\u00B7\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u037D\u037F-\u1FFF\u200C-\u200D\u203F-\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]*/); return "variable-2"; } else if (ch == "<" && !stream.match(/^[\s\u00a0=]/, false)) { @@ -135,7 +135,11 @@ CodeMirror.defineMode("sparql", function else if (curPunc == "{") pushContext(state, "}", stream.column()); else if (/[\]\}\)]/.test(curPunc)) { while (state.context && state.context.type == "pattern") popContext(state); - if (state.context && curPunc == state.context.type) popContext(state); + if (state.context && curPunc == state.context.type) { + popContext(state); + if (curPunc == "}" && state.context && state.context.type == "pattern") + popContext(state); + } } else if (curPunc == "." && state.context && state.context.type == "pattern") popContext(state); else if (/atom|string|variable/.test(style) && state.context) { diff --git a/rhodecode/public/js/mode/spreadsheet/spreadsheet.js b/rhodecode/public/js/mode/spreadsheet/spreadsheet.js --- a/rhodecode/public/js/mode/spreadsheet/spreadsheet.js +++ b/rhodecode/public/js/mode/spreadsheet/spreadsheet.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -70,7 +70,10 @@ return "operator"; case "\\": if (stream.match(/\\[a-z]+/)) return "string-2"; - else return null; + else { + stream.next(); + return "atom"; + } case ".": case ",": case ";": diff --git a/rhodecode/public/js/mode/sql/sql.js b/rhodecode/public/js/mode/sql/sql.js --- a/rhodecode/public/js/mode/sql/sql.js +++ b/rhodecode/public/js/mode/sql/sql.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -12,16 +12,17 @@ "use strict"; CodeMirror.defineMode("sql", function(config, parserConfig) { - "use strict"; - var client = parserConfig.client || {}, atoms = parserConfig.atoms || {"false": true, "true": true, "null": true}, - builtin = parserConfig.builtin || {}, - keywords = parserConfig.keywords || {}, - operatorChars = parserConfig.operatorChars || /^[*+\-%<>!=&|~^]/, + builtin = parserConfig.builtin || set(defaultBuiltin), + keywords = parserConfig.keywords || set(sqlKeywords), + operatorChars = parserConfig.operatorChars || /^[*+\-%<>!=&|~^\/]/, support = parserConfig.support || {}, hooks = parserConfig.hooks || {}, - dateSQL = parserConfig.dateSQL || {"date" : true, "time" : true, "timestamp" : true}; + dateSQL = parserConfig.dateSQL || {"date" : true, "time" : true, "timestamp" : true}, + backslashStringEscapes = parserConfig.backslashStringEscapes !== false, + brackets = parserConfig.brackets || /^[\{}\(\)\[\]]/, + punctuation = parserConfig.punctuation || /^[;.,:]/ function tokenBase(stream, state) { var ch = stream.next(); @@ -32,13 +33,13 @@ CodeMirror.defineMode("sql", function(co if (result !== false) return result; } - if (support.hexNumber == true && + if (support.hexNumber && ((ch == "0" && stream.match(/^[xX][0-9a-fA-F]+/)) || (ch == "x" || ch == "X") && stream.match(/^'[0-9a-fA-F]+'/))) { // hex // ref: http://dev.mysql.com/doc/refman/5.5/en/hexadecimal-literals.html return "number"; - } else if (support.binaryNumber == true && + } else if (support.binaryNumber && (((ch == "b" || ch == "B") && stream.match(/^'[01]+'/)) || (ch == "0" && stream.match(/^b[01]+/)))) { // bitstring @@ -47,8 +48,8 @@ CodeMirror.defineMode("sql", function(co } else if (ch.charCodeAt(0) > 47 && ch.charCodeAt(0) < 58) { // numbers // ref: http://dev.mysql.com/doc/refman/5.5/en/number-literals.html - stream.match(/^[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?/); - support.decimallessFloat == true && stream.eat('.'); + stream.match(/^[0-9]*(\.[0-9]+)?([eE][-+]?[0-9]+)?/); + support.decimallessFloat && stream.match(/^\.(?!\.)/); return "number"; } else if (ch == "?" && (stream.eatSpace() || stream.eol() || stream.eat(";"))) { // placeholders @@ -58,15 +59,12 @@ CodeMirror.defineMode("sql", function(co // ref: http://dev.mysql.com/doc/refman/5.5/en/string-literals.html state.tokenize = tokenLiteral(ch); return state.tokenize(stream, state); - } else if ((((support.nCharCast == true && (ch == "n" || ch == "N")) - || (support.charsetCast == true && ch == "_" && stream.match(/[a-z][a-z0-9]*/i))) + } else if ((((support.nCharCast && (ch == "n" || ch == "N")) + || (support.charsetCast && ch == "_" && stream.match(/[a-z][a-z0-9]*/i))) && (stream.peek() == "'" || stream.peek() == '"'))) { // charset casting: _utf8'str', N'str', n'str' // ref: http://dev.mysql.com/doc/refman/5.5/en/string-literals.html return "keyword"; - } else if (/^[\(\),\;\[\]]/.test(ch)) { - // no highlightning - return null; } else if (support.commentSlashSlash && ch == "/" && stream.eat("/")) { // 1-line comment stream.skipToEnd(); @@ -80,22 +78,29 @@ CodeMirror.defineMode("sql", function(co } else if (ch == "/" && stream.eat("*")) { // multi-line comments // ref: https://kb.askmonty.org/en/comment-syntax/ - state.tokenize = tokenComment; + state.tokenize = tokenComment(1); return state.tokenize(stream, state); } else if (ch == ".") { // .1 for 0.1 - if (support.zerolessFloat == true && stream.match(/^(?:\d+(?:e[+-]?\d+)?)/i)) { + if (support.zerolessFloat && stream.match(/^(?:\d+(?:e[+-]?\d+)?)/i)) return "number"; - } + if (stream.match(/^\.+/)) + return null // .table_name (ODBC) // // ref: http://dev.mysql.com/doc/refman/5.6/en/identifier-qualifiers.html - if (support.ODBCdotTable == true && stream.match(/^[a-zA-Z_]+/)) { + if (support.ODBCdotTable && stream.match(/^[\w\d_]+/)) return "variable-2"; - } } else if (operatorChars.test(ch)) { // operators stream.eatWhile(operatorChars); - return null; + return "operator"; + } else if (brackets.test(ch)) { + // brackets + return "bracket"; + } else if (punctuation.test(ch)) { + // punctuation + stream.eatWhile(punctuation); + return "punctuation"; } else if (ch == '{' && (stream.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/) || stream.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/))) { // dates (weird ODBC syntax) @@ -125,25 +130,20 @@ CodeMirror.defineMode("sql", function(co state.tokenize = tokenBase; break; } - escaped = !escaped && ch == "\\"; + escaped = backslashStringEscapes && !escaped && ch == "\\"; } return "string"; }; } - function tokenComment(stream, state) { - while (true) { - if (stream.skipTo("*")) { - stream.next(); - if (stream.eat("/")) { - state.tokenize = tokenBase; - break; - } - } else { - stream.skipToEnd(); - break; - } + function tokenComment(depth) { + return function(stream, state) { + var m = stream.match(/^.*?(\/\*|\*\/)/) + if (!m) stream.skipToEnd() + else if (m[1] == "/*") state.tokenize = tokenComment(depth + 1) + else if (depth > 1) state.tokenize = tokenComment(depth - 1) + else state.tokenize = tokenBase + return "comment" } - return "comment"; } function pushContext(stream, state, type) { @@ -170,7 +170,7 @@ CodeMirror.defineMode("sql", function(co if (state.context && state.context.align == null) state.context.align = false; } - if (stream.eatSpace()) return null; + if (state.tokenize == tokenBase && stream.eatSpace()) return null; var style = state.tokenize(stream, state); if (style == "comment") return style; @@ -198,13 +198,11 @@ CodeMirror.defineMode("sql", function(co blockCommentStart: "/*", blockCommentEnd: "*/", - lineComment: support.commentSlashSlash ? "//" : support.commentHash ? "#" : null + lineComment: support.commentSlashSlash ? "//" : support.commentHash ? "#" : "--", + closeBrackets: "()[]{}''\"\"``" }; }); -(function() { - "use strict"; - // `identifier` function hookIdentifier(stream) { // MySQL/MariaDB identifiers @@ -217,6 +215,19 @@ CodeMirror.defineMode("sql", function(co return stream.eatWhile(/\w/) ? "variable-2" : null; } + // "identifier" + function hookIdentifierDoublequote(stream) { + // Standard SQL /SQLite identifiers + // ref: http://web.archive.org/web/20160813185132/http://savage.net.au/SQL/sql-99.bnf.html#delimited%20identifier + // ref: http://sqlite.org/lang_keywords.html + var ch; + while ((ch = stream.next()) != null) { + if (ch == "\"" && !stream.eat("\"")) return "variable-2"; + } + stream.backUp(stream.current().length - 1); + return stream.eatWhile(/\w/) ? "variable-2" : null; + } + // variable token function hookVar(stream) { // variables @@ -257,7 +268,7 @@ CodeMirror.defineMode("sql", function(co } // these keywords are used by all SQL dialects (however, a mode can still overwrite it) - var sqlKeywords = "alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit"; + var sqlKeywords = "alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit "; // turn a space-separated list into an array function set(str) { @@ -266,24 +277,28 @@ CodeMirror.defineMode("sql", function(co return obj; } + var defaultBuiltin = "bool boolean bit blob enum long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision real date datetime year unsigned signed decimal numeric" + // A generic SQL Mode. It's not a standard, it just try to support what is generally supported CodeMirror.defineMIME("text/x-sql", { name: "sql", keywords: set(sqlKeywords + "begin"), - builtin: set("bool boolean bit blob enum long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision real date datetime year unsigned signed decimal numeric"), + builtin: set(defaultBuiltin), atoms: set("false true null unknown"), - operatorChars: /^[*+\-%<>!=]/, dateSQL: set("date time timestamp"), support: set("ODBCdotTable doubleQuote binaryNumber hexNumber") }); CodeMirror.defineMIME("text/x-mssql", { name: "sql", - client: set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"), - keywords: set(sqlKeywords + "begin trigger proc view index for add constraint key primary foreign collate clustered nonclustered declare"), + client: set("$partition binary_checksum checksum connectionproperty context_info current_request_id error_line error_message error_number error_procedure error_severity error_state formatmessage get_filestream_transaction_context getansinull host_id host_name isnull isnumeric min_active_rowversion newid newsequentialid rowcount_big xact_state object_id"), + keywords: set(sqlKeywords + "begin trigger proc view index for add constraint key primary foreign collate clustered nonclustered declare exec go if use index holdlock nolock nowait paglock readcommitted readcommittedlock readpast readuncommitted repeatableread rowlock serializable snapshot tablock tablockx updlock with"), builtin: set("bigint numeric bit smallint decimal smallmoney int tinyint money float real char varchar text nchar nvarchar ntext binary varbinary image cursor timestamp hierarchyid uniqueidentifier sql_variant xml table "), - atoms: set("false true null unknown"), - operatorChars: /^[*+\-%<>!=]/, + atoms: set("is not null like and or in left right between inner outer join all any some cross unpivot pivot exists"), + operatorChars: /^[*+\-%<>!=^\&|\/]/, + brackets: /^[\{}\(\)]/, + punctuation: /^[;.,:/]/, + backslashStringEscapes: false, dateSQL: set("date datetimeoffset datetime2 smalldatetime datetime time"), hooks: { "@": hookVar @@ -322,6 +337,36 @@ CodeMirror.defineMode("sql", function(co } }); + // provided by the phpLiteAdmin project - phpliteadmin.org + CodeMirror.defineMIME("text/x-sqlite", { + name: "sql", + // commands of the official SQLite client, ref: https://www.sqlite.org/cli.html#dotcmd + client: set("auth backup bail binary changes check clone databases dbinfo dump echo eqp exit explain fullschema headers help import imposter indexes iotrace limit lint load log mode nullvalue once open output print prompt quit read restore save scanstats schema separator session shell show stats system tables testcase timeout timer trace vfsinfo vfslist vfsname width"), + // ref: http://sqlite.org/lang_keywords.html + keywords: set(sqlKeywords + "abort action add after all analyze attach autoincrement before begin cascade case cast check collate column commit conflict constraint cross current_date current_time current_timestamp database default deferrable deferred detach each else end escape except exclusive exists explain fail for foreign full glob if ignore immediate index indexed initially inner instead intersect isnull key left limit match natural no notnull null of offset outer plan pragma primary query raise recursive references regexp reindex release rename replace restrict right rollback row savepoint temp temporary then to transaction trigger unique using vacuum view virtual when with without"), + // SQLite is weakly typed, ref: http://sqlite.org/datatype3.html. This is just a list of some common types. + builtin: set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text clob bigint int int2 int8 integer float double char varchar date datetime year unsigned signed numeric real"), + // ref: http://sqlite.org/syntax/literal-value.html + atoms: set("null current_date current_time current_timestamp"), + // ref: http://sqlite.org/lang_expr.html#binaryops + operatorChars: /^[*+\-%<>!=&|/~]/, + // SQLite is weakly typed, ref: http://sqlite.org/datatype3.html. This is just a list of some common types. + dateSQL: set("date time timestamp datetime"), + support: set("decimallessFloat zerolessFloat"), + identifierQuote: "\"", //ref: http://sqlite.org/lang_keywords.html + hooks: { + // bind-parameters ref:http://sqlite.org/lang_expr.html#varparam + "@": hookVar, + ":": hookVar, + "?": hookVar, + "$": hookVar, + // The preferred way to escape Identifiers is using double quotes, ref: http://sqlite.org/lang_keywords.html + "\"": hookIdentifierDoublequote, + // there is also support for backtics, ref: http://sqlite.org/lang_keywords.html + "`": hookIdentifier + } + }); + // the query language used by Apache Cassandra is called CQL, but this mime type // is called Cassandra to avoid confusion with Contextual Query Language CodeMirror.defineMIME("text/x-cassandra", { @@ -341,8 +386,8 @@ CodeMirror.defineMode("sql", function(co name: "sql", client: set("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"), keywords: set("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"), - builtin: set("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least lenght lenghtb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"), - operatorChars: /^[*+\-%<>!=~]/, + builtin: set("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least length lengthb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"), + operatorChars: /^[*\/+\-%<>!=~]/, dateSQL: set("date time timestamp"), support: set("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber") }); @@ -350,15 +395,73 @@ CodeMirror.defineMode("sql", function(co // Created to support specific hive keywords CodeMirror.defineMIME("text/x-hive", { name: "sql", - keywords: set("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external false fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger true unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with"), - builtin: set("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype"), + keywords: set("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with admin authorization char compact compactions conf cube current current_date current_timestamp day decimal defined dependency directories elem_type exchange file following for grouping hour ignore inner interval jar less logical macro minute month more none noscan over owner partialscan preceding pretty principals protection reload rewrite role roles rollup rows second server sets skewed transactions truncate unbounded unset uri user values window year"), + builtin: set("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype key_type utctimestamp value_type varchar"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=]/, dateSQL: set("date timestamp"), support: set("ODBCdotTable doubleQuote binaryNumber hexNumber") }); -}()); + + CodeMirror.defineMIME("text/x-pgsql", { + name: "sql", + client: set("source"), + // For PostgreSQL - https://www.postgresql.org/docs/11/sql-keywords-appendix.html + // For pl/pgsql lang - https://github.com/postgres/postgres/blob/REL_11_2/src/pl/plpgsql/src/pl_scanner.c + keywords: set(sqlKeywords + "a abort abs absent absolute access according action ada add admin after aggregate alias all allocate also alter always analyse analyze and any are array array_agg array_max_cardinality as asc asensitive assert assertion assignment asymmetric at atomic attach attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli between bigint binary bit bit_length blob blocked bom boolean both breadth by c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain char char_length character character_length character_set_catalog character_set_name character_set_schema characteristics characters check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column column_name columns command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constant constraint constraint_catalog constraint_name constraint_schema constraints constructor contains content continue control conversion convert copy corr corresponding cost count covar_pop covar_samp create cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datatype date datetime_interval_code datetime_interval_precision day db deallocate debug dec decimal declare default defaults deferrable deferred defined definer degree delete delimiter delimiters dense_rank depends depth deref derived desc describe descriptor detach detail deterministic diagnostics dictionary disable discard disconnect dispatch distinct dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain double drop dump dynamic dynamic_function dynamic_function_code each element else elseif elsif empty enable encoding encrypted end end_frame end_partition endexec enforced enum equals errcode error escape event every except exception exclude excluding exclusive exec execute exists exit exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreach foreign fortran forward found frame_row free freeze from fs full function functions fusion g general generated get global go goto grant granted greatest group grouping groups handler having header hex hierarchy hint hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import in include including increment indent index indexes indicator info inherit inherits initially inline inner inout input insensitive insert instance instantiable instead int integer integrity intersect intersection interval into invoker is isnull isolation join k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like like_regex limit link listen ln load local localtime localtimestamp location locator lock locked log logged loop lower m map mapping match matched materialized max max_cardinality maxvalue member merge message message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized not nothing notice notify notnull nowait nth_value ntile null nullable nullif nulls number numeric object occurrences_regex octet_length octets of off offset oids old on only open operator option options or order ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parallel parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password path percent percent_rank percentile_cont percentile_disc perform period permission pg_context pg_datatype_name pg_exception_context pg_exception_detail pg_exception_hint placing plans pli policy portion position position_regex power precedes preceding precision prepare prepared preserve primary print_strict_params prior privileges procedural procedure procedures program public publication query quote raise range rank read reads real reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict result result_oid return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns reverse revoke right role rollback rollup routine routine_catalog routine_name routine_schema routines row row_count row_number rows rowtype rule savepoint scale schema schema_name schemas scope scope_catalog scope_name scope_schema scroll search second section security select selective self sensitive sequence sequences serializable server server_name session session_user set setof sets share show similar simple size skip slice smallint snapshot some source space specific specific_name specifictype sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable stacked standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset subscription substring substring_regex succeeds sum symmetric sysid system system_time system_user t table table_name tables tablesample tablespace temp template temporary text then ties time timestamp timezone_hour timezone_minute to token top_level_count trailing transaction transaction_active transactions_committed transactions_rolled_back transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted union unique unknown unlink unlisten unlogged unnamed unnest until untyped update upper uri usage use_column use_variable user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of values var_pop var_samp varbinary varchar variable_conflict variadic varying verbose version versioning view views volatile warning when whenever where while whitespace width_bucket window with within without work wrapper write xml xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes zone"), + // https://www.postgresql.org/docs/11/datatype.html + builtin: set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"), + atoms: set("false true null unknown"), + operatorChars: /^[*\/+\-%<>!=&|^\/#@?~]/, + dateSQL: set("date time timestamp"), + support: set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast") + }); + + // Google's SQL-like query language, GQL + CodeMirror.defineMIME("text/x-gql", { + name: "sql", + keywords: set("ancestor and asc by contains desc descendant distinct from group has in is limit offset on order select superset where"), + atoms: set("false true"), + builtin: set("blob datetime first key __key__ string integer double boolean null"), + operatorChars: /^[*+\-%<>!=]/ + }); + // Greenplum + CodeMirror.defineMIME("text/x-gpsql", { + name: "sql", + client: set("source"), + //https://github.com/greenplum-db/gpdb/blob/master/src/include/parser/kwlist.h + keywords: set("abort absolute access action active add admin after aggregate all also alter always analyse analyze and any array as asc assertion assignment asymmetric at authorization backward before begin between bigint binary bit boolean both by cache called cascade cascaded case cast chain char character characteristics check checkpoint class close cluster coalesce codegen collate column comment commit committed concurrency concurrently configuration connection constraint constraints contains content continue conversion copy cost cpu_rate_limit create createdb createexttable createrole createuser cross csv cube current current_catalog current_date current_role current_schema current_time current_timestamp current_user cursor cycle data database day deallocate dec decimal declare decode default defaults deferrable deferred definer delete delimiter delimiters deny desc dictionary disable discard distinct distributed do document domain double drop dxl each else enable encoding encrypted end enum errors escape every except exchange exclude excluding exclusive execute exists explain extension external extract false family fetch fields filespace fill filter first float following for force foreign format forward freeze from full function global grant granted greatest group group_id grouping handler hash having header hold host hour identity if ignore ilike immediate immutable implicit in including inclusive increment index indexes inherit inherits initially inline inner inout input insensitive insert instead int integer intersect interval into invoker is isnull isolation join key language large last leading least left level like limit list listen load local localtime localtimestamp location lock log login mapping master match maxvalue median merge minute minvalue missing mode modifies modify month move name names national natural nchar new newline next no nocreatedb nocreateexttable nocreaterole nocreateuser noinherit nologin none noovercommit nosuperuser not nothing notify notnull nowait null nullif nulls numeric object of off offset oids old on only operator option options or order ordered others out outer over overcommit overlaps overlay owned owner parser partial partition partitions passing password percent percentile_cont percentile_disc placing plans position preceding precision prepare prepared preserve primary prior privileges procedural procedure protocol queue quote randomly range read readable reads real reassign recheck recursive ref references reindex reject relative release rename repeatable replace replica reset resource restart restrict returning returns revoke right role rollback rollup rootpartition row rows rule savepoint scatter schema scroll search second security segment select sequence serializable session session_user set setof sets share show similar simple smallint some split sql stable standalone start statement statistics stdin stdout storage strict strip subpartition subpartitions substring superuser symmetric sysid system table tablespace temp template temporary text then threshold ties time timestamp to trailing transaction treat trigger trim true truncate trusted type unbounded uncommitted unencrypted union unique unknown unlisten until update user using vacuum valid validation validator value values varchar variadic varying verbose version view volatile web when where whitespace window with within without work writable write xml xmlattributes xmlconcat xmlelement xmlexists xmlforest xmlparse xmlpi xmlroot xmlserialize year yes zone"), + builtin: set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float float8 inet integer int int4 interval json jsonb line lseg macaddr macaddr8 money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"), + atoms: set("false true null unknown"), + operatorChars: /^[*+\-%<>!=&|^\/#@?~]/, + dateSQL: set("date time timestamp"), + support: set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast") + }); + + // Spark SQL + CodeMirror.defineMIME("text/x-sparksql", { + name: "sql", + keywords: set("add after all alter analyze and anti archive array as asc at between bucket buckets by cache cascade case cast change clear cluster clustered codegen collection column columns comment commit compact compactions compute concatenate cost create cross cube current current_date current_timestamp database databases datata dbproperties defined delete delimited deny desc describe dfs directories distinct distribute drop else end escaped except exchange exists explain export extended external false fields fileformat first following for format formatted from full function functions global grant group grouping having if ignore import in index indexes inner inpath inputformat insert intersect interval into is items join keys last lateral lazy left like limit lines list load local location lock locks logical macro map minus msck natural no not null nulls of on optimize option options or order out outer outputformat over overwrite partition partitioned partitions percent preceding principals purge range recordreader recordwriter recover reduce refresh regexp rename repair replace reset restrict revoke right rlike role roles rollback rollup row rows schema schemas select semi separated serde serdeproperties set sets show skewed sort sorted start statistics stored stratify struct table tables tablesample tblproperties temp temporary terminated then to touch transaction transactions transform true truncate unarchive unbounded uncache union unlock unset use using values view when where window with"), + builtin: set("tinyint smallint int bigint boolean float double string binary timestamp decimal array map struct uniontype delimited serde sequencefile textfile rcfile inputformat outputformat"), + atoms: set("false true null"), + operatorChars: /^[*\/+\-%<>!=~&|^]/, + dateSQL: set("date time timestamp"), + support: set("ODBCdotTable doubleQuote zerolessFloat") + }); + + // Esper + CodeMirror.defineMIME("text/x-esper", { + name: "sql", + client: set("source"), + // http://www.espertech.com/esper/release-5.5.0/esper-reference/html/appendix_keywords.html + keywords: set("alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit after all and as at asc avedev avg between by case cast coalesce count create current_timestamp day days delete define desc distinct else end escape events every exists false first from full group having hour hours in inner insert instanceof into irstream is istream join last lastweekday left limit like max match_recognize matches median measures metadatasql min minute minutes msec millisecond milliseconds not null offset on or order outer output partition pattern prev prior regexp retain-union retain-intersection right rstream sec second seconds select set some snapshot sql stddev sum then true unidirectional until update variable weekday when where window"), + builtin: {}, + atoms: set("false true null"), + operatorChars: /^[*+\-%<>!=&|^\/#@?~]/, + dateSQL: set("time"), + support: set("decimallessFloat zerolessFloat binaryNumber hexNumber") + }); }); /* diff --git a/rhodecode/public/js/mode/stex/stex.js b/rhodecode/public/js/mode/stex/stex.js --- a/rhodecode/public/js/mode/stex/stex.js +++ b/rhodecode/public/js/mode/stex/stex.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* * Author: Constantin Jucovschi (c.jucovschi@jacobs-university.de) @@ -16,7 +16,7 @@ })(function(CodeMirror) { "use strict"; - CodeMirror.defineMode("stex", function() { + CodeMirror.defineMode("stex", function(_config, parserConfig) { "use strict"; function pushCommand(state, command) { @@ -78,6 +78,14 @@ plugins["begin"] = addPluginPattern("begin", "tag", ["atom"]); plugins["end"] = addPluginPattern("end", "tag", ["atom"]); + plugins["label" ] = addPluginPattern("label" , "tag", ["atom"]); + plugins["ref" ] = addPluginPattern("ref" , "tag", ["atom"]); + plugins["eqref" ] = addPluginPattern("eqref" , "tag", ["atom"]); + plugins["cite" ] = addPluginPattern("cite" , "tag", ["atom"]); + plugins["bibitem" ] = addPluginPattern("bibitem" , "tag", ["atom"]); + plugins["Bibitem" ] = addPluginPattern("Bibitem" , "tag", ["atom"]); + plugins["RBibitem" ] = addPluginPattern("RBibitem" , "tag", ["atom"]); + plugins["DEFAULT"] = function () { this.name = "DEFAULT"; this.style = "tag"; @@ -117,6 +125,10 @@ setState(state, function(source, state){ return inMathMode(source, state, "\\]"); }); return "keyword"; } + if (source.match("\\(")) { + setState(state, function(source, state){ return inMathMode(source, state, "\\)"); }); + return "keyword"; + } if (source.match("$$")) { setState(state, function(source, state){ return inMathMode(source, state, "$$"); }); return "keyword"; @@ -161,7 +173,7 @@ if (source.eatSpace()) { return null; } - if (source.match(endModeSeq)) { + if (endModeSeq && source.match(endModeSeq)) { setState(state, normal); return "keyword"; } @@ -223,9 +235,10 @@ return { startState: function() { + var f = parserConfig.inMathMode ? function(source, state){ return inMathMode(source, state); } : normal; return { cmdState: [], - f: normal + f: f }; }, copyState: function(s) { diff --git a/rhodecode/public/js/mode/stylus/stylus.js b/rhodecode/public/js/mode/stylus/stylus.js --- a/rhodecode/public/js/mode/stylus/stylus.js +++ b/rhodecode/public/js/mode/stylus/stylus.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Stylus mode created by Dmitry Kiselyov http://git.io/AaRB @@ -15,6 +15,7 @@ CodeMirror.defineMode("stylus", function(config) { var indentUnit = config.indentUnit, + indentUnitString = '', tagKeywords = keySet(tagKeywords_), tagVariablesRegexp = /^(a|b|i|s|col|em)$/i, propertyKeywords = keySet(propertyKeywords_), @@ -38,6 +39,8 @@ type, override; + while (indentUnitString.length < indentUnit) indentUnitString += ' '; + /** * Tokenizers */ @@ -73,7 +76,7 @@ if (ch == "#") { stream.next(); // Hex color - if (stream.match(/^[0-9a-f]{6}|[0-9a-f]{3}/i)) { + if (stream.match(/^[0-9a-f]{3}([0-9a-f]([0-9a-f]{2}){0,2})?\b/i)) { return ["atom", "atom"]; } // ID selector @@ -313,7 +316,7 @@ return pushContext(state, stream, "block", 0); } } - if (typeIsBlock(type, stream, state)) { + if (typeIsBlock(type, stream)) { return pushContext(state, stream, "block"); } if (type == "}" && endOfLine(stream)) { @@ -513,7 +516,7 @@ */ states.atBlock = function(type, stream, state) { if (type == "(") return pushContext(state, stream, "atBlock_parens"); - if (typeIsBlock(type, stream, state)) { + if (typeIsBlock(type, stream)) { return pushContext(state, stream, "block"); } if (typeIsInterpolation(type, stream)) { @@ -672,7 +675,7 @@ ch = textAfter && textAfter.charAt(0), indent = cx.indent, lineFirstWord = firstWordOfLine(textAfter), - lineIndent = line.length - line.replace(/^\s*/, "").length, + lineIndent = line.match(/^\s*/)[0].replace(/\t/g, indentUnitString).length, prevLineFirstWord = state.context.prev ? state.context.prev.line.firstWord : "", prevLineIndent = state.context.prev ? state.context.prev.line.indent : lineIndent; @@ -681,7 +684,6 @@ ch == ")" && (cx.type == "parens" || cx.type == "atBlock_parens") || ch == "{" && (cx.type == "at"))) { indent = cx.indent - indentUnit; - cx = cx.prev; } else if (!(/(\})/.test(ch))) { if (/@|\$|\d/.test(ch) || /^\{/.test(textAfter) || @@ -732,11 +734,11 @@ var documentTypes_ = ["domain", "regexp", "url", "url-prefix"]; var mediaTypes_ = ["all","aural","braille","handheld","print","projection","screen","tty","tv","embossed"]; var mediaFeatures_ = ["width","min-width","max-width","height","min-height","max-height","device-width","min-device-width","max-device-width","device-height","min-device-height","max-device-height","aspect-ratio","min-aspect-ratio","max-aspect-ratio","device-aspect-ratio","min-device-aspect-ratio","max-device-aspect-ratio","color","min-color","max-color","color-index","min-color-index","max-color-index","monochrome","min-monochrome","max-monochrome","resolution","min-resolution","max-resolution","scan","grid"]; - var propertyKeywords_ = ["align-content","align-items","align-self","alignment-adjust","alignment-baseline","anchor-point","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","appearance","azimuth","backface-visibility","background","background-attachment","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","baseline-shift","binding","bleed","bookmark-label","bookmark-level","bookmark-state","bookmark-target","border","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","clear","clip","color","color-profile","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","content","counter-increment","counter-reset","crop","cue","cue-after","cue-before","cursor","direction","display","dominant-baseline","drop-initial-after-adjust","drop-initial-after-align","drop-initial-before-adjust","drop-initial-before-align","drop-initial-size","drop-initial-value","elevation","empty-cells","fit","fit-position","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","float-offset","flow-from","flow-into","font","font-feature-settings","font-family","font-kerning","font-language-override","font-size","font-size-adjust","font-stretch","font-style","font-synthesis","font-variant","font-variant-alternates","font-variant-caps","font-variant-east-asian","font-variant-ligatures","font-variant-numeric","font-variant-position","font-weight","grid","grid-area","grid-auto-columns","grid-auto-flow","grid-auto-position","grid-auto-rows","grid-column","grid-column-end","grid-column-start","grid-row","grid-row-end","grid-row-start","grid-template","grid-template-areas","grid-template-columns","grid-template-rows","hanging-punctuation","height","hyphens","icon","image-orientation","image-rendering","image-resolution","inline-box-align","justify-content","left","letter-spacing","line-break","line-height","line-stacking","line-stacking-ruby","line-stacking-shift","line-stacking-strategy","list-style","list-style-image","list-style-position","list-style-type","margin","margin-bottom","margin-left","margin-right","margin-top","marker-offset","marks","marquee-direction","marquee-loop","marquee-play-count","marquee-speed","marquee-style","max-height","max-width","min-height","min-width","move-to","nav-down","nav-index","nav-left","nav-right","nav-up","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-style","overflow-wrap","overflow-x","overflow-y","padding","padding-bottom","padding-left","padding-right","padding-top","page","page-break-after","page-break-before","page-break-inside","page-policy","pause","pause-after","pause-before","perspective","perspective-origin","pitch","pitch-range","play-during","position","presentation-level","punctuation-trim","quotes","region-break-after","region-break-before","region-break-inside","region-fragment","rendering-intent","resize","rest","rest-after","rest-before","richness","right","rotation","rotation-point","ruby-align","ruby-overhang","ruby-position","ruby-span","shape-image-threshold","shape-inside","shape-margin","shape-outside","size","speak","speak-as","speak-header","speak-numeral","speak-punctuation","speech-rate","stress","string-set","tab-size","table-layout","target","target-name","target-new","target-position","text-align","text-align-last","text-decoration","text-decoration-color","text-decoration-line","text-decoration-skip","text-decoration-style","text-emphasis","text-emphasis-color","text-emphasis-position","text-emphasis-style","text-height","text-indent","text-justify","text-outline","text-overflow","text-shadow","text-size-adjust","text-space-collapse","text-transform","text-underline-position","text-wrap","top","transform","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","voice-balance","voice-duration","voice-family","voice-pitch","voice-range","voice-rate","voice-stress","voice-volume","volume","white-space","widows","width","word-break","word-spacing","word-wrap","z-index","clip-path","clip-rule","mask","enable-background","filter","flood-color","flood-opacity","lighting-color","stop-color","stop-opacity","pointer-events","color-interpolation","color-interpolation-filters","color-rendering","fill","fill-opacity","fill-rule","image-rendering","marker","marker-end","marker-mid","marker-start","shape-rendering","stroke","stroke-dasharray","stroke-dashoffset","stroke-linecap","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke-width","text-rendering","baseline-shift","dominant-baseline","glyph-orientation-horizontal","glyph-orientation-vertical","text-anchor","writing-mode","font-smoothing","osx-font-smoothing"]; + var propertyKeywords_ = ["align-content","align-items","align-self","alignment-adjust","alignment-baseline","anchor-point","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","appearance","azimuth","backface-visibility","background","background-attachment","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","baseline-shift","binding","bleed","bookmark-label","bookmark-level","bookmark-state","bookmark-target","border","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","clear","clip","color","color-profile","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","content","counter-increment","counter-reset","crop","cue","cue-after","cue-before","cursor","direction","display","dominant-baseline","drop-initial-after-adjust","drop-initial-after-align","drop-initial-before-adjust","drop-initial-before-align","drop-initial-size","drop-initial-value","elevation","empty-cells","fit","fit-position","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","float-offset","flow-from","flow-into","font","font-feature-settings","font-family","font-kerning","font-language-override","font-size","font-size-adjust","font-stretch","font-style","font-synthesis","font-variant","font-variant-alternates","font-variant-caps","font-variant-east-asian","font-variant-ligatures","font-variant-numeric","font-variant-position","font-weight","grid","grid-area","grid-auto-columns","grid-auto-flow","grid-auto-position","grid-auto-rows","grid-column","grid-column-end","grid-column-start","grid-row","grid-row-end","grid-row-start","grid-template","grid-template-areas","grid-template-columns","grid-template-rows","hanging-punctuation","height","hyphens","icon","image-orientation","image-rendering","image-resolution","inline-box-align","justify-content","left","letter-spacing","line-break","line-height","line-stacking","line-stacking-ruby","line-stacking-shift","line-stacking-strategy","list-style","list-style-image","list-style-position","list-style-type","margin","margin-bottom","margin-left","margin-right","margin-top","marker-offset","marks","marquee-direction","marquee-loop","marquee-play-count","marquee-speed","marquee-style","max-height","max-width","min-height","min-width","move-to","nav-down","nav-index","nav-left","nav-right","nav-up","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-style","overflow-wrap","overflow-x","overflow-y","padding","padding-bottom","padding-left","padding-right","padding-top","page","page-break-after","page-break-before","page-break-inside","page-policy","pause","pause-after","pause-before","perspective","perspective-origin","pitch","pitch-range","play-during","position","presentation-level","punctuation-trim","quotes","region-break-after","region-break-before","region-break-inside","region-fragment","rendering-intent","resize","rest","rest-after","rest-before","richness","right","rotation","rotation-point","ruby-align","ruby-overhang","ruby-position","ruby-span","shape-image-threshold","shape-inside","shape-margin","shape-outside","size","speak","speak-as","speak-header","speak-numeral","speak-punctuation","speech-rate","stress","string-set","tab-size","table-layout","target","target-name","target-new","target-position","text-align","text-align-last","text-decoration","text-decoration-color","text-decoration-line","text-decoration-skip","text-decoration-style","text-emphasis","text-emphasis-color","text-emphasis-position","text-emphasis-style","text-height","text-indent","text-justify","text-outline","text-overflow","text-shadow","text-size-adjust","text-space-collapse","text-transform","text-underline-position","text-wrap","top","transform","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","voice-balance","voice-duration","voice-family","voice-pitch","voice-range","voice-rate","voice-stress","voice-volume","volume","white-space","widows","width","will-change","word-break","word-spacing","word-wrap","z-index","clip-path","clip-rule","mask","enable-background","filter","flood-color","flood-opacity","lighting-color","stop-color","stop-opacity","pointer-events","color-interpolation","color-interpolation-filters","color-rendering","fill","fill-opacity","fill-rule","image-rendering","marker","marker-end","marker-mid","marker-start","shape-rendering","stroke","stroke-dasharray","stroke-dashoffset","stroke-linecap","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke-width","text-rendering","baseline-shift","dominant-baseline","glyph-orientation-horizontal","glyph-orientation-vertical","text-anchor","writing-mode","font-smoothing","osx-font-smoothing"]; var nonStandardPropertyKeywords_ = ["scrollbar-arrow-color","scrollbar-base-color","scrollbar-dark-shadow-color","scrollbar-face-color","scrollbar-highlight-color","scrollbar-shadow-color","scrollbar-3d-light-color","scrollbar-track-color","shape-inside","searchfield-cancel-button","searchfield-decoration","searchfield-results-button","searchfield-results-decoration","zoom"]; var fontProperties_ = ["font-family","src","unicode-range","font-variant","font-feature-settings","font-stretch","font-weight","font-style"]; var colorKeywords_ = ["aliceblue","antiquewhite","aqua","aquamarine","azure","beige","bisque","black","blanchedalmond","blue","blueviolet","brown","burlywood","cadetblue","chartreuse","chocolate","coral","cornflowerblue","cornsilk","crimson","cyan","darkblue","darkcyan","darkgoldenrod","darkgray","darkgreen","darkkhaki","darkmagenta","darkolivegreen","darkorange","darkorchid","darkred","darksalmon","darkseagreen","darkslateblue","darkslategray","darkturquoise","darkviolet","deeppink","deepskyblue","dimgray","dodgerblue","firebrick","floralwhite","forestgreen","fuchsia","gainsboro","ghostwhite","gold","goldenrod","gray","grey","green","greenyellow","honeydew","hotpink","indianred","indigo","ivory","khaki","lavender","lavenderblush","lawngreen","lemonchiffon","lightblue","lightcoral","lightcyan","lightgoldenrodyellow","lightgray","lightgreen","lightpink","lightsalmon","lightseagreen","lightskyblue","lightslategray","lightsteelblue","lightyellow","lime","limegreen","linen","magenta","maroon","mediumaquamarine","mediumblue","mediumorchid","mediumpurple","mediumseagreen","mediumslateblue","mediumspringgreen","mediumturquoise","mediumvioletred","midnightblue","mintcream","mistyrose","moccasin","navajowhite","navy","oldlace","olive","olivedrab","orange","orangered","orchid","palegoldenrod","palegreen","paleturquoise","palevioletred","papayawhip","peachpuff","peru","pink","plum","powderblue","purple","rebeccapurple","red","rosybrown","royalblue","saddlebrown","salmon","sandybrown","seagreen","seashell","sienna","silver","skyblue","slateblue","slategray","snow","springgreen","steelblue","tan","teal","thistle","tomato","turquoise","violet","wheat","white","whitesmoke","yellow","yellowgreen"]; - var valueKeywords_ = ["above","absolute","activeborder","additive","activecaption","afar","after-white-space","ahead","alias","all","all-scroll","alphabetic","alternate","always","amharic","amharic-abegede","antialiased","appworkspace","arabic-indic","armenian","asterisks","attr","auto","avoid","avoid-column","avoid-page","avoid-region","background","backwards","baseline","below","bidi-override","binary","bengali","blink","block","block-axis","bold","bolder","border","border-box","both","bottom","break","break-all","break-word","bullets","button","button-bevel","buttonface","buttonhighlight","buttonshadow","buttontext","calc","cambodian","capitalize","caps-lock-indicator","caption","captiontext","caret","cell","center","checkbox","circle","cjk-decimal","cjk-earthly-branch","cjk-heavenly-stem","cjk-ideographic","clear","clip","close-quote","col-resize","collapse","column","compact","condensed","contain","content","content-box","context-menu","continuous","copy","counter","counters","cover","crop","cross","crosshair","currentcolor","cursive","cyclic","dashed","decimal","decimal-leading-zero","default","default-button","destination-atop","destination-in","destination-out","destination-over","devanagari","disc","discard","disclosure-closed","disclosure-open","document","dot-dash","dot-dot-dash","dotted","double","down","e-resize","ease","ease-in","ease-in-out","ease-out","element","ellipse","ellipsis","embed","end","ethiopic","ethiopic-abegede","ethiopic-abegede-am-et","ethiopic-abegede-gez","ethiopic-abegede-ti-er","ethiopic-abegede-ti-et","ethiopic-halehame-aa-er","ethiopic-halehame-aa-et","ethiopic-halehame-am-et","ethiopic-halehame-gez","ethiopic-halehame-om-et","ethiopic-halehame-sid-et","ethiopic-halehame-so-et","ethiopic-halehame-ti-er","ethiopic-halehame-ti-et","ethiopic-halehame-tig","ethiopic-numeric","ew-resize","expanded","extends","extra-condensed","extra-expanded","fantasy","fast","fill","fixed","flat","flex","footnotes","forwards","from","geometricPrecision","georgian","graytext","groove","gujarati","gurmukhi","hand","hangul","hangul-consonant","hebrew","help","hidden","hide","higher","highlight","highlighttext","hiragana","hiragana-iroha","horizontal","hsl","hsla","icon","ignore","inactiveborder","inactivecaption","inactivecaptiontext","infinite","infobackground","infotext","inherit","initial","inline","inline-axis","inline-block","inline-flex","inline-table","inset","inside","intrinsic","invert","italic","japanese-formal","japanese-informal","justify","kannada","katakana","katakana-iroha","keep-all","khmer","korean-hangul-formal","korean-hanja-formal","korean-hanja-informal","landscape","lao","large","larger","left","level","lighter","line-through","linear","linear-gradient","lines","list-item","listbox","listitem","local","logical","loud","lower","lower-alpha","lower-armenian","lower-greek","lower-hexadecimal","lower-latin","lower-norwegian","lower-roman","lowercase","ltr","malayalam","match","matrix","matrix3d","media-controls-background","media-current-time-display","media-fullscreen-button","media-mute-button","media-play-button","media-return-to-realtime-button","media-rewind-button","media-seek-back-button","media-seek-forward-button","media-slider","media-sliderthumb","media-time-remaining-display","media-volume-slider","media-volume-slider-container","media-volume-sliderthumb","medium","menu","menulist","menulist-button","menulist-text","menulist-textfield","menutext","message-box","middle","min-intrinsic","mix","mongolian","monospace","move","multiple","myanmar","n-resize","narrower","ne-resize","nesw-resize","no-close-quote","no-drop","no-open-quote","no-repeat","none","normal","not-allowed","nowrap","ns-resize","numbers","numeric","nw-resize","nwse-resize","oblique","octal","open-quote","optimizeLegibility","optimizeSpeed","oriya","oromo","outset","outside","outside-shape","overlay","overline","padding","padding-box","painted","page","paused","persian","perspective","plus-darker","plus-lighter","pointer","polygon","portrait","pre","pre-line","pre-wrap","preserve-3d","progress","push-button","radial-gradient","radio","read-only","read-write","read-write-plaintext-only","rectangle","region","relative","repeat","repeating-linear-gradient","repeating-radial-gradient","repeat-x","repeat-y","reset","reverse","rgb","rgba","ridge","right","rotate","rotate3d","rotateX","rotateY","rotateZ","round","row-resize","rtl","run-in","running","s-resize","sans-serif","scale","scale3d","scaleX","scaleY","scaleZ","scroll","scrollbar","se-resize","searchfield","searchfield-cancel-button","searchfield-decoration","searchfield-results-button","searchfield-results-decoration","semi-condensed","semi-expanded","separate","serif","show","sidama","simp-chinese-formal","simp-chinese-informal","single","skew","skewX","skewY","skip-white-space","slide","slider-horizontal","slider-vertical","sliderthumb-horizontal","sliderthumb-vertical","slow","small","small-caps","small-caption","smaller","solid","somali","source-atop","source-in","source-out","source-over","space","spell-out","square","square-button","start","static","status-bar","stretch","stroke","sub","subpixel-antialiased","super","sw-resize","symbolic","symbols","table","table-caption","table-cell","table-column","table-column-group","table-footer-group","table-header-group","table-row","table-row-group","tamil","telugu","text","text-bottom","text-top","textarea","textfield","thai","thick","thin","threeddarkshadow","threedface","threedhighlight","threedlightshadow","threedshadow","tibetan","tigre","tigrinya-er","tigrinya-er-abegede","tigrinya-et","tigrinya-et-abegede","to","top","trad-chinese-formal","trad-chinese-informal","translate","translate3d","translateX","translateY","translateZ","transparent","ultra-condensed","ultra-expanded","underline","up","upper-alpha","upper-armenian","upper-greek","upper-hexadecimal","upper-latin","upper-norwegian","upper-roman","uppercase","urdu","url","var","vertical","vertical-text","visible","visibleFill","visiblePainted","visibleStroke","visual","w-resize","wait","wave","wider","window","windowframe","windowtext","words","x-large","x-small","xor","xx-large","xx-small","bicubic","optimizespeed","grayscale","row","row-reverse","wrap","wrap-reverse","column-reverse","flex-start","flex-end","space-between","space-around"]; + var valueKeywords_ = ["above","absolute","activeborder","additive","activecaption","afar","after-white-space","ahead","alias","all","all-scroll","alphabetic","alternate","always","amharic","amharic-abegede","antialiased","appworkspace","arabic-indic","armenian","asterisks","attr","auto","avoid","avoid-column","avoid-page","avoid-region","background","backwards","baseline","below","bidi-override","binary","bengali","blink","block","block-axis","bold","bolder","border","border-box","both","bottom","break","break-all","break-word","bullets","button","button-bevel","buttonface","buttonhighlight","buttonshadow","buttontext","calc","cambodian","capitalize","caps-lock-indicator","caption","captiontext","caret","cell","center","checkbox","circle","cjk-decimal","cjk-earthly-branch","cjk-heavenly-stem","cjk-ideographic","clear","clip","close-quote","col-resize","collapse","column","compact","condensed","contain","content","contents","content-box","context-menu","continuous","copy","counter","counters","cover","crop","cross","crosshair","currentcolor","cursive","cyclic","dashed","decimal","decimal-leading-zero","default","default-button","destination-atop","destination-in","destination-out","destination-over","devanagari","disc","discard","disclosure-closed","disclosure-open","document","dot-dash","dot-dot-dash","dotted","double","down","e-resize","ease","ease-in","ease-in-out","ease-out","element","ellipse","ellipsis","embed","end","ethiopic","ethiopic-abegede","ethiopic-abegede-am-et","ethiopic-abegede-gez","ethiopic-abegede-ti-er","ethiopic-abegede-ti-et","ethiopic-halehame-aa-er","ethiopic-halehame-aa-et","ethiopic-halehame-am-et","ethiopic-halehame-gez","ethiopic-halehame-om-et","ethiopic-halehame-sid-et","ethiopic-halehame-so-et","ethiopic-halehame-ti-er","ethiopic-halehame-ti-et","ethiopic-halehame-tig","ethiopic-numeric","ew-resize","expanded","extends","extra-condensed","extra-expanded","fantasy","fast","fill","fixed","flat","flex","footnotes","forwards","from","geometricPrecision","georgian","graytext","groove","gujarati","gurmukhi","hand","hangul","hangul-consonant","hebrew","help","hidden","hide","higher","highlight","highlighttext","hiragana","hiragana-iroha","horizontal","hsl","hsla","icon","ignore","inactiveborder","inactivecaption","inactivecaptiontext","infinite","infobackground","infotext","inherit","initial","inline","inline-axis","inline-block","inline-flex","inline-table","inset","inside","intrinsic","invert","italic","japanese-formal","japanese-informal","justify","kannada","katakana","katakana-iroha","keep-all","khmer","korean-hangul-formal","korean-hanja-formal","korean-hanja-informal","landscape","lao","large","larger","left","level","lighter","line-through","linear","linear-gradient","lines","list-item","listbox","listitem","local","logical","loud","lower","lower-alpha","lower-armenian","lower-greek","lower-hexadecimal","lower-latin","lower-norwegian","lower-roman","lowercase","ltr","malayalam","match","matrix","matrix3d","media-controls-background","media-current-time-display","media-fullscreen-button","media-mute-button","media-play-button","media-return-to-realtime-button","media-rewind-button","media-seek-back-button","media-seek-forward-button","media-slider","media-sliderthumb","media-time-remaining-display","media-volume-slider","media-volume-slider-container","media-volume-sliderthumb","medium","menu","menulist","menulist-button","menulist-text","menulist-textfield","menutext","message-box","middle","min-intrinsic","mix","mongolian","monospace","move","multiple","myanmar","n-resize","narrower","ne-resize","nesw-resize","no-close-quote","no-drop","no-open-quote","no-repeat","none","normal","not-allowed","nowrap","ns-resize","numbers","numeric","nw-resize","nwse-resize","oblique","octal","open-quote","optimizeLegibility","optimizeSpeed","oriya","oromo","outset","outside","outside-shape","overlay","overline","padding","padding-box","painted","page","paused","persian","perspective","plus-darker","plus-lighter","pointer","polygon","portrait","pre","pre-line","pre-wrap","preserve-3d","progress","push-button","radial-gradient","radio","read-only","read-write","read-write-plaintext-only","rectangle","region","relative","repeat","repeating-linear-gradient","repeating-radial-gradient","repeat-x","repeat-y","reset","reverse","rgb","rgba","ridge","right","rotate","rotate3d","rotateX","rotateY","rotateZ","round","row-resize","rtl","run-in","running","s-resize","sans-serif","scale","scale3d","scaleX","scaleY","scaleZ","scroll","scrollbar","scroll-position","se-resize","searchfield","searchfield-cancel-button","searchfield-decoration","searchfield-results-button","searchfield-results-decoration","semi-condensed","semi-expanded","separate","serif","show","sidama","simp-chinese-formal","simp-chinese-informal","single","skew","skewX","skewY","skip-white-space","slide","slider-horizontal","slider-vertical","sliderthumb-horizontal","sliderthumb-vertical","slow","small","small-caps","small-caption","smaller","solid","somali","source-atop","source-in","source-out","source-over","space","spell-out","square","square-button","start","static","status-bar","stretch","stroke","sub","subpixel-antialiased","super","sw-resize","symbolic","symbols","table","table-caption","table-cell","table-column","table-column-group","table-footer-group","table-header-group","table-row","table-row-group","tamil","telugu","text","text-bottom","text-top","textarea","textfield","thai","thick","thin","threeddarkshadow","threedface","threedhighlight","threedlightshadow","threedshadow","tibetan","tigre","tigrinya-er","tigrinya-er-abegede","tigrinya-et","tigrinya-et-abegede","to","top","trad-chinese-formal","trad-chinese-informal","translate","translate3d","translateX","translateY","translateZ","transparent","ultra-condensed","ultra-expanded","underline","up","upper-alpha","upper-armenian","upper-greek","upper-hexadecimal","upper-latin","upper-norwegian","upper-roman","uppercase","urdu","url","var","vertical","vertical-text","visible","visibleFill","visiblePainted","visibleStroke","visual","w-resize","wait","wave","wider","window","windowframe","windowtext","words","x-large","x-small","xor","xx-large","xx-small","bicubic","optimizespeed","grayscale","row","row-reverse","wrap","wrap-reverse","column-reverse","flex-start","flex-end","space-between","space-around", "unset"]; var wordOperatorKeywords_ = ["in","and","or","not","is not","is a","is","isnt","defined","if unless"], blockKeywords_ = ["for","if","else","unless", "from", "to"], diff --git a/rhodecode/public/js/mode/swift/swift.js b/rhodecode/public/js/mode/swift/swift.js --- a/rhodecode/public/js/mode/swift/swift.js +++ b/rhodecode/public/js/mode/swift/swift.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Swift mode created by Michael Kaminsky https://github.com/mkaminsky11 @@ -19,25 +19,28 @@ return set } - var keywords = wordSet(["var","let","class","deinit","enum","extension","func","import","init","protocol", - "static","struct","subscript","typealias","as","dynamicType","is","new","super", - "self","Self","Type","__COLUMN__","__FILE__","__FUNCTION__","__LINE__","break","case", - "continue","default","do","else","fallthrough","if","in","for","return","switch", - "where","while","associativity","didSet","get","infix","inout","left","mutating", - "none","nonmutating","operator","override","postfix","precedence","prefix","right", - "set","unowned","weak","willSet"]) - var definingKeywords = wordSet(["var","let","class","enum","extension","func","import","protocol","struct", - "typealias","dynamicType","for"]) - var atoms = wordSet(["Infinity","NaN","undefined","null","true","false","on","off","yes","no","nil","null", - "this","super"]) - var types = wordSet(["String","bool","int","string","double","Double","Int","Float","float","public", - "private","extension"]) - var operators = "+-/*%=|&<>#" - var punc = ";,.(){}[]" - var number = /^-?(?:(?:[\d_]+\.[_\d]*|\.[_\d]+|0o[0-7_\.]+|0b[01_\.]+)(?:e-?[\d_]+)?|0x[\d_a-f\.]+(?:p-?[\d_]+)?)/i - var identifier = /^[_A-Za-z$][_A-Za-z$0-9]*/ - var property = /^[@\.][_A-Za-z$][_A-Za-z$0-9]*/ - var regexp = /^\/(?!\s)(?:\/\/)?(?:\\.|[^\/])+\// + var keywords = wordSet(["_","var","let","class","enum","extension","import","protocol","struct","func","typealias","associatedtype", + "open","public","internal","fileprivate","private","deinit","init","new","override","self","subscript","super", + "convenience","dynamic","final","indirect","lazy","required","static","unowned","unowned(safe)","unowned(unsafe)","weak","as","is", + "break","case","continue","default","else","fallthrough","for","guard","if","in","repeat","switch","where","while", + "defer","return","inout","mutating","nonmutating","catch","do","rethrows","throw","throws","try","didSet","get","set","willSet", + "assignment","associativity","infix","left","none","operator","postfix","precedence","precedencegroup","prefix","right", + "Any","AnyObject","Type","dynamicType","Self","Protocol","__COLUMN__","__FILE__","__FUNCTION__","__LINE__"]) + var definingKeywords = wordSet(["var","let","class","enum","extension","import","protocol","struct","func","typealias","associatedtype","for"]) + var atoms = wordSet(["true","false","nil","self","super","_"]) + var types = wordSet(["Array","Bool","Character","Dictionary","Double","Float","Int","Int8","Int16","Int32","Int64","Never","Optional","Set","String", + "UInt8","UInt16","UInt32","UInt64","Void"]) + var operators = "+-/*%=|&<>~^?!" + var punc = ":;,.(){}[]" + var binary = /^\-?0b[01][01_]*/ + var octal = /^\-?0o[0-7][0-7_]*/ + var hexadecimal = /^\-?0x[\dA-Fa-f][\dA-Fa-f_]*(?:(?:\.[\dA-Fa-f][\dA-Fa-f_]*)?[Pp]\-?\d[\d_]*)?/ + var decimal = /^\-?\d[\d_]*(?:\.\d[\d_]*)?(?:[Ee]\-?\d[\d_]*)?/ + var identifier = /^\$\d+|(`?)[_A-Za-z][_A-Za-z$0-9]*\1/ + var property = /^\.(?:\$\d+|(`?)[_A-Za-z][_A-Za-z$0-9]*\1)/ + var instruction = /^\#[A-Za-z]+/ + var attribute = /^@(?:\$\d+|(`?)[_A-Za-z][_A-Za-z$0-9]*\1)/ + //var regexp = /^\/(?!\s)(?:\/\/)?(?:\\.|[^\/])+\// function tokenBase(stream, state, prev) { if (stream.sol()) state.indented = stream.indentation() @@ -53,8 +56,14 @@ state.tokenize.push(tokenComment) return tokenComment(stream, state) } - if (stream.match(regexp)) return "string-2" } + if (stream.match(instruction)) return "builtin" + if (stream.match(attribute)) return "attribute" + if (stream.match(binary)) return "number" + if (stream.match(octal)) return "number" + if (stream.match(hexadecimal)) return "number" + if (stream.match(decimal)) return "number" + if (stream.match(property)) return "property" if (operators.indexOf(ch) > -1) { stream.next() return "operator" @@ -64,25 +73,22 @@ stream.match("..") return "punctuation" } - if (ch == '"' || ch == "'") { - stream.next() - var tokenize = tokenString(ch) + var stringMatch + if (stringMatch = stream.match(/("""|"|')/)) { + var tokenize = tokenString.bind(null, stringMatch[0]) state.tokenize.push(tokenize) return tokenize(stream, state) } - if (stream.match(number)) return "number" - if (stream.match(property)) return "property" - if (stream.match(identifier)) { var ident = stream.current() + if (types.hasOwnProperty(ident)) return "variable-2" + if (atoms.hasOwnProperty(ident)) return "atom" if (keywords.hasOwnProperty(ident)) { if (definingKeywords.hasOwnProperty(ident)) state.prev = "define" return "keyword" } - if (types.hasOwnProperty(ident)) return "variable-2" - if (atoms.hasOwnProperty(ident)) return "atom" if (prev == "define") return "def" return "variable" } @@ -110,30 +116,43 @@ } } - function tokenString(quote) { - return function(stream, state) { - var ch, escaped = false - while (ch = stream.next()) { - if (escaped) { - if (ch == "(") { - state.tokenize.push(tokenUntilClosingParen()) - return "string" - } - escaped = false - } else if (ch == quote) { - break - } else { - escaped = ch == "\\" + function tokenString(openQuote, stream, state) { + var singleLine = openQuote.length == 1 + var ch, escaped = false + while (ch = stream.peek()) { + if (escaped) { + stream.next() + if (ch == "(") { + state.tokenize.push(tokenUntilClosingParen()) + return "string" } + escaped = false + } else if (stream.match(openQuote)) { + state.tokenize.pop() + return "string" + } else { + stream.next() + escaped = ch == "\\" } + } + if (singleLine) { state.tokenize.pop() - return "string" } + return "string" } function tokenComment(stream, state) { - stream.match(/^(?:[^*]|\*(?!\/))*/) - if (stream.match("*/")) state.tokenize.pop() + var ch + while (true) { + stream.match(/^[^/*]+/, true) + ch = stream.next() + if (!ch) break + if (ch === "/" && stream.eat("*")) { + state.tokenize.push(tokenComment) + } else if (ch === "*" && stream.eat("/")) { + state.tokenize.pop() + } + } return "comment" } @@ -194,9 +213,11 @@ lineComment: "//", blockCommentStart: "/*", - blockCommentEnd: "*/" + blockCommentEnd: "*/", + fold: "brace", + closeBrackets: "()[]{}''\"\"``" } }) CodeMirror.defineMIME("text/x-swift","swift") -}) +}); diff --git a/rhodecode/public/js/mode/tcl/tcl.js b/rhodecode/public/js/mode/tcl/tcl.js --- a/rhodecode/public/js/mode/tcl/tcl.js +++ b/rhodecode/public/js/mode/tcl/tcl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE //tcl mode by Ford_Lawnmower :: Based on Velocity mode by Steve O'Hara @@ -42,42 +42,34 @@ CodeMirror.defineMode("tcl", function() var beforeParams = state.beforeParams; state.beforeParams = false; var ch = stream.next(); - if ((ch == '"' || ch == "'") && state.inParams) + if ((ch == '"' || ch == "'") && state.inParams) { return chain(stream, state, tokenString(ch)); - else if (/[\[\]{}\(\),;\.]/.test(ch)) { + } else if (/[\[\]{}\(\),;\.]/.test(ch)) { if (ch == "(" && beforeParams) state.inParams = true; else if (ch == ")") state.inParams = false; return null; - } - else if (/\d/.test(ch)) { + } else if (/\d/.test(ch)) { stream.eatWhile(/[\w\.]/); return "number"; - } - else if (ch == "#" && stream.eat("*")) { - return chain(stream, state, tokenComment); - } - else if (ch == "#" && stream.match(/ *\[ *\[/)) { - return chain(stream, state, tokenUnparsed); - } - else if (ch == "#" && stream.eat("#")) { + } else if (ch == "#") { + if (stream.eat("*")) + return chain(stream, state, tokenComment); + if (ch == "#" && stream.match(/ *\[ *\[/)) + return chain(stream, state, tokenUnparsed); stream.skipToEnd(); return "comment"; - } - else if (ch == '"') { + } else if (ch == '"') { stream.skipTo(/"/); return "comment"; - } - else if (ch == "$") { + } else if (ch == "$") { stream.eatWhile(/[$_a-z0-9A-Z\.{:]/); stream.eatWhile(/}/); state.beforeParams = true; return "builtin"; - } - else if (isOperatorChar.test(ch)) { + } else if (isOperatorChar.test(ch)) { stream.eatWhile(isOperatorChar); return "comment"; - } - else { + } else { stream.eatWhile(/[\w\$_{}\xa1-\uffff]/); var word = stream.current().toLowerCase(); if (keywords && keywords.propertyIsEnumerable(word)) diff --git a/rhodecode/public/js/mode/textile/textile.js b/rhodecode/public/js/mode/textile/textile.js --- a/rhodecode/public/js/mode/textile/textile.js +++ b/rhodecode/public/js/mode/textile/textile.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") { // CommonJS @@ -203,7 +203,7 @@ single: { bc: "bc", bq: "bq", - definitionList: /- [^(?::=)]+:=+/, + definitionList: /- .*?:=+/, definitionListEnd: /.*=:\s*$/, div: "div", drawTable: /\|.*\|/, diff --git a/rhodecode/public/js/mode/tiddlywiki/tiddlywiki.js b/rhodecode/public/js/mode/tiddlywiki/tiddlywiki.js --- a/rhodecode/public/js/mode/tiddlywiki/tiddlywiki.js +++ b/rhodecode/public/js/mode/tiddlywiki/tiddlywiki.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /*** |''Name''|tiddlywiki.js| @@ -8,7 +8,7 @@ |''Version''|0.1.7| |''Status''|''stable''| |''Source''|[[GitHub|https://github.com/pmario/CodeMirror2/blob/tw-syntax/mode/tiddlywiki]]| - |''Documentation''|http://codemirror.tiddlyspace.com/| + |''Documentation''|https://codemirror.tiddlyspace.com/| |''License''|[[MIT License|http://www.opensource.org/licenses/mit-license.php]]| |''CoreVersion''|2.5.0| |''Requires''|codemirror.js| @@ -16,7 +16,6 @@ ! Info CoreVersion parameter is needed for TiddlyWiki only! ***/ -//{{{ (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -32,73 +31,60 @@ CodeMirror.defineMode("tiddlywiki", func // Tokenizer var textwords = {}; - var keywords = function () { - function kw(type) { - return { type: type, style: "macro"}; - } - return { - "allTags": kw('allTags'), "closeAll": kw('closeAll'), "list": kw('list'), - "newJournal": kw('newJournal'), "newTiddler": kw('newTiddler'), - "permaview": kw('permaview'), "saveChanges": kw('saveChanges'), - "search": kw('search'), "slider": kw('slider'), "tabs": kw('tabs'), - "tag": kw('tag'), "tagging": kw('tagging'), "tags": kw('tags'), - "tiddler": kw('tiddler'), "timeline": kw('timeline'), - "today": kw('today'), "version": kw('version'), "option": kw('option'), - - "with": kw('with'), - "filter": kw('filter') - }; - }(); + var keywords = { + "allTags": true, "closeAll": true, "list": true, + "newJournal": true, "newTiddler": true, + "permaview": true, "saveChanges": true, + "search": true, "slider": true, "tabs": true, + "tag": true, "tagging": true, "tags": true, + "tiddler": true, "timeline": true, + "today": true, "version": true, "option": true, + "with": true, "filter": true + }; var isSpaceName = /[\w_\-]/i, - reHR = /^\-\-\-\-+$/, //
    - reWikiCommentStart = /^\/\*\*\*$/, // /*** - reWikiCommentStop = /^\*\*\*\/$/, // ***/ - reBlockQuote = /^<<<$/, + reHR = /^\-\-\-\-+$/, //
    + reWikiCommentStart = /^\/\*\*\*$/, // /*** + reWikiCommentStop = /^\*\*\*\/$/, // ***/ + reBlockQuote = /^<<<$/, - reJsCodeStart = /^\/\/\{\{\{$/, // //{{{ js block start - reJsCodeStop = /^\/\/\}\}\}$/, // //}}} js stop - reXmlCodeStart = /^$/, // xml block start - reXmlCodeStop = /^$/, // xml stop + reJsCodeStart = /^\/\/\{\{\{$/, // //{{{ js block start + reJsCodeStop = /^\/\/\}\}\}$/, // //}}} js stop + reXmlCodeStart = /^$/, // xml block start + reXmlCodeStop = /^$/, // xml stop - reCodeBlockStart = /^\{\{\{$/, // {{{ TW text div block start - reCodeBlockStop = /^\}\}\}$/, // }}} TW text stop + reCodeBlockStart = /^\{\{\{$/, // {{{ TW text div block start + reCodeBlockStop = /^\}\}\}$/, // }}} TW text stop - reUntilCodeStop = /.*?\}\}\}/; + reUntilCodeStop = /.*?\}\}\}/; function chain(stream, state, f) { state.tokenize = f; return f(stream, state); } - function jsTokenBase(stream, state) { - var sol = stream.sol(), ch; + function tokenBase(stream, state) { + var sol = stream.sol(), ch = stream.peek(); state.block = false; // indicates the start of a code block. - ch = stream.peek(); // don't eat, to make matching simpler - // check start of blocks if (sol && /[<\/\*{}\-]/.test(ch)) { if (stream.match(reCodeBlockStart)) { state.block = true; return chain(stream, state, twTokenCode); } - if (stream.match(reBlockQuote)) { + if (stream.match(reBlockQuote)) return 'quote'; - } - if (stream.match(reWikiCommentStart) || stream.match(reWikiCommentStop)) { + if (stream.match(reWikiCommentStart) || stream.match(reWikiCommentStop)) return 'comment'; - } - if (stream.match(reJsCodeStart) || stream.match(reJsCodeStop) || stream.match(reXmlCodeStart) || stream.match(reXmlCodeStop)) { + if (stream.match(reJsCodeStart) || stream.match(reJsCodeStop) || stream.match(reXmlCodeStart) || stream.match(reXmlCodeStop)) return 'comment'; - } - if (stream.match(reHR)) { + if (stream.match(reHR)) return 'hr'; - } - } // sol - ch = stream.next(); + } + stream.next(); if (sol && /[\/\*!#;:>|]/.test(ch)) { if (ch == "!") { // tw header stream.skipToEnd(); @@ -124,95 +110,77 @@ CodeMirror.defineMode("tiddlywiki", func stream.eatWhile(">"); return "quote"; } - if (ch == '|') { + if (ch == '|') return 'header'; - } } - if (ch == '{' && stream.match(/\{\{/)) { + if (ch == '{' && stream.match(/\{\{/)) return chain(stream, state, twTokenCode); - } // rudimentary html:// file:// link matching. TW knows much more ... - if (/[hf]/i.test(ch)) { - if (/[ti]/i.test(stream.peek()) && stream.match(/\b(ttps?|tp|ile):\/\/[\-A-Z0-9+&@#\/%?=~_|$!:,.;]*[A-Z0-9+&@#\/%=~_|$]/i)) { - return "link"; - } - } + if (/[hf]/i.test(ch) && + /[ti]/i.test(stream.peek()) && + stream.match(/\b(ttps?|tp|ile):\/\/[\-A-Z0-9+&@#\/%?=~_|$!:,.;]*[A-Z0-9+&@#\/%=~_|$]/i)) + return "link"; + // just a little string indicator, don't want to have the whole string covered - if (ch == '"') { + if (ch == '"') return 'string'; - } - if (ch == '~') { // _no_ CamelCase indicator should be bold + + if (ch == '~') // _no_ CamelCase indicator should be bold return 'brace'; - } - if (/[\[\]]/.test(ch)) { // check for [[..]] - if (stream.peek() == ch) { - stream.next(); - return 'brace'; - } - } + + if (/[\[\]]/.test(ch) && stream.match(ch)) // check for [[..]] + return 'brace'; + if (ch == "@") { // check for space link. TODO fix @@...@@ highlighting stream.eatWhile(isSpaceName); return "link"; } + if (/\d/.test(ch)) { // numbers stream.eatWhile(/\d/); return "number"; } + if (ch == "/") { // tw invisible comment if (stream.eat("%")) { return chain(stream, state, twTokenComment); - } - else if (stream.eat("/")) { // + } else if (stream.eat("/")) { // return chain(stream, state, twTokenEm); } } - if (ch == "_") { // tw underline - if (stream.eat("_")) { + + if (ch == "_" && stream.eat("_")) // tw underline return chain(stream, state, twTokenUnderline); - } - } + // strikethrough and mdash handling - if (ch == "-") { - if (stream.eat("-")) { - // if strikethrough looks ugly, change CSS. - if (stream.peek() != ' ') - return chain(stream, state, twTokenStrike); - // mdash - if (stream.peek() == ' ') - return 'brace'; - } + if (ch == "-" && stream.eat("-")) { + // if strikethrough looks ugly, change CSS. + if (stream.peek() != ' ') + return chain(stream, state, twTokenStrike); + // mdash + if (stream.peek() == ' ') + return 'brace'; } - if (ch == "'") { // tw bold - if (stream.eat("'")) { - return chain(stream, state, twTokenStrong); - } - } - if (ch == "<") { // tw macro - if (stream.eat("<")) { - return chain(stream, state, twTokenMacro); - } - } - else { - return null; - } + + if (ch == "'" && stream.eat("'")) // tw bold + return chain(stream, state, twTokenStrong); + + if (ch == "<" && stream.eat("<")) // tw macro + return chain(stream, state, twTokenMacro); // core macro handling stream.eatWhile(/[\w\$_]/); - var word = stream.current(), - known = textwords.propertyIsEnumerable(word) && textwords[word]; - - return known ? known.style : null; - } // jsTokenBase() + return textwords.propertyIsEnumerable(stream.current()) ? "keyword" : null + } // tw invisible comment function twTokenComment(stream, state) { - var maybeEnd = false, - ch; + var maybeEnd = false, ch; while (ch = stream.next()) { if (ch == "/" && maybeEnd) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; break; } maybeEnd = (ch == "%"); @@ -226,7 +194,7 @@ CodeMirror.defineMode("tiddlywiki", func ch; while (ch = stream.next()) { if (ch == "'" && maybeEnd) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; break; } maybeEnd = (ch == "'"); @@ -243,12 +211,12 @@ CodeMirror.defineMode("tiddlywiki", func } if (!sb && stream.match(reUntilCodeStop)) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; return "comment"; } if (sb && stream.sol() && stream.match(reCodeBlockStop)) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; return "comment"; } @@ -262,7 +230,7 @@ CodeMirror.defineMode("tiddlywiki", func ch; while (ch = stream.next()) { if (ch == "/" && maybeEnd) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; break; } maybeEnd = (ch == "/"); @@ -276,7 +244,7 @@ CodeMirror.defineMode("tiddlywiki", func ch; while (ch = stream.next()) { if (ch == "_" && maybeEnd) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; break; } maybeEnd = (ch == "_"); @@ -291,7 +259,7 @@ CodeMirror.defineMode("tiddlywiki", func while (ch = stream.next()) { if (ch == "-" && maybeEnd) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; break; } maybeEnd = (ch == "-"); @@ -301,58 +269,40 @@ CodeMirror.defineMode("tiddlywiki", func // macro function twTokenMacro(stream, state) { - var ch, word, known; - if (stream.current() == '<<') { return 'macro'; } - ch = stream.next(); + var ch = stream.next(); if (!ch) { - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; return null; } if (ch == ">") { if (stream.peek() == '>') { stream.next(); - state.tokenize = jsTokenBase; + state.tokenize = tokenBase; return "macro"; } } stream.eatWhile(/[\w\$_]/); - word = stream.current(); - known = keywords.propertyIsEnumerable(word) && keywords[word]; - - if (known) { - return known.style, word; - } - else { - return null, word; - } + return keywords.propertyIsEnumerable(stream.current()) ? "keyword" : null } // Interface return { startState: function () { - return { - tokenize: jsTokenBase, - indented: 0, - level: 0 - }; + return {tokenize: tokenBase}; }, token: function (stream, state) { if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); return style; - }, - - electricChars: "" + } }; }); CodeMirror.defineMIME("text/x-tiddlywiki", "tiddlywiki"); }); - -//}}} diff --git a/rhodecode/public/js/mode/tiki/tiki.js b/rhodecode/public/js/mode/tiki/tiki.js --- a/rhodecode/public/js/mode/tiki/tiki.js +++ b/rhodecode/public/js/mode/tiki/tiki.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -144,7 +144,7 @@ CodeMirror.defineMode('tiki', function(c type = "equals"; if (peek == ">") { - ch = stream.next(); + stream.next(); peek = stream.peek(); } @@ -298,13 +298,13 @@ return { if (context && context.noIndent) return 0; if (context && /^{\//.test(textAfter)) context = context.prev; - while (context && !context.startOfLine) - context = context.prev; - if (context) return context.indent + indentUnit; - else return 0; - }, - electricChars: "/" - }; + while (context && !context.startOfLine) + context = context.prev; + if (context) return context.indent + indentUnit; + else return 0; + }, + electricChars: "/" +}; }); CodeMirror.defineMIME("text/tiki", "tiki"); diff --git a/rhodecode/public/js/mode/toml/toml.js b/rhodecode/public/js/mode/toml/toml.js --- a/rhodecode/public/js/mode/toml/toml.js +++ b/rhodecode/public/js/mode/toml/toml.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/tornado/tornado.js b/rhodecode/public/js/mode/tornado/tornado.js --- a/rhodecode/public/js/mode/tornado/tornado.js +++ b/rhodecode/public/js/mode/tornado/tornado.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/troff/troff.js b/rhodecode/public/js/mode/troff/troff.js --- a/rhodecode/public/js/mode/troff/troff.js +++ b/rhodecode/public/js/mode/troff/troff.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") @@ -77,6 +77,8 @@ CodeMirror.defineMode('troff', function( }; }); -CodeMirror.defineMIME('troff', 'troff'); +CodeMirror.defineMIME('text/troff', 'troff'); +CodeMirror.defineMIME('text/x-troff', 'troff'); +CodeMirror.defineMIME('application/x-troff', 'troff'); }); diff --git a/rhodecode/public/js/mode/ttcn-cfg/ttcn-cfg.js b/rhodecode/public/js/mode/ttcn-cfg/ttcn-cfg.js --- a/rhodecode/public/js/mode/ttcn-cfg/ttcn-cfg.js +++ b/rhodecode/public/js/mode/ttcn-cfg/ttcn-cfg.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/ttcn/ttcn.js b/rhodecode/public/js/mode/ttcn/ttcn.js --- a/rhodecode/public/js/mode/ttcn/ttcn.js +++ b/rhodecode/public/js/mode/ttcn/ttcn.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/turtle/turtle.js b/rhodecode/public/js/mode/turtle/turtle.js --- a/rhodecode/public/js/mode/turtle/turtle.js +++ b/rhodecode/public/js/mode/turtle/turtle.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/mode/twig/twig.js b/rhodecode/public/js/mode/twig/twig.js --- a/rhodecode/public/js/mode/twig/twig.js +++ b/rhodecode/public/js/mode/twig/twig.js @@ -1,17 +1,17 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); + mod(require("../../lib/codemirror"), require("../../addon/mode/multiplex")); else if (typeof define == "function" && define.amd) // AMD - define(["../../lib/codemirror"], mod); + define(["../../lib/codemirror", "../../addon/mode/multiplex"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; - CodeMirror.defineMode("twig", function() { + CodeMirror.defineMode("twig:inner", function() { var keywords = ["and", "as", "autoescape", "endautoescape", "block", "do", "endblock", "else", "elseif", "extends", "for", "endfor", "embed", "endembed", "filter", "endfilter", "flush", "from", "if", "endif", "in", "is", "include", "import", "not", "or", "set", "spaceless", "endspaceless", "with", "endwith", "trans", "endtrans", "blocktrans", "endblocktrans", "macro", "endmacro", "use", "verbatim", "endverbatim"], operator = /^[+\-*&%=<>!?|~^]/, sign = /^[:\[\(\{]/, @@ -95,7 +95,7 @@ } return "variable"; } else if (stream.eat("{")) { - if (ch = stream.eat("#")) { + if (stream.eat("#")) { state.incomment = true; if (!stream.skipTo("#}")) { stream.skipToEnd(); @@ -128,5 +128,14 @@ }; }); + CodeMirror.defineMode("twig", function(config, parserConfig) { + var twigInner = CodeMirror.getMode(config, "twig:inner"); + if (!parserConfig || !parserConfig.base) return twigInner; + return CodeMirror.multiplexingMode( + CodeMirror.getMode(config, parserConfig.base), { + open: /\{[{#%]/, close: /[}#%]\}/, mode: twigInner, parseDelimiters: true + } + ); + }); CodeMirror.defineMIME("text/x-twig", "twig"); }); diff --git a/rhodecode/public/js/mode/vb/vb.js b/rhodecode/public/js/mode/vb/vb.js --- a/rhodecode/public/js/mode/vb/vb.js +++ b/rhodecode/public/js/mode/vb/vb.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -25,16 +25,16 @@ CodeMirror.defineMode("vb", function(con var tripleDelimiters = new RegExp("^((//=)|(>>=)|(<<=)|(\\*\\*=))"); var identifiers = new RegExp("^[_A-Za-z][_A-Za-z0-9]*"); - var openingKeywords = ['class','module', 'sub','enum','select','while','if','function', 'get','set','property', 'try']; - var middleKeywords = ['else','elseif','case', 'catch']; + var openingKeywords = ['class','module', 'sub','enum','select','while','if','function', 'get','set','property', 'try', 'structure', 'synclock', 'using', 'with']; + var middleKeywords = ['else','elseif','case', 'catch', 'finally']; var endKeywords = ['next','loop']; - var operatorKeywords = ['and', 'or', 'not', 'xor', 'in']; + var operatorKeywords = ['and', "andalso", 'or', 'orelse', 'xor', 'in', 'not', 'is', 'isnot', 'like']; var wordOperators = wordRegexp(operatorKeywords); - var commonKeywords = ['as', 'dim', 'break', 'continue','optional', 'then', 'until', - 'goto', 'byval','byref','new','handles','property', 'return', - 'const','private', 'protected', 'friend', 'public', 'shared', 'static', 'true','false']; - var commontypes = ['integer','string','double','decimal','boolean','short','char', 'float','single']; + + var commonKeywords = ["#const", "#else", "#elseif", "#end", "#if", "#region", "addhandler", "addressof", "alias", "as", "byref", "byval", "cbool", "cbyte", "cchar", "cdate", "cdbl", "cdec", "cint", "clng", "cobj", "compare", "const", "continue", "csbyte", "cshort", "csng", "cstr", "cuint", "culng", "cushort", "declare", "default", "delegate", "dim", "directcast", "each", "erase", "error", "event", "exit", "explicit", "false", "for", "friend", "gettype", "goto", "handles", "implements", "imports", "infer", "inherits", "interface", "isfalse", "istrue", "lib", "me", "mod", "mustinherit", "mustoverride", "my", "mybase", "myclass", "namespace", "narrowing", "new", "nothing", "notinheritable", "notoverridable", "of", "off", "on", "operator", "option", "optional", "out", "overloads", "overridable", "overrides", "paramarray", "partial", "private", "protected", "public", "raiseevent", "readonly", "redim", "removehandler", "resume", "return", "shadows", "shared", "static", "step", "stop", "strict", "then", "throw", "to", "true", "trycast", "typeof", "until", "until", "when", "widening", "withevents", "writeonly"]; + + var commontypes = ['object', 'boolean', 'char', 'string', 'byte', 'sbyte', 'short', 'ushort', 'int16', 'uint16', 'integer', 'uinteger', 'int32', 'uint32', 'long', 'ulong', 'int64', 'uint64', 'decimal', 'single', 'double', 'float', 'date', 'datetime', 'intptr', 'uintptr']; var keywords = wordRegexp(commonKeywords); var types = wordRegexp(commontypes); @@ -202,7 +202,6 @@ CodeMirror.defineMode("vb", function(con // Handle '.' connected identifiers if (current === '.') { style = state.tokenize(stream, state); - current = stream.current(); if (style === 'variable') { return 'variable'; } else { diff --git a/rhodecode/public/js/mode/vbscript/vbscript.js b/rhodecode/public/js/mode/vbscript/vbscript.js --- a/rhodecode/public/js/mode/vbscript/vbscript.js +++ b/rhodecode/public/js/mode/vbscript/vbscript.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE /* For extra ASP classic objects, initialize CodeMirror instance with this option: diff --git a/rhodecode/public/js/mode/velocity/velocity.js b/rhodecode/public/js/mode/velocity/velocity.js --- a/rhodecode/public/js/mode/velocity/velocity.js +++ b/rhodecode/public/js/mode/velocity/velocity.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -34,7 +34,7 @@ CodeMirror.defineMode("velocity", functi state.beforeParams = false; var ch = stream.next(); // start of unparsed string? - if ((ch == "'") && state.inParams) { + if ((ch == "'") && !state.inString && state.inParams) { state.lastTokenWasBuiltin = false; return chain(stream, state, tokenString(ch)); } @@ -82,7 +82,7 @@ CodeMirror.defineMode("velocity", functi } // variable? else if (ch == "$") { - stream.eatWhile(/[\w\d\$_\.{}]/); + stream.eatWhile(/[\w\d\$_\.{}-]/); // is it one of the specials? if (specials && specials.propertyIsEnumerable(stream.current())) { return "keyword"; diff --git a/rhodecode/public/js/mode/verilog/verilog.js b/rhodecode/public/js/mode/verilog/verilog.js --- a/rhodecode/public/js/mode/verilog/verilog.js +++ b/rhodecode/public/js/mode/verilog/verilog.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -81,7 +81,7 @@ CodeMirror.defineMode("verilog", functio // Block openings which are closed by a matching keyword in the form of ("end" + keyword) // E.g. "task" => "endtask" var blockKeywords = words( - "case checker class clocking config function generate interface module package" + + "case checker class clocking config function generate interface module package " + "primitive program property specify sequence table task" ); @@ -250,7 +250,7 @@ CodeMirror.defineMode("verilog", functio if (text == contextClosing) { return true; } else { - // contextClosing may be mulitple keywords separated by ; + // contextClosing may be multiple keywords separated by ; var closingKeywords = contextClosing.split(";"); for (var i in closingKeywords) { if (text == closingKeywords[i]) { @@ -302,7 +302,13 @@ CodeMirror.defineMode("verilog", functio state.indented = stream.indentation(); state.startOfLine = true; } - if (hooks.token) hooks.token(stream, state); + if (hooks.token) { + // Call hook, with an optional return value of a style to override verilog styling. + var style = hooks.token(stream, state); + if (style !== undefined) { + return style; + } + } if (stream.eatSpace()) return null; curPunc = null; curKeyword = null; @@ -375,163 +381,295 @@ CodeMirror.defineMode("verilog", functio name: "verilog" }); - // TLVVerilog mode + + + // TL-Verilog mode. + // See tl-x.org for language spec. + // See the mode in action at makerchip.com. + // Contact: steve.hoover@redwoodeda.com - var tlvchScopePrefixes = { - ">": "property", "->": "property", "-": "hr", "|": "link", "?$": "qualifier", "?*": "qualifier", - "@-": "variable-3", "@": "variable-3", "?": "qualifier" + // TLV Identifier prefixes. + // Note that sign is not treated separately, so "+/-" versions of numeric identifiers + // are included. + var tlvIdentifierStyle = { + "|": "link", + ">": "property", // Should condition this off for > TLV 1c. + "$": "variable", + "$$": "variable", + "?$": "qualifier", + "?*": "qualifier", + "-": "hr", + "/": "property", + "/-": "property", + "@": "variable-3", + "@-": "variable-3", + "@++": "variable-3", + "@+=": "variable-3", + "@+=-": "variable-3", + "@--": "variable-3", + "@-=": "variable-3", + "%+": "tag", + "%-": "tag", + "%": "tag", + ">>": "tag", + "<<": "tag", + "<>": "tag", + "#": "tag", // Need to choose a style for this. + "^": "attribute", + "^^": "attribute", + "^!": "attribute", + "*": "variable-2", + "**": "variable-2", + "\\": "keyword", + "\"": "comment" }; - function tlvGenIndent(stream, state) { - var tlvindentUnit = 2; - var rtnIndent = -1, indentUnitRq = 0, curIndent = stream.indentation(); - switch (state.tlvCurCtlFlowChar) { - case "\\": - curIndent = 0; - break; - case "|": - if (state.tlvPrevPrevCtlFlowChar == "@") { - indentUnitRq = -2; //-2 new pipe rq after cur pipe - break; - } - if (tlvchScopePrefixes[state.tlvPrevCtlFlowChar]) - indentUnitRq = 1; // +1 new scope - break; - case "M": // m4 - if (state.tlvPrevPrevCtlFlowChar == "@") { - indentUnitRq = -2; //-2 new inst rq after pipe - break; - } - if (tlvchScopePrefixes[state.tlvPrevCtlFlowChar]) - indentUnitRq = 1; // +1 new scope - break; - case "@": - if (state.tlvPrevCtlFlowChar == "S") - indentUnitRq = -1; // new pipe stage after stmts - if (state.tlvPrevCtlFlowChar == "|") - indentUnitRq = 1; // 1st pipe stage - break; - case "S": - if (state.tlvPrevCtlFlowChar == "@") - indentUnitRq = 1; // flow in pipe stage - if (tlvchScopePrefixes[state.tlvPrevCtlFlowChar]) - indentUnitRq = 1; // +1 new scope - break; - } - var statementIndentUnit = tlvindentUnit; - rtnIndent = curIndent + (indentUnitRq*statementIndentUnit); - return rtnIndent >= 0 ? rtnIndent : curIndent; + // Lines starting with these characters define scope (result in indentation). + var tlvScopePrefixChars = { + "/": "beh-hier", + ">": "beh-hier", + "-": "phys-hier", + "|": "pipe", + "?": "when", + "@": "stage", + "\\": "keyword" + }; + var tlvIndentUnit = 3; + var tlvTrackStatements = false; + var tlvIdentMatch = /^([~!@#\$%\^&\*-\+=\?\/\\\|'"<>]+)([\d\w_]*)/; // Matches an identifiere. + // Note that ':' is excluded, because of it's use in [:]. + var tlvFirstLevelIndentMatch = /^[! ] /; + var tlvLineIndentationMatch = /^[! ] */; + var tlvCommentMatch = /^\/[\/\*]/; + + + // Returns a style specific to the scope at the given indentation column. + // Type is one of: "indent", "scope-ident", "before-scope-ident". + function tlvScopeStyle(state, indentation, type) { + // Begin scope. + var depth = indentation / tlvIndentUnit; // TODO: Pass this in instead. + return "tlv-" + state.tlvIndentationStyle[depth] + "-" + type; + } + + // Return true if the next thing in the stream is an identifier with a mnemonic. + function tlvIdentNext(stream) { + var match; + return (match = stream.match(tlvIdentMatch, false)) && match[2].length > 0; } CodeMirror.defineMIME("text/x-tlv", { name: "verilog", + hooks: { - "\\": function(stream, state) { - var vxIndent = 0, style = false; - var curPunc = stream.string; - if ((stream.sol()) && ((/\\SV/.test(stream.string)) || (/\\TLV/.test(stream.string)))) { - curPunc = (/\\TLV_version/.test(stream.string)) - ? "\\TLV_version" : stream.string; - stream.skipToEnd(); - if (curPunc == "\\SV" && state.vxCodeActive) {state.vxCodeActive = false;}; - if ((/\\TLV/.test(curPunc) && !state.vxCodeActive) - || (curPunc=="\\TLV_version" && state.vxCodeActive)) {state.vxCodeActive = true;}; - style = "keyword"; - state.tlvCurCtlFlowChar = state.tlvPrevPrevCtlFlowChar - = state.tlvPrevCtlFlowChar = ""; - if (state.vxCodeActive == true) { - state.tlvCurCtlFlowChar = "\\"; - vxIndent = tlvGenIndent(stream, state); + + electricInput: false, + + + // Return undefined for verilog tokenizing, or style for TLV token (null not used). + // Standard CM styles are used for most formatting, but some TL-Verilog-specific highlighting + // can be enabled with the definition of cm-tlv-* styles, including highlighting for: + // - M4 tokens + // - TLV scope indentation + // - Statement delimitation (enabled by tlvTrackStatements) + token: function(stream, state) { + var style = undefined; + var match; // Return value of pattern matches. + + // Set highlighting mode based on code region (TLV or SV). + if (stream.sol() && ! state.tlvInBlockComment) { + // Process region. + if (stream.peek() == '\\') { + style = "def"; + stream.skipToEnd(); + if (stream.string.match(/\\SV/)) { + state.tlvCodeActive = false; + } else if (stream.string.match(/\\TLV/)){ + state.tlvCodeActive = true; + } + } + // Correct indentation in the face of a line prefix char. + if (state.tlvCodeActive && stream.pos == 0 && + (state.indented == 0) && (match = stream.match(tlvLineIndentationMatch, false))) { + state.indented = match[0].length; } - state.vxIndentRq = vxIndent; + + // Compute indentation state: + // o Auto indentation on next line + // o Indentation scope styles + var indented = state.indented; + var depth = indented / tlvIndentUnit; + if (depth <= state.tlvIndentationStyle.length) { + // not deeper than current scope + + var blankline = stream.string.length == indented; + var chPos = depth * tlvIndentUnit; + if (chPos < stream.string.length) { + var bodyString = stream.string.slice(chPos); + var ch = bodyString[0]; + if (tlvScopePrefixChars[ch] && ((match = bodyString.match(tlvIdentMatch)) && + tlvIdentifierStyle[match[1]])) { + // This line begins scope. + // Next line gets indented one level. + indented += tlvIndentUnit; + // Style the next level of indentation (except non-region keyword identifiers, + // which are statements themselves) + if (!(ch == "\\" && chPos > 0)) { + state.tlvIndentationStyle[depth] = tlvScopePrefixChars[ch]; + if (tlvTrackStatements) {state.statementComment = false;} + depth++; + } + } + } + // Clear out deeper indentation levels unless line is blank. + if (!blankline) { + while (state.tlvIndentationStyle.length > depth) { + state.tlvIndentationStyle.pop(); + } + } + } + // Set next level of indentation. + state.tlvNextIndent = indented; } - return style; - }, - tokenBase: function(stream, state) { - var vxIndent = 0, style = false; - var tlvisOperatorChar = /[\[\]=:]/; - var tlvkpScopePrefixs = { - "**":"variable-2", "*":"variable-2", "$$":"variable", "$":"variable", - "^^":"attribute", "^":"attribute"}; - var ch = stream.peek(); - var vxCurCtlFlowCharValueAtStart = state.tlvCurCtlFlowChar; - if (state.vxCodeActive == true) { - if (/[\[\]{}\(\);\:]/.test(ch)) { - // bypass nesting and 1 char punc - style = "meta"; - stream.next(); - } else if (ch == "/") { - stream.next(); - if (stream.eat("/")) { - stream.skipToEnd(); - style = "comment"; - state.tlvCurCtlFlowChar = "S"; + + if (state.tlvCodeActive) { + // Highlight as TLV. + + var beginStatement = false; + if (tlvTrackStatements) { + // This starts a statement if the position is at the scope level + // and we're not within a statement leading comment. + beginStatement = + (stream.peek() != " ") && // not a space + (style === undefined) && // not a region identifier + !state.tlvInBlockComment && // not in block comment + //!stream.match(tlvCommentMatch, false) && // not comment start + (stream.column() == state.tlvIndentationStyle.length * tlvIndentUnit); // at scope level + if (beginStatement) { + if (state.statementComment) { + // statement already started by comment + beginStatement = false; + } + state.statementComment = + stream.match(tlvCommentMatch, false); // comment start + } + } + + var match; + if (style !== undefined) { + // Region line. + style += " " + tlvScopeStyle(state, 0, "scope-ident") + } else if (((stream.pos / tlvIndentUnit) < state.tlvIndentationStyle.length) && + (match = stream.match(stream.sol() ? tlvFirstLevelIndentMatch : /^ /))) { + // Indentation + style = // make this style distinct from the previous one to prevent + // codemirror from combining spans + "tlv-indent-" + (((stream.pos % 2) == 0) ? "even" : "odd") + + // and style it + " " + tlvScopeStyle(state, stream.pos - tlvIndentUnit, "indent"); + // Style the line prefix character. + if (match[0].charAt(0) == "!") { + style += " tlv-alert-line-prefix"; + } + // Place a class before a scope identifier. + if (tlvIdentNext(stream)) { + style += " " + tlvScopeStyle(state, stream.pos, "before-scope-ident"); + } + } else if (state.tlvInBlockComment) { + // In a block comment. + if (stream.match(/^.*?\*\//)) { + // Exit block comment. + state.tlvInBlockComment = false; + if (tlvTrackStatements && !stream.eol()) { + // Anything after comment is assumed to be real statement content. + state.statementComment = false; + } } else { - stream.backUp(1); + stream.skipToEnd(); + } + style = "comment"; + } else if ((match = stream.match(tlvCommentMatch)) && !state.tlvInBlockComment) { + // Start comment. + if (match[0] == "//") { + // Line comment. + stream.skipToEnd(); + } else { + // Block comment. + state.tlvInBlockComment = true; } - } else if (ch == "@") { - // pipeline stage - style = tlvchScopePrefixes[ch]; - state.tlvCurCtlFlowChar = "@"; - stream.next(); - stream.eatWhile(/[\w\$_]/); - } else if (stream.match(/\b[mM]4+/, true)) { // match: function(pattern, consume, caseInsensitive) + style = "comment"; + } else if (match = stream.match(tlvIdentMatch)) { + // looks like an identifier (or identifier prefix) + var prefix = match[1]; + var mnemonic = match[2]; + if (// is identifier prefix + tlvIdentifierStyle.hasOwnProperty(prefix) && + // has mnemonic or we're at the end of the line (maybe it hasn't been typed yet) + (mnemonic.length > 0 || stream.eol())) { + style = tlvIdentifierStyle[prefix]; + if (stream.column() == state.indented) { + // Begin scope. + style += " " + tlvScopeStyle(state, stream.column(), "scope-ident") + } + } else { + // Just swallow one character and try again. + // This enables subsequent identifier match with preceding symbol character, which + // is legal within a statement. (Eg, !$reset). It also enables detection of + // comment start with preceding symbols. + stream.backUp(stream.current().length - 1); + style = "tlv-default"; + } + } else if (stream.match(/^\t+/)) { + // Highlight tabs, which are illegal. + style = "tlv-tab"; + } else if (stream.match(/^[\[\]{}\(\);\:]+/)) { + // [:], (), {}, ;. + style = "meta"; + } else if (match = stream.match(/^[mM]4([\+_])?[\w\d_]*/)) { // m4 pre proc - stream.skipTo("("); - style = "def"; - state.tlvCurCtlFlowChar = "M"; - } else if (ch == "!" && stream.sol()) { - // v stmt in tlv region - // state.tlvCurCtlFlowChar = "S"; - style = "comment"; + style = (match[1] == "+") ? "tlv-m4-plus" : "tlv-m4"; + } else if (stream.match(/^ +/)){ + // Skip over spaces. + if (stream.eol()) { + // Trailing spaces. + style = "error"; + } else { + // Non-trailing spaces. + style = "tlv-default"; + } + } else if (stream.match(/^[\w\d_]+/)) { + // alpha-numeric token. + style = "number"; + } else { + // Eat the next char w/ no formatting. stream.next(); - } else if (tlvisOperatorChar.test(ch)) { - // operators - stream.eatWhile(tlvisOperatorChar); - style = "operator"; - } else if (ch == "#") { - // phy hier - state.tlvCurCtlFlowChar = (state.tlvCurCtlFlowChar == "") - ? ch : state.tlvCurCtlFlowChar; - stream.next(); - stream.eatWhile(/[+-]\d/); - style = "tag"; - } else if (tlvkpScopePrefixs.propertyIsEnumerable(ch)) { - // special TLV operators - style = tlvkpScopePrefixs[ch]; - state.tlvCurCtlFlowChar = state.tlvCurCtlFlowChar == "" ? "S" : state.tlvCurCtlFlowChar; // stmt - stream.next(); - stream.match(/[a-zA-Z_0-9]+/); - } else if (style = tlvchScopePrefixes[ch] || false) { - // special TLV operators - state.tlvCurCtlFlowChar = state.tlvCurCtlFlowChar == "" ? ch : state.tlvCurCtlFlowChar; - stream.next(); - stream.match(/[a-zA-Z_0-9]+/); + style = "tlv-default"; + } + if (beginStatement) { + style += " tlv-statement"; } - if (state.tlvCurCtlFlowChar != vxCurCtlFlowCharValueAtStart) { // flow change - vxIndent = tlvGenIndent(stream, state); - state.vxIndentRq = vxIndent; + } else { + if (stream.match(/^[mM]4([\w\d_]*)/)) { + // m4 pre proc + style = "tlv-m4"; } } return style; }, - token: function(stream, state) { - if (state.vxCodeActive == true && stream.sol() && state.tlvCurCtlFlowChar != "") { - state.tlvPrevPrevCtlFlowChar = state.tlvPrevCtlFlowChar; - state.tlvPrevCtlFlowChar = state.tlvCurCtlFlowChar; - state.tlvCurCtlFlowChar = ""; - } - }, + indent: function(state) { - return (state.vxCodeActive == true) ? state.vxIndentRq : -1; + return (state.tlvCodeActive == true) ? state.tlvNextIndent : -1; }, + startState: function(state) { - state.tlvCurCtlFlowChar = ""; - state.tlvPrevCtlFlowChar = ""; - state.tlvPrevPrevCtlFlowChar = ""; - state.vxCodeActive = true; - state.vxIndentRq = 0; + state.tlvIndentationStyle = []; // Styles to use for each level of indentation. + state.tlvCodeActive = true; // True when we're in a TLV region (and at beginning of file). + state.tlvNextIndent = -1; // The number of spaces to autoindent the next line if tlvCodeActive. + state.tlvInBlockComment = false; // True inside /**/ comment. + if (tlvTrackStatements) { + state.statementComment = false; // True inside a statement's header comment. + } } + } }); }); diff --git a/rhodecode/public/js/mode/vhdl/vhdl.js b/rhodecode/public/js/mode/vhdl/vhdl.js --- a/rhodecode/public/js/mode/vhdl/vhdl.js +++ b/rhodecode/public/js/mode/vhdl/vhdl.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Originally written by Alf Nielsen, re-written by Michael Zhou (function(mod) { @@ -36,7 +36,7 @@ CodeMirror.defineMode("vhdl", function(c multiLineStrings = parserConfig.multiLineStrings; var keywords = words("abs,access,after,alias,all,and,architecture,array,assert,attribute,begin,block," + - "body,buffer,bus,case,component,configuration,constant,disconnent,downto,else,elsif,end,end block,end case," + + "body,buffer,bus,case,component,configuration,constant,disconnect,downto,else,elsif,end,end block,end case," + "end component,end for,end generate,end if,end loop,end process,end record,end units,entity,exit,file,for," + "function,generate,generic,generic map,group,guarded,if,impure,in,inertial,inout,is,label,library,linkage," + "literal,loop,map,mod,nand,new,next,nor,null,of,on,open,or,others,out,package,package body,port,port map," + diff --git a/rhodecode/public/js/mode/vue/vue.js b/rhodecode/public/js/mode/vue/vue.js --- a/rhodecode/public/js/mode/vue/vue.js +++ b/rhodecode/public/js/mode/vue/vue.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function (mod) { "use strict"; @@ -12,7 +12,7 @@ require("../css/css"), require("../sass/sass"), require("../stylus/stylus"), - require("../jade/jade"), + require("../pug/pug"), require("../handlebars/handlebars")); } else if (typeof define === "function" && define.amd) { // AMD define(["../../lib/codemirror", @@ -23,7 +23,7 @@ "../css/css", "../sass/sass", "../stylus/stylus", - "../jade/jade", + "../pug/pug", "../handlebars/handlebars"], mod); } else { // Plain browser env mod(CodeMirror); @@ -32,19 +32,26 @@ var tagLanguages = { script: [ ["lang", /coffee(script)?/, "coffeescript"], - ["type", /^(?:text|application)\/(?:x-)?coffee(?:script)?$/, "coffeescript"] + ["type", /^(?:text|application)\/(?:x-)?coffee(?:script)?$/, "coffeescript"], + ["lang", /^babel$/, "javascript"], + ["type", /^text\/babel$/, "javascript"], + ["type", /^text\/ecmascript-\d+$/, "javascript"] ], style: [ ["lang", /^stylus$/i, "stylus"], ["lang", /^sass$/i, "sass"], + ["lang", /^less$/i, "text/x-less"], + ["lang", /^scss$/i, "text/x-scss"], ["type", /^(text\/)?(x-)?styl(us)?$/i, "stylus"], - ["type", /^text\/sass/i, "sass"] + ["type", /^text\/sass/i, "sass"], + ["type", /^(text\/)?(x-)?scss$/i, "text/x-scss"], + ["type", /^(text\/)?(x-)?less$/i, "text/x-less"] ], template: [ ["lang", /^vue-template$/i, "vue"], - ["lang", /^jade$/i, "jade"], + ["lang", /^pug$/i, "pug"], ["lang", /^handlebars$/i, "handlebars"], - ["type", /^(text\/)?(x-)?jade$/i, "jade"], + ["type", /^(text\/)?(x-)?pug$/i, "pug"], ["type", /^text\/x-handlebars-template$/i, "handlebars"], [null, null, "vue-template"] ] @@ -63,7 +70,8 @@ CodeMirror.defineMode("vue", function (config) { return CodeMirror.getMode(config, {name: "htmlmixed", tags: tagLanguages}); - }, "htmlmixed", "xml", "javascript", "coffeescript", "css", "sass", "stylus", "jade", "handlebars"); + }, "htmlmixed", "xml", "javascript", "coffeescript", "css", "sass", "stylus", "pug", "handlebars"); CodeMirror.defineMIME("script/x-vue", "vue"); + CodeMirror.defineMIME("text/x-vue", "vue"); }); diff --git a/rhodecode/public/js/mode/webidl/webidl.js b/rhodecode/public/js/mode/webidl/webidl.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/webidl/webidl.js @@ -0,0 +1,195 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + +function wordRegexp(words) { + return new RegExp("^((" + words.join(")|(") + "))\\b"); +}; + +var builtinArray = [ + "Clamp", + "Constructor", + "EnforceRange", + "Exposed", + "ImplicitThis", + "Global", "PrimaryGlobal", + "LegacyArrayClass", + "LegacyUnenumerableNamedProperties", + "LenientThis", + "NamedConstructor", + "NewObject", + "NoInterfaceObject", + "OverrideBuiltins", + "PutForwards", + "Replaceable", + "SameObject", + "TreatNonObjectAsNull", + "TreatNullAs", + "EmptyString", + "Unforgeable", + "Unscopeable" +]; +var builtins = wordRegexp(builtinArray); + +var typeArray = [ + "unsigned", "short", "long", // UnsignedIntegerType + "unrestricted", "float", "double", // UnrestrictedFloatType + "boolean", "byte", "octet", // Rest of PrimitiveType + "Promise", // PromiseType + "ArrayBuffer", "DataView", "Int8Array", "Int16Array", "Int32Array", + "Uint8Array", "Uint16Array", "Uint32Array", "Uint8ClampedArray", + "Float32Array", "Float64Array", // BufferRelatedType + "ByteString", "DOMString", "USVString", "sequence", "object", "RegExp", + "Error", "DOMException", "FrozenArray", // Rest of NonAnyType + "any", // Rest of SingleType + "void" // Rest of ReturnType +]; +var types = wordRegexp(typeArray); + +var keywordArray = [ + "attribute", "callback", "const", "deleter", "dictionary", "enum", "getter", + "implements", "inherit", "interface", "iterable", "legacycaller", "maplike", + "partial", "required", "serializer", "setlike", "setter", "static", + "stringifier", "typedef", // ArgumentNameKeyword except + // "unrestricted" + "optional", "readonly", "or" +]; +var keywords = wordRegexp(keywordArray); + +var atomArray = [ + "true", "false", // BooleanLiteral + "Infinity", "NaN", // FloatLiteral + "null" // Rest of ConstValue +]; +var atoms = wordRegexp(atomArray); + +CodeMirror.registerHelper("hintWords", "webidl", + builtinArray.concat(typeArray).concat(keywordArray).concat(atomArray)); + +var startDefArray = ["callback", "dictionary", "enum", "interface"]; +var startDefs = wordRegexp(startDefArray); + +var endDefArray = ["typedef"]; +var endDefs = wordRegexp(endDefArray); + +var singleOperators = /^[:<=>?]/; +var integers = /^-?([1-9][0-9]*|0[Xx][0-9A-Fa-f]+|0[0-7]*)/; +var floats = /^-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+)/; +var identifiers = /^_?[A-Za-z][0-9A-Z_a-z-]*/; +var identifiersEnd = /^_?[A-Za-z][0-9A-Z_a-z-]*(?=\s*;)/; +var strings = /^"[^"]*"/; +var multilineComments = /^\/\*.*?\*\//; +var multilineCommentsStart = /^\/\*.*/; +var multilineCommentsEnd = /^.*?\*\//; + +function readToken(stream, state) { + // whitespace + if (stream.eatSpace()) return null; + + // comment + if (state.inComment) { + if (stream.match(multilineCommentsEnd)) { + state.inComment = false; + return "comment"; + } + stream.skipToEnd(); + return "comment"; + } + if (stream.match("//")) { + stream.skipToEnd(); + return "comment"; + } + if (stream.match(multilineComments)) return "comment"; + if (stream.match(multilineCommentsStart)) { + state.inComment = true; + return "comment"; + } + + // integer and float + if (stream.match(/^-?[0-9\.]/, false)) { + if (stream.match(integers) || stream.match(floats)) return "number"; + } + + // string + if (stream.match(strings)) return "string"; + + // identifier + if (state.startDef && stream.match(identifiers)) return "def"; + + if (state.endDef && stream.match(identifiersEnd)) { + state.endDef = false; + return "def"; + } + + if (stream.match(keywords)) return "keyword"; + + if (stream.match(types)) { + var lastToken = state.lastToken; + var nextToken = (stream.match(/^\s*(.+?)\b/, false) || [])[1]; + + if (lastToken === ":" || lastToken === "implements" || + nextToken === "implements" || nextToken === "=") { + // Used as identifier + return "builtin"; + } else { + // Used as type + return "variable-3"; + } + } + + if (stream.match(builtins)) return "builtin"; + if (stream.match(atoms)) return "atom"; + if (stream.match(identifiers)) return "variable"; + + // other + if (stream.match(singleOperators)) return "operator"; + + // unrecognized + stream.next(); + return null; +}; + +CodeMirror.defineMode("webidl", function() { + return { + startState: function() { + return { + // Is in multiline comment + inComment: false, + // Last non-whitespace, matched token + lastToken: "", + // Next token is a definition + startDef: false, + // Last token of the statement is a definition + endDef: false + }; + }, + token: function(stream, state) { + var style = readToken(stream, state); + + if (style) { + var cur = stream.current(); + state.lastToken = cur; + if (style === "keyword") { + state.startDef = startDefs.test(cur); + state.endDef = state.endDef || endDefs.test(cur); + } else { + state.startDef = false; + } + } + + return style; + } + }; +}); + +CodeMirror.defineMIME("text/x-webidl", "webidl"); +}); diff --git a/rhodecode/public/js/mode/xml/xml.js b/rhodecode/public/js/mode/xml/xml.js --- a/rhodecode/public/js/mode/xml/xml.js +++ b/rhodecode/public/js/mode/xml/xml.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -52,6 +52,7 @@ var xmlConfig = { doNotIndent: {}, allowUnquoted: false, allowMissing: false, + allowMissingTagName: false, caseFold: false } @@ -162,8 +163,9 @@ CodeMirror.defineMode("xml", function(ed stream.next(); } return style; - }; + } } + function doctype(depth) { return function(stream, state) { var ch; @@ -226,6 +228,9 @@ CodeMirror.defineMode("xml", function(ed state.tagName = stream.current(); setStyle = "tag"; return attrState; + } else if (config.allowMissingTagName && type == "endTag") { + setStyle = "tag bracket"; + return attrState(type, stream, state); } else { setStyle = "error"; return tagNameState; @@ -237,13 +242,16 @@ CodeMirror.defineMode("xml", function(ed if (state.context && state.context.tagName != tagName && config.implicitlyClosed.hasOwnProperty(state.context.tagName)) popContext(state); - if (state.context && state.context.tagName == tagName) { + if ((state.context && state.context.tagName == tagName) || config.matchClosing === false) { setStyle = "tag"; return closeState; } else { setStyle = "tag error"; return closeStateErr; } + } else if (config.allowMissingTagName && type == "endTag") { + setStyle = "tag bracket"; + return closeState(type, stream, state); } else { setStyle = "error"; return closeStateErr; @@ -382,6 +390,17 @@ CodeMirror.defineMode("xml", function(ed skipAttribute: function(state) { if (state.state == attrValueState) state.state = attrState + }, + + xmlCurrentTag: function(state) { + return state.tagName ? {name: state.tagName, close: state.type == "closeTag"} : null + }, + + xmlCurrentContext: function(state) { + var context = [] + for (var cx = state.context; cx; cx = cx.prev) + if (cx.tagName) context.push(cx.tagName) + return context.reverse() } }; }); diff --git a/rhodecode/public/js/mode/xquery/xquery.js b/rhodecode/public/js/mode/xquery/xquery.js --- a/rhodecode/public/js/mode/xquery/xquery.js +++ b/rhodecode/public/js/mode/xquery/xquery.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -17,43 +17,54 @@ CodeMirror.defineMode("xquery", function // function. Each keyword is a property of the keywords object whose // value is {type: atype, style: astyle} var keywords = function(){ - // conveinence functions used to build keywords object + // convenience functions used to build keywords object function kw(type) {return {type: type, style: "keyword"};} - var A = kw("keyword a") - , B = kw("keyword b") - , C = kw("keyword c") - , operator = kw("operator") + var operator = kw("operator") , atom = {type: "atom", style: "atom"} , punctuation = {type: "punctuation", style: null} , qualifier = {type: "axis_specifier", style: "qualifier"}; // kwObj is what is return from this function at the end var kwObj = { - 'if': A, 'switch': A, 'while': A, 'for': A, - 'else': B, 'then': B, 'try': B, 'finally': B, 'catch': B, - 'element': C, 'attribute': C, 'let': C, 'implements': C, 'import': C, 'module': C, 'namespace': C, - 'return': C, 'super': C, 'this': C, 'throws': C, 'where': C, 'private': C, - ',': punctuation, - 'null': atom, 'fn:false()': atom, 'fn:true()': atom + ',': punctuation }; // a list of 'basic' keywords. For each add a property to kwObj with the value of // {type: basic[i], style: "keyword"} e.g. 'after' --> {type: "after", style: "keyword"} - var basic = ['after','ancestor','ancestor-or-self','and','as','ascending','assert','attribute','before', - 'by','case','cast','child','comment','declare','default','define','descendant','descendant-or-self', - 'descending','document','document-node','element','else','eq','every','except','external','following', - 'following-sibling','follows','for','function','if','import','in','instance','intersect','item', - 'let','module','namespace','node','node','of','only','or','order','parent','precedes','preceding', - 'preceding-sibling','processing-instruction','ref','return','returns','satisfies','schema','schema-element', - 'self','some','sortby','stable','text','then','to','treat','typeswitch','union','variable','version','where', - 'xquery', 'empty-sequence']; + var basic = ['after', 'all', 'allowing', 'ancestor', 'ancestor-or-self', 'any', 'array', 'as', + 'ascending', 'at', 'attribute', 'base-uri', 'before', 'boundary-space', 'by', 'case', 'cast', + 'castable', 'catch', 'child', 'collation', 'comment', 'construction', 'contains', 'content', + 'context', 'copy', 'copy-namespaces', 'count', 'decimal-format', 'declare', 'default', 'delete', + 'descendant', 'descendant-or-self', 'descending', 'diacritics', 'different', 'distance', + 'document', 'document-node', 'element', 'else', 'empty', 'empty-sequence', 'encoding', 'end', + 'entire', 'every', 'exactly', 'except', 'external', 'first', 'following', 'following-sibling', + 'for', 'from', 'ftand', 'ftnot', 'ft-option', 'ftor', 'function', 'fuzzy', 'greatest', 'group', + 'if', 'import', 'in', 'inherit', 'insensitive', 'insert', 'instance', 'intersect', 'into', + 'invoke', 'is', 'item', 'language', 'last', 'lax', 'least', 'let', 'levels', 'lowercase', 'map', + 'modify', 'module', 'most', 'namespace', 'next', 'no', 'node', 'nodes', 'no-inherit', + 'no-preserve', 'not', 'occurs', 'of', 'only', 'option', 'order', 'ordered', 'ordering', + 'paragraph', 'paragraphs', 'parent', 'phrase', 'preceding', 'preceding-sibling', 'preserve', + 'previous', 'processing-instruction', 'relationship', 'rename', 'replace', 'return', + 'revalidation', 'same', 'satisfies', 'schema', 'schema-attribute', 'schema-element', 'score', + 'self', 'sensitive', 'sentence', 'sentences', 'sequence', 'skip', 'sliding', 'some', 'stable', + 'start', 'stemming', 'stop', 'strict', 'strip', 'switch', 'text', 'then', 'thesaurus', 'times', + 'to', 'transform', 'treat', 'try', 'tumbling', 'type', 'typeswitch', 'union', 'unordered', + 'update', 'updating', 'uppercase', 'using', 'validate', 'value', 'variable', 'version', + 'weight', 'when', 'where', 'wildcards', 'window', 'with', 'without', 'word', 'words', 'xquery']; for(var i=0, l=basic.length; i < l; i++) { kwObj[basic[i]] = kw(basic[i]);}; // a list of types. For each add a property to kwObj with the value of // {type: "atom", style: "atom"} - var types = ['xs:string', 'xs:float', 'xs:decimal', 'xs:double', 'xs:integer', 'xs:boolean', 'xs:date', 'xs:dateTime', - 'xs:time', 'xs:duration', 'xs:dayTimeDuration', 'xs:time', 'xs:yearMonthDuration', 'numeric', 'xs:hexBinary', - 'xs:base64Binary', 'xs:anyURI', 'xs:QName', 'xs:byte','xs:boolean','xs:anyURI','xf:yearMonthDuration']; + var types = ['xs:anyAtomicType', 'xs:anySimpleType', 'xs:anyType', 'xs:anyURI', + 'xs:base64Binary', 'xs:boolean', 'xs:byte', 'xs:date', 'xs:dateTime', 'xs:dateTimeStamp', + 'xs:dayTimeDuration', 'xs:decimal', 'xs:double', 'xs:duration', 'xs:ENTITIES', 'xs:ENTITY', + 'xs:float', 'xs:gDay', 'xs:gMonth', 'xs:gMonthDay', 'xs:gYear', 'xs:gYearMonth', 'xs:hexBinary', + 'xs:ID', 'xs:IDREF', 'xs:IDREFS', 'xs:int', 'xs:integer', 'xs:item', 'xs:java', 'xs:language', + 'xs:long', 'xs:Name', 'xs:NCName', 'xs:negativeInteger', 'xs:NMTOKEN', 'xs:NMTOKENS', + 'xs:nonNegativeInteger', 'xs:nonPositiveInteger', 'xs:normalizedString', 'xs:NOTATION', + 'xs:numeric', 'xs:positiveInteger', 'xs:precisionDecimal', 'xs:QName', 'xs:short', 'xs:string', + 'xs:time', 'xs:token', 'xs:unsignedByte', 'xs:unsignedInt', 'xs:unsignedLong', + 'xs:unsignedShort', 'xs:untyped', 'xs:untypedAtomic', 'xs:yearMonthDuration']; for(var i=0, l=types.length; i < l; i++) { kwObj[types[i]] = atom;}; // each operator will add a property to kwObj with value of {type: "operator", style: "keyword"} @@ -102,7 +113,7 @@ CodeMirror.defineMode("xquery", function } // start code block else if(ch == "{") { - pushStateStack(state,{ type: "codeblock"}); + pushStateStack(state, { type: "codeblock"}); return null; } // end code block @@ -132,7 +143,7 @@ CodeMirror.defineMode("xquery", function return chain(stream, state, tokenComment); } // quoted string - else if ( !isEQName && (ch === '"' || ch === "'")) + else if (!isEQName && (ch === '"' || ch === "'")) return chain(stream, state, tokenString(ch)); // variable else if(ch === "$") { diff --git a/rhodecode/public/js/mode/yacas/yacas.js b/rhodecode/public/js/mode/yacas/yacas.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/mode/yacas/yacas.js @@ -0,0 +1,204 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: https://codemirror.net/LICENSE + +// Yacas mode copyright (c) 2015 by Grzegorz Mazur +// Loosely based on mathematica mode by Calin Barbat + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + +CodeMirror.defineMode('yacas', function(_config, _parserConfig) { + + function words(str) { + var obj = {}, words = str.split(" "); + for (var i = 0; i < words.length; ++i) obj[words[i]] = true; + return obj; + } + + var bodiedOps = words("Assert BackQuote D Defun Deriv For ForEach FromFile " + + "FromString Function Integrate InverseTaylor Limit " + + "LocalSymbols Macro MacroRule MacroRulePattern " + + "NIntegrate Rule RulePattern Subst TD TExplicitSum " + + "TSum Taylor Taylor1 Taylor2 Taylor3 ToFile " + + "ToStdout ToString TraceRule Until While"); + + // patterns + var pFloatForm = "(?:(?:\\.\\d+|\\d+\\.\\d*|\\d+)(?:[eE][+-]?\\d+)?)"; + var pIdentifier = "(?:[a-zA-Z\\$'][a-zA-Z0-9\\$']*)"; + + // regular expressions + var reFloatForm = new RegExp(pFloatForm); + var reIdentifier = new RegExp(pIdentifier); + var rePattern = new RegExp(pIdentifier + "?_" + pIdentifier); + var reFunctionLike = new RegExp(pIdentifier + "\\s*\\("); + + function tokenBase(stream, state) { + var ch; + + // get next character + ch = stream.next(); + + // string + if (ch === '"') { + state.tokenize = tokenString; + return state.tokenize(stream, state); + } + + // comment + if (ch === '/') { + if (stream.eat('*')) { + state.tokenize = tokenComment; + return state.tokenize(stream, state); + } + if (stream.eat("/")) { + stream.skipToEnd(); + return "comment"; + } + } + + // go back one character + stream.backUp(1); + + // update scope info + var m = stream.match(/^(\w+)\s*\(/, false); + if (m !== null && bodiedOps.hasOwnProperty(m[1])) + state.scopes.push('bodied'); + + var scope = currentScope(state); + + if (scope === 'bodied' && ch === '[') + state.scopes.pop(); + + if (ch === '[' || ch === '{' || ch === '(') + state.scopes.push(ch); + + scope = currentScope(state); + + if (scope === '[' && ch === ']' || + scope === '{' && ch === '}' || + scope === '(' && ch === ')') + state.scopes.pop(); + + if (ch === ';') { + while (scope === 'bodied') { + state.scopes.pop(); + scope = currentScope(state); + } + } + + // look for ordered rules + if (stream.match(/\d+ *#/, true, false)) { + return 'qualifier'; + } + + // look for numbers + if (stream.match(reFloatForm, true, false)) { + return 'number'; + } + + // look for placeholders + if (stream.match(rePattern, true, false)) { + return 'variable-3'; + } + + // match all braces separately + if (stream.match(/(?:\[|\]|{|}|\(|\))/, true, false)) { + return 'bracket'; + } + + // literals looking like function calls + if (stream.match(reFunctionLike, true, false)) { + stream.backUp(1); + return 'variable'; + } + + // all other identifiers + if (stream.match(reIdentifier, true, false)) { + return 'variable-2'; + } + + // operators; note that operators like @@ or /; are matched separately for each symbol. + if (stream.match(/(?:\\|\+|\-|\*|\/|,|;|\.|:|@|~|=|>|<|&|\||_|`|'|\^|\?|!|%|#)/, true, false)) { + return 'operator'; + } + + // everything else is an error + return 'error'; + } + + function tokenString(stream, state) { + var next, end = false, escaped = false; + while ((next = stream.next()) != null) { + if (next === '"' && !escaped) { + end = true; + break; + } + escaped = !escaped && next === '\\'; + } + if (end && !escaped) { + state.tokenize = tokenBase; + } + return 'string'; + }; + + function tokenComment(stream, state) { + var prev, next; + while((next = stream.next()) != null) { + if (prev === '*' && next === '/') { + state.tokenize = tokenBase; + break; + } + prev = next; + } + return 'comment'; + } + + function currentScope(state) { + var scope = null; + if (state.scopes.length > 0) + scope = state.scopes[state.scopes.length - 1]; + return scope; + } + + return { + startState: function() { + return { + tokenize: tokenBase, + scopes: [] + }; + }, + token: function(stream, state) { + if (stream.eatSpace()) return null; + return state.tokenize(stream, state); + }, + indent: function(state, textAfter) { + if (state.tokenize !== tokenBase && state.tokenize !== null) + return CodeMirror.Pass; + + var delta = 0; + if (textAfter === ']' || textAfter === '];' || + textAfter === '}' || textAfter === '};' || + textAfter === ');') + delta = -1; + + return (state.scopes.length + delta) * _config.indentUnit; + }, + electricChars: "{}[]();", + blockCommentStart: "/*", + blockCommentEnd: "*/", + lineComment: "//" + }; +}); + +CodeMirror.defineMIME('text/x-yacas', { + name: 'yacas' +}); + +}); diff --git a/rhodecode/public/js/mode/yaml-frontmatter/yaml-frontmatter.js b/rhodecode/public/js/mode/yaml-frontmatter/yaml-frontmatter.js --- a/rhodecode/public/js/mode/yaml-frontmatter/yaml-frontmatter.js +++ b/rhodecode/public/js/mode/yaml-frontmatter/yaml-frontmatter.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function (mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -45,7 +45,7 @@ return innerMode.token(stream, state.inner) } } else if (state.state == FRONTMATTER) { - var end = stream.sol() && stream.match(/---/, false) + var end = stream.sol() && stream.match(/(---|\.\.\.)/, false) var style = yamlMode.token(stream, state.inner) if (end) { state.state = BODY @@ -65,4 +65,4 @@ } } }) -}) +}); diff --git a/rhodecode/public/js/mode/yaml/yaml.js b/rhodecode/public/js/mode/yaml/yaml.js --- a/rhodecode/public/js/mode/yaml/yaml.js +++ b/rhodecode/public/js/mode/yaml/yaml.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -108,10 +108,13 @@ CodeMirror.defineMode("yaml", function() literal: false, escaped: false }; - } + }, + lineComment: "#", + fold: "indent" }; }); CodeMirror.defineMIME("text/x-yaml", "yaml"); +CodeMirror.defineMIME("text/yaml", "yaml"); }); diff --git a/rhodecode/public/js/mode/z80/z80.js b/rhodecode/public/js/mode/z80/z80.js --- a/rhodecode/public/js/mode/z80/z80.js +++ b/rhodecode/public/js/mode/z80/z80.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/rhodecode/base/keyboard-bindings.js b/rhodecode/public/js/rhodecode/base/keyboard-bindings.js --- a/rhodecode/public/js/rhodecode/base/keyboard-bindings.js +++ b/rhodecode/public/js/rhodecode/base/keyboard-bindings.js @@ -119,6 +119,10 @@ function setRCMouseBindings(repoName, re 'f_path': '' }); }); + Mousetrap.bind(['g p'], function(e) { + window.location = pyroutes.url( + 'pullrequest_show_all', {'repo_name': repoName}); + }); Mousetrap.bind(['g o'], function(e) { window.location = pyroutes.url( 'edit_repo', {'repo_name': repoName}); diff --git a/rhodecode/public/js/rhodecode/routes.js b/rhodecode/public/js/rhodecode/routes.js --- a/rhodecode/public/js/rhodecode/routes.js +++ b/rhodecode/public/js/rhodecode/routes.js @@ -14,6 +14,7 @@ function registerRCRoutes() { // routes registration pyroutes.register('favicon', '/favicon.ico', []); pyroutes.register('robots', '/robots.txt', []); + pyroutes.register('auth_home', '/_admin/auth*traverse', []); pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); pyroutes.register('global_integrations_home', '/_admin/integrations', []); pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); @@ -29,7 +30,10 @@ function registerRCRoutes() { pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); - pyroutes.register('auth_home', '/_admin/auth*traverse', []); + pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); + pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); + pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']); + pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('ops_ping', '/_admin/ops/ping', []); pyroutes.register('ops_error_test', '/_admin/ops/error', []); pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); @@ -121,6 +125,7 @@ function registerRCRoutes() { pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); + pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); pyroutes.register('user_groups', '/_admin/user_groups', []); @@ -128,6 +133,7 @@ function registerRCRoutes() { pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); pyroutes.register('repos', '/_admin/repos', []); + pyroutes.register('repos_data', '/_admin/repos_data', []); pyroutes.register('repo_new', '/_admin/repos/new', []); pyroutes.register('repo_create', '/_admin/repos/create', []); pyroutes.register('repo_groups', '/_admin/repo_groups', []); @@ -139,10 +145,13 @@ function registerRCRoutes() { pyroutes.register('channelstream_proxy', '/_channelstream', []); pyroutes.register('upload_file', '/_file_store/upload', []); pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); + pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); pyroutes.register('logout', '/_admin/logout', []); pyroutes.register('reset_password', '/_admin/password_reset', []); pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); pyroutes.register('home', '/', []); + pyroutes.register('main_page_repos_data', '/_home_repos', []); + pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []); pyroutes.register('user_autocomplete_data', '/_users', []); pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); pyroutes.register('repo_list_data', '/_repos', []); @@ -173,6 +182,7 @@ function registerRCRoutes() { pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); + pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); @@ -336,6 +346,8 @@ function registerRCRoutes() { pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); pyroutes.register('debug_style_home', '/_admin/debug_style', []); + pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); + pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); pyroutes.register('apiv2', '/_admin/api', []); pyroutes.register('admin_settings_license', '/_admin/settings/license', []); @@ -369,7 +381,9 @@ function registerRCRoutes() { pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); + pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); + pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); diff --git a/rhodecode/public/js/src/codemirror/codemirror.js b/rhodecode/public/js/src/codemirror/codemirror.js --- a/rhodecode/public/js/src/codemirror/codemirror.js +++ b/rhodecode/public/js/src/codemirror/codemirror.js @@ -1,23 +1,17 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -// This is CodeMirror (http://codemirror.net), a code editor +// Distributed under an MIT license: https://codemirror.net/LICENSE + +// This is CodeMirror (https://codemirror.net), a code editor // implemented in JavaScript on top of the browser's DOM. // // You can find some technical background for some of the code below // at http://marijnhaverbeke.nl/blog/#cm-internals . -(function(mod) { - if (typeof exports == "object" && typeof module == "object") // CommonJS - module.exports = mod(); - else if (typeof define == "function" && define.amd) // AMD - return define([], mod); - else // Plain browser env - (this || window).CodeMirror = mod(); -})(function() { - "use strict"; - - // BROWSER SNIFFING +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global.CodeMirror = factory()); +}(this, (function () { 'use strict'; // Kludges for bugs and behavior differences that can't be feature // detected are enabled based on userAgent etc sniffing. @@ -27,113 +21,4314 @@ var gecko = /gecko\/\d/i.test(userAgent); var ie_upto10 = /MSIE \d/.test(userAgent); var ie_11up = /Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(userAgent); - var ie = ie_upto10 || ie_11up; - var ie_version = ie && (ie_upto10 ? document.documentMode || 6 : ie_11up[1]); - var webkit = /WebKit\//.test(userAgent); + var edge = /Edge\/(\d+)/.exec(userAgent); + var ie = ie_upto10 || ie_11up || edge; + var ie_version = ie && (ie_upto10 ? document.documentMode || 6 : +(edge || ie_11up)[1]); + var webkit = !edge && /WebKit\//.test(userAgent); var qtwebkit = webkit && /Qt\/\d+\.\d+/.test(userAgent); - var chrome = /Chrome\//.test(userAgent); + var chrome = !edge && /Chrome\//.test(userAgent); var presto = /Opera\//.test(userAgent); var safari = /Apple Computer/.test(navigator.vendor); var mac_geMountainLion = /Mac OS X 1\d\D([8-9]|\d\d)\D/.test(userAgent); var phantom = /PhantomJS/.test(userAgent); - var ios = /AppleWebKit/.test(userAgent) && /Mobile\/\w+/.test(userAgent); + var ios = !edge && /AppleWebKit/.test(userAgent) && /Mobile\/\w+/.test(userAgent); + var android = /Android/.test(userAgent); // This is woefully incomplete. Suggestions for alternative methods welcome. - var mobile = ios || /Android|webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent); + var mobile = ios || android || /webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(userAgent); var mac = ios || /Mac/.test(platform); + var chromeOS = /\bCrOS\b/.test(userAgent); var windows = /win/i.test(platform); var presto_version = presto && userAgent.match(/Version\/(\d*\.\d*)/); - if (presto_version) presto_version = Number(presto_version[1]); + if (presto_version) { presto_version = Number(presto_version[1]); } if (presto_version && presto_version >= 15) { presto = false; webkit = true; } // Some browsers use the wrong event properties to signal cmd/ctrl on OS X var flipCtrlCmd = mac && (qtwebkit || presto && (presto_version == null || presto_version < 12.11)); var captureRightClick = gecko || (ie && ie_version >= 9); + function classTest(cls) { return new RegExp("(^|\\s)" + cls + "(?:$|\\s)\\s*") } + + var rmClass = function(node, cls) { + var current = node.className; + var match = classTest(cls).exec(current); + if (match) { + var after = current.slice(match.index + match[0].length); + node.className = current.slice(0, match.index) + (after ? match[1] + after : ""); + } + }; + + function removeChildren(e) { + for (var count = e.childNodes.length; count > 0; --count) + { e.removeChild(e.firstChild); } + return e + } + + function removeChildrenAndAdd(parent, e) { + return removeChildren(parent).appendChild(e) + } + + function elt(tag, content, className, style) { + var e = document.createElement(tag); + if (className) { e.className = className; } + if (style) { e.style.cssText = style; } + if (typeof content == "string") { e.appendChild(document.createTextNode(content)); } + else if (content) { for (var i = 0; i < content.length; ++i) { e.appendChild(content[i]); } } + return e + } + // wrapper for elt, which removes the elt from the accessibility tree + function eltP(tag, content, className, style) { + var e = elt(tag, content, className, style); + e.setAttribute("role", "presentation"); + return e + } + + var range; + if (document.createRange) { range = function(node, start, end, endNode) { + var r = document.createRange(); + r.setEnd(endNode || node, end); + r.setStart(node, start); + return r + }; } + else { range = function(node, start, end) { + var r = document.body.createTextRange(); + try { r.moveToElementText(node.parentNode); } + catch(e) { return r } + r.collapse(true); + r.moveEnd("character", end); + r.moveStart("character", start); + return r + }; } + + function contains(parent, child) { + if (child.nodeType == 3) // Android browser always returns false when child is a textnode + { child = child.parentNode; } + if (parent.contains) + { return parent.contains(child) } + do { + if (child.nodeType == 11) { child = child.host; } + if (child == parent) { return true } + } while (child = child.parentNode) + } + + function activeElt() { + // IE and Edge may throw an "Unspecified Error" when accessing document.activeElement. + // IE < 10 will throw when accessed while the page is loading or in an iframe. + // IE > 9 and Edge will throw when accessed in an iframe if document.body is unavailable. + var activeElement; + try { + activeElement = document.activeElement; + } catch(e) { + activeElement = document.body || null; + } + while (activeElement && activeElement.shadowRoot && activeElement.shadowRoot.activeElement) + { activeElement = activeElement.shadowRoot.activeElement; } + return activeElement + } + + function addClass(node, cls) { + var current = node.className; + if (!classTest(cls).test(current)) { node.className += (current ? " " : "") + cls; } + } + function joinClasses(a, b) { + var as = a.split(" "); + for (var i = 0; i < as.length; i++) + { if (as[i] && !classTest(as[i]).test(b)) { b += " " + as[i]; } } + return b + } + + var selectInput = function(node) { node.select(); }; + if (ios) // Mobile Safari apparently has a bug where select() is broken. + { selectInput = function(node) { node.selectionStart = 0; node.selectionEnd = node.value.length; }; } + else if (ie) // Suppress mysterious IE10 errors + { selectInput = function(node) { try { node.select(); } catch(_e) {} }; } + + function bind(f) { + var args = Array.prototype.slice.call(arguments, 1); + return function(){return f.apply(null, args)} + } + + function copyObj(obj, target, overwrite) { + if (!target) { target = {}; } + for (var prop in obj) + { if (obj.hasOwnProperty(prop) && (overwrite !== false || !target.hasOwnProperty(prop))) + { target[prop] = obj[prop]; } } + return target + } + + // Counts the column offset in a string, taking tabs into account. + // Used mostly to find indentation. + function countColumn(string, end, tabSize, startIndex, startValue) { + if (end == null) { + end = string.search(/[^\s\u00a0]/); + if (end == -1) { end = string.length; } + } + for (var i = startIndex || 0, n = startValue || 0;;) { + var nextTab = string.indexOf("\t", i); + if (nextTab < 0 || nextTab >= end) + { return n + (end - i) } + n += nextTab - i; + n += tabSize - (n % tabSize); + i = nextTab + 1; + } + } + + var Delayed = function() { + this.id = null; + this.f = null; + this.time = 0; + this.handler = bind(this.onTimeout, this); + }; + Delayed.prototype.onTimeout = function (self) { + self.id = 0; + if (self.time <= +new Date) { + self.f(); + } else { + setTimeout(self.handler, self.time - +new Date); + } + }; + Delayed.prototype.set = function (ms, f) { + this.f = f; + var time = +new Date + ms; + if (!this.id || time < this.time) { + clearTimeout(this.id); + this.id = setTimeout(this.handler, ms); + this.time = time; + } + }; + + function indexOf(array, elt) { + for (var i = 0; i < array.length; ++i) + { if (array[i] == elt) { return i } } + return -1 + } + + // Number of pixels added to scroller and sizer to hide scrollbar + var scrollerGap = 30; + + // Returned or thrown by various protocols to signal 'I'm not + // handling this'. + var Pass = {toString: function(){return "CodeMirror.Pass"}}; + + // Reused option objects for setSelection & friends + var sel_dontScroll = {scroll: false}, sel_mouse = {origin: "*mouse"}, sel_move = {origin: "+move"}; + + // The inverse of countColumn -- find the offset that corresponds to + // a particular column. + function findColumn(string, goal, tabSize) { + for (var pos = 0, col = 0;;) { + var nextTab = string.indexOf("\t", pos); + if (nextTab == -1) { nextTab = string.length; } + var skipped = nextTab - pos; + if (nextTab == string.length || col + skipped >= goal) + { return pos + Math.min(skipped, goal - col) } + col += nextTab - pos; + col += tabSize - (col % tabSize); + pos = nextTab + 1; + if (col >= goal) { return pos } + } + } + + var spaceStrs = [""]; + function spaceStr(n) { + while (spaceStrs.length <= n) + { spaceStrs.push(lst(spaceStrs) + " "); } + return spaceStrs[n] + } + + function lst(arr) { return arr[arr.length-1] } + + function map(array, f) { + var out = []; + for (var i = 0; i < array.length; i++) { out[i] = f(array[i], i); } + return out + } + + function insertSorted(array, value, score) { + var pos = 0, priority = score(value); + while (pos < array.length && score(array[pos]) <= priority) { pos++; } + array.splice(pos, 0, value); + } + + function nothing() {} + + function createObj(base, props) { + var inst; + if (Object.create) { + inst = Object.create(base); + } else { + nothing.prototype = base; + inst = new nothing(); + } + if (props) { copyObj(props, inst); } + return inst + } + + var nonASCIISingleCaseWordChar = /[\u00df\u0587\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/; + function isWordCharBasic(ch) { + return /\w/.test(ch) || ch > "\x80" && + (ch.toUpperCase() != ch.toLowerCase() || nonASCIISingleCaseWordChar.test(ch)) + } + function isWordChar(ch, helper) { + if (!helper) { return isWordCharBasic(ch) } + if (helper.source.indexOf("\\w") > -1 && isWordCharBasic(ch)) { return true } + return helper.test(ch) + } + + function isEmpty(obj) { + for (var n in obj) { if (obj.hasOwnProperty(n) && obj[n]) { return false } } + return true + } + + // Extending unicode characters. A series of a non-extending char + + // any number of extending chars is treated as a single unit as far + // as editing and measuring is concerned. This is not fully correct, + // since some scripts/fonts/browsers also treat other configurations + // of code points as a group. + var extendingChars = /[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/; + function isExtendingChar(ch) { return ch.charCodeAt(0) >= 768 && extendingChars.test(ch) } + + // Returns a number from the range [`0`; `str.length`] unless `pos` is outside that range. + function skipExtendingChars(str, pos, dir) { + while ((dir < 0 ? pos > 0 : pos < str.length) && isExtendingChar(str.charAt(pos))) { pos += dir; } + return pos + } + + // Returns the value from the range [`from`; `to`] that satisfies + // `pred` and is closest to `from`. Assumes that at least `to` + // satisfies `pred`. Supports `from` being greater than `to`. + function findFirst(pred, from, to) { + // At any point we are certain `to` satisfies `pred`, don't know + // whether `from` does. + var dir = from > to ? -1 : 1; + for (;;) { + if (from == to) { return from } + var midF = (from + to) / 2, mid = dir < 0 ? Math.ceil(midF) : Math.floor(midF); + if (mid == from) { return pred(mid) ? from : to } + if (pred(mid)) { to = mid; } + else { from = mid + dir; } + } + } + + // BIDI HELPERS + + function iterateBidiSections(order, from, to, f) { + if (!order) { return f(from, to, "ltr", 0) } + var found = false; + for (var i = 0; i < order.length; ++i) { + var part = order[i]; + if (part.from < to && part.to > from || from == to && part.to == from) { + f(Math.max(part.from, from), Math.min(part.to, to), part.level == 1 ? "rtl" : "ltr", i); + found = true; + } + } + if (!found) { f(from, to, "ltr"); } + } + + var bidiOther = null; + function getBidiPartAt(order, ch, sticky) { + var found; + bidiOther = null; + for (var i = 0; i < order.length; ++i) { + var cur = order[i]; + if (cur.from < ch && cur.to > ch) { return i } + if (cur.to == ch) { + if (cur.from != cur.to && sticky == "before") { found = i; } + else { bidiOther = i; } + } + if (cur.from == ch) { + if (cur.from != cur.to && sticky != "before") { found = i; } + else { bidiOther = i; } + } + } + return found != null ? found : bidiOther + } + + // Bidirectional ordering algorithm + // See http://unicode.org/reports/tr9/tr9-13.html for the algorithm + // that this (partially) implements. + + // One-char codes used for character types: + // L (L): Left-to-Right + // R (R): Right-to-Left + // r (AL): Right-to-Left Arabic + // 1 (EN): European Number + // + (ES): European Number Separator + // % (ET): European Number Terminator + // n (AN): Arabic Number + // , (CS): Common Number Separator + // m (NSM): Non-Spacing Mark + // b (BN): Boundary Neutral + // s (B): Paragraph Separator + // t (S): Segment Separator + // w (WS): Whitespace + // N (ON): Other Neutrals + + // Returns null if characters are ordered as they appear + // (left-to-right), or an array of sections ({from, to, level} + // objects) in the order in which they occur visually. + var bidiOrdering = (function() { + // Character types for codepoints 0 to 0xff + var lowTypes = "bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN"; + // Character types for codepoints 0x600 to 0x6f9 + var arabicTypes = "nnnnnnNNr%%r,rNNmmmmmmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmmmnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmnNmmmmmmrrmmNmmmmrr1111111111"; + function charType(code) { + if (code <= 0xf7) { return lowTypes.charAt(code) } + else if (0x590 <= code && code <= 0x5f4) { return "R" } + else if (0x600 <= code && code <= 0x6f9) { return arabicTypes.charAt(code - 0x600) } + else if (0x6ee <= code && code <= 0x8ac) { return "r" } + else if (0x2000 <= code && code <= 0x200b) { return "w" } + else if (code == 0x200c) { return "b" } + else { return "L" } + } + + var bidiRE = /[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/; + var isNeutral = /[stwN]/, isStrong = /[LRr]/, countsAsLeft = /[Lb1n]/, countsAsNum = /[1n]/; + + function BidiSpan(level, from, to) { + this.level = level; + this.from = from; this.to = to; + } + + return function(str, direction) { + var outerType = direction == "ltr" ? "L" : "R"; + + if (str.length == 0 || direction == "ltr" && !bidiRE.test(str)) { return false } + var len = str.length, types = []; + for (var i = 0; i < len; ++i) + { types.push(charType(str.charCodeAt(i))); } + + // W1. Examine each non-spacing mark (NSM) in the level run, and + // change the type of the NSM to the type of the previous + // character. If the NSM is at the start of the level run, it will + // get the type of sor. + for (var i$1 = 0, prev = outerType; i$1 < len; ++i$1) { + var type = types[i$1]; + if (type == "m") { types[i$1] = prev; } + else { prev = type; } + } + + // W2. Search backwards from each instance of a European number + // until the first strong type (R, L, AL, or sor) is found. If an + // AL is found, change the type of the European number to Arabic + // number. + // W3. Change all ALs to R. + for (var i$2 = 0, cur = outerType; i$2 < len; ++i$2) { + var type$1 = types[i$2]; + if (type$1 == "1" && cur == "r") { types[i$2] = "n"; } + else if (isStrong.test(type$1)) { cur = type$1; if (type$1 == "r") { types[i$2] = "R"; } } + } + + // W4. A single European separator between two European numbers + // changes to a European number. A single common separator between + // two numbers of the same type changes to that type. + for (var i$3 = 1, prev$1 = types[0]; i$3 < len - 1; ++i$3) { + var type$2 = types[i$3]; + if (type$2 == "+" && prev$1 == "1" && types[i$3+1] == "1") { types[i$3] = "1"; } + else if (type$2 == "," && prev$1 == types[i$3+1] && + (prev$1 == "1" || prev$1 == "n")) { types[i$3] = prev$1; } + prev$1 = type$2; + } + + // W5. A sequence of European terminators adjacent to European + // numbers changes to all European numbers. + // W6. Otherwise, separators and terminators change to Other + // Neutral. + for (var i$4 = 0; i$4 < len; ++i$4) { + var type$3 = types[i$4]; + if (type$3 == ",") { types[i$4] = "N"; } + else if (type$3 == "%") { + var end = (void 0); + for (end = i$4 + 1; end < len && types[end] == "%"; ++end) {} + var replace = (i$4 && types[i$4-1] == "!") || (end < len && types[end] == "1") ? "1" : "N"; + for (var j = i$4; j < end; ++j) { types[j] = replace; } + i$4 = end - 1; + } + } + + // W7. Search backwards from each instance of a European number + // until the first strong type (R, L, or sor) is found. If an L is + // found, then change the type of the European number to L. + for (var i$5 = 0, cur$1 = outerType; i$5 < len; ++i$5) { + var type$4 = types[i$5]; + if (cur$1 == "L" && type$4 == "1") { types[i$5] = "L"; } + else if (isStrong.test(type$4)) { cur$1 = type$4; } + } + + // N1. A sequence of neutrals takes the direction of the + // surrounding strong text if the text on both sides has the same + // direction. European and Arabic numbers act as if they were R in + // terms of their influence on neutrals. Start-of-level-run (sor) + // and end-of-level-run (eor) are used at level run boundaries. + // N2. Any remaining neutrals take the embedding direction. + for (var i$6 = 0; i$6 < len; ++i$6) { + if (isNeutral.test(types[i$6])) { + var end$1 = (void 0); + for (end$1 = i$6 + 1; end$1 < len && isNeutral.test(types[end$1]); ++end$1) {} + var before = (i$6 ? types[i$6-1] : outerType) == "L"; + var after = (end$1 < len ? types[end$1] : outerType) == "L"; + var replace$1 = before == after ? (before ? "L" : "R") : outerType; + for (var j$1 = i$6; j$1 < end$1; ++j$1) { types[j$1] = replace$1; } + i$6 = end$1 - 1; + } + } + + // Here we depart from the documented algorithm, in order to avoid + // building up an actual levels array. Since there are only three + // levels (0, 1, 2) in an implementation that doesn't take + // explicit embedding into account, we can build up the order on + // the fly, without following the level-based algorithm. + var order = [], m; + for (var i$7 = 0; i$7 < len;) { + if (countsAsLeft.test(types[i$7])) { + var start = i$7; + for (++i$7; i$7 < len && countsAsLeft.test(types[i$7]); ++i$7) {} + order.push(new BidiSpan(0, start, i$7)); + } else { + var pos = i$7, at = order.length; + for (++i$7; i$7 < len && types[i$7] != "L"; ++i$7) {} + for (var j$2 = pos; j$2 < i$7;) { + if (countsAsNum.test(types[j$2])) { + if (pos < j$2) { order.splice(at, 0, new BidiSpan(1, pos, j$2)); } + var nstart = j$2; + for (++j$2; j$2 < i$7 && countsAsNum.test(types[j$2]); ++j$2) {} + order.splice(at, 0, new BidiSpan(2, nstart, j$2)); + pos = j$2; + } else { ++j$2; } + } + if (pos < i$7) { order.splice(at, 0, new BidiSpan(1, pos, i$7)); } + } + } + if (direction == "ltr") { + if (order[0].level == 1 && (m = str.match(/^\s+/))) { + order[0].from = m[0].length; + order.unshift(new BidiSpan(0, 0, m[0].length)); + } + if (lst(order).level == 1 && (m = str.match(/\s+$/))) { + lst(order).to -= m[0].length; + order.push(new BidiSpan(0, len - m[0].length, len)); + } + } + + return direction == "rtl" ? order.reverse() : order + } + })(); + + // Get the bidi ordering for the given line (and cache it). Returns + // false for lines that are fully left-to-right, and an array of + // BidiSpan objects otherwise. + function getOrder(line, direction) { + var order = line.order; + if (order == null) { order = line.order = bidiOrdering(line.text, direction); } + return order + } + + // EVENT HANDLING + + // Lightweight event framework. on/off also work on DOM nodes, + // registering native DOM handlers. + + var noHandlers = []; + + var on = function(emitter, type, f) { + if (emitter.addEventListener) { + emitter.addEventListener(type, f, false); + } else if (emitter.attachEvent) { + emitter.attachEvent("on" + type, f); + } else { + var map$$1 = emitter._handlers || (emitter._handlers = {}); + map$$1[type] = (map$$1[type] || noHandlers).concat(f); + } + }; + + function getHandlers(emitter, type) { + return emitter._handlers && emitter._handlers[type] || noHandlers + } + + function off(emitter, type, f) { + if (emitter.removeEventListener) { + emitter.removeEventListener(type, f, false); + } else if (emitter.detachEvent) { + emitter.detachEvent("on" + type, f); + } else { + var map$$1 = emitter._handlers, arr = map$$1 && map$$1[type]; + if (arr) { + var index = indexOf(arr, f); + if (index > -1) + { map$$1[type] = arr.slice(0, index).concat(arr.slice(index + 1)); } + } + } + } + + function signal(emitter, type /*, values...*/) { + var handlers = getHandlers(emitter, type); + if (!handlers.length) { return } + var args = Array.prototype.slice.call(arguments, 2); + for (var i = 0; i < handlers.length; ++i) { handlers[i].apply(null, args); } + } + + // The DOM events that CodeMirror handles can be overridden by + // registering a (non-DOM) handler on the editor for the event name, + // and preventDefault-ing the event in that handler. + function signalDOMEvent(cm, e, override) { + if (typeof e == "string") + { e = {type: e, preventDefault: function() { this.defaultPrevented = true; }}; } + signal(cm, override || e.type, cm, e); + return e_defaultPrevented(e) || e.codemirrorIgnore + } + + function signalCursorActivity(cm) { + var arr = cm._handlers && cm._handlers.cursorActivity; + if (!arr) { return } + var set = cm.curOp.cursorActivityHandlers || (cm.curOp.cursorActivityHandlers = []); + for (var i = 0; i < arr.length; ++i) { if (indexOf(set, arr[i]) == -1) + { set.push(arr[i]); } } + } + + function hasHandler(emitter, type) { + return getHandlers(emitter, type).length > 0 + } + + // Add on and off methods to a constructor's prototype, to make + // registering events on such objects more convenient. + function eventMixin(ctor) { + ctor.prototype.on = function(type, f) {on(this, type, f);}; + ctor.prototype.off = function(type, f) {off(this, type, f);}; + } + + // Due to the fact that we still support jurassic IE versions, some + // compatibility wrappers are needed. + + function e_preventDefault(e) { + if (e.preventDefault) { e.preventDefault(); } + else { e.returnValue = false; } + } + function e_stopPropagation(e) { + if (e.stopPropagation) { e.stopPropagation(); } + else { e.cancelBubble = true; } + } + function e_defaultPrevented(e) { + return e.defaultPrevented != null ? e.defaultPrevented : e.returnValue == false + } + function e_stop(e) {e_preventDefault(e); e_stopPropagation(e);} + + function e_target(e) {return e.target || e.srcElement} + function e_button(e) { + var b = e.which; + if (b == null) { + if (e.button & 1) { b = 1; } + else if (e.button & 2) { b = 3; } + else if (e.button & 4) { b = 2; } + } + if (mac && e.ctrlKey && b == 1) { b = 3; } + return b + } + + // Detect drag-and-drop + var dragAndDrop = function() { + // There is *some* kind of drag-and-drop support in IE6-8, but I + // couldn't get it to work yet. + if (ie && ie_version < 9) { return false } + var div = elt('div'); + return "draggable" in div || "dragDrop" in div + }(); + + var zwspSupported; + function zeroWidthElement(measure) { + if (zwspSupported == null) { + var test = elt("span", "\u200b"); + removeChildrenAndAdd(measure, elt("span", [test, document.createTextNode("x")])); + if (measure.firstChild.offsetHeight != 0) + { zwspSupported = test.offsetWidth <= 1 && test.offsetHeight > 2 && !(ie && ie_version < 8); } + } + var node = zwspSupported ? elt("span", "\u200b") : + elt("span", "\u00a0", null, "display: inline-block; width: 1px; margin-right: -1px"); + node.setAttribute("cm-text", ""); + return node + } + + // Feature-detect IE's crummy client rect reporting for bidi text + var badBidiRects; + function hasBadBidiRects(measure) { + if (badBidiRects != null) { return badBidiRects } + var txt = removeChildrenAndAdd(measure, document.createTextNode("A\u062eA")); + var r0 = range(txt, 0, 1).getBoundingClientRect(); + var r1 = range(txt, 1, 2).getBoundingClientRect(); + removeChildren(measure); + if (!r0 || r0.left == r0.right) { return false } // Safari returns null in some cases (#2780) + return badBidiRects = (r1.right - r0.right < 3) + } + + // See if "".split is the broken IE version, if so, provide an + // alternative way to split lines. + var splitLinesAuto = "\n\nb".split(/\n/).length != 3 ? function (string) { + var pos = 0, result = [], l = string.length; + while (pos <= l) { + var nl = string.indexOf("\n", pos); + if (nl == -1) { nl = string.length; } + var line = string.slice(pos, string.charAt(nl - 1) == "\r" ? nl - 1 : nl); + var rt = line.indexOf("\r"); + if (rt != -1) { + result.push(line.slice(0, rt)); + pos += rt + 1; + } else { + result.push(line); + pos = nl + 1; + } + } + return result + } : function (string) { return string.split(/\r\n?|\n/); }; + + var hasSelection = window.getSelection ? function (te) { + try { return te.selectionStart != te.selectionEnd } + catch(e) { return false } + } : function (te) { + var range$$1; + try {range$$1 = te.ownerDocument.selection.createRange();} + catch(e) {} + if (!range$$1 || range$$1.parentElement() != te) { return false } + return range$$1.compareEndPoints("StartToEnd", range$$1) != 0 + }; + + var hasCopyEvent = (function () { + var e = elt("div"); + if ("oncopy" in e) { return true } + e.setAttribute("oncopy", "return;"); + return typeof e.oncopy == "function" + })(); + + var badZoomedRects = null; + function hasBadZoomedRects(measure) { + if (badZoomedRects != null) { return badZoomedRects } + var node = removeChildrenAndAdd(measure, elt("span", "x")); + var normal = node.getBoundingClientRect(); + var fromRange = range(node, 0, 1).getBoundingClientRect(); + return badZoomedRects = Math.abs(normal.left - fromRange.left) > 1 + } + + // Known modes, by name and by MIME + var modes = {}, mimeModes = {}; + + // Extra arguments are stored as the mode's dependencies, which is + // used by (legacy) mechanisms like loadmode.js to automatically + // load a mode. (Preferred mechanism is the require/define calls.) + function defineMode(name, mode) { + if (arguments.length > 2) + { mode.dependencies = Array.prototype.slice.call(arguments, 2); } + modes[name] = mode; + } + + function defineMIME(mime, spec) { + mimeModes[mime] = spec; + } + + // Given a MIME type, a {name, ...options} config object, or a name + // string, return a mode config object. + function resolveMode(spec) { + if (typeof spec == "string" && mimeModes.hasOwnProperty(spec)) { + spec = mimeModes[spec]; + } else if (spec && typeof spec.name == "string" && mimeModes.hasOwnProperty(spec.name)) { + var found = mimeModes[spec.name]; + if (typeof found == "string") { found = {name: found}; } + spec = createObj(found, spec); + spec.name = found.name; + } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec)) { + return resolveMode("application/xml") + } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+json$/.test(spec)) { + return resolveMode("application/json") + } + if (typeof spec == "string") { return {name: spec} } + else { return spec || {name: "null"} } + } + + // Given a mode spec (anything that resolveMode accepts), find and + // initialize an actual mode object. + function getMode(options, spec) { + spec = resolveMode(spec); + var mfactory = modes[spec.name]; + if (!mfactory) { return getMode(options, "text/plain") } + var modeObj = mfactory(options, spec); + if (modeExtensions.hasOwnProperty(spec.name)) { + var exts = modeExtensions[spec.name]; + for (var prop in exts) { + if (!exts.hasOwnProperty(prop)) { continue } + if (modeObj.hasOwnProperty(prop)) { modeObj["_" + prop] = modeObj[prop]; } + modeObj[prop] = exts[prop]; + } + } + modeObj.name = spec.name; + if (spec.helperType) { modeObj.helperType = spec.helperType; } + if (spec.modeProps) { for (var prop$1 in spec.modeProps) + { modeObj[prop$1] = spec.modeProps[prop$1]; } } + + return modeObj + } + + // This can be used to attach properties to mode objects from + // outside the actual mode definition. + var modeExtensions = {}; + function extendMode(mode, properties) { + var exts = modeExtensions.hasOwnProperty(mode) ? modeExtensions[mode] : (modeExtensions[mode] = {}); + copyObj(properties, exts); + } + + function copyState(mode, state) { + if (state === true) { return state } + if (mode.copyState) { return mode.copyState(state) } + var nstate = {}; + for (var n in state) { + var val = state[n]; + if (val instanceof Array) { val = val.concat([]); } + nstate[n] = val; + } + return nstate + } + + // Given a mode and a state (for that mode), find the inner mode and + // state at the position that the state refers to. + function innerMode(mode, state) { + var info; + while (mode.innerMode) { + info = mode.innerMode(state); + if (!info || info.mode == mode) { break } + state = info.state; + mode = info.mode; + } + return info || {mode: mode, state: state} + } + + function startState(mode, a1, a2) { + return mode.startState ? mode.startState(a1, a2) : true + } + + // STRING STREAM + + // Fed to the mode parsers, provides helper functions to make + // parsers more succinct. + + var StringStream = function(string, tabSize, lineOracle) { + this.pos = this.start = 0; + this.string = string; + this.tabSize = tabSize || 8; + this.lastColumnPos = this.lastColumnValue = 0; + this.lineStart = 0; + this.lineOracle = lineOracle; + }; + + StringStream.prototype.eol = function () {return this.pos >= this.string.length}; + StringStream.prototype.sol = function () {return this.pos == this.lineStart}; + StringStream.prototype.peek = function () {return this.string.charAt(this.pos) || undefined}; + StringStream.prototype.next = function () { + if (this.pos < this.string.length) + { return this.string.charAt(this.pos++) } + }; + StringStream.prototype.eat = function (match) { + var ch = this.string.charAt(this.pos); + var ok; + if (typeof match == "string") { ok = ch == match; } + else { ok = ch && (match.test ? match.test(ch) : match(ch)); } + if (ok) {++this.pos; return ch} + }; + StringStream.prototype.eatWhile = function (match) { + var start = this.pos; + while (this.eat(match)){} + return this.pos > start + }; + StringStream.prototype.eatSpace = function () { + var this$1 = this; + + var start = this.pos; + while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) { ++this$1.pos; } + return this.pos > start + }; + StringStream.prototype.skipToEnd = function () {this.pos = this.string.length;}; + StringStream.prototype.skipTo = function (ch) { + var found = this.string.indexOf(ch, this.pos); + if (found > -1) {this.pos = found; return true} + }; + StringStream.prototype.backUp = function (n) {this.pos -= n;}; + StringStream.prototype.column = function () { + if (this.lastColumnPos < this.start) { + this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue); + this.lastColumnPos = this.start; + } + return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) + }; + StringStream.prototype.indentation = function () { + return countColumn(this.string, null, this.tabSize) - + (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0) + }; + StringStream.prototype.match = function (pattern, consume, caseInsensitive) { + if (typeof pattern == "string") { + var cased = function (str) { return caseInsensitive ? str.toLowerCase() : str; }; + var substr = this.string.substr(this.pos, pattern.length); + if (cased(substr) == cased(pattern)) { + if (consume !== false) { this.pos += pattern.length; } + return true + } + } else { + var match = this.string.slice(this.pos).match(pattern); + if (match && match.index > 0) { return null } + if (match && consume !== false) { this.pos += match[0].length; } + return match + } + }; + StringStream.prototype.current = function (){return this.string.slice(this.start, this.pos)}; + StringStream.prototype.hideFirstChars = function (n, inner) { + this.lineStart += n; + try { return inner() } + finally { this.lineStart -= n; } + }; + StringStream.prototype.lookAhead = function (n) { + var oracle = this.lineOracle; + return oracle && oracle.lookAhead(n) + }; + StringStream.prototype.baseToken = function () { + var oracle = this.lineOracle; + return oracle && oracle.baseToken(this.pos) + }; + + // Find the line object corresponding to the given line number. + function getLine(doc, n) { + n -= doc.first; + if (n < 0 || n >= doc.size) { throw new Error("There is no line " + (n + doc.first) + " in the document.") } + var chunk = doc; + while (!chunk.lines) { + for (var i = 0;; ++i) { + var child = chunk.children[i], sz = child.chunkSize(); + if (n < sz) { chunk = child; break } + n -= sz; + } + } + return chunk.lines[n] + } + + // Get the part of a document between two positions, as an array of + // strings. + function getBetween(doc, start, end) { + var out = [], n = start.line; + doc.iter(start.line, end.line + 1, function (line) { + var text = line.text; + if (n == end.line) { text = text.slice(0, end.ch); } + if (n == start.line) { text = text.slice(start.ch); } + out.push(text); + ++n; + }); + return out + } + // Get the lines between from and to, as array of strings. + function getLines(doc, from, to) { + var out = []; + doc.iter(from, to, function (line) { out.push(line.text); }); // iter aborts when callback returns truthy value + return out + } + + // Update the height of a line, propagating the height change + // upwards to parent nodes. + function updateLineHeight(line, height) { + var diff = height - line.height; + if (diff) { for (var n = line; n; n = n.parent) { n.height += diff; } } + } + + // Given a line object, find its line number by walking up through + // its parent links. + function lineNo(line) { + if (line.parent == null) { return null } + var cur = line.parent, no = indexOf(cur.lines, line); + for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) { + for (var i = 0;; ++i) { + if (chunk.children[i] == cur) { break } + no += chunk.children[i].chunkSize(); + } + } + return no + cur.first + } + + // Find the line at the given vertical position, using the height + // information in the document tree. + function lineAtHeight(chunk, h) { + var n = chunk.first; + outer: do { + for (var i$1 = 0; i$1 < chunk.children.length; ++i$1) { + var child = chunk.children[i$1], ch = child.height; + if (h < ch) { chunk = child; continue outer } + h -= ch; + n += child.chunkSize(); + } + return n + } while (!chunk.lines) + var i = 0; + for (; i < chunk.lines.length; ++i) { + var line = chunk.lines[i], lh = line.height; + if (h < lh) { break } + h -= lh; + } + return n + i + } + + function isLine(doc, l) {return l >= doc.first && l < doc.first + doc.size} + + function lineNumberFor(options, i) { + return String(options.lineNumberFormatter(i + options.firstLineNumber)) + } + + // A Pos instance represents a position within the text. + function Pos(line, ch, sticky) { + if ( sticky === void 0 ) sticky = null; + + if (!(this instanceof Pos)) { return new Pos(line, ch, sticky) } + this.line = line; + this.ch = ch; + this.sticky = sticky; + } + + // Compare two positions, return 0 if they are the same, a negative + // number when a is less, and a positive number otherwise. + function cmp(a, b) { return a.line - b.line || a.ch - b.ch } + + function equalCursorPos(a, b) { return a.sticky == b.sticky && cmp(a, b) == 0 } + + function copyPos(x) {return Pos(x.line, x.ch)} + function maxPos(a, b) { return cmp(a, b) < 0 ? b : a } + function minPos(a, b) { return cmp(a, b) < 0 ? a : b } + + // Most of the external API clips given positions to make sure they + // actually exist within the document. + function clipLine(doc, n) {return Math.max(doc.first, Math.min(n, doc.first + doc.size - 1))} + function clipPos(doc, pos) { + if (pos.line < doc.first) { return Pos(doc.first, 0) } + var last = doc.first + doc.size - 1; + if (pos.line > last) { return Pos(last, getLine(doc, last).text.length) } + return clipToLen(pos, getLine(doc, pos.line).text.length) + } + function clipToLen(pos, linelen) { + var ch = pos.ch; + if (ch == null || ch > linelen) { return Pos(pos.line, linelen) } + else if (ch < 0) { return Pos(pos.line, 0) } + else { return pos } + } + function clipPosArray(doc, array) { + var out = []; + for (var i = 0; i < array.length; i++) { out[i] = clipPos(doc, array[i]); } + return out + } + + var SavedContext = function(state, lookAhead) { + this.state = state; + this.lookAhead = lookAhead; + }; + + var Context = function(doc, state, line, lookAhead) { + this.state = state; + this.doc = doc; + this.line = line; + this.maxLookAhead = lookAhead || 0; + this.baseTokens = null; + this.baseTokenPos = 1; + }; + + Context.prototype.lookAhead = function (n) { + var line = this.doc.getLine(this.line + n); + if (line != null && n > this.maxLookAhead) { this.maxLookAhead = n; } + return line + }; + + Context.prototype.baseToken = function (n) { + var this$1 = this; + + if (!this.baseTokens) { return null } + while (this.baseTokens[this.baseTokenPos] <= n) + { this$1.baseTokenPos += 2; } + var type = this.baseTokens[this.baseTokenPos + 1]; + return {type: type && type.replace(/( |^)overlay .*/, ""), + size: this.baseTokens[this.baseTokenPos] - n} + }; + + Context.prototype.nextLine = function () { + this.line++; + if (this.maxLookAhead > 0) { this.maxLookAhead--; } + }; + + Context.fromSaved = function (doc, saved, line) { + if (saved instanceof SavedContext) + { return new Context(doc, copyState(doc.mode, saved.state), line, saved.lookAhead) } + else + { return new Context(doc, copyState(doc.mode, saved), line) } + }; + + Context.prototype.save = function (copy) { + var state = copy !== false ? copyState(this.doc.mode, this.state) : this.state; + return this.maxLookAhead > 0 ? new SavedContext(state, this.maxLookAhead) : state + }; + + + // Compute a style array (an array starting with a mode generation + // -- for invalidation -- followed by pairs of end positions and + // style strings), which is used to highlight the tokens on the + // line. + function highlightLine(cm, line, context, forceToEnd) { + // A styles array always starts with a number identifying the + // mode/overlays that it is based on (for easy invalidation). + var st = [cm.state.modeGen], lineClasses = {}; + // Compute the base array of styles + runMode(cm, line.text, cm.doc.mode, context, function (end, style) { return st.push(end, style); }, + lineClasses, forceToEnd); + var state = context.state; + + // Run overlays, adjust style array. + var loop = function ( o ) { + context.baseTokens = st; + var overlay = cm.state.overlays[o], i = 1, at = 0; + context.state = true; + runMode(cm, line.text, overlay.mode, context, function (end, style) { + var start = i; + // Ensure there's a token end at the current position, and that i points at it + while (at < end) { + var i_end = st[i]; + if (i_end > end) + { st.splice(i, 1, end, st[i+1], i_end); } + i += 2; + at = Math.min(end, i_end); + } + if (!style) { return } + if (overlay.opaque) { + st.splice(start, i - start, end, "overlay " + style); + i = start + 2; + } else { + for (; start < i; start += 2) { + var cur = st[start+1]; + st[start+1] = (cur ? cur + " " : "") + "overlay " + style; + } + } + }, lineClasses); + context.state = state; + context.baseTokens = null; + context.baseTokenPos = 1; + }; + + for (var o = 0; o < cm.state.overlays.length; ++o) loop( o ); + + return {styles: st, classes: lineClasses.bgClass || lineClasses.textClass ? lineClasses : null} + } + + function getLineStyles(cm, line, updateFrontier) { + if (!line.styles || line.styles[0] != cm.state.modeGen) { + var context = getContextBefore(cm, lineNo(line)); + var resetState = line.text.length > cm.options.maxHighlightLength && copyState(cm.doc.mode, context.state); + var result = highlightLine(cm, line, context); + if (resetState) { context.state = resetState; } + line.stateAfter = context.save(!resetState); + line.styles = result.styles; + if (result.classes) { line.styleClasses = result.classes; } + else if (line.styleClasses) { line.styleClasses = null; } + if (updateFrontier === cm.doc.highlightFrontier) + { cm.doc.modeFrontier = Math.max(cm.doc.modeFrontier, ++cm.doc.highlightFrontier); } + } + return line.styles + } + + function getContextBefore(cm, n, precise) { + var doc = cm.doc, display = cm.display; + if (!doc.mode.startState) { return new Context(doc, true, n) } + var start = findStartLine(cm, n, precise); + var saved = start > doc.first && getLine(doc, start - 1).stateAfter; + var context = saved ? Context.fromSaved(doc, saved, start) : new Context(doc, startState(doc.mode), start); + + doc.iter(start, n, function (line) { + processLine(cm, line.text, context); + var pos = context.line; + line.stateAfter = pos == n - 1 || pos % 5 == 0 || pos >= display.viewFrom && pos < display.viewTo ? context.save() : null; + context.nextLine(); + }); + if (precise) { doc.modeFrontier = context.line; } + return context + } + + // Lightweight form of highlight -- proceed over this line and + // update state, but don't save a style array. Used for lines that + // aren't currently visible. + function processLine(cm, text, context, startAt) { + var mode = cm.doc.mode; + var stream = new StringStream(text, cm.options.tabSize, context); + stream.start = stream.pos = startAt || 0; + if (text == "") { callBlankLine(mode, context.state); } + while (!stream.eol()) { + readToken(mode, stream, context.state); + stream.start = stream.pos; + } + } + + function callBlankLine(mode, state) { + if (mode.blankLine) { return mode.blankLine(state) } + if (!mode.innerMode) { return } + var inner = innerMode(mode, state); + if (inner.mode.blankLine) { return inner.mode.blankLine(inner.state) } + } + + function readToken(mode, stream, state, inner) { + for (var i = 0; i < 10; i++) { + if (inner) { inner[0] = innerMode(mode, state).mode; } + var style = mode.token(stream, state); + if (stream.pos > stream.start) { return style } + } + throw new Error("Mode " + mode.name + " failed to advance stream.") + } + + var Token = function(stream, type, state) { + this.start = stream.start; this.end = stream.pos; + this.string = stream.current(); + this.type = type || null; + this.state = state; + }; + + // Utility for getTokenAt and getLineTokens + function takeToken(cm, pos, precise, asArray) { + var doc = cm.doc, mode = doc.mode, style; + pos = clipPos(doc, pos); + var line = getLine(doc, pos.line), context = getContextBefore(cm, pos.line, precise); + var stream = new StringStream(line.text, cm.options.tabSize, context), tokens; + if (asArray) { tokens = []; } + while ((asArray || stream.pos < pos.ch) && !stream.eol()) { + stream.start = stream.pos; + style = readToken(mode, stream, context.state); + if (asArray) { tokens.push(new Token(stream, style, copyState(doc.mode, context.state))); } + } + return asArray ? tokens : new Token(stream, style, context.state) + } + + function extractLineClasses(type, output) { + if (type) { for (;;) { + var lineClass = type.match(/(?:^|\s+)line-(background-)?(\S+)/); + if (!lineClass) { break } + type = type.slice(0, lineClass.index) + type.slice(lineClass.index + lineClass[0].length); + var prop = lineClass[1] ? "bgClass" : "textClass"; + if (output[prop] == null) + { output[prop] = lineClass[2]; } + else if (!(new RegExp("(?:^|\s)" + lineClass[2] + "(?:$|\s)")).test(output[prop])) + { output[prop] += " " + lineClass[2]; } + } } + return type + } + + // Run the given mode's parser over a line, calling f for each token. + function runMode(cm, text, mode, context, f, lineClasses, forceToEnd) { + var flattenSpans = mode.flattenSpans; + if (flattenSpans == null) { flattenSpans = cm.options.flattenSpans; } + var curStart = 0, curStyle = null; + var stream = new StringStream(text, cm.options.tabSize, context), style; + var inner = cm.options.addModeClass && [null]; + if (text == "") { extractLineClasses(callBlankLine(mode, context.state), lineClasses); } + while (!stream.eol()) { + if (stream.pos > cm.options.maxHighlightLength) { + flattenSpans = false; + if (forceToEnd) { processLine(cm, text, context, stream.pos); } + stream.pos = text.length; + style = null; + } else { + style = extractLineClasses(readToken(mode, stream, context.state, inner), lineClasses); + } + if (inner) { + var mName = inner[0].name; + if (mName) { style = "m-" + (style ? mName + " " + style : mName); } + } + if (!flattenSpans || curStyle != style) { + while (curStart < stream.start) { + curStart = Math.min(stream.start, curStart + 5000); + f(curStart, curStyle); + } + curStyle = style; + } + stream.start = stream.pos; + } + while (curStart < stream.pos) { + // Webkit seems to refuse to render text nodes longer than 57444 + // characters, and returns inaccurate measurements in nodes + // starting around 5000 chars. + var pos = Math.min(stream.pos, curStart + 5000); + f(pos, curStyle); + curStart = pos; + } + } + + // Finds the line to start with when starting a parse. Tries to + // find a line with a stateAfter, so that it can start with a + // valid state. If that fails, it returns the line with the + // smallest indentation, which tends to need the least context to + // parse correctly. + function findStartLine(cm, n, precise) { + var minindent, minline, doc = cm.doc; + var lim = precise ? -1 : n - (cm.doc.mode.innerMode ? 1000 : 100); + for (var search = n; search > lim; --search) { + if (search <= doc.first) { return doc.first } + var line = getLine(doc, search - 1), after = line.stateAfter; + if (after && (!precise || search + (after instanceof SavedContext ? after.lookAhead : 0) <= doc.modeFrontier)) + { return search } + var indented = countColumn(line.text, null, cm.options.tabSize); + if (minline == null || minindent > indented) { + minline = search - 1; + minindent = indented; + } + } + return minline + } + + function retreatFrontier(doc, n) { + doc.modeFrontier = Math.min(doc.modeFrontier, n); + if (doc.highlightFrontier < n - 10) { return } + var start = doc.first; + for (var line = n - 1; line > start; line--) { + var saved = getLine(doc, line).stateAfter; + // change is on 3 + // state on line 1 looked ahead 2 -- so saw 3 + // test 1 + 2 < 3 should cover this + if (saved && (!(saved instanceof SavedContext) || line + saved.lookAhead < n)) { + start = line + 1; + break + } + } + doc.highlightFrontier = Math.min(doc.highlightFrontier, start); + } + // Optimize some code when these features are not used. var sawReadOnlySpans = false, sawCollapsedSpans = false; - // EDITOR CONSTRUCTOR - - // A CodeMirror instance represents an editor. This is the object - // that user code is usually dealing with. - - function CodeMirror(place, options) { - if (!(this instanceof CodeMirror)) return new CodeMirror(place, options); - - this.options = options = options ? copyObj(options) : {}; - // Determine effective options based on given values and defaults. - copyObj(defaults, options, false); - setGuttersForLineNumbers(options); - - var doc = options.value; - if (typeof doc == "string") doc = new Doc(doc, options.mode, null, options.lineSeparator); - this.doc = doc; - - var input = new CodeMirror.inputStyles[options.inputStyle](this); - var display = this.display = new Display(place, doc, input); - display.wrapper.CodeMirror = this; - updateGutters(this); - themeChanged(this); - if (options.lineWrapping) - this.display.wrapper.className += " CodeMirror-wrap"; - if (options.autofocus && !mobile) display.input.focus(); - initScrollbars(this); - - this.state = { - keyMaps: [], // stores maps added by addKeyMap - overlays: [], // highlighting overlays, as added by addOverlay - modeGen: 0, // bumped when mode/overlay changes, used to invalidate highlighting info - overwrite: false, - delayingBlurEvent: false, - focused: false, - suppressEdits: false, // used to disable editing during key handlers when in readOnly mode - pasteIncoming: false, cutIncoming: false, // help recognize paste/cut edits in input.poll - selectingText: false, - draggingText: false, - highlight: new Delayed(), // stores highlight worker timeout - keySeq: null, // Unfinished key sequence - specialChars: null + function seeReadOnlySpans() { + sawReadOnlySpans = true; + } + + function seeCollapsedSpans() { + sawCollapsedSpans = true; + } + + // TEXTMARKER SPANS + + function MarkedSpan(marker, from, to) { + this.marker = marker; + this.from = from; this.to = to; + } + + // Search an array of spans for a span matching the given marker. + function getMarkedSpanFor(spans, marker) { + if (spans) { for (var i = 0; i < spans.length; ++i) { + var span = spans[i]; + if (span.marker == marker) { return span } + } } + } + // Remove a span from an array, returning undefined if no spans are + // left (we don't store arrays for lines without spans). + function removeMarkedSpan(spans, span) { + var r; + for (var i = 0; i < spans.length; ++i) + { if (spans[i] != span) { (r || (r = [])).push(spans[i]); } } + return r + } + // Add a span to a line. + function addMarkedSpan(line, span) { + line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span]; + span.marker.attachLine(line); + } + + // Used for the algorithm that adjusts markers for a change in the + // document. These functions cut an array of spans at a given + // character position, returning an array of remaining chunks (or + // undefined if nothing remains). + function markedSpansBefore(old, startCh, isInsert) { + var nw; + if (old) { for (var i = 0; i < old.length; ++i) { + var span = old[i], marker = span.marker; + var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh); + if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) { + var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh) + ;(nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to)); + } + } } + return nw + } + function markedSpansAfter(old, endCh, isInsert) { + var nw; + if (old) { for (var i = 0; i < old.length; ++i) { + var span = old[i], marker = span.marker; + var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh); + if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) { + var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh) + ;(nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh, + span.to == null ? null : span.to - endCh)); + } + } } + return nw + } + + // Given a change object, compute the new set of marker spans that + // cover the line in which the change took place. Removes spans + // entirely within the change, reconnects spans belonging to the + // same marker that appear on both sides of the change, and cuts off + // spans partially within the change. Returns an array of span + // arrays with one element for each line in (after) the change. + function stretchSpansOverChange(doc, change) { + if (change.full) { return null } + var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans; + var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans; + if (!oldFirst && !oldLast) { return null } + + var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0; + // Get the spans that 'stick out' on both sides + var first = markedSpansBefore(oldFirst, startCh, isInsert); + var last = markedSpansAfter(oldLast, endCh, isInsert); + + // Next, merge those two ends + var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0); + if (first) { + // Fix up .to properties of first + for (var i = 0; i < first.length; ++i) { + var span = first[i]; + if (span.to == null) { + var found = getMarkedSpanFor(last, span.marker); + if (!found) { span.to = startCh; } + else if (sameLine) { span.to = found.to == null ? null : found.to + offset; } + } + } + } + if (last) { + // Fix up .from in last (or move them into first in case of sameLine) + for (var i$1 = 0; i$1 < last.length; ++i$1) { + var span$1 = last[i$1]; + if (span$1.to != null) { span$1.to += offset; } + if (span$1.from == null) { + var found$1 = getMarkedSpanFor(first, span$1.marker); + if (!found$1) { + span$1.from = offset; + if (sameLine) { (first || (first = [])).push(span$1); } + } + } else { + span$1.from += offset; + if (sameLine) { (first || (first = [])).push(span$1); } + } + } + } + // Make sure we didn't create any zero-length spans + if (first) { first = clearEmptySpans(first); } + if (last && last != first) { last = clearEmptySpans(last); } + + var newMarkers = [first]; + if (!sameLine) { + // Fill gap with whole-line-spans + var gap = change.text.length - 2, gapMarkers; + if (gap > 0 && first) + { for (var i$2 = 0; i$2 < first.length; ++i$2) + { if (first[i$2].to == null) + { (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i$2].marker, null, null)); } } } + for (var i$3 = 0; i$3 < gap; ++i$3) + { newMarkers.push(gapMarkers); } + newMarkers.push(last); + } + return newMarkers + } + + // Remove spans that are empty and don't have a clearWhenEmpty + // option of false. + function clearEmptySpans(spans) { + for (var i = 0; i < spans.length; ++i) { + var span = spans[i]; + if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false) + { spans.splice(i--, 1); } + } + if (!spans.length) { return null } + return spans + } + + // Used to 'clip' out readOnly ranges when making a change. + function removeReadOnlyRanges(doc, from, to) { + var markers = null; + doc.iter(from.line, to.line + 1, function (line) { + if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { + var mark = line.markedSpans[i].marker; + if (mark.readOnly && (!markers || indexOf(markers, mark) == -1)) + { (markers || (markers = [])).push(mark); } + } } + }); + if (!markers) { return null } + var parts = [{from: from, to: to}]; + for (var i = 0; i < markers.length; ++i) { + var mk = markers[i], m = mk.find(0); + for (var j = 0; j < parts.length; ++j) { + var p = parts[j]; + if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) { continue } + var newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to); + if (dfrom < 0 || !mk.inclusiveLeft && !dfrom) + { newParts.push({from: p.from, to: m.from}); } + if (dto > 0 || !mk.inclusiveRight && !dto) + { newParts.push({from: m.to, to: p.to}); } + parts.splice.apply(parts, newParts); + j += newParts.length - 3; + } + } + return parts + } + + // Connect or disconnect spans from a line. + function detachMarkedSpans(line) { + var spans = line.markedSpans; + if (!spans) { return } + for (var i = 0; i < spans.length; ++i) + { spans[i].marker.detachLine(line); } + line.markedSpans = null; + } + function attachMarkedSpans(line, spans) { + if (!spans) { return } + for (var i = 0; i < spans.length; ++i) + { spans[i].marker.attachLine(line); } + line.markedSpans = spans; + } + + // Helpers used when computing which overlapping collapsed span + // counts as the larger one. + function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0 } + function extraRight(marker) { return marker.inclusiveRight ? 1 : 0 } + + // Returns a number indicating which of two overlapping collapsed + // spans is larger (and thus includes the other). Falls back to + // comparing ids when the spans cover exactly the same range. + function compareCollapsedMarkers(a, b) { + var lenDiff = a.lines.length - b.lines.length; + if (lenDiff != 0) { return lenDiff } + var aPos = a.find(), bPos = b.find(); + var fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b); + if (fromCmp) { return -fromCmp } + var toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b); + if (toCmp) { return toCmp } + return b.id - a.id + } + + // Find out whether a line ends or starts in a collapsed span. If + // so, return the marker for that span. + function collapsedSpanAtSide(line, start) { + var sps = sawCollapsedSpans && line.markedSpans, found; + if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { + sp = sps[i]; + if (sp.marker.collapsed && (start ? sp.from : sp.to) == null && + (!found || compareCollapsedMarkers(found, sp.marker) < 0)) + { found = sp.marker; } + } } + return found + } + function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true) } + function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false) } + + function collapsedSpanAround(line, ch) { + var sps = sawCollapsedSpans && line.markedSpans, found; + if (sps) { for (var i = 0; i < sps.length; ++i) { + var sp = sps[i]; + if (sp.marker.collapsed && (sp.from == null || sp.from < ch) && (sp.to == null || sp.to > ch) && + (!found || compareCollapsedMarkers(found, sp.marker) < 0)) { found = sp.marker; } + } } + return found + } + + // Test whether there exists a collapsed span that partially + // overlaps (covers the start or end, but not both) of a new span. + // Such overlap is not allowed. + function conflictingCollapsedRange(doc, lineNo$$1, from, to, marker) { + var line = getLine(doc, lineNo$$1); + var sps = sawCollapsedSpans && line.markedSpans; + if (sps) { for (var i = 0; i < sps.length; ++i) { + var sp = sps[i]; + if (!sp.marker.collapsed) { continue } + var found = sp.marker.find(0); + var fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker); + var toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker); + if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) { continue } + if (fromCmp <= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.to, from) >= 0 : cmp(found.to, from) > 0) || + fromCmp >= 0 && (sp.marker.inclusiveRight && marker.inclusiveLeft ? cmp(found.from, to) <= 0 : cmp(found.from, to) < 0)) + { return true } + } } + } + + // A visual line is a line as drawn on the screen. Folding, for + // example, can cause multiple logical lines to appear on the same + // visual line. This finds the start of the visual line that the + // given line is part of (usually that is the line itself). + function visualLine(line) { + var merged; + while (merged = collapsedSpanAtStart(line)) + { line = merged.find(-1, true).line; } + return line + } + + function visualLineEnd(line) { + var merged; + while (merged = collapsedSpanAtEnd(line)) + { line = merged.find(1, true).line; } + return line + } + + // Returns an array of logical lines that continue the visual line + // started by the argument, or undefined if there are no such lines. + function visualLineContinued(line) { + var merged, lines; + while (merged = collapsedSpanAtEnd(line)) { + line = merged.find(1, true).line + ;(lines || (lines = [])).push(line); + } + return lines + } + + // Get the line number of the start of the visual line that the + // given line number is part of. + function visualLineNo(doc, lineN) { + var line = getLine(doc, lineN), vis = visualLine(line); + if (line == vis) { return lineN } + return lineNo(vis) + } + + // Get the line number of the start of the next visual line after + // the given line. + function visualLineEndNo(doc, lineN) { + if (lineN > doc.lastLine()) { return lineN } + var line = getLine(doc, lineN), merged; + if (!lineIsHidden(doc, line)) { return lineN } + while (merged = collapsedSpanAtEnd(line)) + { line = merged.find(1, true).line; } + return lineNo(line) + 1 + } + + // Compute whether a line is hidden. Lines count as hidden when they + // are part of a visual line that starts with another line, or when + // they are entirely covered by collapsed, non-widget span. + function lineIsHidden(doc, line) { + var sps = sawCollapsedSpans && line.markedSpans; + if (sps) { for (var sp = (void 0), i = 0; i < sps.length; ++i) { + sp = sps[i]; + if (!sp.marker.collapsed) { continue } + if (sp.from == null) { return true } + if (sp.marker.widgetNode) { continue } + if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp)) + { return true } + } } + } + function lineIsHiddenInner(doc, line, span) { + if (span.to == null) { + var end = span.marker.find(1, true); + return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker)) + } + if (span.marker.inclusiveRight && span.to == line.text.length) + { return true } + for (var sp = (void 0), i = 0; i < line.markedSpans.length; ++i) { + sp = line.markedSpans[i]; + if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to && + (sp.to == null || sp.to != span.from) && + (sp.marker.inclusiveLeft || span.marker.inclusiveRight) && + lineIsHiddenInner(doc, line, sp)) { return true } + } + } + + // Find the height above the given line. + function heightAtLine(lineObj) { + lineObj = visualLine(lineObj); + + var h = 0, chunk = lineObj.parent; + for (var i = 0; i < chunk.lines.length; ++i) { + var line = chunk.lines[i]; + if (line == lineObj) { break } + else { h += line.height; } + } + for (var p = chunk.parent; p; chunk = p, p = chunk.parent) { + for (var i$1 = 0; i$1 < p.children.length; ++i$1) { + var cur = p.children[i$1]; + if (cur == chunk) { break } + else { h += cur.height; } + } + } + return h + } + + // Compute the character length of a line, taking into account + // collapsed ranges (see markText) that might hide parts, and join + // other lines onto it. + function lineLength(line) { + if (line.height == 0) { return 0 } + var len = line.text.length, merged, cur = line; + while (merged = collapsedSpanAtStart(cur)) { + var found = merged.find(0, true); + cur = found.from.line; + len += found.from.ch - found.to.ch; + } + cur = line; + while (merged = collapsedSpanAtEnd(cur)) { + var found$1 = merged.find(0, true); + len -= cur.text.length - found$1.from.ch; + cur = found$1.to.line; + len += cur.text.length - found$1.to.ch; + } + return len + } + + // Find the longest line in the document. + function findMaxLine(cm) { + var d = cm.display, doc = cm.doc; + d.maxLine = getLine(doc, doc.first); + d.maxLineLength = lineLength(d.maxLine); + d.maxLineChanged = true; + doc.iter(function (line) { + var len = lineLength(line); + if (len > d.maxLineLength) { + d.maxLineLength = len; + d.maxLine = line; + } + }); + } + + // LINE DATA STRUCTURE + + // Line objects. These hold state related to a line, including + // highlighting info (the styles array). + var Line = function(text, markedSpans, estimateHeight) { + this.text = text; + attachMarkedSpans(this, markedSpans); + this.height = estimateHeight ? estimateHeight(this) : 1; + }; + + Line.prototype.lineNo = function () { return lineNo(this) }; + eventMixin(Line); + + // Change the content (text, markers) of a line. Automatically + // invalidates cached information and tries to re-estimate the + // line's height. + function updateLine(line, text, markedSpans, estimateHeight) { + line.text = text; + if (line.stateAfter) { line.stateAfter = null; } + if (line.styles) { line.styles = null; } + if (line.order != null) { line.order = null; } + detachMarkedSpans(line); + attachMarkedSpans(line, markedSpans); + var estHeight = estimateHeight ? estimateHeight(line) : 1; + if (estHeight != line.height) { updateLineHeight(line, estHeight); } + } + + // Detach a line from the document tree and its markers. + function cleanUpLine(line) { + line.parent = null; + detachMarkedSpans(line); + } + + // Convert a style as returned by a mode (either null, or a string + // containing one or more styles) to a CSS style. This is cached, + // and also looks for line-wide styles. + var styleToClassCache = {}, styleToClassCacheWithMode = {}; + function interpretTokenStyle(style, options) { + if (!style || /^\s*$/.test(style)) { return null } + var cache = options.addModeClass ? styleToClassCacheWithMode : styleToClassCache; + return cache[style] || + (cache[style] = style.replace(/\S+/g, "cm-$&")) + } + + // Render the DOM representation of the text of a line. Also builds + // up a 'line map', which points at the DOM nodes that represent + // specific stretches of text, and is used by the measuring code. + // The returned object contains the DOM node, this map, and + // information about line-wide styles that were set by the mode. + function buildLineContent(cm, lineView) { + // The padding-right forces the element to have a 'border', which + // is needed on Webkit to be able to get line-level bounding + // rectangles for it (in measureChar). + var content = eltP("span", null, null, webkit ? "padding-right: .1px" : null); + var builder = {pre: eltP("pre", [content], "CodeMirror-line"), content: content, + col: 0, pos: 0, cm: cm, + trailingSpace: false, + splitSpaces: cm.getOption("lineWrapping")}; + lineView.measure = {}; + + // Iterate over the logical lines that make up this visual line. + for (var i = 0; i <= (lineView.rest ? lineView.rest.length : 0); i++) { + var line = i ? lineView.rest[i - 1] : lineView.line, order = (void 0); + builder.pos = 0; + builder.addToken = buildToken; + // Optionally wire in some hacks into the token-rendering + // algorithm, to deal with browser quirks. + if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line, cm.doc.direction))) + { builder.addToken = buildTokenBadBidi(builder.addToken, order); } + builder.map = []; + var allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line); + insertLineContent(line, builder, getLineStyles(cm, line, allowFrontierUpdate)); + if (line.styleClasses) { + if (line.styleClasses.bgClass) + { builder.bgClass = joinClasses(line.styleClasses.bgClass, builder.bgClass || ""); } + if (line.styleClasses.textClass) + { builder.textClass = joinClasses(line.styleClasses.textClass, builder.textClass || ""); } + } + + // Ensure at least a single node is present, for measuring. + if (builder.map.length == 0) + { builder.map.push(0, 0, builder.content.appendChild(zeroWidthElement(cm.display.measure))); } + + // Store the map and a cache object for the current logical line + if (i == 0) { + lineView.measure.map = builder.map; + lineView.measure.cache = {}; + } else { + (lineView.measure.maps || (lineView.measure.maps = [])).push(builder.map) + ;(lineView.measure.caches || (lineView.measure.caches = [])).push({}); + } + } + + // See issue #2901 + if (webkit) { + var last = builder.content.lastChild; + if (/\bcm-tab\b/.test(last.className) || (last.querySelector && last.querySelector(".cm-tab"))) + { builder.content.className = "cm-tab-wrap-hack"; } + } + + signal(cm, "renderLine", cm, lineView.line, builder.pre); + if (builder.pre.className) + { builder.textClass = joinClasses(builder.pre.className, builder.textClass || ""); } + + return builder + } + + function defaultSpecialCharPlaceholder(ch) { + var token = elt("span", "\u2022", "cm-invalidchar"); + token.title = "\\u" + ch.charCodeAt(0).toString(16); + token.setAttribute("aria-label", token.title); + return token + } + + // Build up the DOM representation for a single token, and add it to + // the line map. Takes care to render special characters separately. + function buildToken(builder, text, style, startStyle, endStyle, css, attributes) { + if (!text) { return } + var displayText = builder.splitSpaces ? splitSpaces(text, builder.trailingSpace) : text; + var special = builder.cm.state.specialChars, mustWrap = false; + var content; + if (!special.test(text)) { + builder.col += text.length; + content = document.createTextNode(displayText); + builder.map.push(builder.pos, builder.pos + text.length, content); + if (ie && ie_version < 9) { mustWrap = true; } + builder.pos += text.length; + } else { + content = document.createDocumentFragment(); + var pos = 0; + while (true) { + special.lastIndex = pos; + var m = special.exec(text); + var skipped = m ? m.index - pos : text.length - pos; + if (skipped) { + var txt = document.createTextNode(displayText.slice(pos, pos + skipped)); + if (ie && ie_version < 9) { content.appendChild(elt("span", [txt])); } + else { content.appendChild(txt); } + builder.map.push(builder.pos, builder.pos + skipped, txt); + builder.col += skipped; + builder.pos += skipped; + } + if (!m) { break } + pos += skipped + 1; + var txt$1 = (void 0); + if (m[0] == "\t") { + var tabSize = builder.cm.options.tabSize, tabWidth = tabSize - builder.col % tabSize; + txt$1 = content.appendChild(elt("span", spaceStr(tabWidth), "cm-tab")); + txt$1.setAttribute("role", "presentation"); + txt$1.setAttribute("cm-text", "\t"); + builder.col += tabWidth; + } else if (m[0] == "\r" || m[0] == "\n") { + txt$1 = content.appendChild(elt("span", m[0] == "\r" ? "\u240d" : "\u2424", "cm-invalidchar")); + txt$1.setAttribute("cm-text", m[0]); + builder.col += 1; + } else { + txt$1 = builder.cm.options.specialCharPlaceholder(m[0]); + txt$1.setAttribute("cm-text", m[0]); + if (ie && ie_version < 9) { content.appendChild(elt("span", [txt$1])); } + else { content.appendChild(txt$1); } + builder.col += 1; + } + builder.map.push(builder.pos, builder.pos + 1, txt$1); + builder.pos++; + } + } + builder.trailingSpace = displayText.charCodeAt(text.length - 1) == 32; + if (style || startStyle || endStyle || mustWrap || css) { + var fullStyle = style || ""; + if (startStyle) { fullStyle += startStyle; } + if (endStyle) { fullStyle += endStyle; } + var token = elt("span", [content], fullStyle, css); + if (attributes) { + for (var attr in attributes) { if (attributes.hasOwnProperty(attr) && attr != "style" && attr != "class") + { token.setAttribute(attr, attributes[attr]); } } + } + return builder.content.appendChild(token) + } + builder.content.appendChild(content); + } + + // Change some spaces to NBSP to prevent the browser from collapsing + // trailing spaces at the end of a line when rendering text (issue #1362). + function splitSpaces(text, trailingBefore) { + if (text.length > 1 && !/ /.test(text)) { return text } + var spaceBefore = trailingBefore, result = ""; + for (var i = 0; i < text.length; i++) { + var ch = text.charAt(i); + if (ch == " " && spaceBefore && (i == text.length - 1 || text.charCodeAt(i + 1) == 32)) + { ch = "\u00a0"; } + result += ch; + spaceBefore = ch == " "; + } + return result + } + + // Work around nonsense dimensions being reported for stretches of + // right-to-left text. + function buildTokenBadBidi(inner, order) { + return function (builder, text, style, startStyle, endStyle, css, attributes) { + style = style ? style + " cm-force-border" : "cm-force-border"; + var start = builder.pos, end = start + text.length; + for (;;) { + // Find the part that overlaps with the start of this text + var part = (void 0); + for (var i = 0; i < order.length; i++) { + part = order[i]; + if (part.to > start && part.from <= start) { break } + } + if (part.to >= end) { return inner(builder, text, style, startStyle, endStyle, css, attributes) } + inner(builder, text.slice(0, part.to - start), style, startStyle, null, css, attributes); + startStyle = null; + text = text.slice(part.to - start); + start = part.to; + } + } + } + + function buildCollapsedSpan(builder, size, marker, ignoreWidget) { + var widget = !ignoreWidget && marker.widgetNode; + if (widget) { builder.map.push(builder.pos, builder.pos + size, widget); } + if (!ignoreWidget && builder.cm.display.input.needsContentAttribute) { + if (!widget) + { widget = builder.content.appendChild(document.createElement("span")); } + widget.setAttribute("cm-marker", marker.id); + } + if (widget) { + builder.cm.display.input.setUneditable(widget); + builder.content.appendChild(widget); + } + builder.pos += size; + builder.trailingSpace = false; + } + + // Outputs a number of spans to make up a line, taking highlighting + // and marked text into account. + function insertLineContent(line, builder, styles) { + var spans = line.markedSpans, allText = line.text, at = 0; + if (!spans) { + for (var i$1 = 1; i$1 < styles.length; i$1+=2) + { builder.addToken(builder, allText.slice(at, at = styles[i$1]), interpretTokenStyle(styles[i$1+1], builder.cm.options)); } + return + } + + var len = allText.length, pos = 0, i = 1, text = "", style, css; + var nextChange = 0, spanStyle, spanEndStyle, spanStartStyle, collapsed, attributes; + for (;;) { + if (nextChange == pos) { // Update current marker set + spanStyle = spanEndStyle = spanStartStyle = css = ""; + attributes = null; + collapsed = null; nextChange = Infinity; + var foundBookmarks = [], endStyles = (void 0); + for (var j = 0; j < spans.length; ++j) { + var sp = spans[j], m = sp.marker; + if (m.type == "bookmark" && sp.from == pos && m.widgetNode) { + foundBookmarks.push(m); + } else if (sp.from <= pos && (sp.to == null || sp.to > pos || m.collapsed && sp.to == pos && sp.from == pos)) { + if (sp.to != null && sp.to != pos && nextChange > sp.to) { + nextChange = sp.to; + spanEndStyle = ""; + } + if (m.className) { spanStyle += " " + m.className; } + if (m.css) { css = (css ? css + ";" : "") + m.css; } + if (m.startStyle && sp.from == pos) { spanStartStyle += " " + m.startStyle; } + if (m.endStyle && sp.to == nextChange) { (endStyles || (endStyles = [])).push(m.endStyle, sp.to); } + // support for the old title property + // https://github.com/codemirror/CodeMirror/pull/5673 + if (m.title) { (attributes || (attributes = {})).title = m.title; } + if (m.attributes) { + for (var attr in m.attributes) + { (attributes || (attributes = {}))[attr] = m.attributes[attr]; } + } + if (m.collapsed && (!collapsed || compareCollapsedMarkers(collapsed.marker, m) < 0)) + { collapsed = sp; } + } else if (sp.from > pos && nextChange > sp.from) { + nextChange = sp.from; + } + } + if (endStyles) { for (var j$1 = 0; j$1 < endStyles.length; j$1 += 2) + { if (endStyles[j$1 + 1] == nextChange) { spanEndStyle += " " + endStyles[j$1]; } } } + + if (!collapsed || collapsed.from == pos) { for (var j$2 = 0; j$2 < foundBookmarks.length; ++j$2) + { buildCollapsedSpan(builder, 0, foundBookmarks[j$2]); } } + if (collapsed && (collapsed.from || 0) == pos) { + buildCollapsedSpan(builder, (collapsed.to == null ? len + 1 : collapsed.to) - pos, + collapsed.marker, collapsed.from == null); + if (collapsed.to == null) { return } + if (collapsed.to == pos) { collapsed = false; } + } + } + if (pos >= len) { break } + + var upto = Math.min(len, nextChange); + while (true) { + if (text) { + var end = pos + text.length; + if (!collapsed) { + var tokenText = end > upto ? text.slice(0, upto - pos) : text; + builder.addToken(builder, tokenText, style ? style + spanStyle : spanStyle, + spanStartStyle, pos + tokenText.length == nextChange ? spanEndStyle : "", css, attributes); + } + if (end >= upto) {text = text.slice(upto - pos); pos = upto; break} + pos = end; + spanStartStyle = ""; + } + text = allText.slice(at, at = styles[i++]); + style = interpretTokenStyle(styles[i++], builder.cm.options); + } + } + } + + + // These objects are used to represent the visible (currently drawn) + // part of the document. A LineView may correspond to multiple + // logical lines, if those are connected by collapsed ranges. + function LineView(doc, line, lineN) { + // The starting line + this.line = line; + // Continuing lines, if any + this.rest = visualLineContinued(line); + // Number of logical lines in this visual line + this.size = this.rest ? lineNo(lst(this.rest)) - lineN + 1 : 1; + this.node = this.text = null; + this.hidden = lineIsHidden(doc, line); + } + + // Create a range of LineView objects for the given lines. + function buildViewArray(cm, from, to) { + var array = [], nextPos; + for (var pos = from; pos < to; pos = nextPos) { + var view = new LineView(cm.doc, getLine(cm.doc, pos), pos); + nextPos = pos + view.size; + array.push(view); + } + return array + } + + var operationGroup = null; + + function pushOperation(op) { + if (operationGroup) { + operationGroup.ops.push(op); + } else { + op.ownsGroup = operationGroup = { + ops: [op], + delayedCallbacks: [] + }; + } + } + + function fireCallbacksForOps(group) { + // Calls delayed callbacks and cursorActivity handlers until no + // new ones appear + var callbacks = group.delayedCallbacks, i = 0; + do { + for (; i < callbacks.length; i++) + { callbacks[i].call(null); } + for (var j = 0; j < group.ops.length; j++) { + var op = group.ops[j]; + if (op.cursorActivityHandlers) + { while (op.cursorActivityCalled < op.cursorActivityHandlers.length) + { op.cursorActivityHandlers[op.cursorActivityCalled++].call(null, op.cm); } } + } + } while (i < callbacks.length) + } + + function finishOperation(op, endCb) { + var group = op.ownsGroup; + if (!group) { return } + + try { fireCallbacksForOps(group); } + finally { + operationGroup = null; + endCb(group); + } + } + + var orphanDelayedCallbacks = null; + + // Often, we want to signal events at a point where we are in the + // middle of some work, but don't want the handler to start calling + // other methods on the editor, which might be in an inconsistent + // state or simply not expect any other events to happen. + // signalLater looks whether there are any handlers, and schedules + // them to be executed when the last operation ends, or, if no + // operation is active, when a timeout fires. + function signalLater(emitter, type /*, values...*/) { + var arr = getHandlers(emitter, type); + if (!arr.length) { return } + var args = Array.prototype.slice.call(arguments, 2), list; + if (operationGroup) { + list = operationGroup.delayedCallbacks; + } else if (orphanDelayedCallbacks) { + list = orphanDelayedCallbacks; + } else { + list = orphanDelayedCallbacks = []; + setTimeout(fireOrphanDelayed, 0); + } + var loop = function ( i ) { + list.push(function () { return arr[i].apply(null, args); }); }; - var cm = this; - - // Override magic textarea content restore that IE sometimes does - // on our hidden textarea on reload - if (ie && ie_version < 11) setTimeout(function() { cm.display.input.reset(true); }, 20); - - registerEventHandlers(this); - ensureGlobalHandlers(); - - startOperation(this); - this.curOp.forceUpdate = true; - attachDoc(this, doc); - - if ((options.autofocus && !mobile) || cm.hasFocus()) - setTimeout(bind(onFocus, this), 20); - else - onBlur(this); - - for (var opt in optionHandlers) if (optionHandlers.hasOwnProperty(opt)) - optionHandlers[opt](this, options[opt], Init); - maybeUpdateLineNumberWidth(this); - if (options.finishInit) options.finishInit(this); - for (var i = 0; i < initHooks.length; ++i) initHooks[i](this); - endOperation(this); - // Suppress optimizelegibility in Webkit, since it breaks text - // measuring on line wrapping boundaries. - if (webkit && options.lineWrapping && - getComputedStyle(display.lineDiv).textRendering == "optimizelegibility") - display.lineDiv.style.textRendering = "auto"; - } - - // DISPLAY CONSTRUCTOR + for (var i = 0; i < arr.length; ++i) + loop( i ); + } + + function fireOrphanDelayed() { + var delayed = orphanDelayedCallbacks; + orphanDelayedCallbacks = null; + for (var i = 0; i < delayed.length; ++i) { delayed[i](); } + } + + // When an aspect of a line changes, a string is added to + // lineView.changes. This updates the relevant part of the line's + // DOM structure. + function updateLineForChanges(cm, lineView, lineN, dims) { + for (var j = 0; j < lineView.changes.length; j++) { + var type = lineView.changes[j]; + if (type == "text") { updateLineText(cm, lineView); } + else if (type == "gutter") { updateLineGutter(cm, lineView, lineN, dims); } + else if (type == "class") { updateLineClasses(cm, lineView); } + else if (type == "widget") { updateLineWidgets(cm, lineView, dims); } + } + lineView.changes = null; + } + + // Lines with gutter elements, widgets or a background class need to + // be wrapped, and have the extra elements added to the wrapper div + function ensureLineWrapped(lineView) { + if (lineView.node == lineView.text) { + lineView.node = elt("div", null, null, "position: relative"); + if (lineView.text.parentNode) + { lineView.text.parentNode.replaceChild(lineView.node, lineView.text); } + lineView.node.appendChild(lineView.text); + if (ie && ie_version < 8) { lineView.node.style.zIndex = 2; } + } + return lineView.node + } + + function updateLineBackground(cm, lineView) { + var cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass; + if (cls) { cls += " CodeMirror-linebackground"; } + if (lineView.background) { + if (cls) { lineView.background.className = cls; } + else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null; } + } else if (cls) { + var wrap = ensureLineWrapped(lineView); + lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild); + cm.display.input.setUneditable(lineView.background); + } + } + + // Wrapper around buildLineContent which will reuse the structure + // in display.externalMeasured when possible. + function getLineContent(cm, lineView) { + var ext = cm.display.externalMeasured; + if (ext && ext.line == lineView.line) { + cm.display.externalMeasured = null; + lineView.measure = ext.measure; + return ext.built + } + return buildLineContent(cm, lineView) + } + + // Redraw the line's text. Interacts with the background and text + // classes because the mode may output tokens that influence these + // classes. + function updateLineText(cm, lineView) { + var cls = lineView.text.className; + var built = getLineContent(cm, lineView); + if (lineView.text == lineView.node) { lineView.node = built.pre; } + lineView.text.parentNode.replaceChild(built.pre, lineView.text); + lineView.text = built.pre; + if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) { + lineView.bgClass = built.bgClass; + lineView.textClass = built.textClass; + updateLineClasses(cm, lineView); + } else if (cls) { + lineView.text.className = cls; + } + } + + function updateLineClasses(cm, lineView) { + updateLineBackground(cm, lineView); + if (lineView.line.wrapClass) + { ensureLineWrapped(lineView).className = lineView.line.wrapClass; } + else if (lineView.node != lineView.text) + { lineView.node.className = ""; } + var textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass; + lineView.text.className = textClass || ""; + } + + function updateLineGutter(cm, lineView, lineN, dims) { + if (lineView.gutter) { + lineView.node.removeChild(lineView.gutter); + lineView.gutter = null; + } + if (lineView.gutterBackground) { + lineView.node.removeChild(lineView.gutterBackground); + lineView.gutterBackground = null; + } + if (lineView.line.gutterClass) { + var wrap = ensureLineWrapped(lineView); + lineView.gutterBackground = elt("div", null, "CodeMirror-gutter-background " + lineView.line.gutterClass, + ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px; width: " + (dims.gutterTotalWidth) + "px")); + cm.display.input.setUneditable(lineView.gutterBackground); + wrap.insertBefore(lineView.gutterBackground, lineView.text); + } + var markers = lineView.line.gutterMarkers; + if (cm.options.lineNumbers || markers) { + var wrap$1 = ensureLineWrapped(lineView); + var gutterWrap = lineView.gutter = elt("div", null, "CodeMirror-gutter-wrapper", ("left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px")); + cm.display.input.setUneditable(gutterWrap); + wrap$1.insertBefore(gutterWrap, lineView.text); + if (lineView.line.gutterClass) + { gutterWrap.className += " " + lineView.line.gutterClass; } + if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"])) + { lineView.lineNumber = gutterWrap.appendChild( + elt("div", lineNumberFor(cm.options, lineN), + "CodeMirror-linenumber CodeMirror-gutter-elt", + ("left: " + (dims.gutterLeft["CodeMirror-linenumbers"]) + "px; width: " + (cm.display.lineNumInnerWidth) + "px"))); } + if (markers) { for (var k = 0; k < cm.display.gutterSpecs.length; ++k) { + var id = cm.display.gutterSpecs[k].className, found = markers.hasOwnProperty(id) && markers[id]; + if (found) + { gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", + ("left: " + (dims.gutterLeft[id]) + "px; width: " + (dims.gutterWidth[id]) + "px"))); } + } } + } + } + + function updateLineWidgets(cm, lineView, dims) { + if (lineView.alignable) { lineView.alignable = null; } + for (var node = lineView.node.firstChild, next = (void 0); node; node = next) { + next = node.nextSibling; + if (node.className == "CodeMirror-linewidget") + { lineView.node.removeChild(node); } + } + insertLineWidgets(cm, lineView, dims); + } + + // Build a line's DOM representation from scratch + function buildLineElement(cm, lineView, lineN, dims) { + var built = getLineContent(cm, lineView); + lineView.text = lineView.node = built.pre; + if (built.bgClass) { lineView.bgClass = built.bgClass; } + if (built.textClass) { lineView.textClass = built.textClass; } + + updateLineClasses(cm, lineView); + updateLineGutter(cm, lineView, lineN, dims); + insertLineWidgets(cm, lineView, dims); + return lineView.node + } + + // A lineView may contain multiple logical lines (when merged by + // collapsed spans). The widgets for all of them need to be drawn. + function insertLineWidgets(cm, lineView, dims) { + insertLineWidgetsFor(cm, lineView.line, lineView, dims, true); + if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) + { insertLineWidgetsFor(cm, lineView.rest[i], lineView, dims, false); } } + } + + function insertLineWidgetsFor(cm, line, lineView, dims, allowAbove) { + if (!line.widgets) { return } + var wrap = ensureLineWrapped(lineView); + for (var i = 0, ws = line.widgets; i < ws.length; ++i) { + var widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget"); + if (!widget.handleMouseEvents) { node.setAttribute("cm-ignore-events", "true"); } + positionLineWidget(widget, node, lineView, dims); + cm.display.input.setUneditable(node); + if (allowAbove && widget.above) + { wrap.insertBefore(node, lineView.gutter || lineView.text); } + else + { wrap.appendChild(node); } + signalLater(widget, "redraw"); + } + } + + function positionLineWidget(widget, node, lineView, dims) { + if (widget.noHScroll) { + (lineView.alignable || (lineView.alignable = [])).push(node); + var width = dims.wrapperWidth; + node.style.left = dims.fixedPos + "px"; + if (!widget.coverGutter) { + width -= dims.gutterTotalWidth; + node.style.paddingLeft = dims.gutterTotalWidth + "px"; + } + node.style.width = width + "px"; + } + if (widget.coverGutter) { + node.style.zIndex = 5; + node.style.position = "relative"; + if (!widget.noHScroll) { node.style.marginLeft = -dims.gutterTotalWidth + "px"; } + } + } + + function widgetHeight(widget) { + if (widget.height != null) { return widget.height } + var cm = widget.doc.cm; + if (!cm) { return 0 } + if (!contains(document.body, widget.node)) { + var parentStyle = "position: relative;"; + if (widget.coverGutter) + { parentStyle += "margin-left: -" + cm.display.gutters.offsetWidth + "px;"; } + if (widget.noHScroll) + { parentStyle += "width: " + cm.display.wrapper.clientWidth + "px;"; } + removeChildrenAndAdd(cm.display.measure, elt("div", [widget.node], null, parentStyle)); + } + return widget.height = widget.node.parentNode.offsetHeight + } + + // Return true when the given mouse event happened in a widget + function eventInWidget(display, e) { + for (var n = e_target(e); n != display.wrapper; n = n.parentNode) { + if (!n || (n.nodeType == 1 && n.getAttribute("cm-ignore-events") == "true") || + (n.parentNode == display.sizer && n != display.mover)) + { return true } + } + } + + // POSITION MEASUREMENT + + function paddingTop(display) {return display.lineSpace.offsetTop} + function paddingVert(display) {return display.mover.offsetHeight - display.lineSpace.offsetHeight} + function paddingH(display) { + if (display.cachedPaddingH) { return display.cachedPaddingH } + var e = removeChildrenAndAdd(display.measure, elt("pre", "x", "CodeMirror-line-like")); + var style = window.getComputedStyle ? window.getComputedStyle(e) : e.currentStyle; + var data = {left: parseInt(style.paddingLeft), right: parseInt(style.paddingRight)}; + if (!isNaN(data.left) && !isNaN(data.right)) { display.cachedPaddingH = data; } + return data + } + + function scrollGap(cm) { return scrollerGap - cm.display.nativeBarWidth } + function displayWidth(cm) { + return cm.display.scroller.clientWidth - scrollGap(cm) - cm.display.barWidth + } + function displayHeight(cm) { + return cm.display.scroller.clientHeight - scrollGap(cm) - cm.display.barHeight + } + + // Ensure the lineView.wrapping.heights array is populated. This is + // an array of bottom offsets for the lines that make up a drawn + // line. When lineWrapping is on, there might be more than one + // height. + function ensureLineHeights(cm, lineView, rect) { + var wrapping = cm.options.lineWrapping; + var curWidth = wrapping && displayWidth(cm); + if (!lineView.measure.heights || wrapping && lineView.measure.width != curWidth) { + var heights = lineView.measure.heights = []; + if (wrapping) { + lineView.measure.width = curWidth; + var rects = lineView.text.firstChild.getClientRects(); + for (var i = 0; i < rects.length - 1; i++) { + var cur = rects[i], next = rects[i + 1]; + if (Math.abs(cur.bottom - next.bottom) > 2) + { heights.push((cur.bottom + next.top) / 2 - rect.top); } + } + } + heights.push(rect.bottom - rect.top); + } + } + + // Find a line map (mapping character offsets to text nodes) and a + // measurement cache for the given line number. (A line view might + // contain multiple lines when collapsed ranges are present.) + function mapFromLineView(lineView, line, lineN) { + if (lineView.line == line) + { return {map: lineView.measure.map, cache: lineView.measure.cache} } + for (var i = 0; i < lineView.rest.length; i++) + { if (lineView.rest[i] == line) + { return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i]} } } + for (var i$1 = 0; i$1 < lineView.rest.length; i$1++) + { if (lineNo(lineView.rest[i$1]) > lineN) + { return {map: lineView.measure.maps[i$1], cache: lineView.measure.caches[i$1], before: true} } } + } + + // Render a line into the hidden node display.externalMeasured. Used + // when measurement is needed for a line that's not in the viewport. + function updateExternalMeasurement(cm, line) { + line = visualLine(line); + var lineN = lineNo(line); + var view = cm.display.externalMeasured = new LineView(cm.doc, line, lineN); + view.lineN = lineN; + var built = view.built = buildLineContent(cm, view); + view.text = built.pre; + removeChildrenAndAdd(cm.display.lineMeasure, built.pre); + return view + } + + // Get a {top, bottom, left, right} box (in line-local coordinates) + // for a given character. + function measureChar(cm, line, ch, bias) { + return measureCharPrepared(cm, prepareMeasureForLine(cm, line), ch, bias) + } + + // Find a line view that corresponds to the given line number. + function findViewForLine(cm, lineN) { + if (lineN >= cm.display.viewFrom && lineN < cm.display.viewTo) + { return cm.display.view[findViewIndex(cm, lineN)] } + var ext = cm.display.externalMeasured; + if (ext && lineN >= ext.lineN && lineN < ext.lineN + ext.size) + { return ext } + } + + // Measurement can be split in two steps, the set-up work that + // applies to the whole line, and the measurement of the actual + // character. Functions like coordsChar, that need to do a lot of + // measurements in a row, can thus ensure that the set-up work is + // only done once. + function prepareMeasureForLine(cm, line) { + var lineN = lineNo(line); + var view = findViewForLine(cm, lineN); + if (view && !view.text) { + view = null; + } else if (view && view.changes) { + updateLineForChanges(cm, view, lineN, getDimensions(cm)); + cm.curOp.forceUpdate = true; + } + if (!view) + { view = updateExternalMeasurement(cm, line); } + + var info = mapFromLineView(view, line, lineN); + return { + line: line, view: view, rect: null, + map: info.map, cache: info.cache, before: info.before, + hasHeights: false + } + } + + // Given a prepared measurement object, measures the position of an + // actual character (or fetches it from the cache). + function measureCharPrepared(cm, prepared, ch, bias, varHeight) { + if (prepared.before) { ch = -1; } + var key = ch + (bias || ""), found; + if (prepared.cache.hasOwnProperty(key)) { + found = prepared.cache[key]; + } else { + if (!prepared.rect) + { prepared.rect = prepared.view.text.getBoundingClientRect(); } + if (!prepared.hasHeights) { + ensureLineHeights(cm, prepared.view, prepared.rect); + prepared.hasHeights = true; + } + found = measureCharInner(cm, prepared, ch, bias); + if (!found.bogus) { prepared.cache[key] = found; } + } + return {left: found.left, right: found.right, + top: varHeight ? found.rtop : found.top, + bottom: varHeight ? found.rbottom : found.bottom} + } + + var nullRect = {left: 0, right: 0, top: 0, bottom: 0}; + + function nodeAndOffsetInLineMap(map$$1, ch, bias) { + var node, start, end, collapse, mStart, mEnd; + // First, search the line map for the text node corresponding to, + // or closest to, the target character. + for (var i = 0; i < map$$1.length; i += 3) { + mStart = map$$1[i]; + mEnd = map$$1[i + 1]; + if (ch < mStart) { + start = 0; end = 1; + collapse = "left"; + } else if (ch < mEnd) { + start = ch - mStart; + end = start + 1; + } else if (i == map$$1.length - 3 || ch == mEnd && map$$1[i + 3] > ch) { + end = mEnd - mStart; + start = end - 1; + if (ch >= mEnd) { collapse = "right"; } + } + if (start != null) { + node = map$$1[i + 2]; + if (mStart == mEnd && bias == (node.insertLeft ? "left" : "right")) + { collapse = bias; } + if (bias == "left" && start == 0) + { while (i && map$$1[i - 2] == map$$1[i - 3] && map$$1[i - 1].insertLeft) { + node = map$$1[(i -= 3) + 2]; + collapse = "left"; + } } + if (bias == "right" && start == mEnd - mStart) + { while (i < map$$1.length - 3 && map$$1[i + 3] == map$$1[i + 4] && !map$$1[i + 5].insertLeft) { + node = map$$1[(i += 3) + 2]; + collapse = "right"; + } } + break + } + } + return {node: node, start: start, end: end, collapse: collapse, coverStart: mStart, coverEnd: mEnd} + } + + function getUsefulRect(rects, bias) { + var rect = nullRect; + if (bias == "left") { for (var i = 0; i < rects.length; i++) { + if ((rect = rects[i]).left != rect.right) { break } + } } else { for (var i$1 = rects.length - 1; i$1 >= 0; i$1--) { + if ((rect = rects[i$1]).left != rect.right) { break } + } } + return rect + } + + function measureCharInner(cm, prepared, ch, bias) { + var place = nodeAndOffsetInLineMap(prepared.map, ch, bias); + var node = place.node, start = place.start, end = place.end, collapse = place.collapse; + + var rect; + if (node.nodeType == 3) { // If it is a text node, use a range to retrieve the coordinates. + for (var i$1 = 0; i$1 < 4; i$1++) { // Retry a maximum of 4 times when nonsense rectangles are returned + while (start && isExtendingChar(prepared.line.text.charAt(place.coverStart + start))) { --start; } + while (place.coverStart + end < place.coverEnd && isExtendingChar(prepared.line.text.charAt(place.coverStart + end))) { ++end; } + if (ie && ie_version < 9 && start == 0 && end == place.coverEnd - place.coverStart) + { rect = node.parentNode.getBoundingClientRect(); } + else + { rect = getUsefulRect(range(node, start, end).getClientRects(), bias); } + if (rect.left || rect.right || start == 0) { break } + end = start; + start = start - 1; + collapse = "right"; + } + if (ie && ie_version < 11) { rect = maybeUpdateRectForZooming(cm.display.measure, rect); } + } else { // If it is a widget, simply get the box for the whole widget. + if (start > 0) { collapse = bias = "right"; } + var rects; + if (cm.options.lineWrapping && (rects = node.getClientRects()).length > 1) + { rect = rects[bias == "right" ? rects.length - 1 : 0]; } + else + { rect = node.getBoundingClientRect(); } + } + if (ie && ie_version < 9 && !start && (!rect || !rect.left && !rect.right)) { + var rSpan = node.parentNode.getClientRects()[0]; + if (rSpan) + { rect = {left: rSpan.left, right: rSpan.left + charWidth(cm.display), top: rSpan.top, bottom: rSpan.bottom}; } + else + { rect = nullRect; } + } + + var rtop = rect.top - prepared.rect.top, rbot = rect.bottom - prepared.rect.top; + var mid = (rtop + rbot) / 2; + var heights = prepared.view.measure.heights; + var i = 0; + for (; i < heights.length - 1; i++) + { if (mid < heights[i]) { break } } + var top = i ? heights[i - 1] : 0, bot = heights[i]; + var result = {left: (collapse == "right" ? rect.right : rect.left) - prepared.rect.left, + right: (collapse == "left" ? rect.left : rect.right) - prepared.rect.left, + top: top, bottom: bot}; + if (!rect.left && !rect.right) { result.bogus = true; } + if (!cm.options.singleCursorHeightPerLine) { result.rtop = rtop; result.rbottom = rbot; } + + return result + } + + // Work around problem with bounding client rects on ranges being + // returned incorrectly when zoomed on IE10 and below. + function maybeUpdateRectForZooming(measure, rect) { + if (!window.screen || screen.logicalXDPI == null || + screen.logicalXDPI == screen.deviceXDPI || !hasBadZoomedRects(measure)) + { return rect } + var scaleX = screen.logicalXDPI / screen.deviceXDPI; + var scaleY = screen.logicalYDPI / screen.deviceYDPI; + return {left: rect.left * scaleX, right: rect.right * scaleX, + top: rect.top * scaleY, bottom: rect.bottom * scaleY} + } + + function clearLineMeasurementCacheFor(lineView) { + if (lineView.measure) { + lineView.measure.cache = {}; + lineView.measure.heights = null; + if (lineView.rest) { for (var i = 0; i < lineView.rest.length; i++) + { lineView.measure.caches[i] = {}; } } + } + } + + function clearLineMeasurementCache(cm) { + cm.display.externalMeasure = null; + removeChildren(cm.display.lineMeasure); + for (var i = 0; i < cm.display.view.length; i++) + { clearLineMeasurementCacheFor(cm.display.view[i]); } + } + + function clearCaches(cm) { + clearLineMeasurementCache(cm); + cm.display.cachedCharWidth = cm.display.cachedTextHeight = cm.display.cachedPaddingH = null; + if (!cm.options.lineWrapping) { cm.display.maxLineChanged = true; } + cm.display.lineNumChars = null; + } + + function pageScrollX() { + // Work around https://bugs.chromium.org/p/chromium/issues/detail?id=489206 + // which causes page_Offset and bounding client rects to use + // different reference viewports and invalidate our calculations. + if (chrome && android) { return -(document.body.getBoundingClientRect().left - parseInt(getComputedStyle(document.body).marginLeft)) } + return window.pageXOffset || (document.documentElement || document.body).scrollLeft + } + function pageScrollY() { + if (chrome && android) { return -(document.body.getBoundingClientRect().top - parseInt(getComputedStyle(document.body).marginTop)) } + return window.pageYOffset || (document.documentElement || document.body).scrollTop + } + + function widgetTopHeight(lineObj) { + var height = 0; + if (lineObj.widgets) { for (var i = 0; i < lineObj.widgets.length; ++i) { if (lineObj.widgets[i].above) + { height += widgetHeight(lineObj.widgets[i]); } } } + return height + } + + // Converts a {top, bottom, left, right} box from line-local + // coordinates into another coordinate system. Context may be one of + // "line", "div" (display.lineDiv), "local"./null (editor), "window", + // or "page". + function intoCoordSystem(cm, lineObj, rect, context, includeWidgets) { + if (!includeWidgets) { + var height = widgetTopHeight(lineObj); + rect.top += height; rect.bottom += height; + } + if (context == "line") { return rect } + if (!context) { context = "local"; } + var yOff = heightAtLine(lineObj); + if (context == "local") { yOff += paddingTop(cm.display); } + else { yOff -= cm.display.viewOffset; } + if (context == "page" || context == "window") { + var lOff = cm.display.lineSpace.getBoundingClientRect(); + yOff += lOff.top + (context == "window" ? 0 : pageScrollY()); + var xOff = lOff.left + (context == "window" ? 0 : pageScrollX()); + rect.left += xOff; rect.right += xOff; + } + rect.top += yOff; rect.bottom += yOff; + return rect + } + + // Coverts a box from "div" coords to another coordinate system. + // Context may be "window", "page", "div", or "local"./null. + function fromCoordSystem(cm, coords, context) { + if (context == "div") { return coords } + var left = coords.left, top = coords.top; + // First move into "page" coordinate system + if (context == "page") { + left -= pageScrollX(); + top -= pageScrollY(); + } else if (context == "local" || !context) { + var localBox = cm.display.sizer.getBoundingClientRect(); + left += localBox.left; + top += localBox.top; + } + + var lineSpaceBox = cm.display.lineSpace.getBoundingClientRect(); + return {left: left - lineSpaceBox.left, top: top - lineSpaceBox.top} + } + + function charCoords(cm, pos, context, lineObj, bias) { + if (!lineObj) { lineObj = getLine(cm.doc, pos.line); } + return intoCoordSystem(cm, lineObj, measureChar(cm, lineObj, pos.ch, bias), context) + } + + // Returns a box for a given cursor position, which may have an + // 'other' property containing the position of the secondary cursor + // on a bidi boundary. + // A cursor Pos(line, char, "before") is on the same visual line as `char - 1` + // and after `char - 1` in writing order of `char - 1` + // A cursor Pos(line, char, "after") is on the same visual line as `char` + // and before `char` in writing order of `char` + // Examples (upper-case letters are RTL, lower-case are LTR): + // Pos(0, 1, ...) + // before after + // ab a|b a|b + // aB a|B aB| + // Ab |Ab A|b + // AB B|A B|A + // Every position after the last character on a line is considered to stick + // to the last character on the line. + function cursorCoords(cm, pos, context, lineObj, preparedMeasure, varHeight) { + lineObj = lineObj || getLine(cm.doc, pos.line); + if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } + function get(ch, right) { + var m = measureCharPrepared(cm, preparedMeasure, ch, right ? "right" : "left", varHeight); + if (right) { m.left = m.right; } else { m.right = m.left; } + return intoCoordSystem(cm, lineObj, m, context) + } + var order = getOrder(lineObj, cm.doc.direction), ch = pos.ch, sticky = pos.sticky; + if (ch >= lineObj.text.length) { + ch = lineObj.text.length; + sticky = "before"; + } else if (ch <= 0) { + ch = 0; + sticky = "after"; + } + if (!order) { return get(sticky == "before" ? ch - 1 : ch, sticky == "before") } + + function getBidi(ch, partPos, invert) { + var part = order[partPos], right = part.level == 1; + return get(invert ? ch - 1 : ch, right != invert) + } + var partPos = getBidiPartAt(order, ch, sticky); + var other = bidiOther; + var val = getBidi(ch, partPos, sticky == "before"); + if (other != null) { val.other = getBidi(ch, other, sticky != "before"); } + return val + } + + // Used to cheaply estimate the coordinates for a position. Used for + // intermediate scroll updates. + function estimateCoords(cm, pos) { + var left = 0; + pos = clipPos(cm.doc, pos); + if (!cm.options.lineWrapping) { left = charWidth(cm.display) * pos.ch; } + var lineObj = getLine(cm.doc, pos.line); + var top = heightAtLine(lineObj) + paddingTop(cm.display); + return {left: left, right: left, top: top, bottom: top + lineObj.height} + } + + // Positions returned by coordsChar contain some extra information. + // xRel is the relative x position of the input coordinates compared + // to the found position (so xRel > 0 means the coordinates are to + // the right of the character position, for example). When outside + // is true, that means the coordinates lie outside the line's + // vertical range. + function PosWithInfo(line, ch, sticky, outside, xRel) { + var pos = Pos(line, ch, sticky); + pos.xRel = xRel; + if (outside) { pos.outside = outside; } + return pos + } + + // Compute the character position closest to the given coordinates. + // Input must be lineSpace-local ("div" coordinate system). + function coordsChar(cm, x, y) { + var doc = cm.doc; + y += cm.display.viewOffset; + if (y < 0) { return PosWithInfo(doc.first, 0, null, -1, -1) } + var lineN = lineAtHeight(doc, y), last = doc.first + doc.size - 1; + if (lineN > last) + { return PosWithInfo(doc.first + doc.size - 1, getLine(doc, last).text.length, null, 1, 1) } + if (x < 0) { x = 0; } + + var lineObj = getLine(doc, lineN); + for (;;) { + var found = coordsCharInner(cm, lineObj, lineN, x, y); + var collapsed = collapsedSpanAround(lineObj, found.ch + (found.xRel > 0 || found.outside > 0 ? 1 : 0)); + if (!collapsed) { return found } + var rangeEnd = collapsed.find(1); + if (rangeEnd.line == lineN) { return rangeEnd } + lineObj = getLine(doc, lineN = rangeEnd.line); + } + } + + function wrappedLineExtent(cm, lineObj, preparedMeasure, y) { + y -= widgetTopHeight(lineObj); + var end = lineObj.text.length; + var begin = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch - 1).bottom <= y; }, end, 0); + end = findFirst(function (ch) { return measureCharPrepared(cm, preparedMeasure, ch).top > y; }, begin, end); + return {begin: begin, end: end} + } + + function wrappedLineExtentChar(cm, lineObj, preparedMeasure, target) { + if (!preparedMeasure) { preparedMeasure = prepareMeasureForLine(cm, lineObj); } + var targetTop = intoCoordSystem(cm, lineObj, measureCharPrepared(cm, preparedMeasure, target), "line").top; + return wrappedLineExtent(cm, lineObj, preparedMeasure, targetTop) + } + + // Returns true if the given side of a box is after the given + // coordinates, in top-to-bottom, left-to-right order. + function boxIsAfter(box, x, y, left) { + return box.bottom <= y ? false : box.top > y ? true : (left ? box.left : box.right) > x + } + + function coordsCharInner(cm, lineObj, lineNo$$1, x, y) { + // Move y into line-local coordinate space + y -= heightAtLine(lineObj); + var preparedMeasure = prepareMeasureForLine(cm, lineObj); + // When directly calling `measureCharPrepared`, we have to adjust + // for the widgets at this line. + var widgetHeight$$1 = widgetTopHeight(lineObj); + var begin = 0, end = lineObj.text.length, ltr = true; + + var order = getOrder(lineObj, cm.doc.direction); + // If the line isn't plain left-to-right text, first figure out + // which bidi section the coordinates fall into. + if (order) { + var part = (cm.options.lineWrapping ? coordsBidiPartWrapped : coordsBidiPart) + (cm, lineObj, lineNo$$1, preparedMeasure, order, x, y); + ltr = part.level != 1; + // The awkward -1 offsets are needed because findFirst (called + // on these below) will treat its first bound as inclusive, + // second as exclusive, but we want to actually address the + // characters in the part's range + begin = ltr ? part.from : part.to - 1; + end = ltr ? part.to : part.from - 1; + } + + // A binary search to find the first character whose bounding box + // starts after the coordinates. If we run across any whose box wrap + // the coordinates, store that. + var chAround = null, boxAround = null; + var ch = findFirst(function (ch) { + var box = measureCharPrepared(cm, preparedMeasure, ch); + box.top += widgetHeight$$1; box.bottom += widgetHeight$$1; + if (!boxIsAfter(box, x, y, false)) { return false } + if (box.top <= y && box.left <= x) { + chAround = ch; + boxAround = box; + } + return true + }, begin, end); + + var baseX, sticky, outside = false; + // If a box around the coordinates was found, use that + if (boxAround) { + // Distinguish coordinates nearer to the left or right side of the box + var atLeft = x - boxAround.left < boxAround.right - x, atStart = atLeft == ltr; + ch = chAround + (atStart ? 0 : 1); + sticky = atStart ? "after" : "before"; + baseX = atLeft ? boxAround.left : boxAround.right; + } else { + // (Adjust for extended bound, if necessary.) + if (!ltr && (ch == end || ch == begin)) { ch++; } + // To determine which side to associate with, get the box to the + // left of the character and compare it's vertical position to the + // coordinates + sticky = ch == 0 ? "after" : ch == lineObj.text.length ? "before" : + (measureCharPrepared(cm, preparedMeasure, ch - (ltr ? 1 : 0)).bottom + widgetHeight$$1 <= y) == ltr ? + "after" : "before"; + // Now get accurate coordinates for this place, in order to get a + // base X position + var coords = cursorCoords(cm, Pos(lineNo$$1, ch, sticky), "line", lineObj, preparedMeasure); + baseX = coords.left; + outside = y < coords.top ? -1 : y >= coords.bottom ? 1 : 0; + } + + ch = skipExtendingChars(lineObj.text, ch, 1); + return PosWithInfo(lineNo$$1, ch, sticky, outside, x - baseX) + } + + function coordsBidiPart(cm, lineObj, lineNo$$1, preparedMeasure, order, x, y) { + // Bidi parts are sorted left-to-right, and in a non-line-wrapping + // situation, we can take this ordering to correspond to the visual + // ordering. This finds the first part whose end is after the given + // coordinates. + var index = findFirst(function (i) { + var part = order[i], ltr = part.level != 1; + return boxIsAfter(cursorCoords(cm, Pos(lineNo$$1, ltr ? part.to : part.from, ltr ? "before" : "after"), + "line", lineObj, preparedMeasure), x, y, true) + }, 0, order.length - 1); + var part = order[index]; + // If this isn't the first part, the part's start is also after + // the coordinates, and the coordinates aren't on the same line as + // that start, move one part back. + if (index > 0) { + var ltr = part.level != 1; + var start = cursorCoords(cm, Pos(lineNo$$1, ltr ? part.from : part.to, ltr ? "after" : "before"), + "line", lineObj, preparedMeasure); + if (boxIsAfter(start, x, y, true) && start.top > y) + { part = order[index - 1]; } + } + return part + } + + function coordsBidiPartWrapped(cm, lineObj, _lineNo, preparedMeasure, order, x, y) { + // In a wrapped line, rtl text on wrapping boundaries can do things + // that don't correspond to the ordering in our `order` array at + // all, so a binary search doesn't work, and we want to return a + // part that only spans one line so that the binary search in + // coordsCharInner is safe. As such, we first find the extent of the + // wrapped line, and then do a flat search in which we discard any + // spans that aren't on the line. + var ref = wrappedLineExtent(cm, lineObj, preparedMeasure, y); + var begin = ref.begin; + var end = ref.end; + if (/\s/.test(lineObj.text.charAt(end - 1))) { end--; } + var part = null, closestDist = null; + for (var i = 0; i < order.length; i++) { + var p = order[i]; + if (p.from >= end || p.to <= begin) { continue } + var ltr = p.level != 1; + var endX = measureCharPrepared(cm, preparedMeasure, ltr ? Math.min(end, p.to) - 1 : Math.max(begin, p.from)).right; + // Weigh against spans ending before this, so that they are only + // picked if nothing ends after + var dist = endX < x ? x - endX + 1e9 : endX - x; + if (!part || closestDist > dist) { + part = p; + closestDist = dist; + } + } + if (!part) { part = order[order.length - 1]; } + // Clip the part to the wrapped line. + if (part.from < begin) { part = {from: begin, to: part.to, level: part.level}; } + if (part.to > end) { part = {from: part.from, to: end, level: part.level}; } + return part + } + + var measureText; + // Compute the default text height. + function textHeight(display) { + if (display.cachedTextHeight != null) { return display.cachedTextHeight } + if (measureText == null) { + measureText = elt("pre", null, "CodeMirror-line-like"); + // Measure a bunch of lines, for browsers that compute + // fractional heights. + for (var i = 0; i < 49; ++i) { + measureText.appendChild(document.createTextNode("x")); + measureText.appendChild(elt("br")); + } + measureText.appendChild(document.createTextNode("x")); + } + removeChildrenAndAdd(display.measure, measureText); + var height = measureText.offsetHeight / 50; + if (height > 3) { display.cachedTextHeight = height; } + removeChildren(display.measure); + return height || 1 + } + + // Compute the default character width. + function charWidth(display) { + if (display.cachedCharWidth != null) { return display.cachedCharWidth } + var anchor = elt("span", "xxxxxxxxxx"); + var pre = elt("pre", [anchor], "CodeMirror-line-like"); + removeChildrenAndAdd(display.measure, pre); + var rect = anchor.getBoundingClientRect(), width = (rect.right - rect.left) / 10; + if (width > 2) { display.cachedCharWidth = width; } + return width || 10 + } + + // Do a bulk-read of the DOM positions and sizes needed to draw the + // view, so that we don't interleave reading and writing to the DOM. + function getDimensions(cm) { + var d = cm.display, left = {}, width = {}; + var gutterLeft = d.gutters.clientLeft; + for (var n = d.gutters.firstChild, i = 0; n; n = n.nextSibling, ++i) { + var id = cm.display.gutterSpecs[i].className; + left[id] = n.offsetLeft + n.clientLeft + gutterLeft; + width[id] = n.clientWidth; + } + return {fixedPos: compensateForHScroll(d), + gutterTotalWidth: d.gutters.offsetWidth, + gutterLeft: left, + gutterWidth: width, + wrapperWidth: d.wrapper.clientWidth} + } + + // Computes display.scroller.scrollLeft + display.gutters.offsetWidth, + // but using getBoundingClientRect to get a sub-pixel-accurate + // result. + function compensateForHScroll(display) { + return display.scroller.getBoundingClientRect().left - display.sizer.getBoundingClientRect().left + } + + // Returns a function that estimates the height of a line, to use as + // first approximation until the line becomes visible (and is thus + // properly measurable). + function estimateHeight(cm) { + var th = textHeight(cm.display), wrapping = cm.options.lineWrapping; + var perLine = wrapping && Math.max(5, cm.display.scroller.clientWidth / charWidth(cm.display) - 3); + return function (line) { + if (lineIsHidden(cm.doc, line)) { return 0 } + + var widgetsHeight = 0; + if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) { + if (line.widgets[i].height) { widgetsHeight += line.widgets[i].height; } + } } + + if (wrapping) + { return widgetsHeight + (Math.ceil(line.text.length / perLine) || 1) * th } + else + { return widgetsHeight + th } + } + } + + function estimateLineHeights(cm) { + var doc = cm.doc, est = estimateHeight(cm); + doc.iter(function (line) { + var estHeight = est(line); + if (estHeight != line.height) { updateLineHeight(line, estHeight); } + }); + } + + // Given a mouse event, find the corresponding position. If liberal + // is false, it checks whether a gutter or scrollbar was clicked, + // and returns null if it was. forRect is used by rectangular + // selections, and tries to estimate a character position even for + // coordinates beyond the right of the text. + function posFromMouse(cm, e, liberal, forRect) { + var display = cm.display; + if (!liberal && e_target(e).getAttribute("cm-not-content") == "true") { return null } + + var x, y, space = display.lineSpace.getBoundingClientRect(); + // Fails unpredictably on IE[67] when mouse is dragged around quickly. + try { x = e.clientX - space.left; y = e.clientY - space.top; } + catch (e) { return null } + var coords = coordsChar(cm, x, y), line; + if (forRect && coords.xRel == 1 && (line = getLine(cm.doc, coords.line).text).length == coords.ch) { + var colDiff = countColumn(line, line.length, cm.options.tabSize) - line.length; + coords = Pos(coords.line, Math.max(0, Math.round((x - paddingH(cm.display).left) / charWidth(cm.display)) - colDiff)); + } + return coords + } + + // Find the view element corresponding to a given line. Return null + // when the line isn't visible. + function findViewIndex(cm, n) { + if (n >= cm.display.viewTo) { return null } + n -= cm.display.viewFrom; + if (n < 0) { return null } + var view = cm.display.view; + for (var i = 0; i < view.length; i++) { + n -= view[i].size; + if (n < 0) { return i } + } + } + + // Updates the display.view data structure for a given change to the + // document. From and to are in pre-change coordinates. Lendiff is + // the amount of lines added or subtracted by the change. This is + // used for changes that span multiple lines, or change the way + // lines are divided into visual lines. regLineChange (below) + // registers single-line changes. + function regChange(cm, from, to, lendiff) { + if (from == null) { from = cm.doc.first; } + if (to == null) { to = cm.doc.first + cm.doc.size; } + if (!lendiff) { lendiff = 0; } + + var display = cm.display; + if (lendiff && to < display.viewTo && + (display.updateLineNumbers == null || display.updateLineNumbers > from)) + { display.updateLineNumbers = from; } + + cm.curOp.viewChanged = true; + + if (from >= display.viewTo) { // Change after + if (sawCollapsedSpans && visualLineNo(cm.doc, from) < display.viewTo) + { resetView(cm); } + } else if (to <= display.viewFrom) { // Change before + if (sawCollapsedSpans && visualLineEndNo(cm.doc, to + lendiff) > display.viewFrom) { + resetView(cm); + } else { + display.viewFrom += lendiff; + display.viewTo += lendiff; + } + } else if (from <= display.viewFrom && to >= display.viewTo) { // Full overlap + resetView(cm); + } else if (from <= display.viewFrom) { // Top overlap + var cut = viewCuttingPoint(cm, to, to + lendiff, 1); + if (cut) { + display.view = display.view.slice(cut.index); + display.viewFrom = cut.lineN; + display.viewTo += lendiff; + } else { + resetView(cm); + } + } else if (to >= display.viewTo) { // Bottom overlap + var cut$1 = viewCuttingPoint(cm, from, from, -1); + if (cut$1) { + display.view = display.view.slice(0, cut$1.index); + display.viewTo = cut$1.lineN; + } else { + resetView(cm); + } + } else { // Gap in the middle + var cutTop = viewCuttingPoint(cm, from, from, -1); + var cutBot = viewCuttingPoint(cm, to, to + lendiff, 1); + if (cutTop && cutBot) { + display.view = display.view.slice(0, cutTop.index) + .concat(buildViewArray(cm, cutTop.lineN, cutBot.lineN)) + .concat(display.view.slice(cutBot.index)); + display.viewTo += lendiff; + } else { + resetView(cm); + } + } + + var ext = display.externalMeasured; + if (ext) { + if (to < ext.lineN) + { ext.lineN += lendiff; } + else if (from < ext.lineN + ext.size) + { display.externalMeasured = null; } + } + } + + // Register a change to a single line. Type must be one of "text", + // "gutter", "class", "widget" + function regLineChange(cm, line, type) { + cm.curOp.viewChanged = true; + var display = cm.display, ext = cm.display.externalMeasured; + if (ext && line >= ext.lineN && line < ext.lineN + ext.size) + { display.externalMeasured = null; } + + if (line < display.viewFrom || line >= display.viewTo) { return } + var lineView = display.view[findViewIndex(cm, line)]; + if (lineView.node == null) { return } + var arr = lineView.changes || (lineView.changes = []); + if (indexOf(arr, type) == -1) { arr.push(type); } + } + + // Clear the view. + function resetView(cm) { + cm.display.viewFrom = cm.display.viewTo = cm.doc.first; + cm.display.view = []; + cm.display.viewOffset = 0; + } + + function viewCuttingPoint(cm, oldN, newN, dir) { + var index = findViewIndex(cm, oldN), diff, view = cm.display.view; + if (!sawCollapsedSpans || newN == cm.doc.first + cm.doc.size) + { return {index: index, lineN: newN} } + var n = cm.display.viewFrom; + for (var i = 0; i < index; i++) + { n += view[i].size; } + if (n != oldN) { + if (dir > 0) { + if (index == view.length - 1) { return null } + diff = (n + view[index].size) - oldN; + index++; + } else { + diff = n - oldN; + } + oldN += diff; newN += diff; + } + while (visualLineNo(cm.doc, newN) != newN) { + if (index == (dir < 0 ? 0 : view.length - 1)) { return null } + newN += dir * view[index - (dir < 0 ? 1 : 0)].size; + index += dir; + } + return {index: index, lineN: newN} + } + + // Force the view to cover a given range, adding empty view element + // or clipping off existing ones as needed. + function adjustView(cm, from, to) { + var display = cm.display, view = display.view; + if (view.length == 0 || from >= display.viewTo || to <= display.viewFrom) { + display.view = buildViewArray(cm, from, to); + display.viewFrom = from; + } else { + if (display.viewFrom > from) + { display.view = buildViewArray(cm, from, display.viewFrom).concat(display.view); } + else if (display.viewFrom < from) + { display.view = display.view.slice(findViewIndex(cm, from)); } + display.viewFrom = from; + if (display.viewTo < to) + { display.view = display.view.concat(buildViewArray(cm, display.viewTo, to)); } + else if (display.viewTo > to) + { display.view = display.view.slice(0, findViewIndex(cm, to)); } + } + display.viewTo = to; + } + + // Count the number of lines in the view whose DOM representation is + // out of date (or nonexistent). + function countDirtyView(cm) { + var view = cm.display.view, dirty = 0; + for (var i = 0; i < view.length; i++) { + var lineView = view[i]; + if (!lineView.hidden && (!lineView.node || lineView.changes)) { ++dirty; } + } + return dirty + } + + function updateSelection(cm) { + cm.display.input.showSelection(cm.display.input.prepareSelection()); + } + + function prepareSelection(cm, primary) { + if ( primary === void 0 ) primary = true; + + var doc = cm.doc, result = {}; + var curFragment = result.cursors = document.createDocumentFragment(); + var selFragment = result.selection = document.createDocumentFragment(); + + for (var i = 0; i < doc.sel.ranges.length; i++) { + if (!primary && i == doc.sel.primIndex) { continue } + var range$$1 = doc.sel.ranges[i]; + if (range$$1.from().line >= cm.display.viewTo || range$$1.to().line < cm.display.viewFrom) { continue } + var collapsed = range$$1.empty(); + if (collapsed || cm.options.showCursorWhenSelecting) + { drawSelectionCursor(cm, range$$1.head, curFragment); } + if (!collapsed) + { drawSelectionRange(cm, range$$1, selFragment); } + } + return result + } + + // Draws a cursor for the given range + function drawSelectionCursor(cm, head, output) { + var pos = cursorCoords(cm, head, "div", null, null, !cm.options.singleCursorHeightPerLine); + + var cursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor")); + cursor.style.left = pos.left + "px"; + cursor.style.top = pos.top + "px"; + cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + "px"; + + if (pos.other) { + // Secondary cursor, shown when on a 'jump' in bi-directional text + var otherCursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor CodeMirror-secondarycursor")); + otherCursor.style.display = ""; + otherCursor.style.left = pos.other.left + "px"; + otherCursor.style.top = pos.other.top + "px"; + otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + "px"; + } + } + + function cmpCoords(a, b) { return a.top - b.top || a.left - b.left } + + // Draws the given range as a highlighted selection + function drawSelectionRange(cm, range$$1, output) { + var display = cm.display, doc = cm.doc; + var fragment = document.createDocumentFragment(); + var padding = paddingH(cm.display), leftSide = padding.left; + var rightSide = Math.max(display.sizerWidth, displayWidth(cm) - display.sizer.offsetLeft) - padding.right; + var docLTR = doc.direction == "ltr"; + + function add(left, top, width, bottom) { + if (top < 0) { top = 0; } + top = Math.round(top); + bottom = Math.round(bottom); + fragment.appendChild(elt("div", null, "CodeMirror-selected", ("position: absolute; left: " + left + "px;\n top: " + top + "px; width: " + (width == null ? rightSide - left : width) + "px;\n height: " + (bottom - top) + "px"))); + } + + function drawForLine(line, fromArg, toArg) { + var lineObj = getLine(doc, line); + var lineLen = lineObj.text.length; + var start, end; + function coords(ch, bias) { + return charCoords(cm, Pos(line, ch), "div", lineObj, bias) + } + + function wrapX(pos, dir, side) { + var extent = wrappedLineExtentChar(cm, lineObj, null, pos); + var prop = (dir == "ltr") == (side == "after") ? "left" : "right"; + var ch = side == "after" ? extent.begin : extent.end - (/\s/.test(lineObj.text.charAt(extent.end - 1)) ? 2 : 1); + return coords(ch, prop)[prop] + } + + var order = getOrder(lineObj, doc.direction); + iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, function (from, to, dir, i) { + var ltr = dir == "ltr"; + var fromPos = coords(from, ltr ? "left" : "right"); + var toPos = coords(to - 1, ltr ? "right" : "left"); + + var openStart = fromArg == null && from == 0, openEnd = toArg == null && to == lineLen; + var first = i == 0, last = !order || i == order.length - 1; + if (toPos.top - fromPos.top <= 3) { // Single line + var openLeft = (docLTR ? openStart : openEnd) && first; + var openRight = (docLTR ? openEnd : openStart) && last; + var left = openLeft ? leftSide : (ltr ? fromPos : toPos).left; + var right = openRight ? rightSide : (ltr ? toPos : fromPos).right; + add(left, fromPos.top, right - left, fromPos.bottom); + } else { // Multiple lines + var topLeft, topRight, botLeft, botRight; + if (ltr) { + topLeft = docLTR && openStart && first ? leftSide : fromPos.left; + topRight = docLTR ? rightSide : wrapX(from, dir, "before"); + botLeft = docLTR ? leftSide : wrapX(to, dir, "after"); + botRight = docLTR && openEnd && last ? rightSide : toPos.right; + } else { + topLeft = !docLTR ? leftSide : wrapX(from, dir, "before"); + topRight = !docLTR && openStart && first ? rightSide : fromPos.right; + botLeft = !docLTR && openEnd && last ? leftSide : toPos.left; + botRight = !docLTR ? rightSide : wrapX(to, dir, "after"); + } + add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom); + if (fromPos.bottom < toPos.top) { add(leftSide, fromPos.bottom, null, toPos.top); } + add(botLeft, toPos.top, botRight - botLeft, toPos.bottom); + } + + if (!start || cmpCoords(fromPos, start) < 0) { start = fromPos; } + if (cmpCoords(toPos, start) < 0) { start = toPos; } + if (!end || cmpCoords(fromPos, end) < 0) { end = fromPos; } + if (cmpCoords(toPos, end) < 0) { end = toPos; } + }); + return {start: start, end: end} + } + + var sFrom = range$$1.from(), sTo = range$$1.to(); + if (sFrom.line == sTo.line) { + drawForLine(sFrom.line, sFrom.ch, sTo.ch); + } else { + var fromLine = getLine(doc, sFrom.line), toLine = getLine(doc, sTo.line); + var singleVLine = visualLine(fromLine) == visualLine(toLine); + var leftEnd = drawForLine(sFrom.line, sFrom.ch, singleVLine ? fromLine.text.length + 1 : null).end; + var rightStart = drawForLine(sTo.line, singleVLine ? 0 : null, sTo.ch).start; + if (singleVLine) { + if (leftEnd.top < rightStart.top - 2) { + add(leftEnd.right, leftEnd.top, null, leftEnd.bottom); + add(leftSide, rightStart.top, rightStart.left, rightStart.bottom); + } else { + add(leftEnd.right, leftEnd.top, rightStart.left - leftEnd.right, leftEnd.bottom); + } + } + if (leftEnd.bottom < rightStart.top) + { add(leftSide, leftEnd.bottom, null, rightStart.top); } + } + + output.appendChild(fragment); + } + + // Cursor-blinking + function restartBlink(cm) { + if (!cm.state.focused) { return } + var display = cm.display; + clearInterval(display.blinker); + var on = true; + display.cursorDiv.style.visibility = ""; + if (cm.options.cursorBlinkRate > 0) + { display.blinker = setInterval(function () { return display.cursorDiv.style.visibility = (on = !on) ? "" : "hidden"; }, + cm.options.cursorBlinkRate); } + else if (cm.options.cursorBlinkRate < 0) + { display.cursorDiv.style.visibility = "hidden"; } + } + + function ensureFocus(cm) { + if (!cm.state.focused) { cm.display.input.focus(); onFocus(cm); } + } + + function delayBlurEvent(cm) { + cm.state.delayingBlurEvent = true; + setTimeout(function () { if (cm.state.delayingBlurEvent) { + cm.state.delayingBlurEvent = false; + onBlur(cm); + } }, 100); + } + + function onFocus(cm, e) { + if (cm.state.delayingBlurEvent) { cm.state.delayingBlurEvent = false; } + + if (cm.options.readOnly == "nocursor") { return } + if (!cm.state.focused) { + signal(cm, "focus", cm, e); + cm.state.focused = true; + addClass(cm.display.wrapper, "CodeMirror-focused"); + // This test prevents this from firing when a context + // menu is closed (since the input reset would kill the + // select-all detection hack) + if (!cm.curOp && cm.display.selForContextMenu != cm.doc.sel) { + cm.display.input.reset(); + if (webkit) { setTimeout(function () { return cm.display.input.reset(true); }, 20); } // Issue #1730 + } + cm.display.input.receivedFocus(); + } + restartBlink(cm); + } + function onBlur(cm, e) { + if (cm.state.delayingBlurEvent) { return } + + if (cm.state.focused) { + signal(cm, "blur", cm, e); + cm.state.focused = false; + rmClass(cm.display.wrapper, "CodeMirror-focused"); + } + clearInterval(cm.display.blinker); + setTimeout(function () { if (!cm.state.focused) { cm.display.shift = false; } }, 150); + } + + // Read the actual heights of the rendered lines, and update their + // stored heights to match. + function updateHeightsInViewport(cm) { + var display = cm.display; + var prevBottom = display.lineDiv.offsetTop; + for (var i = 0; i < display.view.length; i++) { + var cur = display.view[i], wrapping = cm.options.lineWrapping; + var height = (void 0), width = 0; + if (cur.hidden) { continue } + if (ie && ie_version < 8) { + var bot = cur.node.offsetTop + cur.node.offsetHeight; + height = bot - prevBottom; + prevBottom = bot; + } else { + var box = cur.node.getBoundingClientRect(); + height = box.bottom - box.top; + // Check that lines don't extend past the right of the current + // editor width + if (!wrapping && cur.text.firstChild) + { width = cur.text.firstChild.getBoundingClientRect().right - box.left - 1; } + } + var diff = cur.line.height - height; + if (diff > .005 || diff < -.005) { + updateLineHeight(cur.line, height); + updateWidgetHeight(cur.line); + if (cur.rest) { for (var j = 0; j < cur.rest.length; j++) + { updateWidgetHeight(cur.rest[j]); } } + } + if (width > cm.display.sizerWidth) { + var chWidth = Math.ceil(width / charWidth(cm.display)); + if (chWidth > cm.display.maxLineLength) { + cm.display.maxLineLength = chWidth; + cm.display.maxLine = cur.line; + cm.display.maxLineChanged = true; + } + } + } + } + + // Read and store the height of line widgets associated with the + // given line. + function updateWidgetHeight(line) { + if (line.widgets) { for (var i = 0; i < line.widgets.length; ++i) { + var w = line.widgets[i], parent = w.node.parentNode; + if (parent) { w.height = parent.offsetHeight; } + } } + } + + // Compute the lines that are visible in a given viewport (defaults + // the the current scroll position). viewport may contain top, + // height, and ensure (see op.scrollToPos) properties. + function visibleLines(display, doc, viewport) { + var top = viewport && viewport.top != null ? Math.max(0, viewport.top) : display.scroller.scrollTop; + top = Math.floor(top - paddingTop(display)); + var bottom = viewport && viewport.bottom != null ? viewport.bottom : top + display.wrapper.clientHeight; + + var from = lineAtHeight(doc, top), to = lineAtHeight(doc, bottom); + // Ensure is a {from: {line, ch}, to: {line, ch}} object, and + // forces those lines into the viewport (if possible). + if (viewport && viewport.ensure) { + var ensureFrom = viewport.ensure.from.line, ensureTo = viewport.ensure.to.line; + if (ensureFrom < from) { + from = ensureFrom; + to = lineAtHeight(doc, heightAtLine(getLine(doc, ensureFrom)) + display.wrapper.clientHeight); + } else if (Math.min(ensureTo, doc.lastLine()) >= to) { + from = lineAtHeight(doc, heightAtLine(getLine(doc, ensureTo)) - display.wrapper.clientHeight); + to = ensureTo; + } + } + return {from: from, to: Math.max(to, from + 1)} + } + + // SCROLLING THINGS INTO VIEW + + // If an editor sits on the top or bottom of the window, partially + // scrolled out of view, this ensures that the cursor is visible. + function maybeScrollWindow(cm, rect) { + if (signalDOMEvent(cm, "scrollCursorIntoView")) { return } + + var display = cm.display, box = display.sizer.getBoundingClientRect(), doScroll = null; + if (rect.top + box.top < 0) { doScroll = true; } + else if (rect.bottom + box.top > (window.innerHeight || document.documentElement.clientHeight)) { doScroll = false; } + if (doScroll != null && !phantom) { + var scrollNode = elt("div", "\u200b", null, ("position: absolute;\n top: " + (rect.top - display.viewOffset - paddingTop(cm.display)) + "px;\n height: " + (rect.bottom - rect.top + scrollGap(cm) + display.barHeight) + "px;\n left: " + (rect.left) + "px; width: " + (Math.max(2, rect.right - rect.left)) + "px;")); + cm.display.lineSpace.appendChild(scrollNode); + scrollNode.scrollIntoView(doScroll); + cm.display.lineSpace.removeChild(scrollNode); + } + } + + // Scroll a given position into view (immediately), verifying that + // it actually became visible (as line heights are accurately + // measured, the position of something may 'drift' during drawing). + function scrollPosIntoView(cm, pos, end, margin) { + if (margin == null) { margin = 0; } + var rect; + if (!cm.options.lineWrapping && pos == end) { + // Set pos and end to the cursor positions around the character pos sticks to + // If pos.sticky == "before", that is around pos.ch - 1, otherwise around pos.ch + // If pos == Pos(_, 0, "before"), pos and end are unchanged + pos = pos.ch ? Pos(pos.line, pos.sticky == "before" ? pos.ch - 1 : pos.ch, "after") : pos; + end = pos.sticky == "before" ? Pos(pos.line, pos.ch + 1, "before") : pos; + } + for (var limit = 0; limit < 5; limit++) { + var changed = false; + var coords = cursorCoords(cm, pos); + var endCoords = !end || end == pos ? coords : cursorCoords(cm, end); + rect = {left: Math.min(coords.left, endCoords.left), + top: Math.min(coords.top, endCoords.top) - margin, + right: Math.max(coords.left, endCoords.left), + bottom: Math.max(coords.bottom, endCoords.bottom) + margin}; + var scrollPos = calculateScrollPos(cm, rect); + var startTop = cm.doc.scrollTop, startLeft = cm.doc.scrollLeft; + if (scrollPos.scrollTop != null) { + updateScrollTop(cm, scrollPos.scrollTop); + if (Math.abs(cm.doc.scrollTop - startTop) > 1) { changed = true; } + } + if (scrollPos.scrollLeft != null) { + setScrollLeft(cm, scrollPos.scrollLeft); + if (Math.abs(cm.doc.scrollLeft - startLeft) > 1) { changed = true; } + } + if (!changed) { break } + } + return rect + } + + // Scroll a given set of coordinates into view (immediately). + function scrollIntoView(cm, rect) { + var scrollPos = calculateScrollPos(cm, rect); + if (scrollPos.scrollTop != null) { updateScrollTop(cm, scrollPos.scrollTop); } + if (scrollPos.scrollLeft != null) { setScrollLeft(cm, scrollPos.scrollLeft); } + } + + // Calculate a new scroll position needed to scroll the given + // rectangle into view. Returns an object with scrollTop and + // scrollLeft properties. When these are undefined, the + // vertical/horizontal position does not need to be adjusted. + function calculateScrollPos(cm, rect) { + var display = cm.display, snapMargin = textHeight(cm.display); + if (rect.top < 0) { rect.top = 0; } + var screentop = cm.curOp && cm.curOp.scrollTop != null ? cm.curOp.scrollTop : display.scroller.scrollTop; + var screen = displayHeight(cm), result = {}; + if (rect.bottom - rect.top > screen) { rect.bottom = rect.top + screen; } + var docBottom = cm.doc.height + paddingVert(display); + var atTop = rect.top < snapMargin, atBottom = rect.bottom > docBottom - snapMargin; + if (rect.top < screentop) { + result.scrollTop = atTop ? 0 : rect.top; + } else if (rect.bottom > screentop + screen) { + var newTop = Math.min(rect.top, (atBottom ? docBottom : rect.bottom) - screen); + if (newTop != screentop) { result.scrollTop = newTop; } + } + + var screenleft = cm.curOp && cm.curOp.scrollLeft != null ? cm.curOp.scrollLeft : display.scroller.scrollLeft; + var screenw = displayWidth(cm) - (cm.options.fixedGutter ? display.gutters.offsetWidth : 0); + var tooWide = rect.right - rect.left > screenw; + if (tooWide) { rect.right = rect.left + screenw; } + if (rect.left < 10) + { result.scrollLeft = 0; } + else if (rect.left < screenleft) + { result.scrollLeft = Math.max(0, rect.left - (tooWide ? 0 : 10)); } + else if (rect.right > screenw + screenleft - 3) + { result.scrollLeft = rect.right + (tooWide ? 0 : 10) - screenw; } + return result + } + + // Store a relative adjustment to the scroll position in the current + // operation (to be applied when the operation finishes). + function addToScrollTop(cm, top) { + if (top == null) { return } + resolveScrollToPos(cm); + cm.curOp.scrollTop = (cm.curOp.scrollTop == null ? cm.doc.scrollTop : cm.curOp.scrollTop) + top; + } + + // Make sure that at the end of the operation the current cursor is + // shown. + function ensureCursorVisible(cm) { + resolveScrollToPos(cm); + var cur = cm.getCursor(); + cm.curOp.scrollToPos = {from: cur, to: cur, margin: cm.options.cursorScrollMargin}; + } + + function scrollToCoords(cm, x, y) { + if (x != null || y != null) { resolveScrollToPos(cm); } + if (x != null) { cm.curOp.scrollLeft = x; } + if (y != null) { cm.curOp.scrollTop = y; } + } + + function scrollToRange(cm, range$$1) { + resolveScrollToPos(cm); + cm.curOp.scrollToPos = range$$1; + } + + // When an operation has its scrollToPos property set, and another + // scroll action is applied before the end of the operation, this + // 'simulates' scrolling that position into view in a cheap way, so + // that the effect of intermediate scroll commands is not ignored. + function resolveScrollToPos(cm) { + var range$$1 = cm.curOp.scrollToPos; + if (range$$1) { + cm.curOp.scrollToPos = null; + var from = estimateCoords(cm, range$$1.from), to = estimateCoords(cm, range$$1.to); + scrollToCoordsRange(cm, from, to, range$$1.margin); + } + } + + function scrollToCoordsRange(cm, from, to, margin) { + var sPos = calculateScrollPos(cm, { + left: Math.min(from.left, to.left), + top: Math.min(from.top, to.top) - margin, + right: Math.max(from.right, to.right), + bottom: Math.max(from.bottom, to.bottom) + margin + }); + scrollToCoords(cm, sPos.scrollLeft, sPos.scrollTop); + } + + // Sync the scrollable area and scrollbars, ensure the viewport + // covers the visible area. + function updateScrollTop(cm, val) { + if (Math.abs(cm.doc.scrollTop - val) < 2) { return } + if (!gecko) { updateDisplaySimple(cm, {top: val}); } + setScrollTop(cm, val, true); + if (gecko) { updateDisplaySimple(cm); } + startWorker(cm, 100); + } + + function setScrollTop(cm, val, forceScroll) { + val = Math.min(cm.display.scroller.scrollHeight - cm.display.scroller.clientHeight, val); + if (cm.display.scroller.scrollTop == val && !forceScroll) { return } + cm.doc.scrollTop = val; + cm.display.scrollbars.setScrollTop(val); + if (cm.display.scroller.scrollTop != val) { cm.display.scroller.scrollTop = val; } + } + + // Sync scroller and scrollbar, ensure the gutter elements are + // aligned. + function setScrollLeft(cm, val, isScroller, forceScroll) { + val = Math.min(val, cm.display.scroller.scrollWidth - cm.display.scroller.clientWidth); + if ((isScroller ? val == cm.doc.scrollLeft : Math.abs(cm.doc.scrollLeft - val) < 2) && !forceScroll) { return } + cm.doc.scrollLeft = val; + alignHorizontally(cm); + if (cm.display.scroller.scrollLeft != val) { cm.display.scroller.scrollLeft = val; } + cm.display.scrollbars.setScrollLeft(val); + } + + // SCROLLBARS + + // Prepare DOM reads needed to update the scrollbars. Done in one + // shot to minimize update/measure roundtrips. + function measureForScrollbars(cm) { + var d = cm.display, gutterW = d.gutters.offsetWidth; + var docH = Math.round(cm.doc.height + paddingVert(cm.display)); + return { + clientHeight: d.scroller.clientHeight, + viewHeight: d.wrapper.clientHeight, + scrollWidth: d.scroller.scrollWidth, clientWidth: d.scroller.clientWidth, + viewWidth: d.wrapper.clientWidth, + barLeft: cm.options.fixedGutter ? gutterW : 0, + docHeight: docH, + scrollHeight: docH + scrollGap(cm) + d.barHeight, + nativeBarWidth: d.nativeBarWidth, + gutterWidth: gutterW + } + } + + var NativeScrollbars = function(place, scroll, cm) { + this.cm = cm; + var vert = this.vert = elt("div", [elt("div", null, null, "min-width: 1px")], "CodeMirror-vscrollbar"); + var horiz = this.horiz = elt("div", [elt("div", null, null, "height: 100%; min-height: 1px")], "CodeMirror-hscrollbar"); + vert.tabIndex = horiz.tabIndex = -1; + place(vert); place(horiz); + + on(vert, "scroll", function () { + if (vert.clientHeight) { scroll(vert.scrollTop, "vertical"); } + }); + on(horiz, "scroll", function () { + if (horiz.clientWidth) { scroll(horiz.scrollLeft, "horizontal"); } + }); + + this.checkedZeroWidth = false; + // Need to set a minimum width to see the scrollbar on IE7 (but must not set it on IE8). + if (ie && ie_version < 8) { this.horiz.style.minHeight = this.vert.style.minWidth = "18px"; } + }; + + NativeScrollbars.prototype.update = function (measure) { + var needsH = measure.scrollWidth > measure.clientWidth + 1; + var needsV = measure.scrollHeight > measure.clientHeight + 1; + var sWidth = measure.nativeBarWidth; + + if (needsV) { + this.vert.style.display = "block"; + this.vert.style.bottom = needsH ? sWidth + "px" : "0"; + var totalHeight = measure.viewHeight - (needsH ? sWidth : 0); + // A bug in IE8 can cause this value to be negative, so guard it. + this.vert.firstChild.style.height = + Math.max(0, measure.scrollHeight - measure.clientHeight + totalHeight) + "px"; + } else { + this.vert.style.display = ""; + this.vert.firstChild.style.height = "0"; + } + + if (needsH) { + this.horiz.style.display = "block"; + this.horiz.style.right = needsV ? sWidth + "px" : "0"; + this.horiz.style.left = measure.barLeft + "px"; + var totalWidth = measure.viewWidth - measure.barLeft - (needsV ? sWidth : 0); + this.horiz.firstChild.style.width = + Math.max(0, measure.scrollWidth - measure.clientWidth + totalWidth) + "px"; + } else { + this.horiz.style.display = ""; + this.horiz.firstChild.style.width = "0"; + } + + if (!this.checkedZeroWidth && measure.clientHeight > 0) { + if (sWidth == 0) { this.zeroWidthHack(); } + this.checkedZeroWidth = true; + } + + return {right: needsV ? sWidth : 0, bottom: needsH ? sWidth : 0} + }; + + NativeScrollbars.prototype.setScrollLeft = function (pos) { + if (this.horiz.scrollLeft != pos) { this.horiz.scrollLeft = pos; } + if (this.disableHoriz) { this.enableZeroWidthBar(this.horiz, this.disableHoriz, "horiz"); } + }; + + NativeScrollbars.prototype.setScrollTop = function (pos) { + if (this.vert.scrollTop != pos) { this.vert.scrollTop = pos; } + if (this.disableVert) { this.enableZeroWidthBar(this.vert, this.disableVert, "vert"); } + }; + + NativeScrollbars.prototype.zeroWidthHack = function () { + var w = mac && !mac_geMountainLion ? "12px" : "18px"; + this.horiz.style.height = this.vert.style.width = w; + this.horiz.style.pointerEvents = this.vert.style.pointerEvents = "none"; + this.disableHoriz = new Delayed; + this.disableVert = new Delayed; + }; + + NativeScrollbars.prototype.enableZeroWidthBar = function (bar, delay, type) { + bar.style.pointerEvents = "auto"; + function maybeDisable() { + // To find out whether the scrollbar is still visible, we + // check whether the element under the pixel in the bottom + // right corner of the scrollbar box is the scrollbar box + // itself (when the bar is still visible) or its filler child + // (when the bar is hidden). If it is still visible, we keep + // it enabled, if it's hidden, we disable pointer events. + var box = bar.getBoundingClientRect(); + var elt$$1 = type == "vert" ? document.elementFromPoint(box.right - 1, (box.top + box.bottom) / 2) + : document.elementFromPoint((box.right + box.left) / 2, box.bottom - 1); + if (elt$$1 != bar) { bar.style.pointerEvents = "none"; } + else { delay.set(1000, maybeDisable); } + } + delay.set(1000, maybeDisable); + }; + + NativeScrollbars.prototype.clear = function () { + var parent = this.horiz.parentNode; + parent.removeChild(this.horiz); + parent.removeChild(this.vert); + }; + + var NullScrollbars = function () {}; + + NullScrollbars.prototype.update = function () { return {bottom: 0, right: 0} }; + NullScrollbars.prototype.setScrollLeft = function () {}; + NullScrollbars.prototype.setScrollTop = function () {}; + NullScrollbars.prototype.clear = function () {}; + + function updateScrollbars(cm, measure) { + if (!measure) { measure = measureForScrollbars(cm); } + var startWidth = cm.display.barWidth, startHeight = cm.display.barHeight; + updateScrollbarsInner(cm, measure); + for (var i = 0; i < 4 && startWidth != cm.display.barWidth || startHeight != cm.display.barHeight; i++) { + if (startWidth != cm.display.barWidth && cm.options.lineWrapping) + { updateHeightsInViewport(cm); } + updateScrollbarsInner(cm, measureForScrollbars(cm)); + startWidth = cm.display.barWidth; startHeight = cm.display.barHeight; + } + } + + // Re-synchronize the fake scrollbars with the actual size of the + // content. + function updateScrollbarsInner(cm, measure) { + var d = cm.display; + var sizes = d.scrollbars.update(measure); + + d.sizer.style.paddingRight = (d.barWidth = sizes.right) + "px"; + d.sizer.style.paddingBottom = (d.barHeight = sizes.bottom) + "px"; + d.heightForcer.style.borderBottom = sizes.bottom + "px solid transparent"; + + if (sizes.right && sizes.bottom) { + d.scrollbarFiller.style.display = "block"; + d.scrollbarFiller.style.height = sizes.bottom + "px"; + d.scrollbarFiller.style.width = sizes.right + "px"; + } else { d.scrollbarFiller.style.display = ""; } + if (sizes.bottom && cm.options.coverGutterNextToScrollbar && cm.options.fixedGutter) { + d.gutterFiller.style.display = "block"; + d.gutterFiller.style.height = sizes.bottom + "px"; + d.gutterFiller.style.width = measure.gutterWidth + "px"; + } else { d.gutterFiller.style.display = ""; } + } + + var scrollbarModel = {"native": NativeScrollbars, "null": NullScrollbars}; + + function initScrollbars(cm) { + if (cm.display.scrollbars) { + cm.display.scrollbars.clear(); + if (cm.display.scrollbars.addClass) + { rmClass(cm.display.wrapper, cm.display.scrollbars.addClass); } + } + + cm.display.scrollbars = new scrollbarModel[cm.options.scrollbarStyle](function (node) { + cm.display.wrapper.insertBefore(node, cm.display.scrollbarFiller); + // Prevent clicks in the scrollbars from killing focus + on(node, "mousedown", function () { + if (cm.state.focused) { setTimeout(function () { return cm.display.input.focus(); }, 0); } + }); + node.setAttribute("cm-not-content", "true"); + }, function (pos, axis) { + if (axis == "horizontal") { setScrollLeft(cm, pos); } + else { updateScrollTop(cm, pos); } + }, cm); + if (cm.display.scrollbars.addClass) + { addClass(cm.display.wrapper, cm.display.scrollbars.addClass); } + } + + // Operations are used to wrap a series of changes to the editor + // state in such a way that each change won't have to update the + // cursor and display (which would be awkward, slow, and + // error-prone). Instead, display updates are batched and then all + // combined and executed at once. + + var nextOpId = 0; + // Start a new operation. + function startOperation(cm) { + cm.curOp = { + cm: cm, + viewChanged: false, // Flag that indicates that lines might need to be redrawn + startHeight: cm.doc.height, // Used to detect need to update scrollbar + forceUpdate: false, // Used to force a redraw + updateInput: 0, // Whether to reset the input textarea + typing: false, // Whether this reset should be careful to leave existing text (for compositing) + changeObjs: null, // Accumulated changes, for firing change events + cursorActivityHandlers: null, // Set of handlers to fire cursorActivity on + cursorActivityCalled: 0, // Tracks which cursorActivity handlers have been called already + selectionChanged: false, // Whether the selection needs to be redrawn + updateMaxLine: false, // Set when the widest line needs to be determined anew + scrollLeft: null, scrollTop: null, // Intermediate scroll position, not pushed to DOM yet + scrollToPos: null, // Used to scroll to a specific position + focus: false, + id: ++nextOpId // Unique ID + }; + pushOperation(cm.curOp); + } + + // Finish an operation, updating the display and signalling delayed events + function endOperation(cm) { + var op = cm.curOp; + if (op) { finishOperation(op, function (group) { + for (var i = 0; i < group.ops.length; i++) + { group.ops[i].cm.curOp = null; } + endOperations(group); + }); } + } + + // The DOM updates done when an operation finishes are batched so + // that the minimum number of relayouts are required. + function endOperations(group) { + var ops = group.ops; + for (var i = 0; i < ops.length; i++) // Read DOM + { endOperation_R1(ops[i]); } + for (var i$1 = 0; i$1 < ops.length; i$1++) // Write DOM (maybe) + { endOperation_W1(ops[i$1]); } + for (var i$2 = 0; i$2 < ops.length; i$2++) // Read DOM + { endOperation_R2(ops[i$2]); } + for (var i$3 = 0; i$3 < ops.length; i$3++) // Write DOM (maybe) + { endOperation_W2(ops[i$3]); } + for (var i$4 = 0; i$4 < ops.length; i$4++) // Read DOM + { endOperation_finish(ops[i$4]); } + } + + function endOperation_R1(op) { + var cm = op.cm, display = cm.display; + maybeClipScrollbars(cm); + if (op.updateMaxLine) { findMaxLine(cm); } + + op.mustUpdate = op.viewChanged || op.forceUpdate || op.scrollTop != null || + op.scrollToPos && (op.scrollToPos.from.line < display.viewFrom || + op.scrollToPos.to.line >= display.viewTo) || + display.maxLineChanged && cm.options.lineWrapping; + op.update = op.mustUpdate && + new DisplayUpdate(cm, op.mustUpdate && {top: op.scrollTop, ensure: op.scrollToPos}, op.forceUpdate); + } + + function endOperation_W1(op) { + op.updatedDisplay = op.mustUpdate && updateDisplayIfNeeded(op.cm, op.update); + } + + function endOperation_R2(op) { + var cm = op.cm, display = cm.display; + if (op.updatedDisplay) { updateHeightsInViewport(cm); } + + op.barMeasure = measureForScrollbars(cm); + + // If the max line changed since it was last measured, measure it, + // and ensure the document's width matches it. + // updateDisplay_W2 will use these properties to do the actual resizing + if (display.maxLineChanged && !cm.options.lineWrapping) { + op.adjustWidthTo = measureChar(cm, display.maxLine, display.maxLine.text.length).left + 3; + cm.display.sizerWidth = op.adjustWidthTo; + op.barMeasure.scrollWidth = + Math.max(display.scroller.clientWidth, display.sizer.offsetLeft + op.adjustWidthTo + scrollGap(cm) + cm.display.barWidth); + op.maxScrollLeft = Math.max(0, display.sizer.offsetLeft + op.adjustWidthTo - displayWidth(cm)); + } + + if (op.updatedDisplay || op.selectionChanged) + { op.preparedSelection = display.input.prepareSelection(); } + } + + function endOperation_W2(op) { + var cm = op.cm; + + if (op.adjustWidthTo != null) { + cm.display.sizer.style.minWidth = op.adjustWidthTo + "px"; + if (op.maxScrollLeft < cm.doc.scrollLeft) + { setScrollLeft(cm, Math.min(cm.display.scroller.scrollLeft, op.maxScrollLeft), true); } + cm.display.maxLineChanged = false; + } + + var takeFocus = op.focus && op.focus == activeElt(); + if (op.preparedSelection) + { cm.display.input.showSelection(op.preparedSelection, takeFocus); } + if (op.updatedDisplay || op.startHeight != cm.doc.height) + { updateScrollbars(cm, op.barMeasure); } + if (op.updatedDisplay) + { setDocumentHeight(cm, op.barMeasure); } + + if (op.selectionChanged) { restartBlink(cm); } + + if (cm.state.focused && op.updateInput) + { cm.display.input.reset(op.typing); } + if (takeFocus) { ensureFocus(op.cm); } + } + + function endOperation_finish(op) { + var cm = op.cm, display = cm.display, doc = cm.doc; + + if (op.updatedDisplay) { postUpdateDisplay(cm, op.update); } + + // Abort mouse wheel delta measurement, when scrolling explicitly + if (display.wheelStartX != null && (op.scrollTop != null || op.scrollLeft != null || op.scrollToPos)) + { display.wheelStartX = display.wheelStartY = null; } + + // Propagate the scroll position to the actual DOM scroller + if (op.scrollTop != null) { setScrollTop(cm, op.scrollTop, op.forceScroll); } + + if (op.scrollLeft != null) { setScrollLeft(cm, op.scrollLeft, true, true); } + // If we need to scroll a specific position into view, do so. + if (op.scrollToPos) { + var rect = scrollPosIntoView(cm, clipPos(doc, op.scrollToPos.from), + clipPos(doc, op.scrollToPos.to), op.scrollToPos.margin); + maybeScrollWindow(cm, rect); + } + + // Fire events for markers that are hidden/unidden by editing or + // undoing + var hidden = op.maybeHiddenMarkers, unhidden = op.maybeUnhiddenMarkers; + if (hidden) { for (var i = 0; i < hidden.length; ++i) + { if (!hidden[i].lines.length) { signal(hidden[i], "hide"); } } } + if (unhidden) { for (var i$1 = 0; i$1 < unhidden.length; ++i$1) + { if (unhidden[i$1].lines.length) { signal(unhidden[i$1], "unhide"); } } } + + if (display.wrapper.offsetHeight) + { doc.scrollTop = cm.display.scroller.scrollTop; } + + // Fire change events, and delayed event handlers + if (op.changeObjs) + { signal(cm, "changes", cm, op.changeObjs); } + if (op.update) + { op.update.finish(); } + } + + // Run the given function in an operation + function runInOp(cm, f) { + if (cm.curOp) { return f() } + startOperation(cm); + try { return f() } + finally { endOperation(cm); } + } + // Wraps a function in an operation. Returns the wrapped function. + function operation(cm, f) { + return function() { + if (cm.curOp) { return f.apply(cm, arguments) } + startOperation(cm); + try { return f.apply(cm, arguments) } + finally { endOperation(cm); } + } + } + // Used to add methods to editor and doc instances, wrapping them in + // operations. + function methodOp(f) { + return function() { + if (this.curOp) { return f.apply(this, arguments) } + startOperation(this); + try { return f.apply(this, arguments) } + finally { endOperation(this); } + } + } + function docMethodOp(f) { + return function() { + var cm = this.cm; + if (!cm || cm.curOp) { return f.apply(this, arguments) } + startOperation(cm); + try { return f.apply(this, arguments) } + finally { endOperation(cm); } + } + } + + // HIGHLIGHT WORKER + + function startWorker(cm, time) { + if (cm.doc.highlightFrontier < cm.display.viewTo) + { cm.state.highlight.set(time, bind(highlightWorker, cm)); } + } + + function highlightWorker(cm) { + var doc = cm.doc; + if (doc.highlightFrontier >= cm.display.viewTo) { return } + var end = +new Date + cm.options.workTime; + var context = getContextBefore(cm, doc.highlightFrontier); + var changedLines = []; + + doc.iter(context.line, Math.min(doc.first + doc.size, cm.display.viewTo + 500), function (line) { + if (context.line >= cm.display.viewFrom) { // Visible + var oldStyles = line.styles; + var resetState = line.text.length > cm.options.maxHighlightLength ? copyState(doc.mode, context.state) : null; + var highlighted = highlightLine(cm, line, context, true); + if (resetState) { context.state = resetState; } + line.styles = highlighted.styles; + var oldCls = line.styleClasses, newCls = highlighted.classes; + if (newCls) { line.styleClasses = newCls; } + else if (oldCls) { line.styleClasses = null; } + var ischange = !oldStyles || oldStyles.length != line.styles.length || + oldCls != newCls && (!oldCls || !newCls || oldCls.bgClass != newCls.bgClass || oldCls.textClass != newCls.textClass); + for (var i = 0; !ischange && i < oldStyles.length; ++i) { ischange = oldStyles[i] != line.styles[i]; } + if (ischange) { changedLines.push(context.line); } + line.stateAfter = context.save(); + context.nextLine(); + } else { + if (line.text.length <= cm.options.maxHighlightLength) + { processLine(cm, line.text, context); } + line.stateAfter = context.line % 5 == 0 ? context.save() : null; + context.nextLine(); + } + if (+new Date > end) { + startWorker(cm, cm.options.workDelay); + return true + } + }); + doc.highlightFrontier = context.line; + doc.modeFrontier = Math.max(doc.modeFrontier, context.line); + if (changedLines.length) { runInOp(cm, function () { + for (var i = 0; i < changedLines.length; i++) + { regLineChange(cm, changedLines[i], "text"); } + }); } + } + + // DISPLAY DRAWING + + var DisplayUpdate = function(cm, viewport, force) { + var display = cm.display; + + this.viewport = viewport; + // Store some values that we'll need later (but don't want to force a relayout for) + this.visible = visibleLines(display, cm.doc, viewport); + this.editorIsHidden = !display.wrapper.offsetWidth; + this.wrapperHeight = display.wrapper.clientHeight; + this.wrapperWidth = display.wrapper.clientWidth; + this.oldDisplayWidth = displayWidth(cm); + this.force = force; + this.dims = getDimensions(cm); + this.events = []; + }; + + DisplayUpdate.prototype.signal = function (emitter, type) { + if (hasHandler(emitter, type)) + { this.events.push(arguments); } + }; + DisplayUpdate.prototype.finish = function () { + var this$1 = this; + + for (var i = 0; i < this.events.length; i++) + { signal.apply(null, this$1.events[i]); } + }; + + function maybeClipScrollbars(cm) { + var display = cm.display; + if (!display.scrollbarsClipped && display.scroller.offsetWidth) { + display.nativeBarWidth = display.scroller.offsetWidth - display.scroller.clientWidth; + display.heightForcer.style.height = scrollGap(cm) + "px"; + display.sizer.style.marginBottom = -display.nativeBarWidth + "px"; + display.sizer.style.borderRightWidth = scrollGap(cm) + "px"; + display.scrollbarsClipped = true; + } + } + + function selectionSnapshot(cm) { + if (cm.hasFocus()) { return null } + var active = activeElt(); + if (!active || !contains(cm.display.lineDiv, active)) { return null } + var result = {activeElt: active}; + if (window.getSelection) { + var sel = window.getSelection(); + if (sel.anchorNode && sel.extend && contains(cm.display.lineDiv, sel.anchorNode)) { + result.anchorNode = sel.anchorNode; + result.anchorOffset = sel.anchorOffset; + result.focusNode = sel.focusNode; + result.focusOffset = sel.focusOffset; + } + } + return result + } + + function restoreSelection(snapshot) { + if (!snapshot || !snapshot.activeElt || snapshot.activeElt == activeElt()) { return } + snapshot.activeElt.focus(); + if (snapshot.anchorNode && contains(document.body, snapshot.anchorNode) && contains(document.body, snapshot.focusNode)) { + var sel = window.getSelection(), range$$1 = document.createRange(); + range$$1.setEnd(snapshot.anchorNode, snapshot.anchorOffset); + range$$1.collapse(false); + sel.removeAllRanges(); + sel.addRange(range$$1); + sel.extend(snapshot.focusNode, snapshot.focusOffset); + } + } + + // Does the actual updating of the line display. Bails out + // (returning false) when there is nothing to be done and forced is + // false. + function updateDisplayIfNeeded(cm, update) { + var display = cm.display, doc = cm.doc; + + if (update.editorIsHidden) { + resetView(cm); + return false + } + + // Bail out if the visible area is already rendered and nothing changed. + if (!update.force && + update.visible.from >= display.viewFrom && update.visible.to <= display.viewTo && + (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo) && + display.renderedView == display.view && countDirtyView(cm) == 0) + { return false } + + if (maybeUpdateLineNumberWidth(cm)) { + resetView(cm); + update.dims = getDimensions(cm); + } + + // Compute a suitable new viewport (from & to) + var end = doc.first + doc.size; + var from = Math.max(update.visible.from - cm.options.viewportMargin, doc.first); + var to = Math.min(end, update.visible.to + cm.options.viewportMargin); + if (display.viewFrom < from && from - display.viewFrom < 20) { from = Math.max(doc.first, display.viewFrom); } + if (display.viewTo > to && display.viewTo - to < 20) { to = Math.min(end, display.viewTo); } + if (sawCollapsedSpans) { + from = visualLineNo(cm.doc, from); + to = visualLineEndNo(cm.doc, to); + } + + var different = from != display.viewFrom || to != display.viewTo || + display.lastWrapHeight != update.wrapperHeight || display.lastWrapWidth != update.wrapperWidth; + adjustView(cm, from, to); + + display.viewOffset = heightAtLine(getLine(cm.doc, display.viewFrom)); + // Position the mover div to align with the current scroll position + cm.display.mover.style.top = display.viewOffset + "px"; + + var toUpdate = countDirtyView(cm); + if (!different && toUpdate == 0 && !update.force && display.renderedView == display.view && + (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo)) + { return false } + + // For big changes, we hide the enclosing element during the + // update, since that speeds up the operations on most browsers. + var selSnapshot = selectionSnapshot(cm); + if (toUpdate > 4) { display.lineDiv.style.display = "none"; } + patchDisplay(cm, display.updateLineNumbers, update.dims); + if (toUpdate > 4) { display.lineDiv.style.display = ""; } + display.renderedView = display.view; + // There might have been a widget with a focused element that got + // hidden or updated, if so re-focus it. + restoreSelection(selSnapshot); + + // Prevent selection and cursors from interfering with the scroll + // width and height. + removeChildren(display.cursorDiv); + removeChildren(display.selectionDiv); + display.gutters.style.height = display.sizer.style.minHeight = 0; + + if (different) { + display.lastWrapHeight = update.wrapperHeight; + display.lastWrapWidth = update.wrapperWidth; + startWorker(cm, 400); + } + + display.updateLineNumbers = null; + + return true + } + + function postUpdateDisplay(cm, update) { + var viewport = update.viewport; + + for (var first = true;; first = false) { + if (!first || !cm.options.lineWrapping || update.oldDisplayWidth == displayWidth(cm)) { + // Clip forced viewport to actual scrollable area. + if (viewport && viewport.top != null) + { viewport = {top: Math.min(cm.doc.height + paddingVert(cm.display) - displayHeight(cm), viewport.top)}; } + // Updated line heights might result in the drawn area not + // actually covering the viewport. Keep looping until it does. + update.visible = visibleLines(cm.display, cm.doc, viewport); + if (update.visible.from >= cm.display.viewFrom && update.visible.to <= cm.display.viewTo) + { break } + } + if (!updateDisplayIfNeeded(cm, update)) { break } + updateHeightsInViewport(cm); + var barMeasure = measureForScrollbars(cm); + updateSelection(cm); + updateScrollbars(cm, barMeasure); + setDocumentHeight(cm, barMeasure); + update.force = false; + } + + update.signal(cm, "update", cm); + if (cm.display.viewFrom != cm.display.reportedViewFrom || cm.display.viewTo != cm.display.reportedViewTo) { + update.signal(cm, "viewportChange", cm, cm.display.viewFrom, cm.display.viewTo); + cm.display.reportedViewFrom = cm.display.viewFrom; cm.display.reportedViewTo = cm.display.viewTo; + } + } + + function updateDisplaySimple(cm, viewport) { + var update = new DisplayUpdate(cm, viewport); + if (updateDisplayIfNeeded(cm, update)) { + updateHeightsInViewport(cm); + postUpdateDisplay(cm, update); + var barMeasure = measureForScrollbars(cm); + updateSelection(cm); + updateScrollbars(cm, barMeasure); + setDocumentHeight(cm, barMeasure); + update.finish(); + } + } + + // Sync the actual display DOM structure with display.view, removing + // nodes for lines that are no longer in view, and creating the ones + // that are not there yet, and updating the ones that are out of + // date. + function patchDisplay(cm, updateNumbersFrom, dims) { + var display = cm.display, lineNumbers = cm.options.lineNumbers; + var container = display.lineDiv, cur = container.firstChild; + + function rm(node) { + var next = node.nextSibling; + // Works around a throw-scroll bug in OS X Webkit + if (webkit && mac && cm.display.currentWheelTarget == node) + { node.style.display = "none"; } + else + { node.parentNode.removeChild(node); } + return next + } + + var view = display.view, lineN = display.viewFrom; + // Loop over the elements in the view, syncing cur (the DOM nodes + // in display.lineDiv) with the view as we go. + for (var i = 0; i < view.length; i++) { + var lineView = view[i]; + if (lineView.hidden) ; else if (!lineView.node || lineView.node.parentNode != container) { // Not drawn yet + var node = buildLineElement(cm, lineView, lineN, dims); + container.insertBefore(node, cur); + } else { // Already drawn + while (cur != lineView.node) { cur = rm(cur); } + var updateNumber = lineNumbers && updateNumbersFrom != null && + updateNumbersFrom <= lineN && lineView.lineNumber; + if (lineView.changes) { + if (indexOf(lineView.changes, "gutter") > -1) { updateNumber = false; } + updateLineForChanges(cm, lineView, lineN, dims); + } + if (updateNumber) { + removeChildren(lineView.lineNumber); + lineView.lineNumber.appendChild(document.createTextNode(lineNumberFor(cm.options, lineN))); + } + cur = lineView.node.nextSibling; + } + lineN += lineView.size; + } + while (cur) { cur = rm(cur); } + } + + function updateGutterSpace(display) { + var width = display.gutters.offsetWidth; + display.sizer.style.marginLeft = width + "px"; + } + + function setDocumentHeight(cm, measure) { + cm.display.sizer.style.minHeight = measure.docHeight + "px"; + cm.display.heightForcer.style.top = measure.docHeight + "px"; + cm.display.gutters.style.height = (measure.docHeight + cm.display.barHeight + scrollGap(cm)) + "px"; + } + + // Re-align line numbers and gutter marks to compensate for + // horizontal scrolling. + function alignHorizontally(cm) { + var display = cm.display, view = display.view; + if (!display.alignWidgets && (!display.gutters.firstChild || !cm.options.fixedGutter)) { return } + var comp = compensateForHScroll(display) - display.scroller.scrollLeft + cm.doc.scrollLeft; + var gutterW = display.gutters.offsetWidth, left = comp + "px"; + for (var i = 0; i < view.length; i++) { if (!view[i].hidden) { + if (cm.options.fixedGutter) { + if (view[i].gutter) + { view[i].gutter.style.left = left; } + if (view[i].gutterBackground) + { view[i].gutterBackground.style.left = left; } + } + var align = view[i].alignable; + if (align) { for (var j = 0; j < align.length; j++) + { align[j].style.left = left; } } + } } + if (cm.options.fixedGutter) + { display.gutters.style.left = (comp + gutterW) + "px"; } + } + + // Used to ensure that the line number gutter is still the right + // size for the current document size. Returns true when an update + // is needed. + function maybeUpdateLineNumberWidth(cm) { + if (!cm.options.lineNumbers) { return false } + var doc = cm.doc, last = lineNumberFor(cm.options, doc.first + doc.size - 1), display = cm.display; + if (last.length != display.lineNumChars) { + var test = display.measure.appendChild(elt("div", [elt("div", last)], + "CodeMirror-linenumber CodeMirror-gutter-elt")); + var innerW = test.firstChild.offsetWidth, padding = test.offsetWidth - innerW; + display.lineGutter.style.width = ""; + display.lineNumInnerWidth = Math.max(innerW, display.lineGutter.offsetWidth - padding) + 1; + display.lineNumWidth = display.lineNumInnerWidth + padding; + display.lineNumChars = display.lineNumInnerWidth ? last.length : -1; + display.lineGutter.style.width = display.lineNumWidth + "px"; + updateGutterSpace(cm.display); + return true + } + return false + } + + function getGutters(gutters, lineNumbers) { + var result = [], sawLineNumbers = false; + for (var i = 0; i < gutters.length; i++) { + var name = gutters[i], style = null; + if (typeof name != "string") { style = name.style; name = name.className; } + if (name == "CodeMirror-linenumbers") { + if (!lineNumbers) { continue } + else { sawLineNumbers = true; } + } + result.push({className: name, style: style}); + } + if (lineNumbers && !sawLineNumbers) { result.push({className: "CodeMirror-linenumbers", style: null}); } + return result + } + + // Rebuild the gutter elements, ensure the margin to the left of the + // code matches their width. + function renderGutters(display) { + var gutters = display.gutters, specs = display.gutterSpecs; + removeChildren(gutters); + display.lineGutter = null; + for (var i = 0; i < specs.length; ++i) { + var ref = specs[i]; + var className = ref.className; + var style = ref.style; + var gElt = gutters.appendChild(elt("div", null, "CodeMirror-gutter " + className)); + if (style) { gElt.style.cssText = style; } + if (className == "CodeMirror-linenumbers") { + display.lineGutter = gElt; + gElt.style.width = (display.lineNumWidth || 1) + "px"; + } + } + gutters.style.display = specs.length ? "" : "none"; + updateGutterSpace(display); + } + + function updateGutters(cm) { + renderGutters(cm.display); + regChange(cm); + alignHorizontally(cm); + } // The display handles the DOM integration, both for input reading // and content drawing. It holds references to DOM nodes and // display-related state. - function Display(place, doc, input) { + function Display(place, doc, input, options) { var d = this; this.input = input; @@ -145,7 +4340,7 @@ d.gutterFiller = elt("div", null, "CodeMirror-gutter-filler"); d.gutterFiller.setAttribute("cm-not-content", "true"); // Will contain the actual code, positioned to cover the viewport. - d.lineDiv = elt("div", null, "CodeMirror-code"); + d.lineDiv = eltP("div", null, "CodeMirror-code"); // Elements are added to these to represent selection and cursors. d.selectionDiv = elt("div", null, null, "position: relative; z-index: 1"); d.cursorDiv = elt("div", null, "CodeMirror-cursors"); @@ -154,10 +4349,11 @@ // When lines outside of the viewport are measured, they are drawn in this. d.lineMeasure = elt("div", null, "CodeMirror-measure"); // Wraps everything that needs to exist inside the vertically-padded coordinate system - d.lineSpace = elt("div", [d.measure, d.lineMeasure, d.selectionDiv, d.cursorDiv, d.lineDiv], + d.lineSpace = eltP("div", [d.measure, d.lineMeasure, d.selectionDiv, d.cursorDiv, d.lineDiv], null, "position: relative; outline: none"); + var lines = eltP("div", [d.lineSpace], "CodeMirror-lines"); // Moved around its parent to cover visible view. - d.mover = elt("div", [elt("div", [d.lineSpace], "CodeMirror-lines")], null, "position: relative"); + d.mover = elt("div", [lines], null, "position: relative"); // Set to the height of the document, allowing scrolling. d.sizer = elt("div", [d.mover], "CodeMirror-sizer"); d.sizerWidth = null; @@ -176,11 +4372,11 @@ // Work around IE7 z-index bug (not perfect, hence IE7 not really being supported) if (ie && ie_version < 8) { d.gutters.style.zIndex = -1; d.scroller.style.paddingRight = 0; } - if (!webkit && !(gecko && mobile)) d.scroller.draggable = true; + if (!webkit && !(gecko && mobile)) { d.scroller.draggable = true; } if (place) { - if (place.appendChild) place.appendChild(d.wrapper); - else place(d.wrapper); + if (place.appendChild) { place.appendChild(d.wrapper); } + else { place(d.wrapper); } } // Current rendered range (may be bigger than the view window). @@ -228,3738 +4424,12 @@ d.activeTouch = null; + d.gutterSpecs = getGutters(options.gutters, options.lineNumbers); + renderGutters(d); + input.init(d); } - // STATE UPDATES - - // Used to get the editor into a consistent state again when options change. - - function loadMode(cm) { - cm.doc.mode = CodeMirror.getMode(cm.options, cm.doc.modeOption); - resetModeState(cm); - } - - function resetModeState(cm) { - cm.doc.iter(function(line) { - if (line.stateAfter) line.stateAfter = null; - if (line.styles) line.styles = null; - }); - cm.doc.frontier = cm.doc.first; - startWorker(cm, 100); - cm.state.modeGen++; - if (cm.curOp) regChange(cm); - } - - function wrappingChanged(cm) { - if (cm.options.lineWrapping) { - addClass(cm.display.wrapper, "CodeMirror-wrap"); - cm.display.sizer.style.minWidth = ""; - cm.display.sizerWidth = null; - } else { - rmClass(cm.display.wrapper, "CodeMirror-wrap"); - findMaxLine(cm); - } - estimateLineHeights(cm); - regChange(cm); - clearCaches(cm); - setTimeout(function(){updateScrollbars(cm);}, 100); - } - - // Returns a function that estimates the height of a line, to use as - // first approximation until the line becomes visible (and is thus - // properly measurable). - function estimateHeight(cm) { - var th = textHeight(cm.display), wrapping = cm.options.lineWrapping; - var perLine = wrapping && Math.max(5, cm.display.scroller.clientWidth / charWidth(cm.display) - 3); - return function(line) { - if (lineIsHidden(cm.doc, line)) return 0; - - var widgetsHeight = 0; - if (line.widgets) for (var i = 0; i < line.widgets.length; i++) { - if (line.widgets[i].height) widgetsHeight += line.widgets[i].height; - } - - if (wrapping) - return widgetsHeight + (Math.ceil(line.text.length / perLine) || 1) * th; - else - return widgetsHeight + th; - }; - } - - function estimateLineHeights(cm) { - var doc = cm.doc, est = estimateHeight(cm); - doc.iter(function(line) { - var estHeight = est(line); - if (estHeight != line.height) updateLineHeight(line, estHeight); - }); - } - - function themeChanged(cm) { - cm.display.wrapper.className = cm.display.wrapper.className.replace(/\s*cm-s-\S+/g, "") + - cm.options.theme.replace(/(^|\s)\s*/g, " cm-s-"); - clearCaches(cm); - } - - function guttersChanged(cm) { - updateGutters(cm); - regChange(cm); - setTimeout(function(){alignHorizontally(cm);}, 20); - } - - // Rebuild the gutter elements, ensure the margin to the left of the - // code matches their width. - function updateGutters(cm) { - var gutters = cm.display.gutters, specs = cm.options.gutters; - removeChildren(gutters); - for (var i = 0; i < specs.length; ++i) { - var gutterClass = specs[i]; - var gElt = gutters.appendChild(elt("div", null, "CodeMirror-gutter " + gutterClass)); - if (gutterClass == "CodeMirror-linenumbers") { - cm.display.lineGutter = gElt; - gElt.style.width = (cm.display.lineNumWidth || 1) + "px"; - } - } - gutters.style.display = i ? "" : "none"; - updateGutterSpace(cm); - } - - function updateGutterSpace(cm) { - var width = cm.display.gutters.offsetWidth; - cm.display.sizer.style.marginLeft = width + "px"; - } - - // Compute the character length of a line, taking into account - // collapsed ranges (see markText) that might hide parts, and join - // other lines onto it. - function lineLength(line) { - if (line.height == 0) return 0; - var len = line.text.length, merged, cur = line; - while (merged = collapsedSpanAtStart(cur)) { - var found = merged.find(0, true); - cur = found.from.line; - len += found.from.ch - found.to.ch; - } - cur = line; - while (merged = collapsedSpanAtEnd(cur)) { - var found = merged.find(0, true); - len -= cur.text.length - found.from.ch; - cur = found.to.line; - len += cur.text.length - found.to.ch; - } - return len; - } - - // Find the longest line in the document. - function findMaxLine(cm) { - var d = cm.display, doc = cm.doc; - d.maxLine = getLine(doc, doc.first); - d.maxLineLength = lineLength(d.maxLine); - d.maxLineChanged = true; - doc.iter(function(line) { - var len = lineLength(line); - if (len > d.maxLineLength) { - d.maxLineLength = len; - d.maxLine = line; - } - }); - } - - // Make sure the gutters options contains the element - // "CodeMirror-linenumbers" when the lineNumbers option is true. - function setGuttersForLineNumbers(options) { - var found = indexOf(options.gutters, "CodeMirror-linenumbers"); - if (found == -1 && options.lineNumbers) { - options.gutters = options.gutters.concat(["CodeMirror-linenumbers"]); - } else if (found > -1 && !options.lineNumbers) { - options.gutters = options.gutters.slice(0); - options.gutters.splice(found, 1); - } - } - - // SCROLLBARS - - // Prepare DOM reads needed to update the scrollbars. Done in one - // shot to minimize update/measure roundtrips. - function measureForScrollbars(cm) { - var d = cm.display, gutterW = d.gutters.offsetWidth; - var docH = Math.round(cm.doc.height + paddingVert(cm.display)); - return { - clientHeight: d.scroller.clientHeight, - viewHeight: d.wrapper.clientHeight, - scrollWidth: d.scroller.scrollWidth, clientWidth: d.scroller.clientWidth, - viewWidth: d.wrapper.clientWidth, - barLeft: cm.options.fixedGutter ? gutterW : 0, - docHeight: docH, - scrollHeight: docH + scrollGap(cm) + d.barHeight, - nativeBarWidth: d.nativeBarWidth, - gutterWidth: gutterW - }; - } - - function NativeScrollbars(place, scroll, cm) { - this.cm = cm; - var vert = this.vert = elt("div", [elt("div", null, null, "min-width: 1px")], "CodeMirror-vscrollbar"); - var horiz = this.horiz = elt("div", [elt("div", null, null, "height: 100%; min-height: 1px")], "CodeMirror-hscrollbar"); - place(vert); place(horiz); - - on(vert, "scroll", function() { - if (vert.clientHeight) scroll(vert.scrollTop, "vertical"); - }); - on(horiz, "scroll", function() { - if (horiz.clientWidth) scroll(horiz.scrollLeft, "horizontal"); - }); - - this.checkedZeroWidth = false; - // Need to set a minimum width to see the scrollbar on IE7 (but must not set it on IE8). - if (ie && ie_version < 8) this.horiz.style.minHeight = this.vert.style.minWidth = "18px"; - } - - NativeScrollbars.prototype = copyObj({ - update: function(measure) { - var needsH = measure.scrollWidth > measure.clientWidth + 1; - var needsV = measure.scrollHeight > measure.clientHeight + 1; - var sWidth = measure.nativeBarWidth; - - if (needsV) { - this.vert.style.display = "block"; - this.vert.style.bottom = needsH ? sWidth + "px" : "0"; - var totalHeight = measure.viewHeight - (needsH ? sWidth : 0); - // A bug in IE8 can cause this value to be negative, so guard it. - this.vert.firstChild.style.height = - Math.max(0, measure.scrollHeight - measure.clientHeight + totalHeight) + "px"; - } else { - this.vert.style.display = ""; - this.vert.firstChild.style.height = "0"; - } - - if (needsH) { - this.horiz.style.display = "block"; - this.horiz.style.right = needsV ? sWidth + "px" : "0"; - this.horiz.style.left = measure.barLeft + "px"; - var totalWidth = measure.viewWidth - measure.barLeft - (needsV ? sWidth : 0); - this.horiz.firstChild.style.width = - (measure.scrollWidth - measure.clientWidth + totalWidth) + "px"; - } else { - this.horiz.style.display = ""; - this.horiz.firstChild.style.width = "0"; - } - - if (!this.checkedZeroWidth && measure.clientHeight > 0) { - if (sWidth == 0) this.zeroWidthHack(); - this.checkedZeroWidth = true; - } - - return {right: needsV ? sWidth : 0, bottom: needsH ? sWidth : 0}; - }, - setScrollLeft: function(pos) { - if (this.horiz.scrollLeft != pos) this.horiz.scrollLeft = pos; - if (this.disableHoriz) this.enableZeroWidthBar(this.horiz, this.disableHoriz); - }, - setScrollTop: function(pos) { - if (this.vert.scrollTop != pos) this.vert.scrollTop = pos; - if (this.disableVert) this.enableZeroWidthBar(this.vert, this.disableVert); - }, - zeroWidthHack: function() { - var w = mac && !mac_geMountainLion ? "12px" : "18px"; - this.horiz.style.height = this.vert.style.width = w; - this.horiz.style.pointerEvents = this.vert.style.pointerEvents = "none"; - this.disableHoriz = new Delayed; - this.disableVert = new Delayed; - }, - enableZeroWidthBar: function(bar, delay) { - bar.style.pointerEvents = "auto"; - function maybeDisable() { - // To find out whether the scrollbar is still visible, we - // check whether the element under the pixel in the bottom - // left corner of the scrollbar box is the scrollbar box - // itself (when the bar is still visible) or its filler child - // (when the bar is hidden). If it is still visible, we keep - // it enabled, if it's hidden, we disable pointer events. - var box = bar.getBoundingClientRect(); - var elt = document.elementFromPoint(box.left + 1, box.bottom - 1); - if (elt != bar) bar.style.pointerEvents = "none"; - else delay.set(1000, maybeDisable); - } - delay.set(1000, maybeDisable); - }, - clear: function() { - var parent = this.horiz.parentNode; - parent.removeChild(this.horiz); - parent.removeChild(this.vert); - } - }, NativeScrollbars.prototype); - - function NullScrollbars() {} - - NullScrollbars.prototype = copyObj({ - update: function() { return {bottom: 0, right: 0}; }, - setScrollLeft: function() {}, - setScrollTop: function() {}, - clear: function() {} - }, NullScrollbars.prototype); - - CodeMirror.scrollbarModel = {"native": NativeScrollbars, "null": NullScrollbars}; - - function initScrollbars(cm) { - if (cm.display.scrollbars) { - cm.display.scrollbars.clear(); - if (cm.display.scrollbars.addClass) - rmClass(cm.display.wrapper, cm.display.scrollbars.addClass); - } - - cm.display.scrollbars = new CodeMirror.scrollbarModel[cm.options.scrollbarStyle](function(node) { - cm.display.wrapper.insertBefore(node, cm.display.scrollbarFiller); - // Prevent clicks in the scrollbars from killing focus - on(node, "mousedown", function() { - if (cm.state.focused) setTimeout(function() { cm.display.input.focus(); }, 0); - }); - node.setAttribute("cm-not-content", "true"); - }, function(pos, axis) { - if (axis == "horizontal") setScrollLeft(cm, pos); - else setScrollTop(cm, pos); - }, cm); - if (cm.display.scrollbars.addClass) - addClass(cm.display.wrapper, cm.display.scrollbars.addClass); - } - - function updateScrollbars(cm, measure) { - if (!measure) measure = measureForScrollbars(cm); - var startWidth = cm.display.barWidth, startHeight = cm.display.barHeight; - updateScrollbarsInner(cm, measure); - for (var i = 0; i < 4 && startWidth != cm.display.barWidth || startHeight != cm.display.barHeight; i++) { - if (startWidth != cm.display.barWidth && cm.options.lineWrapping) - updateHeightsInViewport(cm); - updateScrollbarsInner(cm, measureForScrollbars(cm)); - startWidth = cm.display.barWidth; startHeight = cm.display.barHeight; - } - } - - // Re-synchronize the fake scrollbars with the actual size of the - // content. - function updateScrollbarsInner(cm, measure) { - var d = cm.display; - var sizes = d.scrollbars.update(measure); - - d.sizer.style.paddingRight = (d.barWidth = sizes.right) + "px"; - d.sizer.style.paddingBottom = (d.barHeight = sizes.bottom) + "px"; - - if (sizes.right && sizes.bottom) { - d.scrollbarFiller.style.display = "block"; - d.scrollbarFiller.style.height = sizes.bottom + "px"; - d.scrollbarFiller.style.width = sizes.right + "px"; - } else d.scrollbarFiller.style.display = ""; - if (sizes.bottom && cm.options.coverGutterNextToScrollbar && cm.options.fixedGutter) { - d.gutterFiller.style.display = "block"; - d.gutterFiller.style.height = sizes.bottom + "px"; - d.gutterFiller.style.width = measure.gutterWidth + "px"; - } else d.gutterFiller.style.display = ""; - } - - // Compute the lines that are visible in a given viewport (defaults - // the the current scroll position). viewport may contain top, - // height, and ensure (see op.scrollToPos) properties. - function visibleLines(display, doc, viewport) { - var top = viewport && viewport.top != null ? Math.max(0, viewport.top) : display.scroller.scrollTop; - top = Math.floor(top - paddingTop(display)); - var bottom = viewport && viewport.bottom != null ? viewport.bottom : top + display.wrapper.clientHeight; - - var from = lineAtHeight(doc, top), to = lineAtHeight(doc, bottom); - // Ensure is a {from: {line, ch}, to: {line, ch}} object, and - // forces those lines into the viewport (if possible). - if (viewport && viewport.ensure) { - var ensureFrom = viewport.ensure.from.line, ensureTo = viewport.ensure.to.line; - if (ensureFrom < from) { - from = ensureFrom; - to = lineAtHeight(doc, heightAtLine(getLine(doc, ensureFrom)) + display.wrapper.clientHeight); - } else if (Math.min(ensureTo, doc.lastLine()) >= to) { - from = lineAtHeight(doc, heightAtLine(getLine(doc, ensureTo)) - display.wrapper.clientHeight); - to = ensureTo; - } - } - return {from: from, to: Math.max(to, from + 1)}; - } - - // LINE NUMBERS - - // Re-align line numbers and gutter marks to compensate for - // horizontal scrolling. - function alignHorizontally(cm) { - var display = cm.display, view = display.view; - if (!display.alignWidgets && (!display.gutters.firstChild || !cm.options.fixedGutter)) return; - var comp = compensateForHScroll(display) - display.scroller.scrollLeft + cm.doc.scrollLeft; - var gutterW = display.gutters.offsetWidth, left = comp + "px"; - for (var i = 0; i < view.length; i++) if (!view[i].hidden) { - if (cm.options.fixedGutter && view[i].gutter) - view[i].gutter.style.left = left; - var align = view[i].alignable; - if (align) for (var j = 0; j < align.length; j++) - align[j].style.left = left; - } - if (cm.options.fixedGutter) - display.gutters.style.left = (comp + gutterW) + "px"; - } - - // Used to ensure that the line number gutter is still the right - // size for the current document size. Returns true when an update - // is needed. - function maybeUpdateLineNumberWidth(cm) { - if (!cm.options.lineNumbers) return false; - var doc = cm.doc, last = lineNumberFor(cm.options, doc.first + doc.size - 1), display = cm.display; - if (last.length != display.lineNumChars) { - var test = display.measure.appendChild(elt("div", [elt("div", last)], - "CodeMirror-linenumber CodeMirror-gutter-elt")); - var innerW = test.firstChild.offsetWidth, padding = test.offsetWidth - innerW; - display.lineGutter.style.width = ""; - display.lineNumInnerWidth = Math.max(innerW, display.lineGutter.offsetWidth - padding) + 1; - display.lineNumWidth = display.lineNumInnerWidth + padding; - display.lineNumChars = display.lineNumInnerWidth ? last.length : -1; - display.lineGutter.style.width = display.lineNumWidth + "px"; - updateGutterSpace(cm); - return true; - } - return false; - } - - function lineNumberFor(options, i) { - return String(options.lineNumberFormatter(i + options.firstLineNumber)); - } - - // Computes display.scroller.scrollLeft + display.gutters.offsetWidth, - // but using getBoundingClientRect to get a sub-pixel-accurate - // result. - function compensateForHScroll(display) { - return display.scroller.getBoundingClientRect().left - display.sizer.getBoundingClientRect().left; - } - - // DISPLAY DRAWING - - function DisplayUpdate(cm, viewport, force) { - var display = cm.display; - - this.viewport = viewport; - // Store some values that we'll need later (but don't want to force a relayout for) - this.visible = visibleLines(display, cm.doc, viewport); - this.editorIsHidden = !display.wrapper.offsetWidth; - this.wrapperHeight = display.wrapper.clientHeight; - this.wrapperWidth = display.wrapper.clientWidth; - this.oldDisplayWidth = displayWidth(cm); - this.force = force; - this.dims = getDimensions(cm); - this.events = []; - } - - DisplayUpdate.prototype.signal = function(emitter, type) { - if (hasHandler(emitter, type)) - this.events.push(arguments); - }; - DisplayUpdate.prototype.finish = function() { - for (var i = 0; i < this.events.length; i++) - signal.apply(null, this.events[i]); - }; - - function maybeClipScrollbars(cm) { - var display = cm.display; - if (!display.scrollbarsClipped && display.scroller.offsetWidth) { - display.nativeBarWidth = display.scroller.offsetWidth - display.scroller.clientWidth; - display.heightForcer.style.height = scrollGap(cm) + "px"; - display.sizer.style.marginBottom = -display.nativeBarWidth + "px"; - display.sizer.style.borderRightWidth = scrollGap(cm) + "px"; - display.scrollbarsClipped = true; - } - } - - // Does the actual updating of the line display. Bails out - // (returning false) when there is nothing to be done and forced is - // false. - function updateDisplayIfNeeded(cm, update) { - var display = cm.display, doc = cm.doc; - - if (update.editorIsHidden) { - resetView(cm); - return false; - } - - // Bail out if the visible area is already rendered and nothing changed. - if (!update.force && - update.visible.from >= display.viewFrom && update.visible.to <= display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo) && - display.renderedView == display.view && countDirtyView(cm) == 0) - return false; - - if (maybeUpdateLineNumberWidth(cm)) { - resetView(cm); - update.dims = getDimensions(cm); - } - - // Compute a suitable new viewport (from & to) - var end = doc.first + doc.size; - var from = Math.max(update.visible.from - cm.options.viewportMargin, doc.first); - var to = Math.min(end, update.visible.to + cm.options.viewportMargin); - if (display.viewFrom < from && from - display.viewFrom < 20) from = Math.max(doc.first, display.viewFrom); - if (display.viewTo > to && display.viewTo - to < 20) to = Math.min(end, display.viewTo); - if (sawCollapsedSpans) { - from = visualLineNo(cm.doc, from); - to = visualLineEndNo(cm.doc, to); - } - - var different = from != display.viewFrom || to != display.viewTo || - display.lastWrapHeight != update.wrapperHeight || display.lastWrapWidth != update.wrapperWidth; - adjustView(cm, from, to); - - display.viewOffset = heightAtLine(getLine(cm.doc, display.viewFrom)); - // Position the mover div to align with the current scroll position - cm.display.mover.style.top = display.viewOffset + "px"; - - var toUpdate = countDirtyView(cm); - if (!different && toUpdate == 0 && !update.force && display.renderedView == display.view && - (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo)) - return false; - - // For big changes, we hide the enclosing element during the - // update, since that speeds up the operations on most browsers. - var focused = activeElt(); - if (toUpdate > 4) display.lineDiv.style.display = "none"; - patchDisplay(cm, display.updateLineNumbers, update.dims); - if (toUpdate > 4) display.lineDiv.style.display = ""; - display.renderedView = display.view; - // There might have been a widget with a focused element that got - // hidden or updated, if so re-focus it. - if (focused && activeElt() != focused && focused.offsetHeight) focused.focus(); - - // Prevent selection and cursors from interfering with the scroll - // width and height. - removeChildren(display.cursorDiv); - removeChildren(display.selectionDiv); - display.gutters.style.height = display.sizer.style.minHeight = 0; - - if (different) { - display.lastWrapHeight = update.wrapperHeight; - display.lastWrapWidth = update.wrapperWidth; - startWorker(cm, 400); - } - - display.updateLineNumbers = null; - - return true; - } - - function postUpdateDisplay(cm, update) { - var viewport = update.viewport; - for (var first = true;; first = false) { - if (!first || !cm.options.lineWrapping || update.oldDisplayWidth == displayWidth(cm)) { - // Clip forced viewport to actual scrollable area. - if (viewport && viewport.top != null) - viewport = {top: Math.min(cm.doc.height + paddingVert(cm.display) - displayHeight(cm), viewport.top)}; - // Updated line heights might result in the drawn area not - // actually covering the viewport. Keep looping until it does. - update.visible = visibleLines(cm.display, cm.doc, viewport); - if (update.visible.from >= cm.display.viewFrom && update.visible.to <= cm.display.viewTo) - break; - } - if (!updateDisplayIfNeeded(cm, update)) break; - updateHeightsInViewport(cm); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - setDocumentHeight(cm, barMeasure); - updateScrollbars(cm, barMeasure); - } - - update.signal(cm, "update", cm); - if (cm.display.viewFrom != cm.display.reportedViewFrom || cm.display.viewTo != cm.display.reportedViewTo) { - update.signal(cm, "viewportChange", cm, cm.display.viewFrom, cm.display.viewTo); - cm.display.reportedViewFrom = cm.display.viewFrom; cm.display.reportedViewTo = cm.display.viewTo; - } - } - - function updateDisplaySimple(cm, viewport) { - var update = new DisplayUpdate(cm, viewport); - if (updateDisplayIfNeeded(cm, update)) { - updateHeightsInViewport(cm); - postUpdateDisplay(cm, update); - var barMeasure = measureForScrollbars(cm); - updateSelection(cm); - setDocumentHeight(cm, barMeasure); - updateScrollbars(cm, barMeasure); - update.finish(); - } - } - - function setDocumentHeight(cm, measure) { - cm.display.sizer.style.minHeight = measure.docHeight + "px"; - var total = measure.docHeight + cm.display.barHeight; - cm.display.heightForcer.style.top = total + "px"; - cm.display.gutters.style.height = Math.max(total + scrollGap(cm), measure.clientHeight) + "px"; - } - - // Read the actual heights of the rendered lines, and update their - // stored heights to match. - function updateHeightsInViewport(cm) { - var display = cm.display; - var prevBottom = display.lineDiv.offsetTop; - for (var i = 0; i < display.view.length; i++) { - var cur = display.view[i], height; - if (cur.hidden) continue; - if (ie && ie_version < 8) { - var bot = cur.node.offsetTop + cur.node.offsetHeight; - height = bot - prevBottom; - prevBottom = bot; - } else { - var box = cur.node.getBoundingClientRect(); - height = box.bottom - box.top; - } - var diff = cur.line.height - height; - if (height < 2) height = textHeight(display); - if (diff > .001 || diff < -.001) { - updateLineHeight(cur.line, height); - updateWidgetHeight(cur.line); - if (cur.rest) for (var j = 0; j < cur.rest.length; j++) - updateWidgetHeight(cur.rest[j]); - } - } - } - - // Read and store the height of line widgets associated with the - // given line. - function updateWidgetHeight(line) { - if (line.widgets) for (var i = 0; i < line.widgets.length; ++i) - line.widgets[i].height = line.widgets[i].node.parentNode.offsetHeight; - } - - // Do a bulk-read of the DOM positions and sizes needed to draw the - // view, so that we don't interleave reading and writing to the DOM. - function getDimensions(cm) { - var d = cm.display, left = {}, width = {}; - var gutterLeft = d.gutters.clientLeft; - for (var n = d.gutters.firstChild, i = 0; n; n = n.nextSibling, ++i) { - left[cm.options.gutters[i]] = n.offsetLeft + n.clientLeft + gutterLeft; - width[cm.options.gutters[i]] = n.clientWidth; - } - return {fixedPos: compensateForHScroll(d), - gutterTotalWidth: d.gutters.offsetWidth, - gutterLeft: left, - gutterWidth: width, - wrapperWidth: d.wrapper.clientWidth}; - } - - // Sync the actual display DOM structure with display.view, removing - // nodes for lines that are no longer in view, and creating the ones - // that are not there yet, and updating the ones that are out of - // date. - function patchDisplay(cm, updateNumbersFrom, dims) { - var display = cm.display, lineNumbers = cm.options.lineNumbers; - var container = display.lineDiv, cur = container.firstChild; - - function rm(node) { - var next = node.nextSibling; - // Works around a throw-scroll bug in OS X Webkit - if (webkit && mac && cm.display.currentWheelTarget == node) - node.style.display = "none"; - else - node.parentNode.removeChild(node); - return next; - } - - var view = display.view, lineN = display.viewFrom; - // Loop over the elements in the view, syncing cur (the DOM nodes - // in display.lineDiv) with the view as we go. - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (lineView.hidden) { - } else if (!lineView.node || lineView.node.parentNode != container) { // Not drawn yet - var node = buildLineElement(cm, lineView, lineN, dims); - container.insertBefore(node, cur); - } else { // Already drawn - while (cur != lineView.node) cur = rm(cur); - var updateNumber = lineNumbers && updateNumbersFrom != null && - updateNumbersFrom <= lineN && lineView.lineNumber; - if (lineView.changes) { - if (indexOf(lineView.changes, "gutter") > -1) updateNumber = false; - updateLineForChanges(cm, lineView, lineN, dims); - } - if (updateNumber) { - removeChildren(lineView.lineNumber); - lineView.lineNumber.appendChild(document.createTextNode(lineNumberFor(cm.options, lineN))); - } - cur = lineView.node.nextSibling; - } - lineN += lineView.size; - } - while (cur) cur = rm(cur); - } - - // When an aspect of a line changes, a string is added to - // lineView.changes. This updates the relevant part of the line's - // DOM structure. - function updateLineForChanges(cm, lineView, lineN, dims) { - for (var j = 0; j < lineView.changes.length; j++) { - var type = lineView.changes[j]; - if (type == "text") updateLineText(cm, lineView); - else if (type == "gutter") updateLineGutter(cm, lineView, lineN, dims); - else if (type == "class") updateLineClasses(lineView); - else if (type == "widget") updateLineWidgets(cm, lineView, dims); - } - lineView.changes = null; - } - - // Lines with gutter elements, widgets or a background class need to - // be wrapped, and have the extra elements added to the wrapper div - function ensureLineWrapped(lineView) { - if (lineView.node == lineView.text) { - lineView.node = elt("div", null, null, "position: relative"); - if (lineView.text.parentNode) - lineView.text.parentNode.replaceChild(lineView.node, lineView.text); - lineView.node.appendChild(lineView.text); - if (ie && ie_version < 8) lineView.node.style.zIndex = 2; - } - return lineView.node; - } - - function updateLineBackground(lineView) { - var cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass; - if (cls) cls += " CodeMirror-linebackground"; - if (lineView.background) { - if (cls) lineView.background.className = cls; - else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null; } - } else if (cls) { - var wrap = ensureLineWrapped(lineView); - lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild); - } - } - - // Wrapper around buildLineContent which will reuse the structure - // in display.externalMeasured when possible. - function getLineContent(cm, lineView) { - var ext = cm.display.externalMeasured; - if (ext && ext.line == lineView.line) { - cm.display.externalMeasured = null; - lineView.measure = ext.measure; - return ext.built; - } - return buildLineContent(cm, lineView); - } - - // Redraw the line's text. Interacts with the background and text - // classes because the mode may output tokens that influence these - // classes. - function updateLineText(cm, lineView) { - var cls = lineView.text.className; - var built = getLineContent(cm, lineView); - if (lineView.text == lineView.node) lineView.node = built.pre; - lineView.text.parentNode.replaceChild(built.pre, lineView.text); - lineView.text = built.pre; - if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) { - lineView.bgClass = built.bgClass; - lineView.textClass = built.textClass; - updateLineClasses(lineView); - } else if (cls) { - lineView.text.className = cls; - } - } - - function updateLineClasses(lineView) { - updateLineBackground(lineView); - if (lineView.line.wrapClass) - ensureLineWrapped(lineView).className = lineView.line.wrapClass; - else if (lineView.node != lineView.text) - lineView.node.className = ""; - var textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass; - lineView.text.className = textClass || ""; - } - - function updateLineGutter(cm, lineView, lineN, dims) { - if (lineView.gutter) { - lineView.node.removeChild(lineView.gutter); - lineView.gutter = null; - } - if (lineView.gutterBackground) { - lineView.node.removeChild(lineView.gutterBackground); - lineView.gutterBackground = null; - } - if (lineView.line.gutterClass) { - var wrap = ensureLineWrapped(lineView); - lineView.gutterBackground = elt("div", null, "CodeMirror-gutter-background " + lineView.line.gutterClass, - "left: " + (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + - "px; width: " + dims.gutterTotalWidth + "px"); - wrap.insertBefore(lineView.gutterBackground, lineView.text); - } - var markers = lineView.line.gutterMarkers; - if (cm.options.lineNumbers || markers) { - var wrap = ensureLineWrapped(lineView); - var gutterWrap = lineView.gutter = elt("div", null, "CodeMirror-gutter-wrapper", "left: " + - (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) + "px"); - cm.display.input.setUneditable(gutterWrap); - wrap.insertBefore(gutterWrap, lineView.text); - if (lineView.line.gutterClass) - gutterWrap.className += " " + lineView.line.gutterClass; - if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"])) - lineView.lineNumber = gutterWrap.appendChild( - elt("div", lineNumberFor(cm.options, lineN), - "CodeMirror-linenumber CodeMirror-gutter-elt", - "left: " + dims.gutterLeft["CodeMirror-linenumbers"] + "px; width: " - + cm.display.lineNumInnerWidth + "px")); - if (markers) for (var k = 0; k < cm.options.gutters.length; ++k) { - var id = cm.options.gutters[k], found = markers.hasOwnProperty(id) && markers[id]; - if (found) - gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", "left: " + - dims.gutterLeft[id] + "px; width: " + dims.gutterWidth[id] + "px")); - } - } - } - - function updateLineWidgets(cm, lineView, dims) { - if (lineView.alignable) lineView.alignable = null; - for (var node = lineView.node.firstChild, next; node; node = next) { - var next = node.nextSibling; - if (node.className == "CodeMirror-linewidget") - lineView.node.removeChild(node); - } - insertLineWidgets(cm, lineView, dims); - } - - // Build a line's DOM representation from scratch - function buildLineElement(cm, lineView, lineN, dims) { - var built = getLineContent(cm, lineView); - lineView.text = lineView.node = built.pre; - if (built.bgClass) lineView.bgClass = built.bgClass; - if (built.textClass) lineView.textClass = built.textClass; - - updateLineClasses(lineView); - updateLineGutter(cm, lineView, lineN, dims); - insertLineWidgets(cm, lineView, dims); - return lineView.node; - } - - // A lineView may contain multiple logical lines (when merged by - // collapsed spans). The widgets for all of them need to be drawn. - function insertLineWidgets(cm, lineView, dims) { - insertLineWidgetsFor(cm, lineView.line, lineView, dims, true); - if (lineView.rest) for (var i = 0; i < lineView.rest.length; i++) - insertLineWidgetsFor(cm, lineView.rest[i], lineView, dims, false); - } - - function insertLineWidgetsFor(cm, line, lineView, dims, allowAbove) { - if (!line.widgets) return; - var wrap = ensureLineWrapped(lineView); - for (var i = 0, ws = line.widgets; i < ws.length; ++i) { - var widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget"); - if (!widget.handleMouseEvents) node.setAttribute("cm-ignore-events", "true"); - positionLineWidget(widget, node, lineView, dims); - cm.display.input.setUneditable(node); - if (allowAbove && widget.above) - wrap.insertBefore(node, lineView.gutter || lineView.text); - else - wrap.appendChild(node); - signalLater(widget, "redraw"); - } - } - - function positionLineWidget(widget, node, lineView, dims) { - if (widget.noHScroll) { - (lineView.alignable || (lineView.alignable = [])).push(node); - var width = dims.wrapperWidth; - node.style.left = dims.fixedPos + "px"; - if (!widget.coverGutter) { - width -= dims.gutterTotalWidth; - node.style.paddingLeft = dims.gutterTotalWidth + "px"; - } - node.style.width = width + "px"; - } - if (widget.coverGutter) { - node.style.zIndex = 5; - node.style.position = "relative"; - if (!widget.noHScroll) node.style.marginLeft = -dims.gutterTotalWidth + "px"; - } - } - - // POSITION OBJECT - - // A Pos instance represents a position within the text. - var Pos = CodeMirror.Pos = function(line, ch) { - if (!(this instanceof Pos)) return new Pos(line, ch); - this.line = line; this.ch = ch; - }; - - // Compare two positions, return 0 if they are the same, a negative - // number when a is less, and a positive number otherwise. - var cmp = CodeMirror.cmpPos = function(a, b) { return a.line - b.line || a.ch - b.ch; }; - - function copyPos(x) {return Pos(x.line, x.ch);} - function maxPos(a, b) { return cmp(a, b) < 0 ? b : a; } - function minPos(a, b) { return cmp(a, b) < 0 ? a : b; } - - // INPUT HANDLING - - function ensureFocus(cm) { - if (!cm.state.focused) { cm.display.input.focus(); onFocus(cm); } - } - - // This will be set to an array of strings when copying, so that, - // when pasting, we know what kind of selections the copied text - // was made out of. - var lastCopied = null; - - function applyTextInput(cm, inserted, deleted, sel, origin) { - var doc = cm.doc; - cm.display.shift = false; - if (!sel) sel = doc.sel; - - var paste = cm.state.pasteIncoming || origin == "paste"; - var textLines = doc.splitLines(inserted), multiPaste = null; - // When pasing N lines into N selections, insert one line per selection - if (paste && sel.ranges.length > 1) { - if (lastCopied && lastCopied.join("\n") == inserted) { - if (sel.ranges.length % lastCopied.length == 0) { - multiPaste = []; - for (var i = 0; i < lastCopied.length; i++) - multiPaste.push(doc.splitLines(lastCopied[i])); - } - } else if (textLines.length == sel.ranges.length) { - multiPaste = map(textLines, function(l) { return [l]; }); - } - } - - // Normal behavior is to insert the new text into every selection - for (var i = sel.ranges.length - 1; i >= 0; i--) { - var range = sel.ranges[i]; - var from = range.from(), to = range.to(); - if (range.empty()) { - if (deleted && deleted > 0) // Handle deletion - from = Pos(from.line, from.ch - deleted); - else if (cm.state.overwrite && !paste) // Handle overwrite - to = Pos(to.line, Math.min(getLine(doc, to.line).text.length, to.ch + lst(textLines).length)); - } - var updateInput = cm.curOp.updateInput; - var changeEvent = {from: from, to: to, text: multiPaste ? multiPaste[i % multiPaste.length] : textLines, - origin: origin || (paste ? "paste" : cm.state.cutIncoming ? "cut" : "+input")}; - makeChange(cm.doc, changeEvent); - signalLater(cm, "inputRead", cm, changeEvent); - } - if (inserted && !paste) - triggerElectric(cm, inserted); - - ensureCursorVisible(cm); - cm.curOp.updateInput = updateInput; - cm.curOp.typing = true; - cm.state.pasteIncoming = cm.state.cutIncoming = false; - } - - function handlePaste(e, cm) { - var pasted = e.clipboardData && e.clipboardData.getData("text/plain"); - if (pasted) { - e.preventDefault(); - if (!cm.isReadOnly() && !cm.options.disableInput) - runInOp(cm, function() { applyTextInput(cm, pasted, 0, null, "paste"); }); - return true; - } - } - - function triggerElectric(cm, inserted) { - // When an 'electric' character is inserted, immediately trigger a reindent - if (!cm.options.electricChars || !cm.options.smartIndent) return; - var sel = cm.doc.sel; - - for (var i = sel.ranges.length - 1; i >= 0; i--) { - var range = sel.ranges[i]; - if (range.head.ch > 100 || (i && sel.ranges[i - 1].head.line == range.head.line)) continue; - var mode = cm.getModeAt(range.head); - var indented = false; - if (mode.electricChars) { - for (var j = 0; j < mode.electricChars.length; j++) - if (inserted.indexOf(mode.electricChars.charAt(j)) > -1) { - indented = indentLine(cm, range.head.line, "smart"); - break; - } - } else if (mode.electricInput) { - if (mode.electricInput.test(getLine(cm.doc, range.head.line).text.slice(0, range.head.ch))) - indented = indentLine(cm, range.head.line, "smart"); - } - if (indented) signalLater(cm, "electricInput", cm, range.head.line); - } - } - - function copyableRanges(cm) { - var text = [], ranges = []; - for (var i = 0; i < cm.doc.sel.ranges.length; i++) { - var line = cm.doc.sel.ranges[i].head.line; - var lineRange = {anchor: Pos(line, 0), head: Pos(line + 1, 0)}; - ranges.push(lineRange); - text.push(cm.getRange(lineRange.anchor, lineRange.head)); - } - return {text: text, ranges: ranges}; - } - - function disableBrowserMagic(field) { - field.setAttribute("autocorrect", "off"); - field.setAttribute("autocapitalize", "off"); - field.setAttribute("spellcheck", "false"); - } - - // TEXTAREA INPUT STYLE - - function TextareaInput(cm) { - this.cm = cm; - // See input.poll and input.reset - this.prevInput = ""; - - // Flag that indicates whether we expect input to appear real soon - // now (after some event like 'keypress' or 'input') and are - // polling intensively. - this.pollingFast = false; - // Self-resetting timeout for the poller - this.polling = new Delayed(); - // Tracks when input.reset has punted to just putting a short - // string into the textarea instead of the full selection. - this.inaccurateSelection = false; - // Used to work around IE issue with selection being forgotten when focus moves away from textarea - this.hasSelection = false; - this.composing = null; - }; - - function hiddenTextarea() { - var te = elt("textarea", null, null, "position: absolute; padding: 0; width: 1px; height: 1em; outline: none"); - var div = elt("div", [te], null, "overflow: hidden; position: relative; width: 3px; height: 0px;"); - // The textarea is kept positioned near the cursor to prevent the - // fact that it'll be scrolled into view on input from scrolling - // our fake cursor out of view. On webkit, when wrap=off, paste is - // very slow. So make the area wide instead. - if (webkit) te.style.width = "1000px"; - else te.setAttribute("wrap", "off"); - // If border: 0; -- iOS fails to open keyboard (issue #1287) - if (ios) te.style.border = "1px solid black"; - disableBrowserMagic(te); - return div; - } - - TextareaInput.prototype = copyObj({ - init: function(display) { - var input = this, cm = this.cm; - - // Wraps and hides input textarea - var div = this.wrapper = hiddenTextarea(); - // The semihidden textarea that is focused when the editor is - // focused, and receives input. - var te = this.textarea = div.firstChild; - display.wrapper.insertBefore(div, display.wrapper.firstChild); - - // Needed to hide big blue blinking cursor on Mobile Safari (doesn't seem to work in iOS 8 anymore) - if (ios) te.style.width = "0px"; - - on(te, "input", function() { - if (ie && ie_version >= 9 && input.hasSelection) input.hasSelection = null; - input.poll(); - }); - - on(te, "paste", function(e) { - if (signalDOMEvent(cm, e) || handlePaste(e, cm)) return - - cm.state.pasteIncoming = true; - input.fastPoll(); - }); - - function prepareCopyCut(e) { - if (signalDOMEvent(cm, e)) return - if (cm.somethingSelected()) { - lastCopied = cm.getSelections(); - if (input.inaccurateSelection) { - input.prevInput = ""; - input.inaccurateSelection = false; - te.value = lastCopied.join("\n"); - selectInput(te); - } - } else if (!cm.options.lineWiseCopyCut) { - return; - } else { - var ranges = copyableRanges(cm); - lastCopied = ranges.text; - if (e.type == "cut") { - cm.setSelections(ranges.ranges, null, sel_dontScroll); - } else { - input.prevInput = ""; - te.value = ranges.text.join("\n"); - selectInput(te); - } - } - if (e.type == "cut") cm.state.cutIncoming = true; - } - on(te, "cut", prepareCopyCut); - on(te, "copy", prepareCopyCut); - - on(display.scroller, "paste", function(e) { - if (eventInWidget(display, e) || signalDOMEvent(cm, e)) return; - cm.state.pasteIncoming = true; - input.focus(); - }); - - // Prevent normal selection in the editor (we handle our own) - on(display.lineSpace, "selectstart", function(e) { - if (!eventInWidget(display, e)) e_preventDefault(e); - }); - - on(te, "compositionstart", function() { - var start = cm.getCursor("from"); - if (input.composing) input.composing.range.clear() - input.composing = { - start: start, - range: cm.markText(start, cm.getCursor("to"), {className: "CodeMirror-composing"}) - }; - }); - on(te, "compositionend", function() { - if (input.composing) { - input.poll(); - input.composing.range.clear(); - input.composing = null; - } - }); - }, - - prepareSelection: function() { - // Redraw the selection and/or cursor - var cm = this.cm, display = cm.display, doc = cm.doc; - var result = prepareSelection(cm); - - // Move the hidden textarea near the cursor to prevent scrolling artifacts - if (cm.options.moveInputWithCursor) { - var headPos = cursorCoords(cm, doc.sel.primary().head, "div"); - var wrapOff = display.wrapper.getBoundingClientRect(), lineOff = display.lineDiv.getBoundingClientRect(); - result.teTop = Math.max(0, Math.min(display.wrapper.clientHeight - 10, - headPos.top + lineOff.top - wrapOff.top)); - result.teLeft = Math.max(0, Math.min(display.wrapper.clientWidth - 10, - headPos.left + lineOff.left - wrapOff.left)); - } - - return result; - }, - - showSelection: function(drawn) { - var cm = this.cm, display = cm.display; - removeChildrenAndAdd(display.cursorDiv, drawn.cursors); - removeChildrenAndAdd(display.selectionDiv, drawn.selection); - if (drawn.teTop != null) { - this.wrapper.style.top = drawn.teTop + "px"; - this.wrapper.style.left = drawn.teLeft + "px"; - } - }, - - // Reset the input to correspond to the selection (or to be empty, - // when not typing and nothing is selected) - reset: function(typing) { - if (this.contextMenuPending) return; - var minimal, selected, cm = this.cm, doc = cm.doc; - if (cm.somethingSelected()) { - this.prevInput = ""; - var range = doc.sel.primary(); - minimal = hasCopyEvent && - (range.to().line - range.from().line > 100 || (selected = cm.getSelection()).length > 1000); - var content = minimal ? "-" : selected || cm.getSelection(); - this.textarea.value = content; - if (cm.state.focused) selectInput(this.textarea); - if (ie && ie_version >= 9) this.hasSelection = content; - } else if (!typing) { - this.prevInput = this.textarea.value = ""; - if (ie && ie_version >= 9) this.hasSelection = null; - } - this.inaccurateSelection = minimal; - }, - - getField: function() { return this.textarea; }, - - supportsTouch: function() { return false; }, - - focus: function() { - if (this.cm.options.readOnly != "nocursor" && (!mobile || activeElt() != this.textarea)) { - try { this.textarea.focus(); } - catch (e) {} // IE8 will throw if the textarea is display: none or not in DOM - } - }, - - blur: function() { this.textarea.blur(); }, - - resetPosition: function() { - this.wrapper.style.top = this.wrapper.style.left = 0; - }, - - receivedFocus: function() { this.slowPoll(); }, - - // Poll for input changes, using the normal rate of polling. This - // runs as long as the editor is focused. - slowPoll: function() { - var input = this; - if (input.pollingFast) return; - input.polling.set(this.cm.options.pollInterval, function() { - input.poll(); - if (input.cm.state.focused) input.slowPoll(); - }); - }, - - // When an event has just come in that is likely to add or change - // something in the input textarea, we poll faster, to ensure that - // the change appears on the screen quickly. - fastPoll: function() { - var missed = false, input = this; - input.pollingFast = true; - function p() { - var changed = input.poll(); - if (!changed && !missed) {missed = true; input.polling.set(60, p);} - else {input.pollingFast = false; input.slowPoll();} - } - input.polling.set(20, p); - }, - - // Read input from the textarea, and update the document to match. - // When something is selected, it is present in the textarea, and - // selected (unless it is huge, in which case a placeholder is - // used). When nothing is selected, the cursor sits after previously - // seen text (can be empty), which is stored in prevInput (we must - // not reset the textarea when typing, because that breaks IME). - poll: function() { - var cm = this.cm, input = this.textarea, prevInput = this.prevInput; - // Since this is called a *lot*, try to bail out as cheaply as - // possible when it is clear that nothing happened. hasSelection - // will be the case when there is a lot of text in the textarea, - // in which case reading its value would be expensive. - if (this.contextMenuPending || !cm.state.focused || - (hasSelection(input) && !prevInput && !this.composing) || - cm.isReadOnly() || cm.options.disableInput || cm.state.keySeq) - return false; - - var text = input.value; - // If nothing changed, bail. - if (text == prevInput && !cm.somethingSelected()) return false; - // Work around nonsensical selection resetting in IE9/10, and - // inexplicable appearance of private area unicode characters on - // some key combos in Mac (#2689). - if (ie && ie_version >= 9 && this.hasSelection === text || - mac && /[\uf700-\uf7ff]/.test(text)) { - cm.display.input.reset(); - return false; - } - - if (cm.doc.sel == cm.display.selForContextMenu) { - var first = text.charCodeAt(0); - if (first == 0x200b && !prevInput) prevInput = "\u200b"; - if (first == 0x21da) { this.reset(); return this.cm.execCommand("undo"); } - } - // Find the part of the input that is actually new - var same = 0, l = Math.min(prevInput.length, text.length); - while (same < l && prevInput.charCodeAt(same) == text.charCodeAt(same)) ++same; - - var self = this; - runInOp(cm, function() { - applyTextInput(cm, text.slice(same), prevInput.length - same, - null, self.composing ? "*compose" : null); - - // Don't leave long text in the textarea, since it makes further polling slow - if (text.length > 1000 || text.indexOf("\n") > -1) input.value = self.prevInput = ""; - else self.prevInput = text; - - if (self.composing) { - self.composing.range.clear(); - self.composing.range = cm.markText(self.composing.start, cm.getCursor("to"), - {className: "CodeMirror-composing"}); - } - }); - return true; - }, - - ensurePolled: function() { - if (this.pollingFast && this.poll()) this.pollingFast = false; - }, - - onKeyPress: function() { - if (ie && ie_version >= 9) this.hasSelection = null; - this.fastPoll(); - }, - - onContextMenu: function(e) { - var input = this, cm = input.cm, display = cm.display, te = input.textarea; - var pos = posFromMouse(cm, e), scrollPos = display.scroller.scrollTop; - if (!pos || presto) return; // Opera is difficult. - - // Reset the current text selection only if the click is done outside of the selection - // and 'resetSelectionOnContextMenu' option is true. - var reset = cm.options.resetSelectionOnContextMenu; - if (reset && cm.doc.sel.contains(pos) == -1) - operation(cm, setSelection)(cm.doc, simpleSelection(pos), sel_dontScroll); - - var oldCSS = te.style.cssText; - input.wrapper.style.position = "absolute"; - te.style.cssText = "position: fixed; width: 30px; height: 30px; top: " + (e.clientY - 5) + - "px; left: " + (e.clientX - 5) + "px; z-index: 1000; background: " + - (ie ? "rgba(255, 255, 255, .05)" : "transparent") + - "; outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);"; - if (webkit) var oldScrollY = window.scrollY; // Work around Chrome issue (#2712) - display.input.focus(); - if (webkit) window.scrollTo(null, oldScrollY); - display.input.reset(); - // Adds "Select all" to context menu in FF - if (!cm.somethingSelected()) te.value = input.prevInput = " "; - input.contextMenuPending = true; - display.selForContextMenu = cm.doc.sel; - clearTimeout(display.detectingSelectAll); - - // Select-all will be greyed out if there's nothing to select, so - // this adds a zero-width space so that we can later check whether - // it got selected. - function prepareSelectAllHack() { - if (te.selectionStart != null) { - var selected = cm.somethingSelected(); - var extval = "\u200b" + (selected ? te.value : ""); - te.value = "\u21da"; // Used to catch context-menu undo - te.value = extval; - input.prevInput = selected ? "" : "\u200b"; - te.selectionStart = 1; te.selectionEnd = extval.length; - // Re-set this, in case some other handler touched the - // selection in the meantime. - display.selForContextMenu = cm.doc.sel; - } - } - function rehide() { - input.contextMenuPending = false; - input.wrapper.style.position = "relative"; - te.style.cssText = oldCSS; - if (ie && ie_version < 9) display.scrollbars.setScrollTop(display.scroller.scrollTop = scrollPos); - - // Try to detect the user choosing select-all - if (te.selectionStart != null) { - if (!ie || (ie && ie_version < 9)) prepareSelectAllHack(); - var i = 0, poll = function() { - if (display.selForContextMenu == cm.doc.sel && te.selectionStart == 0 && - te.selectionEnd > 0 && input.prevInput == "\u200b") - operation(cm, commands.selectAll)(cm); - else if (i++ < 10) display.detectingSelectAll = setTimeout(poll, 500); - else display.input.reset(); - }; - display.detectingSelectAll = setTimeout(poll, 200); - } - } - - if (ie && ie_version >= 9) prepareSelectAllHack(); - if (captureRightClick) { - e_stop(e); - var mouseup = function() { - off(window, "mouseup", mouseup); - setTimeout(rehide, 20); - }; - on(window, "mouseup", mouseup); - } else { - setTimeout(rehide, 50); - } - }, - - readOnlyChanged: function(val) { - if (!val) this.reset(); - }, - - setUneditable: nothing, - - needsContentAttribute: false - }, TextareaInput.prototype); - - // CONTENTEDITABLE INPUT STYLE - - function ContentEditableInput(cm) { - this.cm = cm; - this.lastAnchorNode = this.lastAnchorOffset = this.lastFocusNode = this.lastFocusOffset = null; - this.polling = new Delayed(); - this.gracePeriod = false; - } - - ContentEditableInput.prototype = copyObj({ - init: function(display) { - var input = this, cm = input.cm; - var div = input.div = display.lineDiv; - disableBrowserMagic(div); - - on(div, "paste", function(e) { - if (!signalDOMEvent(cm, e)) handlePaste(e, cm); - }) - - on(div, "compositionstart", function(e) { - var data = e.data; - input.composing = {sel: cm.doc.sel, data: data, startData: data}; - if (!data) return; - var prim = cm.doc.sel.primary(); - var line = cm.getLine(prim.head.line); - var found = line.indexOf(data, Math.max(0, prim.head.ch - data.length)); - if (found > -1 && found <= prim.head.ch) - input.composing.sel = simpleSelection(Pos(prim.head.line, found), - Pos(prim.head.line, found + data.length)); - }); - on(div, "compositionupdate", function(e) { - input.composing.data = e.data; - }); - on(div, "compositionend", function(e) { - var ours = input.composing; - if (!ours) return; - if (e.data != ours.startData && !/\u200b/.test(e.data)) - ours.data = e.data; - // Need a small delay to prevent other code (input event, - // selection polling) from doing damage when fired right after - // compositionend. - setTimeout(function() { - if (!ours.handled) - input.applyComposition(ours); - if (input.composing == ours) - input.composing = null; - }, 50); - }); - - on(div, "touchstart", function() { - input.forceCompositionEnd(); - }); - - on(div, "input", function() { - if (input.composing) return; - if (cm.isReadOnly() || !input.pollContent()) - runInOp(input.cm, function() {regChange(cm);}); - }); - - function onCopyCut(e) { - if (signalDOMEvent(cm, e)) return - if (cm.somethingSelected()) { - lastCopied = cm.getSelections(); - if (e.type == "cut") cm.replaceSelection("", null, "cut"); - } else if (!cm.options.lineWiseCopyCut) { - return; - } else { - var ranges = copyableRanges(cm); - lastCopied = ranges.text; - if (e.type == "cut") { - cm.operation(function() { - cm.setSelections(ranges.ranges, 0, sel_dontScroll); - cm.replaceSelection("", null, "cut"); - }); - } - } - // iOS exposes the clipboard API, but seems to discard content inserted into it - if (e.clipboardData && !ios) { - e.preventDefault(); - e.clipboardData.clearData(); - e.clipboardData.setData("text/plain", lastCopied.join("\n")); - } else { - // Old-fashioned briefly-focus-a-textarea hack - var kludge = hiddenTextarea(), te = kludge.firstChild; - cm.display.lineSpace.insertBefore(kludge, cm.display.lineSpace.firstChild); - te.value = lastCopied.join("\n"); - var hadFocus = document.activeElement; - selectInput(te); - setTimeout(function() { - cm.display.lineSpace.removeChild(kludge); - hadFocus.focus(); - }, 50); - } - } - on(div, "copy", onCopyCut); - on(div, "cut", onCopyCut); - }, - - prepareSelection: function() { - var result = prepareSelection(this.cm, false); - result.focus = this.cm.state.focused; - return result; - }, - - showSelection: function(info) { - if (!info || !this.cm.display.view.length) return; - if (info.focus) this.showPrimarySelection(); - this.showMultipleSelections(info); - }, - - showPrimarySelection: function() { - var sel = window.getSelection(), prim = this.cm.doc.sel.primary(); - var curAnchor = domToPos(this.cm, sel.anchorNode, sel.anchorOffset); - var curFocus = domToPos(this.cm, sel.focusNode, sel.focusOffset); - if (curAnchor && !curAnchor.bad && curFocus && !curFocus.bad && - cmp(minPos(curAnchor, curFocus), prim.from()) == 0 && - cmp(maxPos(curAnchor, curFocus), prim.to()) == 0) - return; - - var start = posToDOM(this.cm, prim.from()); - var end = posToDOM(this.cm, prim.to()); - if (!start && !end) return; - - var view = this.cm.display.view; - var old = sel.rangeCount && sel.getRangeAt(0); - if (!start) { - start = {node: view[0].measure.map[2], offset: 0}; - } else if (!end) { // FIXME dangerously hacky - var measure = view[view.length - 1].measure; - var map = measure.maps ? measure.maps[measure.maps.length - 1] : measure.map; - end = {node: map[map.length - 1], offset: map[map.length - 2] - map[map.length - 3]}; - } - - try { var rng = range(start.node, start.offset, end.offset, end.node); } - catch(e) {} // Our model of the DOM might be outdated, in which case the range we try to set can be impossible - if (rng) { - if (!gecko && this.cm.state.focused) { - sel.collapse(start.node, start.offset); - if (!rng.collapsed) sel.addRange(rng); - } else { - sel.removeAllRanges(); - sel.addRange(rng); - } - if (old && sel.anchorNode == null) sel.addRange(old); - else if (gecko) this.startGracePeriod(); - } - this.rememberSelection(); - }, - - startGracePeriod: function() { - var input = this; - clearTimeout(this.gracePeriod); - this.gracePeriod = setTimeout(function() { - input.gracePeriod = false; - if (input.selectionChanged()) - input.cm.operation(function() { input.cm.curOp.selectionChanged = true; }); - }, 20); - }, - - showMultipleSelections: function(info) { - removeChildrenAndAdd(this.cm.display.cursorDiv, info.cursors); - removeChildrenAndAdd(this.cm.display.selectionDiv, info.selection); - }, - - rememberSelection: function() { - var sel = window.getSelection(); - this.lastAnchorNode = sel.anchorNode; this.lastAnchorOffset = sel.anchorOffset; - this.lastFocusNode = sel.focusNode; this.lastFocusOffset = sel.focusOffset; - }, - - selectionInEditor: function() { - var sel = window.getSelection(); - if (!sel.rangeCount) return false; - var node = sel.getRangeAt(0).commonAncestorContainer; - return contains(this.div, node); - }, - - focus: function() { - if (this.cm.options.readOnly != "nocursor") this.div.focus(); - }, - blur: function() { this.div.blur(); }, - getField: function() { return this.div; }, - - supportsTouch: function() { return true; }, - - receivedFocus: function() { - var input = this; - if (this.selectionInEditor()) - this.pollSelection(); - else - runInOp(this.cm, function() { input.cm.curOp.selectionChanged = true; }); - - function poll() { - if (input.cm.state.focused) { - input.pollSelection(); - input.polling.set(input.cm.options.pollInterval, poll); - } - } - this.polling.set(this.cm.options.pollInterval, poll); - }, - - selectionChanged: function() { - var sel = window.getSelection(); - return sel.anchorNode != this.lastAnchorNode || sel.anchorOffset != this.lastAnchorOffset || - sel.focusNode != this.lastFocusNode || sel.focusOffset != this.lastFocusOffset; - }, - - pollSelection: function() { - if (!this.composing && !this.gracePeriod && this.selectionChanged()) { - var sel = window.getSelection(), cm = this.cm; - this.rememberSelection(); - var anchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); - var head = domToPos(cm, sel.focusNode, sel.focusOffset); - if (anchor && head) runInOp(cm, function() { - setSelection(cm.doc, simpleSelection(anchor, head), sel_dontScroll); - if (anchor.bad || head.bad) cm.curOp.selectionChanged = true; - }); - } - }, - - pollContent: function() { - var cm = this.cm, display = cm.display, sel = cm.doc.sel.primary(); - var from = sel.from(), to = sel.to(); - if (from.line < display.viewFrom || to.line > display.viewTo - 1) return false; - - var fromIndex; - if (from.line == display.viewFrom || (fromIndex = findViewIndex(cm, from.line)) == 0) { - var fromLine = lineNo(display.view[0].line); - var fromNode = display.view[0].node; - } else { - var fromLine = lineNo(display.view[fromIndex].line); - var fromNode = display.view[fromIndex - 1].node.nextSibling; - } - var toIndex = findViewIndex(cm, to.line); - if (toIndex == display.view.length - 1) { - var toLine = display.viewTo - 1; - var toNode = display.lineDiv.lastChild; - } else { - var toLine = lineNo(display.view[toIndex + 1].line) - 1; - var toNode = display.view[toIndex + 1].node.previousSibling; - } - - var newText = cm.doc.splitLines(domTextBetween(cm, fromNode, toNode, fromLine, toLine)); - var oldText = getBetween(cm.doc, Pos(fromLine, 0), Pos(toLine, getLine(cm.doc, toLine).text.length)); - while (newText.length > 1 && oldText.length > 1) { - if (lst(newText) == lst(oldText)) { newText.pop(); oldText.pop(); toLine--; } - else if (newText[0] == oldText[0]) { newText.shift(); oldText.shift(); fromLine++; } - else break; - } - - var cutFront = 0, cutEnd = 0; - var newTop = newText[0], oldTop = oldText[0], maxCutFront = Math.min(newTop.length, oldTop.length); - while (cutFront < maxCutFront && newTop.charCodeAt(cutFront) == oldTop.charCodeAt(cutFront)) - ++cutFront; - var newBot = lst(newText), oldBot = lst(oldText); - var maxCutEnd = Math.min(newBot.length - (newText.length == 1 ? cutFront : 0), - oldBot.length - (oldText.length == 1 ? cutFront : 0)); - while (cutEnd < maxCutEnd && - newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) - ++cutEnd; - - newText[newText.length - 1] = newBot.slice(0, newBot.length - cutEnd); - newText[0] = newText[0].slice(cutFront); - - var chFrom = Pos(fromLine, cutFront); - var chTo = Pos(toLine, oldText.length ? lst(oldText).length - cutEnd : 0); - if (newText.length > 1 || newText[0] || cmp(chFrom, chTo)) { - replaceRange(cm.doc, newText, chFrom, chTo, "+input"); - return true; - } - }, - - ensurePolled: function() { - this.forceCompositionEnd(); - }, - reset: function() { - this.forceCompositionEnd(); - }, - forceCompositionEnd: function() { - if (!this.composing || this.composing.handled) return; - this.applyComposition(this.composing); - this.composing.handled = true; - this.div.blur(); - this.div.focus(); - }, - applyComposition: function(composing) { - if (this.cm.isReadOnly()) - operation(this.cm, regChange)(this.cm) - else if (composing.data && composing.data != composing.startData) - operation(this.cm, applyTextInput)(this.cm, composing.data, 0, composing.sel); - }, - - setUneditable: function(node) { - node.contentEditable = "false" - }, - - onKeyPress: function(e) { - e.preventDefault(); - if (!this.cm.isReadOnly()) - operation(this.cm, applyTextInput)(this.cm, String.fromCharCode(e.charCode == null ? e.keyCode : e.charCode), 0); - }, - - readOnlyChanged: function(val) { - this.div.contentEditable = String(val != "nocursor") - }, - - onContextMenu: nothing, - resetPosition: nothing, - - needsContentAttribute: true - }, ContentEditableInput.prototype); - - function posToDOM(cm, pos) { - var view = findViewForLine(cm, pos.line); - if (!view || view.hidden) return null; - var line = getLine(cm.doc, pos.line); - var info = mapFromLineView(view, line, pos.line); - - var order = getOrder(line), side = "left"; - if (order) { - var partPos = getBidiPartAt(order, pos.ch); - side = partPos % 2 ? "right" : "left"; - } - var result = nodeAndOffsetInLineMap(info.map, pos.ch, side); - result.offset = result.collapse == "right" ? result.end : result.start; - return result; - } - - function badPos(pos, bad) { if (bad) pos.bad = true; return pos; } - - function domToPos(cm, node, offset) { - var lineNode; - if (node == cm.display.lineDiv) { - lineNode = cm.display.lineDiv.childNodes[offset]; - if (!lineNode) return badPos(cm.clipPos(Pos(cm.display.viewTo - 1)), true); - node = null; offset = 0; - } else { - for (lineNode = node;; lineNode = lineNode.parentNode) { - if (!lineNode || lineNode == cm.display.lineDiv) return null; - if (lineNode.parentNode && lineNode.parentNode == cm.display.lineDiv) break; - } - } - for (var i = 0; i < cm.display.view.length; i++) { - var lineView = cm.display.view[i]; - if (lineView.node == lineNode) - return locateNodeInLineView(lineView, node, offset); - } - } - - function locateNodeInLineView(lineView, node, offset) { - var wrapper = lineView.text.firstChild, bad = false; - if (!node || !contains(wrapper, node)) return badPos(Pos(lineNo(lineView.line), 0), true); - if (node == wrapper) { - bad = true; - node = wrapper.childNodes[offset]; - offset = 0; - if (!node) { - var line = lineView.rest ? lst(lineView.rest) : lineView.line; - return badPos(Pos(lineNo(line), line.text.length), bad); - } - } - - var textNode = node.nodeType == 3 ? node : null, topNode = node; - if (!textNode && node.childNodes.length == 1 && node.firstChild.nodeType == 3) { - textNode = node.firstChild; - if (offset) offset = textNode.nodeValue.length; - } - while (topNode.parentNode != wrapper) topNode = topNode.parentNode; - var measure = lineView.measure, maps = measure.maps; - - function find(textNode, topNode, offset) { - for (var i = -1; i < (maps ? maps.length : 0); i++) { - var map = i < 0 ? measure.map : maps[i]; - for (var j = 0; j < map.length; j += 3) { - var curNode = map[j + 2]; - if (curNode == textNode || curNode == topNode) { - var line = lineNo(i < 0 ? lineView.line : lineView.rest[i]); - var ch = map[j] + offset; - if (offset < 0 || curNode != textNode) ch = map[j + (offset ? 1 : 0)]; - return Pos(line, ch); - } - } - } - } - var found = find(textNode, topNode, offset); - if (found) return badPos(found, bad); - - // FIXME this is all really shaky. might handle the few cases it needs to handle, but likely to cause problems - for (var after = topNode.nextSibling, dist = textNode ? textNode.nodeValue.length - offset : 0; after; after = after.nextSibling) { - found = find(after, after.firstChild, 0); - if (found) - return badPos(Pos(found.line, found.ch - dist), bad); - else - dist += after.textContent.length; - } - for (var before = topNode.previousSibling, dist = offset; before; before = before.previousSibling) { - found = find(before, before.firstChild, -1); - if (found) - return badPos(Pos(found.line, found.ch + dist), bad); - else - dist += after.textContent.length; - } - } - - function domTextBetween(cm, from, to, fromLine, toLine) { - var text = "", closing = false, lineSep = cm.doc.lineSeparator(); - function recognizeMarker(id) { return function(marker) { return marker.id == id; }; } - function walk(node) { - if (node.nodeType == 1) { - var cmText = node.getAttribute("cm-text"); - if (cmText != null) { - if (cmText == "") cmText = node.textContent.replace(/\u200b/g, ""); - text += cmText; - return; - } - var markerID = node.getAttribute("cm-marker"), range; - if (markerID) { - var found = cm.findMarks(Pos(fromLine, 0), Pos(toLine + 1, 0), recognizeMarker(+markerID)); - if (found.length && (range = found[0].find())) - text += getBetween(cm.doc, range.from, range.to).join(lineSep); - return; - } - if (node.getAttribute("contenteditable") == "false") return; - for (var i = 0; i < node.childNodes.length; i++) - walk(node.childNodes[i]); - if (/^(pre|div|p)$/i.test(node.nodeName)) - closing = true; - } else if (node.nodeType == 3) { - var val = node.nodeValue; - if (!val) return; - if (closing) { - text += lineSep; - closing = false; - } - text += val; - } - } - for (;;) { - walk(from); - if (from == to) break; - from = from.nextSibling; - } - return text; - } - - CodeMirror.inputStyles = {"textarea": TextareaInput, "contenteditable": ContentEditableInput}; - - // SELECTION / CURSOR - - // Selection objects are immutable. A new one is created every time - // the selection changes. A selection is one or more non-overlapping - // (and non-touching) ranges, sorted, and an integer that indicates - // which one is the primary selection (the one that's scrolled into - // view, that getCursor returns, etc). - function Selection(ranges, primIndex) { - this.ranges = ranges; - this.primIndex = primIndex; - } - - Selection.prototype = { - primary: function() { return this.ranges[this.primIndex]; }, - equals: function(other) { - if (other == this) return true; - if (other.primIndex != this.primIndex || other.ranges.length != this.ranges.length) return false; - for (var i = 0; i < this.ranges.length; i++) { - var here = this.ranges[i], there = other.ranges[i]; - if (cmp(here.anchor, there.anchor) != 0 || cmp(here.head, there.head) != 0) return false; - } - return true; - }, - deepCopy: function() { - for (var out = [], i = 0; i < this.ranges.length; i++) - out[i] = new Range(copyPos(this.ranges[i].anchor), copyPos(this.ranges[i].head)); - return new Selection(out, this.primIndex); - }, - somethingSelected: function() { - for (var i = 0; i < this.ranges.length; i++) - if (!this.ranges[i].empty()) return true; - return false; - }, - contains: function(pos, end) { - if (!end) end = pos; - for (var i = 0; i < this.ranges.length; i++) { - var range = this.ranges[i]; - if (cmp(end, range.from()) >= 0 && cmp(pos, range.to()) <= 0) - return i; - } - return -1; - } - }; - - function Range(anchor, head) { - this.anchor = anchor; this.head = head; - } - - Range.prototype = { - from: function() { return minPos(this.anchor, this.head); }, - to: function() { return maxPos(this.anchor, this.head); }, - empty: function() { - return this.head.line == this.anchor.line && this.head.ch == this.anchor.ch; - } - }; - - // Take an unsorted, potentially overlapping set of ranges, and - // build a selection out of it. 'Consumes' ranges array (modifying - // it). - function normalizeSelection(ranges, primIndex) { - var prim = ranges[primIndex]; - ranges.sort(function(a, b) { return cmp(a.from(), b.from()); }); - primIndex = indexOf(ranges, prim); - for (var i = 1; i < ranges.length; i++) { - var cur = ranges[i], prev = ranges[i - 1]; - if (cmp(prev.to(), cur.from()) >= 0) { - var from = minPos(prev.from(), cur.from()), to = maxPos(prev.to(), cur.to()); - var inv = prev.empty() ? cur.from() == cur.head : prev.from() == prev.head; - if (i <= primIndex) --primIndex; - ranges.splice(--i, 2, new Range(inv ? to : from, inv ? from : to)); - } - } - return new Selection(ranges, primIndex); - } - - function simpleSelection(anchor, head) { - return new Selection([new Range(anchor, head || anchor)], 0); - } - - // Most of the external API clips given positions to make sure they - // actually exist within the document. - function clipLine(doc, n) {return Math.max(doc.first, Math.min(n, doc.first + doc.size - 1));} - function clipPos(doc, pos) { - if (pos.line < doc.first) return Pos(doc.first, 0); - var last = doc.first + doc.size - 1; - if (pos.line > last) return Pos(last, getLine(doc, last).text.length); - return clipToLen(pos, getLine(doc, pos.line).text.length); - } - function clipToLen(pos, linelen) { - var ch = pos.ch; - if (ch == null || ch > linelen) return Pos(pos.line, linelen); - else if (ch < 0) return Pos(pos.line, 0); - else return pos; - } - function isLine(doc, l) {return l >= doc.first && l < doc.first + doc.size;} - function clipPosArray(doc, array) { - for (var out = [], i = 0; i < array.length; i++) out[i] = clipPos(doc, array[i]); - return out; - } - - // SELECTION UPDATES - - // The 'scroll' parameter given to many of these indicated whether - // the new cursor position should be scrolled into view after - // modifying the selection. - - // If shift is held or the extend flag is set, extends a range to - // include a given position (and optionally a second position). - // Otherwise, simply returns the range between the given positions. - // Used for cursor motion and such. - function extendRange(doc, range, head, other) { - if (doc.cm && doc.cm.display.shift || doc.extend) { - var anchor = range.anchor; - if (other) { - var posBefore = cmp(head, anchor) < 0; - if (posBefore != (cmp(other, anchor) < 0)) { - anchor = head; - head = other; - } else if (posBefore != (cmp(head, other) < 0)) { - head = other; - } - } - return new Range(anchor, head); - } else { - return new Range(other || head, head); - } - } - - // Extend the primary selection range, discard the rest. - function extendSelection(doc, head, other, options) { - setSelection(doc, new Selection([extendRange(doc, doc.sel.primary(), head, other)], 0), options); - } - - // Extend all selections (pos is an array of selections with length - // equal the number of selections) - function extendSelections(doc, heads, options) { - for (var out = [], i = 0; i < doc.sel.ranges.length; i++) - out[i] = extendRange(doc, doc.sel.ranges[i], heads[i], null); - var newSel = normalizeSelection(out, doc.sel.primIndex); - setSelection(doc, newSel, options); - } - - // Updates a single range in the selection. - function replaceOneSelection(doc, i, range, options) { - var ranges = doc.sel.ranges.slice(0); - ranges[i] = range; - setSelection(doc, normalizeSelection(ranges, doc.sel.primIndex), options); - } - - // Reset the selection to a single range. - function setSimpleSelection(doc, anchor, head, options) { - setSelection(doc, simpleSelection(anchor, head), options); - } - - // Give beforeSelectionChange handlers a change to influence a - // selection update. - function filterSelectionChange(doc, sel, options) { - var obj = { - ranges: sel.ranges, - update: function(ranges) { - this.ranges = []; - for (var i = 0; i < ranges.length; i++) - this.ranges[i] = new Range(clipPos(doc, ranges[i].anchor), - clipPos(doc, ranges[i].head)); - }, - origin: options && options.origin - }; - signal(doc, "beforeSelectionChange", doc, obj); - if (doc.cm) signal(doc.cm, "beforeSelectionChange", doc.cm, obj); - if (obj.ranges != sel.ranges) return normalizeSelection(obj.ranges, obj.ranges.length - 1); - else return sel; - } - - function setSelectionReplaceHistory(doc, sel, options) { - var done = doc.history.done, last = lst(done); - if (last && last.ranges) { - done[done.length - 1] = sel; - setSelectionNoUndo(doc, sel, options); - } else { - setSelection(doc, sel, options); - } - } - - // Set a new selection. - function setSelection(doc, sel, options) { - setSelectionNoUndo(doc, sel, options); - addSelectionToHistory(doc, doc.sel, doc.cm ? doc.cm.curOp.id : NaN, options); - } - - function setSelectionNoUndo(doc, sel, options) { - if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange")) - sel = filterSelectionChange(doc, sel, options); - - var bias = options && options.bias || - (cmp(sel.primary().head, doc.sel.primary().head) < 0 ? -1 : 1); - setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, true)); - - if (!(options && options.scroll === false) && doc.cm) - ensureCursorVisible(doc.cm); - } - - function setSelectionInner(doc, sel) { - if (sel.equals(doc.sel)) return; - - doc.sel = sel; - - if (doc.cm) { - doc.cm.curOp.updateInput = doc.cm.curOp.selectionChanged = true; - signalCursorActivity(doc.cm); - } - signalLater(doc, "cursorActivity", doc); - } - - // Verify that the selection does not partially select any atomic - // marked ranges. - function reCheckSelection(doc) { - setSelectionInner(doc, skipAtomicInSelection(doc, doc.sel, null, false), sel_dontScroll); - } - - // Return a selection that does not partially select any atomic - // ranges. - function skipAtomicInSelection(doc, sel, bias, mayClear) { - var out; - for (var i = 0; i < sel.ranges.length; i++) { - var range = sel.ranges[i]; - var old = sel.ranges.length == doc.sel.ranges.length && doc.sel.ranges[i]; - var newAnchor = skipAtomic(doc, range.anchor, old && old.anchor, bias, mayClear); - var newHead = skipAtomic(doc, range.head, old && old.head, bias, mayClear); - if (out || newAnchor != range.anchor || newHead != range.head) { - if (!out) out = sel.ranges.slice(0, i); - out[i] = new Range(newAnchor, newHead); - } - } - return out ? normalizeSelection(out, sel.primIndex) : sel; - } - - function skipAtomicInner(doc, pos, oldPos, dir, mayClear) { - var line = getLine(doc, pos.line); - if (line.markedSpans) for (var i = 0; i < line.markedSpans.length; ++i) { - var sp = line.markedSpans[i], m = sp.marker; - if ((sp.from == null || (m.inclusiveLeft ? sp.from <= pos.ch : sp.from < pos.ch)) && - (sp.to == null || (m.inclusiveRight ? sp.to >= pos.ch : sp.to > pos.ch))) { - if (mayClear) { - signal(m, "beforeCursorEnter"); - if (m.explicitlyCleared) { - if (!line.markedSpans) break; - else {--i; continue;} - } - } - if (!m.atomic) continue; - - if (oldPos) { - var near = m.find(dir < 0 ? 1 : -1), diff; - if (dir < 0 ? m.inclusiveRight : m.inclusiveLeft) near = movePos(doc, near, -dir, line); - if (near && near.line == pos.line && (diff = cmp(near, oldPos)) && (dir < 0 ? diff < 0 : diff > 0)) - return skipAtomicInner(doc, near, pos, dir, mayClear); - } - - var far = m.find(dir < 0 ? -1 : 1); - if (dir < 0 ? m.inclusiveLeft : m.inclusiveRight) far = movePos(doc, far, dir, line); - return far ? skipAtomicInner(doc, far, pos, dir, mayClear) : null; - } - } - return pos; - } - - // Ensure a given position is not inside an atomic range. - function skipAtomic(doc, pos, oldPos, bias, mayClear) { - var dir = bias || 1; - var found = skipAtomicInner(doc, pos, oldPos, dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, dir, true)) || - skipAtomicInner(doc, pos, oldPos, -dir, mayClear) || - (!mayClear && skipAtomicInner(doc, pos, oldPos, -dir, true)); - if (!found) { - doc.cantEdit = true; - return Pos(doc.first, 0); - } - return found; - } - - function movePos(doc, pos, dir, line) { - if (dir < 0 && pos.ch == 0) { - if (pos.line > doc.first) return clipPos(doc, Pos(pos.line - 1)); - else return null; - } else if (dir > 0 && pos.ch == (line || getLine(doc, pos.line)).text.length) { - if (pos.line < doc.first + doc.size - 1) return Pos(pos.line + 1, 0); - else return null; - } else { - return new Pos(pos.line, pos.ch + dir); - } - } - - // SELECTION DRAWING - - function updateSelection(cm) { - cm.display.input.showSelection(cm.display.input.prepareSelection()); - } - - function prepareSelection(cm, primary) { - var doc = cm.doc, result = {}; - var curFragment = result.cursors = document.createDocumentFragment(); - var selFragment = result.selection = document.createDocumentFragment(); - - for (var i = 0; i < doc.sel.ranges.length; i++) { - if (primary === false && i == doc.sel.primIndex) continue; - var range = doc.sel.ranges[i]; - var collapsed = range.empty(); - if (collapsed || cm.options.showCursorWhenSelecting) - drawSelectionCursor(cm, range.head, curFragment); - if (!collapsed) - drawSelectionRange(cm, range, selFragment); - } - return result; - } - - // Draws a cursor for the given range - function drawSelectionCursor(cm, head, output) { - var pos = cursorCoords(cm, head, "div", null, null, !cm.options.singleCursorHeightPerLine); - - var cursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor")); - cursor.style.left = pos.left + "px"; - cursor.style.top = pos.top + "px"; - cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + "px"; - - if (pos.other) { - // Secondary cursor, shown when on a 'jump' in bi-directional text - var otherCursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor CodeMirror-secondarycursor")); - otherCursor.style.display = ""; - otherCursor.style.left = pos.other.left + "px"; - otherCursor.style.top = pos.other.top + "px"; - otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + "px"; - } - } - - // Draws the given range as a highlighted selection - function drawSelectionRange(cm, range, output) { - var display = cm.display, doc = cm.doc; - var fragment = document.createDocumentFragment(); - var padding = paddingH(cm.display), leftSide = padding.left; - var rightSide = Math.max(display.sizerWidth, displayWidth(cm) - display.sizer.offsetLeft) - padding.right; - - function add(left, top, width, bottom) { - if (top < 0) top = 0; - top = Math.round(top); - bottom = Math.round(bottom); - fragment.appendChild(elt("div", null, "CodeMirror-selected", "position: absolute; left: " + left + - "px; top: " + top + "px; width: " + (width == null ? rightSide - left : width) + - "px; height: " + (bottom - top) + "px")); - } - - function drawForLine(line, fromArg, toArg) { - var lineObj = getLine(doc, line); - var lineLen = lineObj.text.length; - var start, end; - function coords(ch, bias) { - return charCoords(cm, Pos(line, ch), "div", lineObj, bias); - } - - iterateBidiSections(getOrder(lineObj), fromArg || 0, toArg == null ? lineLen : toArg, function(from, to, dir) { - var leftPos = coords(from, "left"), rightPos, left, right; - if (from == to) { - rightPos = leftPos; - left = right = leftPos.left; - } else { - rightPos = coords(to - 1, "right"); - if (dir == "rtl") { var tmp = leftPos; leftPos = rightPos; rightPos = tmp; } - left = leftPos.left; - right = rightPos.right; - } - if (fromArg == null && from == 0) left = leftSide; - if (rightPos.top - leftPos.top > 3) { // Different lines, draw top part - add(left, leftPos.top, null, leftPos.bottom); - left = leftSide; - if (leftPos.bottom < rightPos.top) add(left, leftPos.bottom, null, rightPos.top); - } - if (toArg == null && to == lineLen) right = rightSide; - if (!start || leftPos.top < start.top || leftPos.top == start.top && leftPos.left < start.left) - start = leftPos; - if (!end || rightPos.bottom > end.bottom || rightPos.bottom == end.bottom && rightPos.right > end.right) - end = rightPos; - if (left < leftSide + 1) left = leftSide; - add(left, rightPos.top, right - left, rightPos.bottom); - }); - return {start: start, end: end}; - } - - var sFrom = range.from(), sTo = range.to(); - if (sFrom.line == sTo.line) { - drawForLine(sFrom.line, sFrom.ch, sTo.ch); - } else { - var fromLine = getLine(doc, sFrom.line), toLine = getLine(doc, sTo.line); - var singleVLine = visualLine(fromLine) == visualLine(toLine); - var leftEnd = drawForLine(sFrom.line, sFrom.ch, singleVLine ? fromLine.text.length + 1 : null).end; - var rightStart = drawForLine(sTo.line, singleVLine ? 0 : null, sTo.ch).start; - if (singleVLine) { - if (leftEnd.top < rightStart.top - 2) { - add(leftEnd.right, leftEnd.top, null, leftEnd.bottom); - add(leftSide, rightStart.top, rightStart.left, rightStart.bottom); - } else { - add(leftEnd.right, leftEnd.top, rightStart.left - leftEnd.right, leftEnd.bottom); - } - } - if (leftEnd.bottom < rightStart.top) - add(leftSide, leftEnd.bottom, null, rightStart.top); - } - - output.appendChild(fragment); - } - - // Cursor-blinking - function restartBlink(cm) { - if (!cm.state.focused) return; - var display = cm.display; - clearInterval(display.blinker); - var on = true; - display.cursorDiv.style.visibility = ""; - if (cm.options.cursorBlinkRate > 0) - display.blinker = setInterval(function() { - display.cursorDiv.style.visibility = (on = !on) ? "" : "hidden"; - }, cm.options.cursorBlinkRate); - else if (cm.options.cursorBlinkRate < 0) - display.cursorDiv.style.visibility = "hidden"; - } - - // HIGHLIGHT WORKER - - function startWorker(cm, time) { - if (cm.doc.mode.startState && cm.doc.frontier < cm.display.viewTo) - cm.state.highlight.set(time, bind(highlightWorker, cm)); - } - - function highlightWorker(cm) { - var doc = cm.doc; - if (doc.frontier < doc.first) doc.frontier = doc.first; - if (doc.frontier >= cm.display.viewTo) return; - var end = +new Date + cm.options.workTime; - var state = copyState(doc.mode, getStateBefore(cm, doc.frontier)); - var changedLines = []; - - doc.iter(doc.frontier, Math.min(doc.first + doc.size, cm.display.viewTo + 500), function(line) { - if (doc.frontier >= cm.display.viewFrom) { // Visible - var oldStyles = line.styles, tooLong = line.text.length > cm.options.maxHighlightLength; - var highlighted = highlightLine(cm, line, tooLong ? copyState(doc.mode, state) : state, true); - line.styles = highlighted.styles; - var oldCls = line.styleClasses, newCls = highlighted.classes; - if (newCls) line.styleClasses = newCls; - else if (oldCls) line.styleClasses = null; - var ischange = !oldStyles || oldStyles.length != line.styles.length || - oldCls != newCls && (!oldCls || !newCls || oldCls.bgClass != newCls.bgClass || oldCls.textClass != newCls.textClass); - for (var i = 0; !ischange && i < oldStyles.length; ++i) ischange = oldStyles[i] != line.styles[i]; - if (ischange) changedLines.push(doc.frontier); - line.stateAfter = tooLong ? state : copyState(doc.mode, state); - } else { - if (line.text.length <= cm.options.maxHighlightLength) - processLine(cm, line.text, state); - line.stateAfter = doc.frontier % 5 == 0 ? copyState(doc.mode, state) : null; - } - ++doc.frontier; - if (+new Date > end) { - startWorker(cm, cm.options.workDelay); - return true; - } - }); - if (changedLines.length) runInOp(cm, function() { - for (var i = 0; i < changedLines.length; i++) - regLineChange(cm, changedLines[i], "text"); - }); - } - - // Finds the line to start with when starting a parse. Tries to - // find a line with a stateAfter, so that it can start with a - // valid state. If that fails, it returns the line with the - // smallest indentation, which tends to need the least context to - // parse correctly. - function findStartLine(cm, n, precise) { - var minindent, minline, doc = cm.doc; - var lim = precise ? -1 : n - (cm.doc.mode.innerMode ? 1000 : 100); - for (var search = n; search > lim; --search) { - if (search <= doc.first) return doc.first; - var line = getLine(doc, search - 1); - if (line.stateAfter && (!precise || search <= doc.frontier)) return search; - var indented = countColumn(line.text, null, cm.options.tabSize); - if (minline == null || minindent > indented) { - minline = search - 1; - minindent = indented; - } - } - return minline; - } - - function getStateBefore(cm, n, precise) { - var doc = cm.doc, display = cm.display; - if (!doc.mode.startState) return true; - var pos = findStartLine(cm, n, precise), state = pos > doc.first && getLine(doc, pos-1).stateAfter; - if (!state) state = startState(doc.mode); - else state = copyState(doc.mode, state); - doc.iter(pos, n, function(line) { - processLine(cm, line.text, state); - var save = pos == n - 1 || pos % 5 == 0 || pos >= display.viewFrom && pos < display.viewTo; - line.stateAfter = save ? copyState(doc.mode, state) : null; - ++pos; - }); - if (precise) doc.frontier = pos; - return state; - } - - // POSITION MEASUREMENT - - function paddingTop(display) {return display.lineSpace.offsetTop;} - function paddingVert(display) {return display.mover.offsetHeight - display.lineSpace.offsetHeight;} - function paddingH(display) { - if (display.cachedPaddingH) return display.cachedPaddingH; - var e = removeChildrenAndAdd(display.measure, elt("pre", "x")); - var style = window.getComputedStyle ? window.getComputedStyle(e) : e.currentStyle; - var data = {left: parseInt(style.paddingLeft), right: parseInt(style.paddingRight)}; - if (!isNaN(data.left) && !isNaN(data.right)) display.cachedPaddingH = data; - return data; - } - - function scrollGap(cm) { return scrollerGap - cm.display.nativeBarWidth; } - function displayWidth(cm) { - return cm.display.scroller.clientWidth - scrollGap(cm) - cm.display.barWidth; - } - function displayHeight(cm) { - return cm.display.scroller.clientHeight - scrollGap(cm) - cm.display.barHeight; - } - - // Ensure the lineView.wrapping.heights array is populated. This is - // an array of bottom offsets for the lines that make up a drawn - // line. When lineWrapping is on, there might be more than one - // height. - function ensureLineHeights(cm, lineView, rect) { - var wrapping = cm.options.lineWrapping; - var curWidth = wrapping && displayWidth(cm); - if (!lineView.measure.heights || wrapping && lineView.measure.width != curWidth) { - var heights = lineView.measure.heights = []; - if (wrapping) { - lineView.measure.width = curWidth; - var rects = lineView.text.firstChild.getClientRects(); - for (var i = 0; i < rects.length - 1; i++) { - var cur = rects[i], next = rects[i + 1]; - if (Math.abs(cur.bottom - next.bottom) > 2) - heights.push((cur.bottom + next.top) / 2 - rect.top); - } - } - heights.push(rect.bottom - rect.top); - } - } - - // Find a line map (mapping character offsets to text nodes) and a - // measurement cache for the given line number. (A line view might - // contain multiple lines when collapsed ranges are present.) - function mapFromLineView(lineView, line, lineN) { - if (lineView.line == line) - return {map: lineView.measure.map, cache: lineView.measure.cache}; - for (var i = 0; i < lineView.rest.length; i++) - if (lineView.rest[i] == line) - return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i]}; - for (var i = 0; i < lineView.rest.length; i++) - if (lineNo(lineView.rest[i]) > lineN) - return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i], before: true}; - } - - // Render a line into the hidden node display.externalMeasured. Used - // when measurement is needed for a line that's not in the viewport. - function updateExternalMeasurement(cm, line) { - line = visualLine(line); - var lineN = lineNo(line); - var view = cm.display.externalMeasured = new LineView(cm.doc, line, lineN); - view.lineN = lineN; - var built = view.built = buildLineContent(cm, view); - view.text = built.pre; - removeChildrenAndAdd(cm.display.lineMeasure, built.pre); - return view; - } - - // Get a {top, bottom, left, right} box (in line-local coordinates) - // for a given character. - function measureChar(cm, line, ch, bias) { - return measureCharPrepared(cm, prepareMeasureForLine(cm, line), ch, bias); - } - - // Find a line view that corresponds to the given line number. - function findViewForLine(cm, lineN) { - if (lineN >= cm.display.viewFrom && lineN < cm.display.viewTo) - return cm.display.view[findViewIndex(cm, lineN)]; - var ext = cm.display.externalMeasured; - if (ext && lineN >= ext.lineN && lineN < ext.lineN + ext.size) - return ext; - } - - // Measurement can be split in two steps, the set-up work that - // applies to the whole line, and the measurement of the actual - // character. Functions like coordsChar, that need to do a lot of - // measurements in a row, can thus ensure that the set-up work is - // only done once. - function prepareMeasureForLine(cm, line) { - var lineN = lineNo(line); - var view = findViewForLine(cm, lineN); - if (view && !view.text) { - view = null; - } else if (view && view.changes) { - updateLineForChanges(cm, view, lineN, getDimensions(cm)); - cm.curOp.forceUpdate = true; - } - if (!view) - view = updateExternalMeasurement(cm, line); - - var info = mapFromLineView(view, line, lineN); - return { - line: line, view: view, rect: null, - map: info.map, cache: info.cache, before: info.before, - hasHeights: false - }; - } - - // Given a prepared measurement object, measures the position of an - // actual character (or fetches it from the cache). - function measureCharPrepared(cm, prepared, ch, bias, varHeight) { - if (prepared.before) ch = -1; - var key = ch + (bias || ""), found; - if (prepared.cache.hasOwnProperty(key)) { - found = prepared.cache[key]; - } else { - if (!prepared.rect) - prepared.rect = prepared.view.text.getBoundingClientRect(); - if (!prepared.hasHeights) { - ensureLineHeights(cm, prepared.view, prepared.rect); - prepared.hasHeights = true; - } - found = measureCharInner(cm, prepared, ch, bias); - if (!found.bogus) prepared.cache[key] = found; - } - return {left: found.left, right: found.right, - top: varHeight ? found.rtop : found.top, - bottom: varHeight ? found.rbottom : found.bottom}; - } - - var nullRect = {left: 0, right: 0, top: 0, bottom: 0}; - - function nodeAndOffsetInLineMap(map, ch, bias) { - var node, start, end, collapse; - // First, search the line map for the text node corresponding to, - // or closest to, the target character. - for (var i = 0; i < map.length; i += 3) { - var mStart = map[i], mEnd = map[i + 1]; - if (ch < mStart) { - start = 0; end = 1; - collapse = "left"; - } else if (ch < mEnd) { - start = ch - mStart; - end = start + 1; - } else if (i == map.length - 3 || ch == mEnd && map[i + 3] > ch) { - end = mEnd - mStart; - start = end - 1; - if (ch >= mEnd) collapse = "right"; - } - if (start != null) { - node = map[i + 2]; - if (mStart == mEnd && bias == (node.insertLeft ? "left" : "right")) - collapse = bias; - if (bias == "left" && start == 0) - while (i && map[i - 2] == map[i - 3] && map[i - 1].insertLeft) { - node = map[(i -= 3) + 2]; - collapse = "left"; - } - if (bias == "right" && start == mEnd - mStart) - while (i < map.length - 3 && map[i + 3] == map[i + 4] && !map[i + 5].insertLeft) { - node = map[(i += 3) + 2]; - collapse = "right"; - } - break; - } - } - return {node: node, start: start, end: end, collapse: collapse, coverStart: mStart, coverEnd: mEnd}; - } - - function measureCharInner(cm, prepared, ch, bias) { - var place = nodeAndOffsetInLineMap(prepared.map, ch, bias); - var node = place.node, start = place.start, end = place.end, collapse = place.collapse; - - var rect; - if (node.nodeType == 3) { // If it is a text node, use a range to retrieve the coordinates. - for (var i = 0; i < 4; i++) { // Retry a maximum of 4 times when nonsense rectangles are returned - while (start && isExtendingChar(prepared.line.text.charAt(place.coverStart + start))) --start; - while (place.coverStart + end < place.coverEnd && isExtendingChar(prepared.line.text.charAt(place.coverStart + end))) ++end; - if (ie && ie_version < 9 && start == 0 && end == place.coverEnd - place.coverStart) { - rect = node.parentNode.getBoundingClientRect(); - } else if (ie && cm.options.lineWrapping) { - var rects = range(node, start, end).getClientRects(); - if (rects.length) - rect = rects[bias == "right" ? rects.length - 1 : 0]; - else - rect = nullRect; - } else { - rect = range(node, start, end).getBoundingClientRect() || nullRect; - } - if (rect.left || rect.right || start == 0) break; - end = start; - start = start - 1; - collapse = "right"; - } - if (ie && ie_version < 11) rect = maybeUpdateRectForZooming(cm.display.measure, rect); - } else { // If it is a widget, simply get the box for the whole widget. - if (start > 0) collapse = bias = "right"; - var rects; - if (cm.options.lineWrapping && (rects = node.getClientRects()).length > 1) - rect = rects[bias == "right" ? rects.length - 1 : 0]; - else - rect = node.getBoundingClientRect(); - } - if (ie && ie_version < 9 && !start && (!rect || !rect.left && !rect.right)) { - var rSpan = node.parentNode.getClientRects()[0]; - if (rSpan) - rect = {left: rSpan.left, right: rSpan.left + charWidth(cm.display), top: rSpan.top, bottom: rSpan.bottom}; - else - rect = nullRect; - } - - var rtop = rect.top - prepared.rect.top, rbot = rect.bottom - prepared.rect.top; - var mid = (rtop + rbot) / 2; - var heights = prepared.view.measure.heights; - for (var i = 0; i < heights.length - 1; i++) - if (mid < heights[i]) break; - var top = i ? heights[i - 1] : 0, bot = heights[i]; - var result = {left: (collapse == "right" ? rect.right : rect.left) - prepared.rect.left, - right: (collapse == "left" ? rect.left : rect.right) - prepared.rect.left, - top: top, bottom: bot}; - if (!rect.left && !rect.right) result.bogus = true; - if (!cm.options.singleCursorHeightPerLine) { result.rtop = rtop; result.rbottom = rbot; } - - return result; - } - - // Work around problem with bounding client rects on ranges being - // returned incorrectly when zoomed on IE10 and below. - function maybeUpdateRectForZooming(measure, rect) { - if (!window.screen || screen.logicalXDPI == null || - screen.logicalXDPI == screen.deviceXDPI || !hasBadZoomedRects(measure)) - return rect; - var scaleX = screen.logicalXDPI / screen.deviceXDPI; - var scaleY = screen.logicalYDPI / screen.deviceYDPI; - return {left: rect.left * scaleX, right: rect.right * scaleX, - top: rect.top * scaleY, bottom: rect.bottom * scaleY}; - } - - function clearLineMeasurementCacheFor(lineView) { - if (lineView.measure) { - lineView.measure.cache = {}; - lineView.measure.heights = null; - if (lineView.rest) for (var i = 0; i < lineView.rest.length; i++) - lineView.measure.caches[i] = {}; - } - } - - function clearLineMeasurementCache(cm) { - cm.display.externalMeasure = null; - removeChildren(cm.display.lineMeasure); - for (var i = 0; i < cm.display.view.length; i++) - clearLineMeasurementCacheFor(cm.display.view[i]); - } - - function clearCaches(cm) { - clearLineMeasurementCache(cm); - cm.display.cachedCharWidth = cm.display.cachedTextHeight = cm.display.cachedPaddingH = null; - if (!cm.options.lineWrapping) cm.display.maxLineChanged = true; - cm.display.lineNumChars = null; - } - - function pageScrollX() { return window.pageXOffset || (document.documentElement || document.body).scrollLeft; } - function pageScrollY() { return window.pageYOffset || (document.documentElement || document.body).scrollTop; } - - // Converts a {top, bottom, left, right} box from line-local - // coordinates into another coordinate system. Context may be one of - // "line", "div" (display.lineDiv), "local"/null (editor), "window", - // or "page". - function intoCoordSystem(cm, lineObj, rect, context) { - if (lineObj.widgets) for (var i = 0; i < lineObj.widgets.length; ++i) if (lineObj.widgets[i].above) { - var size = widgetHeight(lineObj.widgets[i]); - rect.top += size; rect.bottom += size; - } - if (context == "line") return rect; - if (!context) context = "local"; - var yOff = heightAtLine(lineObj); - if (context == "local") yOff += paddingTop(cm.display); - else yOff -= cm.display.viewOffset; - if (context == "page" || context == "window") { - var lOff = cm.display.lineSpace.getBoundingClientRect(); - yOff += lOff.top + (context == "window" ? 0 : pageScrollY()); - var xOff = lOff.left + (context == "window" ? 0 : pageScrollX()); - rect.left += xOff; rect.right += xOff; - } - rect.top += yOff; rect.bottom += yOff; - return rect; - } - - // Coverts a box from "div" coords to another coordinate system. - // Context may be "window", "page", "div", or "local"/null. - function fromCoordSystem(cm, coords, context) { - if (context == "div") return coords; - var left = coords.left, top = coords.top; - // First move into "page" coordinate system - if (context == "page") { - left -= pageScrollX(); - top -= pageScrollY(); - } else if (context == "local" || !context) { - var localBox = cm.display.sizer.getBoundingClientRect(); - left += localBox.left; - top += localBox.top; - } - - var lineSpaceBox = cm.display.lineSpace.getBoundingClientRect(); - return {left: left - lineSpaceBox.left, top: top - lineSpaceBox.top}; - } - - function charCoords(cm, pos, context, lineObj, bias) { - if (!lineObj) lineObj = getLine(cm.doc, pos.line); - return intoCoordSystem(cm, lineObj, measureChar(cm, lineObj, pos.ch, bias), context); - } - - // Returns a box for a given cursor position, which may have an - // 'other' property containing the position of the secondary cursor - // on a bidi boundary. - function cursorCoords(cm, pos, context, lineObj, preparedMeasure, varHeight) { - lineObj = lineObj || getLine(cm.doc, pos.line); - if (!preparedMeasure) preparedMeasure = prepareMeasureForLine(cm, lineObj); - function get(ch, right) { - var m = measureCharPrepared(cm, preparedMeasure, ch, right ? "right" : "left", varHeight); - if (right) m.left = m.right; else m.right = m.left; - return intoCoordSystem(cm, lineObj, m, context); - } - function getBidi(ch, partPos) { - var part = order[partPos], right = part.level % 2; - if (ch == bidiLeft(part) && partPos && part.level < order[partPos - 1].level) { - part = order[--partPos]; - ch = bidiRight(part) - (part.level % 2 ? 0 : 1); - right = true; - } else if (ch == bidiRight(part) && partPos < order.length - 1 && part.level < order[partPos + 1].level) { - part = order[++partPos]; - ch = bidiLeft(part) - part.level % 2; - right = false; - } - if (right && ch == part.to && ch > part.from) return get(ch - 1); - return get(ch, right); - } - var order = getOrder(lineObj), ch = pos.ch; - if (!order) return get(ch); - var partPos = getBidiPartAt(order, ch); - var val = getBidi(ch, partPos); - if (bidiOther != null) val.other = getBidi(ch, bidiOther); - return val; - } - - // Used to cheaply estimate the coordinates for a position. Used for - // intermediate scroll updates. - function estimateCoords(cm, pos) { - var left = 0, pos = clipPos(cm.doc, pos); - if (!cm.options.lineWrapping) left = charWidth(cm.display) * pos.ch; - var lineObj = getLine(cm.doc, pos.line); - var top = heightAtLine(lineObj) + paddingTop(cm.display); - return {left: left, right: left, top: top, bottom: top + lineObj.height}; - } - - // Positions returned by coordsChar contain some extra information. - // xRel is the relative x position of the input coordinates compared - // to the found position (so xRel > 0 means the coordinates are to - // the right of the character position, for example). When outside - // is true, that means the coordinates lie outside the line's - // vertical range. - function PosWithInfo(line, ch, outside, xRel) { - var pos = Pos(line, ch); - pos.xRel = xRel; - if (outside) pos.outside = true; - return pos; - } - - // Compute the character position closest to the given coordinates. - // Input must be lineSpace-local ("div" coordinate system). - function coordsChar(cm, x, y) { - var doc = cm.doc; - y += cm.display.viewOffset; - if (y < 0) return PosWithInfo(doc.first, 0, true, -1); - var lineN = lineAtHeight(doc, y), last = doc.first + doc.size - 1; - if (lineN > last) - return PosWithInfo(doc.first + doc.size - 1, getLine(doc, last).text.length, true, 1); - if (x < 0) x = 0; - - var lineObj = getLine(doc, lineN); - for (;;) { - var found = coordsCharInner(cm, lineObj, lineN, x, y); - var merged = collapsedSpanAtEnd(lineObj); - var mergedPos = merged && merged.find(0, true); - if (merged && (found.ch > mergedPos.from.ch || found.ch == mergedPos.from.ch && found.xRel > 0)) - lineN = lineNo(lineObj = mergedPos.to.line); - else - return found; - } - } - - function coordsCharInner(cm, lineObj, lineNo, x, y) { - var innerOff = y - heightAtLine(lineObj); - var wrongLine = false, adjust = 2 * cm.display.wrapper.clientWidth; - var preparedMeasure = prepareMeasureForLine(cm, lineObj); - - function getX(ch) { - var sp = cursorCoords(cm, Pos(lineNo, ch), "line", lineObj, preparedMeasure); - wrongLine = true; - if (innerOff > sp.bottom) return sp.left - adjust; - else if (innerOff < sp.top) return sp.left + adjust; - else wrongLine = false; - return sp.left; - } - - var bidi = getOrder(lineObj), dist = lineObj.text.length; - var from = lineLeft(lineObj), to = lineRight(lineObj); - var fromX = getX(from), fromOutside = wrongLine, toX = getX(to), toOutside = wrongLine; - - if (x > toX) return PosWithInfo(lineNo, to, toOutside, 1); - // Do a binary search between these bounds. - for (;;) { - if (bidi ? to == from || to == moveVisually(lineObj, from, 1) : to - from <= 1) { - var ch = x < fromX || x - fromX <= toX - x ? from : to; - var xDiff = x - (ch == from ? fromX : toX); - while (isExtendingChar(lineObj.text.charAt(ch))) ++ch; - var pos = PosWithInfo(lineNo, ch, ch == from ? fromOutside : toOutside, - xDiff < -1 ? -1 : xDiff > 1 ? 1 : 0); - return pos; - } - var step = Math.ceil(dist / 2), middle = from + step; - if (bidi) { - middle = from; - for (var i = 0; i < step; ++i) middle = moveVisually(lineObj, middle, 1); - } - var middleX = getX(middle); - if (middleX > x) {to = middle; toX = middleX; if (toOutside = wrongLine) toX += 1000; dist = step;} - else {from = middle; fromX = middleX; fromOutside = wrongLine; dist -= step;} - } - } - - var measureText; - // Compute the default text height. - function textHeight(display) { - if (display.cachedTextHeight != null) return display.cachedTextHeight; - if (measureText == null) { - measureText = elt("pre"); - // Measure a bunch of lines, for browsers that compute - // fractional heights. - for (var i = 0; i < 49; ++i) { - measureText.appendChild(document.createTextNode("x")); - measureText.appendChild(elt("br")); - } - measureText.appendChild(document.createTextNode("x")); - } - removeChildrenAndAdd(display.measure, measureText); - var height = measureText.offsetHeight / 50; - if (height > 3) display.cachedTextHeight = height; - removeChildren(display.measure); - return height || 1; - } - - // Compute the default character width. - function charWidth(display) { - if (display.cachedCharWidth != null) return display.cachedCharWidth; - var anchor = elt("span", "xxxxxxxxxx"); - var pre = elt("pre", [anchor]); - removeChildrenAndAdd(display.measure, pre); - var rect = anchor.getBoundingClientRect(), width = (rect.right - rect.left) / 10; - if (width > 2) display.cachedCharWidth = width; - return width || 10; - } - - // OPERATIONS - - // Operations are used to wrap a series of changes to the editor - // state in such a way that each change won't have to update the - // cursor and display (which would be awkward, slow, and - // error-prone). Instead, display updates are batched and then all - // combined and executed at once. - - var operationGroup = null; - - var nextOpId = 0; - // Start a new operation. - function startOperation(cm) { - cm.curOp = { - cm: cm, - viewChanged: false, // Flag that indicates that lines might need to be redrawn - startHeight: cm.doc.height, // Used to detect need to update scrollbar - forceUpdate: false, // Used to force a redraw - updateInput: null, // Whether to reset the input textarea - typing: false, // Whether this reset should be careful to leave existing text (for compositing) - changeObjs: null, // Accumulated changes, for firing change events - cursorActivityHandlers: null, // Set of handlers to fire cursorActivity on - cursorActivityCalled: 0, // Tracks which cursorActivity handlers have been called already - selectionChanged: false, // Whether the selection needs to be redrawn - updateMaxLine: false, // Set when the widest line needs to be determined anew - scrollLeft: null, scrollTop: null, // Intermediate scroll position, not pushed to DOM yet - scrollToPos: null, // Used to scroll to a specific position - focus: false, - id: ++nextOpId // Unique ID - }; - if (operationGroup) { - operationGroup.ops.push(cm.curOp); - } else { - cm.curOp.ownsGroup = operationGroup = { - ops: [cm.curOp], - delayedCallbacks: [] - }; - } - } - - function fireCallbacksForOps(group) { - // Calls delayed callbacks and cursorActivity handlers until no - // new ones appear - var callbacks = group.delayedCallbacks, i = 0; - do { - for (; i < callbacks.length; i++) - callbacks[i].call(null); - for (var j = 0; j < group.ops.length; j++) { - var op = group.ops[j]; - if (op.cursorActivityHandlers) - while (op.cursorActivityCalled < op.cursorActivityHandlers.length) - op.cursorActivityHandlers[op.cursorActivityCalled++].call(null, op.cm); - } - } while (i < callbacks.length); - } - - // Finish an operation, updating the display and signalling delayed events - function endOperation(cm) { - var op = cm.curOp, group = op.ownsGroup; - if (!group) return; - - try { fireCallbacksForOps(group); } - finally { - operationGroup = null; - for (var i = 0; i < group.ops.length; i++) - group.ops[i].cm.curOp = null; - endOperations(group); - } - } - - // The DOM updates done when an operation finishes are batched so - // that the minimum number of relayouts are required. - function endOperations(group) { - var ops = group.ops; - for (var i = 0; i < ops.length; i++) // Read DOM - endOperation_R1(ops[i]); - for (var i = 0; i < ops.length; i++) // Write DOM (maybe) - endOperation_W1(ops[i]); - for (var i = 0; i < ops.length; i++) // Read DOM - endOperation_R2(ops[i]); - for (var i = 0; i < ops.length; i++) // Write DOM (maybe) - endOperation_W2(ops[i]); - for (var i = 0; i < ops.length; i++) // Read DOM - endOperation_finish(ops[i]); - } - - function endOperation_R1(op) { - var cm = op.cm, display = cm.display; - maybeClipScrollbars(cm); - if (op.updateMaxLine) findMaxLine(cm); - - op.mustUpdate = op.viewChanged || op.forceUpdate || op.scrollTop != null || - op.scrollToPos && (op.scrollToPos.from.line < display.viewFrom || - op.scrollToPos.to.line >= display.viewTo) || - display.maxLineChanged && cm.options.lineWrapping; - op.update = op.mustUpdate && - new DisplayUpdate(cm, op.mustUpdate && {top: op.scrollTop, ensure: op.scrollToPos}, op.forceUpdate); - } - - function endOperation_W1(op) { - op.updatedDisplay = op.mustUpdate && updateDisplayIfNeeded(op.cm, op.update); - } - - function endOperation_R2(op) { - var cm = op.cm, display = cm.display; - if (op.updatedDisplay) updateHeightsInViewport(cm); - - op.barMeasure = measureForScrollbars(cm); - - // If the max line changed since it was last measured, measure it, - // and ensure the document's width matches it. - // updateDisplay_W2 will use these properties to do the actual resizing - if (display.maxLineChanged && !cm.options.lineWrapping) { - op.adjustWidthTo = measureChar(cm, display.maxLine, display.maxLine.text.length).left + 3; - cm.display.sizerWidth = op.adjustWidthTo; - op.barMeasure.scrollWidth = - Math.max(display.scroller.clientWidth, display.sizer.offsetLeft + op.adjustWidthTo + scrollGap(cm) + cm.display.barWidth); - op.maxScrollLeft = Math.max(0, display.sizer.offsetLeft + op.adjustWidthTo - displayWidth(cm)); - } - - if (op.updatedDisplay || op.selectionChanged) - op.preparedSelection = display.input.prepareSelection(); - } - - function endOperation_W2(op) { - var cm = op.cm; - - if (op.adjustWidthTo != null) { - cm.display.sizer.style.minWidth = op.adjustWidthTo + "px"; - if (op.maxScrollLeft < cm.doc.scrollLeft) - setScrollLeft(cm, Math.min(cm.display.scroller.scrollLeft, op.maxScrollLeft), true); - cm.display.maxLineChanged = false; - } - - if (op.preparedSelection) - cm.display.input.showSelection(op.preparedSelection); - if (op.updatedDisplay) - setDocumentHeight(cm, op.barMeasure); - if (op.updatedDisplay || op.startHeight != cm.doc.height) - updateScrollbars(cm, op.barMeasure); - - if (op.selectionChanged) restartBlink(cm); - - if (cm.state.focused && op.updateInput) - cm.display.input.reset(op.typing); - if (op.focus && op.focus == activeElt() && (!document.hasFocus || document.hasFocus())) - ensureFocus(op.cm); - } - - function endOperation_finish(op) { - var cm = op.cm, display = cm.display, doc = cm.doc; - - if (op.updatedDisplay) postUpdateDisplay(cm, op.update); - - // Abort mouse wheel delta measurement, when scrolling explicitly - if (display.wheelStartX != null && (op.scrollTop != null || op.scrollLeft != null || op.scrollToPos)) - display.wheelStartX = display.wheelStartY = null; - - // Propagate the scroll position to the actual DOM scroller - if (op.scrollTop != null && (display.scroller.scrollTop != op.scrollTop || op.forceScroll)) { - doc.scrollTop = Math.max(0, Math.min(display.scroller.scrollHeight - display.scroller.clientHeight, op.scrollTop)); - display.scrollbars.setScrollTop(doc.scrollTop); - display.scroller.scrollTop = doc.scrollTop; - } - if (op.scrollLeft != null && (display.scroller.scrollLeft != op.scrollLeft || op.forceScroll)) { - doc.scrollLeft = Math.max(0, Math.min(display.scroller.scrollWidth - displayWidth(cm), op.scrollLeft)); - display.scrollbars.setScrollLeft(doc.scrollLeft); - display.scroller.scrollLeft = doc.scrollLeft; - alignHorizontally(cm); - } - // If we need to scroll a specific position into view, do so. - if (op.scrollToPos) { - var coords = scrollPosIntoView(cm, clipPos(doc, op.scrollToPos.from), - clipPos(doc, op.scrollToPos.to), op.scrollToPos.margin); - if (op.scrollToPos.isCursor && cm.state.focused) maybeScrollWindow(cm, coords); - } - - // Fire events for markers that are hidden/unidden by editing or - // undoing - var hidden = op.maybeHiddenMarkers, unhidden = op.maybeUnhiddenMarkers; - if (hidden) for (var i = 0; i < hidden.length; ++i) - if (!hidden[i].lines.length) signal(hidden[i], "hide"); - if (unhidden) for (var i = 0; i < unhidden.length; ++i) - if (unhidden[i].lines.length) signal(unhidden[i], "unhide"); - - if (display.wrapper.offsetHeight) - doc.scrollTop = cm.display.scroller.scrollTop; - - // Fire change events, and delayed event handlers - if (op.changeObjs) - signal(cm, "changes", cm, op.changeObjs); - if (op.update) - op.update.finish(); - } - - // Run the given function in an operation - function runInOp(cm, f) { - if (cm.curOp) return f(); - startOperation(cm); - try { return f(); } - finally { endOperation(cm); } - } - // Wraps a function in an operation. Returns the wrapped function. - function operation(cm, f) { - return function() { - if (cm.curOp) return f.apply(cm, arguments); - startOperation(cm); - try { return f.apply(cm, arguments); } - finally { endOperation(cm); } - }; - } - // Used to add methods to editor and doc instances, wrapping them in - // operations. - function methodOp(f) { - return function() { - if (this.curOp) return f.apply(this, arguments); - startOperation(this); - try { return f.apply(this, arguments); } - finally { endOperation(this); } - }; - } - function docMethodOp(f) { - return function() { - var cm = this.cm; - if (!cm || cm.curOp) return f.apply(this, arguments); - startOperation(cm); - try { return f.apply(this, arguments); } - finally { endOperation(cm); } - }; - } - - // VIEW TRACKING - - // These objects are used to represent the visible (currently drawn) - // part of the document. A LineView may correspond to multiple - // logical lines, if those are connected by collapsed ranges. - function LineView(doc, line, lineN) { - // The starting line - this.line = line; - // Continuing lines, if any - this.rest = visualLineContinued(line); - // Number of logical lines in this visual line - this.size = this.rest ? lineNo(lst(this.rest)) - lineN + 1 : 1; - this.node = this.text = null; - this.hidden = lineIsHidden(doc, line); - } - - // Create a range of LineView objects for the given lines. - function buildViewArray(cm, from, to) { - var array = [], nextPos; - for (var pos = from; pos < to; pos = nextPos) { - var view = new LineView(cm.doc, getLine(cm.doc, pos), pos); - nextPos = pos + view.size; - array.push(view); - } - return array; - } - - // Updates the display.view data structure for a given change to the - // document. From and to are in pre-change coordinates. Lendiff is - // the amount of lines added or subtracted by the change. This is - // used for changes that span multiple lines, or change the way - // lines are divided into visual lines. regLineChange (below) - // registers single-line changes. - function regChange(cm, from, to, lendiff) { - if (from == null) from = cm.doc.first; - if (to == null) to = cm.doc.first + cm.doc.size; - if (!lendiff) lendiff = 0; - - var display = cm.display; - if (lendiff && to < display.viewTo && - (display.updateLineNumbers == null || display.updateLineNumbers > from)) - display.updateLineNumbers = from; - - cm.curOp.viewChanged = true; - - if (from >= display.viewTo) { // Change after - if (sawCollapsedSpans && visualLineNo(cm.doc, from) < display.viewTo) - resetView(cm); - } else if (to <= display.viewFrom) { // Change before - if (sawCollapsedSpans && visualLineEndNo(cm.doc, to + lendiff) > display.viewFrom) { - resetView(cm); - } else { - display.viewFrom += lendiff; - display.viewTo += lendiff; - } - } else if (from <= display.viewFrom && to >= display.viewTo) { // Full overlap - resetView(cm); - } else if (from <= display.viewFrom) { // Top overlap - var cut = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cut) { - display.view = display.view.slice(cut.index); - display.viewFrom = cut.lineN; - display.viewTo += lendiff; - } else { - resetView(cm); - } - } else if (to >= display.viewTo) { // Bottom overlap - var cut = viewCuttingPoint(cm, from, from, -1); - if (cut) { - display.view = display.view.slice(0, cut.index); - display.viewTo = cut.lineN; - } else { - resetView(cm); - } - } else { // Gap in the middle - var cutTop = viewCuttingPoint(cm, from, from, -1); - var cutBot = viewCuttingPoint(cm, to, to + lendiff, 1); - if (cutTop && cutBot) { - display.view = display.view.slice(0, cutTop.index) - .concat(buildViewArray(cm, cutTop.lineN, cutBot.lineN)) - .concat(display.view.slice(cutBot.index)); - display.viewTo += lendiff; - } else { - resetView(cm); - } - } - - var ext = display.externalMeasured; - if (ext) { - if (to < ext.lineN) - ext.lineN += lendiff; - else if (from < ext.lineN + ext.size) - display.externalMeasured = null; - } - } - - // Register a change to a single line. Type must be one of "text", - // "gutter", "class", "widget" - function regLineChange(cm, line, type) { - cm.curOp.viewChanged = true; - var display = cm.display, ext = cm.display.externalMeasured; - if (ext && line >= ext.lineN && line < ext.lineN + ext.size) - display.externalMeasured = null; - - if (line < display.viewFrom || line >= display.viewTo) return; - var lineView = display.view[findViewIndex(cm, line)]; - if (lineView.node == null) return; - var arr = lineView.changes || (lineView.changes = []); - if (indexOf(arr, type) == -1) arr.push(type); - } - - // Clear the view. - function resetView(cm) { - cm.display.viewFrom = cm.display.viewTo = cm.doc.first; - cm.display.view = []; - cm.display.viewOffset = 0; - } - - // Find the view element corresponding to a given line. Return null - // when the line isn't visible. - function findViewIndex(cm, n) { - if (n >= cm.display.viewTo) return null; - n -= cm.display.viewFrom; - if (n < 0) return null; - var view = cm.display.view; - for (var i = 0; i < view.length; i++) { - n -= view[i].size; - if (n < 0) return i; - } - } - - function viewCuttingPoint(cm, oldN, newN, dir) { - var index = findViewIndex(cm, oldN), diff, view = cm.display.view; - if (!sawCollapsedSpans || newN == cm.doc.first + cm.doc.size) - return {index: index, lineN: newN}; - for (var i = 0, n = cm.display.viewFrom; i < index; i++) - n += view[i].size; - if (n != oldN) { - if (dir > 0) { - if (index == view.length - 1) return null; - diff = (n + view[index].size) - oldN; - index++; - } else { - diff = n - oldN; - } - oldN += diff; newN += diff; - } - while (visualLineNo(cm.doc, newN) != newN) { - if (index == (dir < 0 ? 0 : view.length - 1)) return null; - newN += dir * view[index - (dir < 0 ? 1 : 0)].size; - index += dir; - } - return {index: index, lineN: newN}; - } - - // Force the view to cover a given range, adding empty view element - // or clipping off existing ones as needed. - function adjustView(cm, from, to) { - var display = cm.display, view = display.view; - if (view.length == 0 || from >= display.viewTo || to <= display.viewFrom) { - display.view = buildViewArray(cm, from, to); - display.viewFrom = from; - } else { - if (display.viewFrom > from) - display.view = buildViewArray(cm, from, display.viewFrom).concat(display.view); - else if (display.viewFrom < from) - display.view = display.view.slice(findViewIndex(cm, from)); - display.viewFrom = from; - if (display.viewTo < to) - display.view = display.view.concat(buildViewArray(cm, display.viewTo, to)); - else if (display.viewTo > to) - display.view = display.view.slice(0, findViewIndex(cm, to)); - } - display.viewTo = to; - } - - // Count the number of lines in the view whose DOM representation is - // out of date (or nonexistent). - function countDirtyView(cm) { - var view = cm.display.view, dirty = 0; - for (var i = 0; i < view.length; i++) { - var lineView = view[i]; - if (!lineView.hidden && (!lineView.node || lineView.changes)) ++dirty; - } - return dirty; - } - - // EVENT HANDLERS - - // Attach the necessary event handlers when initializing the editor - function registerEventHandlers(cm) { - var d = cm.display; - on(d.scroller, "mousedown", operation(cm, onMouseDown)); - // Older IE's will not fire a second mousedown for a double click - if (ie && ie_version < 11) - on(d.scroller, "dblclick", operation(cm, function(e) { - if (signalDOMEvent(cm, e)) return; - var pos = posFromMouse(cm, e); - if (!pos || clickInGutter(cm, e) || eventInWidget(cm.display, e)) return; - e_preventDefault(e); - var word = cm.findWordAt(pos); - extendSelection(cm.doc, word.anchor, word.head); - })); - else - on(d.scroller, "dblclick", function(e) { signalDOMEvent(cm, e) || e_preventDefault(e); }); - // Some browsers fire contextmenu *after* opening the menu, at - // which point we can't mess with it anymore. Context menu is - // handled in onMouseDown for these browsers. - if (!captureRightClick) on(d.scroller, "contextmenu", function(e) {onContextMenu(cm, e);}); - - // Used to suppress mouse event handling when a touch happens - var touchFinished, prevTouch = {end: 0}; - function finishTouch() { - if (d.activeTouch) { - touchFinished = setTimeout(function() {d.activeTouch = null;}, 1000); - prevTouch = d.activeTouch; - prevTouch.end = +new Date; - } - }; - function isMouseLikeTouchEvent(e) { - if (e.touches.length != 1) return false; - var touch = e.touches[0]; - return touch.radiusX <= 1 && touch.radiusY <= 1; - } - function farAway(touch, other) { - if (other.left == null) return true; - var dx = other.left - touch.left, dy = other.top - touch.top; - return dx * dx + dy * dy > 20 * 20; - } - on(d.scroller, "touchstart", function(e) { - if (!signalDOMEvent(cm, e) && !isMouseLikeTouchEvent(e)) { - clearTimeout(touchFinished); - var now = +new Date; - d.activeTouch = {start: now, moved: false, - prev: now - prevTouch.end <= 300 ? prevTouch : null}; - if (e.touches.length == 1) { - d.activeTouch.left = e.touches[0].pageX; - d.activeTouch.top = e.touches[0].pageY; - } - } - }); - on(d.scroller, "touchmove", function() { - if (d.activeTouch) d.activeTouch.moved = true; - }); - on(d.scroller, "touchend", function(e) { - var touch = d.activeTouch; - if (touch && !eventInWidget(d, e) && touch.left != null && - !touch.moved && new Date - touch.start < 300) { - var pos = cm.coordsChar(d.activeTouch, "page"), range; - if (!touch.prev || farAway(touch, touch.prev)) // Single tap - range = new Range(pos, pos); - else if (!touch.prev.prev || farAway(touch, touch.prev.prev)) // Double tap - range = cm.findWordAt(pos); - else // Triple tap - range = new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))); - cm.setSelection(range.anchor, range.head); - cm.focus(); - e_preventDefault(e); - } - finishTouch(); - }); - on(d.scroller, "touchcancel", finishTouch); - - // Sync scrolling between fake scrollbars and real scrollable - // area, ensure viewport is updated when scrolling. - on(d.scroller, "scroll", function() { - if (d.scroller.clientHeight) { - setScrollTop(cm, d.scroller.scrollTop); - setScrollLeft(cm, d.scroller.scrollLeft, true); - signal(cm, "scroll", cm); - } - }); - - // Listen to wheel events in order to try and update the viewport on time. - on(d.scroller, "mousewheel", function(e){onScrollWheel(cm, e);}); - on(d.scroller, "DOMMouseScroll", function(e){onScrollWheel(cm, e);}); - - // Prevent wrapper from ever scrolling - on(d.wrapper, "scroll", function() { d.wrapper.scrollTop = d.wrapper.scrollLeft = 0; }); - - d.dragFunctions = { - enter: function(e) {if (!signalDOMEvent(cm, e)) e_stop(e);}, - over: function(e) {if (!signalDOMEvent(cm, e)) { onDragOver(cm, e); e_stop(e); }}, - start: function(e){onDragStart(cm, e);}, - drop: operation(cm, onDrop), - leave: function() {clearDragCursor(cm);} - }; - - var inp = d.input.getField(); - on(inp, "keyup", function(e) { onKeyUp.call(cm, e); }); - on(inp, "keydown", operation(cm, onKeyDown)); - on(inp, "keypress", operation(cm, onKeyPress)); - on(inp, "focus", bind(onFocus, cm)); - on(inp, "blur", bind(onBlur, cm)); - } - - function dragDropChanged(cm, value, old) { - var wasOn = old && old != CodeMirror.Init; - if (!value != !wasOn) { - var funcs = cm.display.dragFunctions; - var toggle = value ? on : off; - toggle(cm.display.scroller, "dragstart", funcs.start); - toggle(cm.display.scroller, "dragenter", funcs.enter); - toggle(cm.display.scroller, "dragover", funcs.over); - toggle(cm.display.scroller, "dragleave", funcs.leave); - toggle(cm.display.scroller, "drop", funcs.drop); - } - } - - // Called when the window resizes - function onResize(cm) { - var d = cm.display; - if (d.lastWrapHeight == d.wrapper.clientHeight && d.lastWrapWidth == d.wrapper.clientWidth) - return; - // Might be a text scaling operation, clear size caches. - d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; - d.scrollbarsClipped = false; - cm.setSize(); - } - - // MOUSE EVENTS - - // Return true when the given mouse event happened in a widget - function eventInWidget(display, e) { - for (var n = e_target(e); n != display.wrapper; n = n.parentNode) { - if (!n || (n.nodeType == 1 && n.getAttribute("cm-ignore-events") == "true") || - (n.parentNode == display.sizer && n != display.mover)) - return true; - } - } - - // Given a mouse event, find the corresponding position. If liberal - // is false, it checks whether a gutter or scrollbar was clicked, - // and returns null if it was. forRect is used by rectangular - // selections, and tries to estimate a character position even for - // coordinates beyond the right of the text. - function posFromMouse(cm, e, liberal, forRect) { - var display = cm.display; - if (!liberal && e_target(e).getAttribute("cm-not-content") == "true") return null; - - var x, y, space = display.lineSpace.getBoundingClientRect(); - // Fails unpredictably on IE[67] when mouse is dragged around quickly. - try { x = e.clientX - space.left; y = e.clientY - space.top; } - catch (e) { return null; } - var coords = coordsChar(cm, x, y), line; - if (forRect && coords.xRel == 1 && (line = getLine(cm.doc, coords.line).text).length == coords.ch) { - var colDiff = countColumn(line, line.length, cm.options.tabSize) - line.length; - coords = Pos(coords.line, Math.max(0, Math.round((x - paddingH(cm.display).left) / charWidth(cm.display)) - colDiff)); - } - return coords; - } - - // A mouse down can be a single click, double click, triple click, - // start of selection drag, start of text drag, new cursor - // (ctrl-click), rectangle drag (alt-drag), or xwin - // middle-click-paste. Or it might be a click on something we should - // not interfere with, such as a scrollbar or widget. - function onMouseDown(e) { - var cm = this, display = cm.display; - if (signalDOMEvent(cm, e) || display.activeTouch && display.input.supportsTouch()) return; - display.shift = e.shiftKey; - - if (eventInWidget(display, e)) { - if (!webkit) { - // Briefly turn off draggability, to allow widgets to do - // normal dragging things. - display.scroller.draggable = false; - setTimeout(function(){display.scroller.draggable = true;}, 100); - } - return; - } - if (clickInGutter(cm, e)) return; - var start = posFromMouse(cm, e); - window.focus(); - - switch (e_button(e)) { - case 1: - // #3261: make sure, that we're not starting a second selection - if (cm.state.selectingText) - cm.state.selectingText(e); - else if (start) - leftButtonDown(cm, e, start); - else if (e_target(e) == display.scroller) - e_preventDefault(e); - break; - case 2: - if (webkit) cm.state.lastMiddleDown = +new Date; - if (start) extendSelection(cm.doc, start); - setTimeout(function() {display.input.focus();}, 20); - e_preventDefault(e); - break; - case 3: - if (captureRightClick) onContextMenu(cm, e); - else delayBlurEvent(cm); - break; - } - } - - var lastClick, lastDoubleClick; - function leftButtonDown(cm, e, start) { - if (ie) setTimeout(bind(ensureFocus, cm), 0); - else cm.curOp.focus = activeElt(); - - var now = +new Date, type; - if (lastDoubleClick && lastDoubleClick.time > now - 400 && cmp(lastDoubleClick.pos, start) == 0) { - type = "triple"; - } else if (lastClick && lastClick.time > now - 400 && cmp(lastClick.pos, start) == 0) { - type = "double"; - lastDoubleClick = {time: now, pos: start}; - } else { - type = "single"; - lastClick = {time: now, pos: start}; - } - - var sel = cm.doc.sel, modifier = mac ? e.metaKey : e.ctrlKey, contained; - if (cm.options.dragDrop && dragAndDrop && !cm.isReadOnly() && - type == "single" && (contained = sel.contains(start)) > -1 && - (cmp((contained = sel.ranges[contained]).from(), start) < 0 || start.xRel > 0) && - (cmp(contained.to(), start) > 0 || start.xRel < 0)) - leftButtonStartDrag(cm, e, start, modifier); - else - leftButtonSelect(cm, e, start, type, modifier); - } - - // Start a text drag. When it ends, see if any dragging actually - // happen, and treat as a click if it didn't. - function leftButtonStartDrag(cm, e, start, modifier) { - var display = cm.display, startTime = +new Date; - var dragEnd = operation(cm, function(e2) { - if (webkit) display.scroller.draggable = false; - cm.state.draggingText = false; - off(document, "mouseup", dragEnd); - off(display.scroller, "drop", dragEnd); - if (Math.abs(e.clientX - e2.clientX) + Math.abs(e.clientY - e2.clientY) < 10) { - e_preventDefault(e2); - if (!modifier && +new Date - 200 < startTime) - extendSelection(cm.doc, start); - // Work around unexplainable focus problem in IE9 (#2127) and Chrome (#3081) - if (webkit || ie && ie_version == 9) - setTimeout(function() {document.body.focus(); display.input.focus();}, 20); - else - display.input.focus(); - } - }); - // Let the drag handler handle this. - if (webkit) display.scroller.draggable = true; - cm.state.draggingText = dragEnd; - // IE's approach to draggable - if (display.scroller.dragDrop) display.scroller.dragDrop(); - on(document, "mouseup", dragEnd); - on(display.scroller, "drop", dragEnd); - } - - // Normal selection, as opposed to text dragging. - function leftButtonSelect(cm, e, start, type, addNew) { - var display = cm.display, doc = cm.doc; - e_preventDefault(e); - - var ourRange, ourIndex, startSel = doc.sel, ranges = startSel.ranges; - if (addNew && !e.shiftKey) { - ourIndex = doc.sel.contains(start); - if (ourIndex > -1) - ourRange = ranges[ourIndex]; - else - ourRange = new Range(start, start); - } else { - ourRange = doc.sel.primary(); - ourIndex = doc.sel.primIndex; - } - - if (e.altKey) { - type = "rect"; - if (!addNew) ourRange = new Range(start, start); - start = posFromMouse(cm, e, true, true); - ourIndex = -1; - } else if (type == "double") { - var word = cm.findWordAt(start); - if (cm.display.shift || doc.extend) - ourRange = extendRange(doc, ourRange, word.anchor, word.head); - else - ourRange = word; - } else if (type == "triple") { - var line = new Range(Pos(start.line, 0), clipPos(doc, Pos(start.line + 1, 0))); - if (cm.display.shift || doc.extend) - ourRange = extendRange(doc, ourRange, line.anchor, line.head); - else - ourRange = line; - } else { - ourRange = extendRange(doc, ourRange, start); - } - - if (!addNew) { - ourIndex = 0; - setSelection(doc, new Selection([ourRange], 0), sel_mouse); - startSel = doc.sel; - } else if (ourIndex == -1) { - ourIndex = ranges.length; - setSelection(doc, normalizeSelection(ranges.concat([ourRange]), ourIndex), - {scroll: false, origin: "*mouse"}); - } else if (ranges.length > 1 && ranges[ourIndex].empty() && type == "single" && !e.shiftKey) { - setSelection(doc, normalizeSelection(ranges.slice(0, ourIndex).concat(ranges.slice(ourIndex + 1)), 0), - {scroll: false, origin: "*mouse"}); - startSel = doc.sel; - } else { - replaceOneSelection(doc, ourIndex, ourRange, sel_mouse); - } - - var lastPos = start; - function extendTo(pos) { - if (cmp(lastPos, pos) == 0) return; - lastPos = pos; - - if (type == "rect") { - var ranges = [], tabSize = cm.options.tabSize; - var startCol = countColumn(getLine(doc, start.line).text, start.ch, tabSize); - var posCol = countColumn(getLine(doc, pos.line).text, pos.ch, tabSize); - var left = Math.min(startCol, posCol), right = Math.max(startCol, posCol); - for (var line = Math.min(start.line, pos.line), end = Math.min(cm.lastLine(), Math.max(start.line, pos.line)); - line <= end; line++) { - var text = getLine(doc, line).text, leftPos = findColumn(text, left, tabSize); - if (left == right) - ranges.push(new Range(Pos(line, leftPos), Pos(line, leftPos))); - else if (text.length > leftPos) - ranges.push(new Range(Pos(line, leftPos), Pos(line, findColumn(text, right, tabSize)))); - } - if (!ranges.length) ranges.push(new Range(start, start)); - setSelection(doc, normalizeSelection(startSel.ranges.slice(0, ourIndex).concat(ranges), ourIndex), - {origin: "*mouse", scroll: false}); - cm.scrollIntoView(pos); - } else { - var oldRange = ourRange; - var anchor = oldRange.anchor, head = pos; - if (type != "single") { - if (type == "double") - var range = cm.findWordAt(pos); - else - var range = new Range(Pos(pos.line, 0), clipPos(doc, Pos(pos.line + 1, 0))); - if (cmp(range.anchor, anchor) > 0) { - head = range.head; - anchor = minPos(oldRange.from(), range.anchor); - } else { - head = range.anchor; - anchor = maxPos(oldRange.to(), range.head); - } - } - var ranges = startSel.ranges.slice(0); - ranges[ourIndex] = new Range(clipPos(doc, anchor), head); - setSelection(doc, normalizeSelection(ranges, ourIndex), sel_mouse); - } - } - - var editorSize = display.wrapper.getBoundingClientRect(); - // Used to ensure timeout re-tries don't fire when another extend - // happened in the meantime (clearTimeout isn't reliable -- at - // least on Chrome, the timeouts still happen even when cleared, - // if the clear happens after their scheduled firing time). - var counter = 0; - - function extend(e) { - var curCount = ++counter; - var cur = posFromMouse(cm, e, true, type == "rect"); - if (!cur) return; - if (cmp(cur, lastPos) != 0) { - cm.curOp.focus = activeElt(); - extendTo(cur); - var visible = visibleLines(display, doc); - if (cur.line >= visible.to || cur.line < visible.from) - setTimeout(operation(cm, function(){if (counter == curCount) extend(e);}), 150); - } else { - var outside = e.clientY < editorSize.top ? -20 : e.clientY > editorSize.bottom ? 20 : 0; - if (outside) setTimeout(operation(cm, function() { - if (counter != curCount) return; - display.scroller.scrollTop += outside; - extend(e); - }), 50); - } - } - - function done(e) { - cm.state.selectingText = false; - counter = Infinity; - e_preventDefault(e); - display.input.focus(); - off(document, "mousemove", move); - off(document, "mouseup", up); - doc.history.lastSelOrigin = null; - } - - var move = operation(cm, function(e) { - if (!e_button(e)) done(e); - else extend(e); - }); - var up = operation(cm, done); - cm.state.selectingText = up; - on(document, "mousemove", move); - on(document, "mouseup", up); - } - - // Determines whether an event happened in the gutter, and fires the - // handlers for the corresponding event. - function gutterEvent(cm, e, type, prevent) { - try { var mX = e.clientX, mY = e.clientY; } - catch(e) { return false; } - if (mX >= Math.floor(cm.display.gutters.getBoundingClientRect().right)) return false; - if (prevent) e_preventDefault(e); - - var display = cm.display; - var lineBox = display.lineDiv.getBoundingClientRect(); - - if (mY > lineBox.bottom || !hasHandler(cm, type)) return e_defaultPrevented(e); - mY -= lineBox.top - display.viewOffset; - - for (var i = 0; i < cm.options.gutters.length; ++i) { - var g = display.gutters.childNodes[i]; - if (g && g.getBoundingClientRect().right >= mX) { - var line = lineAtHeight(cm.doc, mY); - var gutter = cm.options.gutters[i]; - signal(cm, type, cm, line, gutter, e); - return e_defaultPrevented(e); - } - } - } - - function clickInGutter(cm, e) { - return gutterEvent(cm, e, "gutterClick", true); - } - - // Kludge to work around strange IE behavior where it'll sometimes - // re-fire a series of drag-related events right after the drop (#1551) - var lastDrop = 0; - - function onDrop(e) { - var cm = this; - clearDragCursor(cm); - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) - return; - e_preventDefault(e); - if (ie) lastDrop = +new Date; - var pos = posFromMouse(cm, e, true), files = e.dataTransfer.files; - if (!pos || cm.isReadOnly()) return; - // Might be a file drop, in which case we simply extract the text - // and insert it. - if (files && files.length && window.FileReader && window.File) { - var n = files.length, text = Array(n), read = 0; - var loadFile = function(file, i) { - if (cm.options.allowDropFileTypes && - indexOf(cm.options.allowDropFileTypes, file.type) == -1) - return; - - var reader = new FileReader; - reader.onload = operation(cm, function() { - var content = reader.result; - if (/[\x00-\x08\x0e-\x1f]{2}/.test(content)) content = ""; - text[i] = content; - if (++read == n) { - pos = clipPos(cm.doc, pos); - var change = {from: pos, to: pos, - text: cm.doc.splitLines(text.join(cm.doc.lineSeparator())), - origin: "paste"}; - makeChange(cm.doc, change); - setSelectionReplaceHistory(cm.doc, simpleSelection(pos, changeEnd(change))); - } - }); - reader.readAsText(file); - }; - for (var i = 0; i < n; ++i) loadFile(files[i], i); - } else { // Normal drop - // Don't do a replace if the drop happened inside of the selected text. - if (cm.state.draggingText && cm.doc.sel.contains(pos) > -1) { - cm.state.draggingText(e); - // Ensure the editor is re-focused - setTimeout(function() {cm.display.input.focus();}, 20); - return; - } - try { - var text = e.dataTransfer.getData("Text"); - if (text) { - if (cm.state.draggingText && !(mac ? e.altKey : e.ctrlKey)) - var selected = cm.listSelections(); - setSelectionNoUndo(cm.doc, simpleSelection(pos, pos)); - if (selected) for (var i = 0; i < selected.length; ++i) - replaceRange(cm.doc, "", selected[i].anchor, selected[i].head, "drag"); - cm.replaceSelection(text, "around", "paste"); - cm.display.input.focus(); - } - } - catch(e){} - } - } - - function onDragStart(cm, e) { - if (ie && (!cm.state.draggingText || +new Date - lastDrop < 100)) { e_stop(e); return; } - if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) return; - - e.dataTransfer.setData("Text", cm.getSelection()); - - // Use dummy image instead of default browsers image. - // Recent Safari (~6.0.2) have a tendency to segfault when this happens, so we don't do it there. - if (e.dataTransfer.setDragImage && !safari) { - var img = elt("img", null, null, "position: fixed; left: 0; top: 0;"); - img.src = "data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw=="; - if (presto) { - img.width = img.height = 1; - cm.display.wrapper.appendChild(img); - // Force a relayout, or Opera won't use our image for some obscure reason - img._top = img.offsetTop; - } - e.dataTransfer.setDragImage(img, 0, 0); - if (presto) img.parentNode.removeChild(img); - } - } - - function onDragOver(cm, e) { - var pos = posFromMouse(cm, e); - if (!pos) return; - var frag = document.createDocumentFragment(); - drawSelectionCursor(cm, pos, frag); - if (!cm.display.dragCursor) { - cm.display.dragCursor = elt("div", null, "CodeMirror-cursors CodeMirror-dragcursors"); - cm.display.lineSpace.insertBefore(cm.display.dragCursor, cm.display.cursorDiv); - } - removeChildrenAndAdd(cm.display.dragCursor, frag); - } - - function clearDragCursor(cm) { - if (cm.display.dragCursor) { - cm.display.lineSpace.removeChild(cm.display.dragCursor); - cm.display.dragCursor = null; - } - } - - // SCROLL EVENTS - - // Sync the scrollable area and scrollbars, ensure the viewport - // covers the visible area. - function setScrollTop(cm, val) { - if (Math.abs(cm.doc.scrollTop - val) < 2) return; - cm.doc.scrollTop = val; - if (!gecko) updateDisplaySimple(cm, {top: val}); - if (cm.display.scroller.scrollTop != val) cm.display.scroller.scrollTop = val; - cm.display.scrollbars.setScrollTop(val); - if (gecko) updateDisplaySimple(cm); - startWorker(cm, 100); - } - // Sync scroller and scrollbar, ensure the gutter elements are - // aligned. - function setScrollLeft(cm, val, isScroller) { - if (isScroller ? val == cm.doc.scrollLeft : Math.abs(cm.doc.scrollLeft - val) < 2) return; - val = Math.min(val, cm.display.scroller.scrollWidth - cm.display.scroller.clientWidth); - cm.doc.scrollLeft = val; - alignHorizontally(cm); - if (cm.display.scroller.scrollLeft != val) cm.display.scroller.scrollLeft = val; - cm.display.scrollbars.setScrollLeft(val); - } - // Since the delta values reported on mouse wheel events are // unstandardized between browsers and even browser versions, and // generally horribly unpredictable, this code starts by measuring @@ -3976,24 +4446,24 @@ // know one. These don't have to be accurate -- the result of them // being wrong would just be a slight flicker on the first wheel // scroll (if it is large enough). - if (ie) wheelPixelsPerUnit = -.53; - else if (gecko) wheelPixelsPerUnit = 15; - else if (chrome) wheelPixelsPerUnit = -.7; - else if (safari) wheelPixelsPerUnit = -1/3; - - var wheelEventDelta = function(e) { + if (ie) { wheelPixelsPerUnit = -.53; } + else if (gecko) { wheelPixelsPerUnit = 15; } + else if (chrome) { wheelPixelsPerUnit = -.7; } + else if (safari) { wheelPixelsPerUnit = -1/3; } + + function wheelEventDelta(e) { var dx = e.wheelDeltaX, dy = e.wheelDeltaY; - if (dx == null && e.detail && e.axis == e.HORIZONTAL_AXIS) dx = e.detail; - if (dy == null && e.detail && e.axis == e.VERTICAL_AXIS) dy = e.detail; - else if (dy == null) dy = e.wheelDelta; - return {x: dx, y: dy}; - }; - CodeMirror.wheelEventPixels = function(e) { + if (dx == null && e.detail && e.axis == e.HORIZONTAL_AXIS) { dx = e.detail; } + if (dy == null && e.detail && e.axis == e.VERTICAL_AXIS) { dy = e.detail; } + else if (dy == null) { dy = e.wheelDelta; } + return {x: dx, y: dy} + } + function wheelEventPixels(e) { var delta = wheelEventDelta(e); delta.x *= wheelPixelsPerUnit; delta.y *= wheelPixelsPerUnit; - return delta; - }; + return delta + } function onScrollWheel(cm, e) { var delta = wheelEventDelta(e), dx = delta.x, dy = delta.y; @@ -4002,7 +4472,7 @@ // Quit if there's nothing to scroll here var canScrollX = scroll.scrollWidth > scroll.clientWidth; var canScrollY = scroll.scrollHeight > scroll.clientHeight; - if (!(dx && canScrollX || dy && canScrollY)) return; + if (!(dx && canScrollX || dy && canScrollY)) { return } // Webkit browsers on OS X abort momentum scrolls when the target // of the scroll event is removed from the scrollable element. @@ -4013,7 +4483,7 @@ for (var i = 0; i < view.length; i++) { if (view[i].node == cur) { cm.display.currentWheelTarget = cur; - break outer; + break outer } } } @@ -4027,16 +4497,16 @@ // better than glitching out. if (dx && !gecko && !presto && wheelPixelsPerUnit != null) { if (dy && canScrollY) - setScrollTop(cm, Math.max(0, Math.min(scroll.scrollTop + dy * wheelPixelsPerUnit, scroll.scrollHeight - scroll.clientHeight))); - setScrollLeft(cm, Math.max(0, Math.min(scroll.scrollLeft + dx * wheelPixelsPerUnit, scroll.scrollWidth - scroll.clientWidth))); + { updateScrollTop(cm, Math.max(0, scroll.scrollTop + dy * wheelPixelsPerUnit)); } + setScrollLeft(cm, Math.max(0, scroll.scrollLeft + dx * wheelPixelsPerUnit)); // Only prevent default scrolling if vertical scrolling is // actually possible. Otherwise, it causes vertical scroll // jitter on OSX trackpads when deltaX is small and deltaY // is large (issue #3579) if (!dy || (dy && canScrollY)) - e_preventDefault(e); + { e_preventDefault(e); } display.wheelStartX = null; // Abort measurement, if in progress - return; + return } // 'Project' the visible viewport to cover the area that is being @@ -4044,8 +4514,8 @@ if (dy && wheelPixelsPerUnit != null) { var pixels = dy * wheelPixelsPerUnit; var top = cm.doc.scrollTop, bot = top + display.wrapper.clientHeight; - if (pixels < 0) top = Math.max(0, top + pixels - 50); - else bot = Math.min(cm.doc.height, bot + pixels + 50); + if (pixels < 0) { top = Math.max(0, top + pixels - 50); } + else { bot = Math.min(cm.doc.height, bot + pixels + 50); } updateDisplaySimple(cm, {top: top, bottom: bot}); } @@ -4053,14 +4523,14 @@ if (display.wheelStartX == null) { display.wheelStartX = scroll.scrollLeft; display.wheelStartY = scroll.scrollTop; display.wheelDX = dx; display.wheelDY = dy; - setTimeout(function() { - if (display.wheelStartX == null) return; + setTimeout(function () { + if (display.wheelStartX == null) { return } var movedX = scroll.scrollLeft - display.wheelStartX; var movedY = scroll.scrollTop - display.wheelStartY; var sample = (movedY && display.wheelDY && movedY / display.wheelDY) || (movedX && display.wheelDX && movedX / display.wheelDX); display.wheelStartX = display.wheelStartY = null; - if (!sample) return; + if (!sample) { return } wheelPixelsPerUnit = (wheelPixelsPerUnit * wheelSamples + sample) / (wheelSamples + 1); ++wheelSamples; }, 200); @@ -4070,226 +4540,109 @@ } } - // KEY EVENTS - - // Run a handler that was bound to a key. - function doHandleBinding(cm, bound, dropShift) { - if (typeof bound == "string") { - bound = commands[bound]; - if (!bound) return false; - } - // Ensure previous input has been read, so that the handler sees a - // consistent view of the document - cm.display.input.ensurePolled(); - var prevShift = cm.display.shift, done = false; - try { - if (cm.isReadOnly()) cm.state.suppressEdits = true; - if (dropShift) cm.display.shift = false; - done = bound(cm) != Pass; - } finally { - cm.display.shift = prevShift; - cm.state.suppressEdits = false; - } - return done; - } - - function lookupKeyForEditor(cm, name, handle) { - for (var i = 0; i < cm.state.keyMaps.length; i++) { - var result = lookupKey(name, cm.state.keyMaps[i], handle, cm); - if (result) return result; - } - return (cm.options.extraKeys && lookupKey(name, cm.options.extraKeys, handle, cm)) - || lookupKey(name, cm.options.keyMap, handle, cm); - } - - var stopSeq = new Delayed; - function dispatchKey(cm, name, e, handle) { - var seq = cm.state.keySeq; - if (seq) { - if (isModifierKey(name)) return "handled"; - stopSeq.set(50, function() { - if (cm.state.keySeq == seq) { - cm.state.keySeq = null; - cm.display.input.reset(); - } - }); - name = seq + " " + name; - } - var result = lookupKeyForEditor(cm, name, handle); - - if (result == "multi") - cm.state.keySeq = name; - if (result == "handled") - signalLater(cm, "keyHandled", cm, name, e); - - if (result == "handled" || result == "multi") { - e_preventDefault(e); - restartBlink(cm); - } - - if (seq && !result && /\'$/.test(name)) { - e_preventDefault(e); - return true; - } - return !!result; - } - - // Handle a key from the keydown event. - function handleKeyBinding(cm, e) { - var name = keyName(e, true); - if (!name) return false; - - if (e.shiftKey && !cm.state.keySeq) { - // First try to resolve full name (including 'Shift-'). Failing - // that, see if there is a cursor-motion command (starting with - // 'go') bound to the keyname without 'Shift-'. - return dispatchKey(cm, "Shift-" + name, e, function(b) {return doHandleBinding(cm, b, true);}) - || dispatchKey(cm, name, e, function(b) { - if (typeof b == "string" ? /^go[A-Z]/.test(b) : b.motion) - return doHandleBinding(cm, b); - }); - } else { - return dispatchKey(cm, name, e, function(b) { return doHandleBinding(cm, b); }); - } - } - - // Handle a key from the keypress event - function handleCharBinding(cm, e, ch) { - return dispatchKey(cm, "'" + ch + "'", e, - function(b) { return doHandleBinding(cm, b, true); }); - } - - var lastStoppedKey = null; - function onKeyDown(e) { - var cm = this; - cm.curOp.focus = activeElt(); - if (signalDOMEvent(cm, e)) return; - // IE does strange things with escape. - if (ie && ie_version < 11 && e.keyCode == 27) e.returnValue = false; - var code = e.keyCode; - cm.display.shift = code == 16 || e.shiftKey; - var handled = handleKeyBinding(cm, e); - if (presto) { - lastStoppedKey = handled ? code : null; - // Opera has no cut event... we try to at least catch the key combo - if (!handled && code == 88 && !hasCopyEvent && (mac ? e.metaKey : e.ctrlKey)) - cm.replaceSelection("", null, "cut"); - } - - // Turn mouse into crosshair when Alt is held on Mac. - if (code == 18 && !/\bCodeMirror-crosshair\b/.test(cm.display.lineDiv.className)) - showCrossHair(cm); - } - - function showCrossHair(cm) { - var lineDiv = cm.display.lineDiv; - addClass(lineDiv, "CodeMirror-crosshair"); - - function up(e) { - if (e.keyCode == 18 || !e.altKey) { - rmClass(lineDiv, "CodeMirror-crosshair"); - off(document, "keyup", up); - off(document, "mouseover", up); - } - } - on(document, "keyup", up); - on(document, "mouseover", up); - } - - function onKeyUp(e) { - if (e.keyCode == 16) this.doc.sel.shift = false; - signalDOMEvent(this, e); - } - - function onKeyPress(e) { - var cm = this; - if (eventInWidget(cm.display, e) || signalDOMEvent(cm, e) || e.ctrlKey && !e.altKey || mac && e.metaKey) return; - var keyCode = e.keyCode, charCode = e.charCode; - if (presto && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return;} - if ((presto && (!e.which || e.which < 10)) && handleKeyBinding(cm, e)) return; - var ch = String.fromCharCode(charCode == null ? keyCode : charCode); - if (handleCharBinding(cm, e, ch)) return; - cm.display.input.onKeyPress(e); - } - - // FOCUS/BLUR EVENTS - - function delayBlurEvent(cm) { - cm.state.delayingBlurEvent = true; - setTimeout(function() { - if (cm.state.delayingBlurEvent) { - cm.state.delayingBlurEvent = false; - onBlur(cm); - } - }, 100); - } - - function onFocus(cm) { - if (cm.state.delayingBlurEvent) cm.state.delayingBlurEvent = false; - - if (cm.options.readOnly == "nocursor") return; - if (!cm.state.focused) { - signal(cm, "focus", cm); - cm.state.focused = true; - addClass(cm.display.wrapper, "CodeMirror-focused"); - // This test prevents this from firing when a context - // menu is closed (since the input reset would kill the - // select-all detection hack) - if (!cm.curOp && cm.display.selForContextMenu != cm.doc.sel) { - cm.display.input.reset(); - if (webkit) setTimeout(function() { cm.display.input.reset(true); }, 20); // Issue #1730 - } - cm.display.input.receivedFocus(); - } - restartBlink(cm); - } - function onBlur(cm) { - if (cm.state.delayingBlurEvent) return; - - if (cm.state.focused) { - signal(cm, "blur", cm); - cm.state.focused = false; - rmClass(cm.display.wrapper, "CodeMirror-focused"); - } - clearInterval(cm.display.blinker); - setTimeout(function() {if (!cm.state.focused) cm.display.shift = false;}, 150); - } - - // CONTEXT MENU HANDLING - - // To make the context menu work, we need to briefly unhide the - // textarea (making it as unobtrusive as possible) to let the - // right-click take effect on it. - function onContextMenu(cm, e) { - if (eventInWidget(cm.display, e) || contextMenuInGutter(cm, e)) return; - if (signalDOMEvent(cm, e, "contextmenu")) return; - cm.display.input.onContextMenu(e); - } - - function contextMenuInGutter(cm, e) { - if (!hasHandler(cm, "gutterContextMenu")) return false; - return gutterEvent(cm, e, "gutterContextMenu", false); - } - - // UPDATING + // Selection objects are immutable. A new one is created every time + // the selection changes. A selection is one or more non-overlapping + // (and non-touching) ranges, sorted, and an integer that indicates + // which one is the primary selection (the one that's scrolled into + // view, that getCursor returns, etc). + var Selection = function(ranges, primIndex) { + this.ranges = ranges; + this.primIndex = primIndex; + }; + + Selection.prototype.primary = function () { return this.ranges[this.primIndex] }; + + Selection.prototype.equals = function (other) { + var this$1 = this; + + if (other == this) { return true } + if (other.primIndex != this.primIndex || other.ranges.length != this.ranges.length) { return false } + for (var i = 0; i < this.ranges.length; i++) { + var here = this$1.ranges[i], there = other.ranges[i]; + if (!equalCursorPos(here.anchor, there.anchor) || !equalCursorPos(here.head, there.head)) { return false } + } + return true + }; + + Selection.prototype.deepCopy = function () { + var this$1 = this; + + var out = []; + for (var i = 0; i < this.ranges.length; i++) + { out[i] = new Range(copyPos(this$1.ranges[i].anchor), copyPos(this$1.ranges[i].head)); } + return new Selection(out, this.primIndex) + }; + + Selection.prototype.somethingSelected = function () { + var this$1 = this; + + for (var i = 0; i < this.ranges.length; i++) + { if (!this$1.ranges[i].empty()) { return true } } + return false + }; + + Selection.prototype.contains = function (pos, end) { + var this$1 = this; + + if (!end) { end = pos; } + for (var i = 0; i < this.ranges.length; i++) { + var range = this$1.ranges[i]; + if (cmp(end, range.from()) >= 0 && cmp(pos, range.to()) <= 0) + { return i } + } + return -1 + }; + + var Range = function(anchor, head) { + this.anchor = anchor; this.head = head; + }; + + Range.prototype.from = function () { return minPos(this.anchor, this.head) }; + Range.prototype.to = function () { return maxPos(this.anchor, this.head) }; + Range.prototype.empty = function () { return this.head.line == this.anchor.line && this.head.ch == this.anchor.ch }; + + // Take an unsorted, potentially overlapping set of ranges, and + // build a selection out of it. 'Consumes' ranges array (modifying + // it). + function normalizeSelection(cm, ranges, primIndex) { + var mayTouch = cm && cm.options.selectionsMayTouch; + var prim = ranges[primIndex]; + ranges.sort(function (a, b) { return cmp(a.from(), b.from()); }); + primIndex = indexOf(ranges, prim); + for (var i = 1; i < ranges.length; i++) { + var cur = ranges[i], prev = ranges[i - 1]; + var diff = cmp(prev.to(), cur.from()); + if (mayTouch && !cur.empty() ? diff > 0 : diff >= 0) { + var from = minPos(prev.from(), cur.from()), to = maxPos(prev.to(), cur.to()); + var inv = prev.empty() ? cur.from() == cur.head : prev.from() == prev.head; + if (i <= primIndex) { --primIndex; } + ranges.splice(--i, 2, new Range(inv ? to : from, inv ? from : to)); + } + } + return new Selection(ranges, primIndex) + } + + function simpleSelection(anchor, head) { + return new Selection([new Range(anchor, head || anchor)], 0) + } // Compute the position of the end of a change (its 'to' property // refers to the pre-change end). - var changeEnd = CodeMirror.changeEnd = function(change) { - if (!change.text) return change.to; + function changeEnd(change) { + if (!change.text) { return change.to } return Pos(change.from.line + change.text.length - 1, - lst(change.text).length + (change.text.length == 1 ? change.from.ch : 0)); - }; + lst(change.text).length + (change.text.length == 1 ? change.from.ch : 0)) + } // Adjust a position to refer to the post-change position of the // same text, or the end of the change if the change covers it. function adjustForChange(pos, change) { - if (cmp(pos, change.from) < 0) return pos; - if (cmp(pos, change.to) <= 0) return changeEnd(change); + if (cmp(pos, change.from) < 0) { return pos } + if (cmp(pos, change.to) <= 0) { return changeEnd(change) } var line = pos.line + change.text.length - (change.to.line - change.from.line) - 1, ch = pos.ch; - if (pos.line == change.to.line) ch += changeEnd(change).ch - change.to.ch; - return Pos(line, ch); + if (pos.line == change.to.line) { ch += changeEnd(change).ch - change.to.ch; } + return Pos(line, ch) } function computeSelAfterChange(doc, change) { @@ -4299,14 +4652,14 @@ out.push(new Range(adjustForChange(range.anchor, change), adjustForChange(range.head, change))); } - return normalizeSelection(out, doc.sel.primIndex); + return normalizeSelection(doc.cm, out, doc.sel.primIndex) } function offsetPos(pos, old, nw) { if (pos.line == old.line) - return Pos(nw.line, pos.ch - old.ch + nw.ch); + { return Pos(nw.line, pos.ch - old.ch + nw.ch) } else - return Pos(nw.line + (pos.line - old.line), pos.ch); + { return Pos(nw.line + (pos.line - old.line), pos.ch) } } // Used by replaceSelections to allow moving the selection to the @@ -4327,2823 +4680,25 @@ out[i] = new Range(from, from); } } - return new Selection(out, doc.sel.primIndex); - } - - // Allow "beforeChange" event handlers to influence a change - function filterChange(doc, change, update) { - var obj = { - canceled: false, - from: change.from, - to: change.to, - text: change.text, - origin: change.origin, - cancel: function() { this.canceled = true; } - }; - if (update) obj.update = function(from, to, text, origin) { - if (from) this.from = clipPos(doc, from); - if (to) this.to = clipPos(doc, to); - if (text) this.text = text; - if (origin !== undefined) this.origin = origin; - }; - signal(doc, "beforeChange", doc, obj); - if (doc.cm) signal(doc.cm, "beforeChange", doc.cm, obj); - - if (obj.canceled) return null; - return {from: obj.from, to: obj.to, text: obj.text, origin: obj.origin}; - } - - // Apply a change to a document, and add it to the document's - // history, and propagating it to all linked documents. - function makeChange(doc, change, ignoreReadOnly) { - if (doc.cm) { - if (!doc.cm.curOp) return operation(doc.cm, makeChange)(doc, change, ignoreReadOnly); - if (doc.cm.state.suppressEdits) return; - } - - if (hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange")) { - change = filterChange(doc, change, true); - if (!change) return; - } - - // Possibly split or suppress the update based on the presence - // of read-only spans in its range. - var split = sawReadOnlySpans && !ignoreReadOnly && removeReadOnlyRanges(doc, change.from, change.to); - if (split) { - for (var i = split.length - 1; i >= 0; --i) - makeChangeInner(doc, {from: split[i].from, to: split[i].to, text: i ? [""] : change.text}); - } else { - makeChangeInner(doc, change); - } - } - - function makeChangeInner(doc, change) { - if (change.text.length == 1 && change.text[0] == "" && cmp(change.from, change.to) == 0) return; - var selAfter = computeSelAfterChange(doc, change); - addChangeToHistory(doc, change, selAfter, doc.cm ? doc.cm.curOp.id : NaN); - - makeChangeSingleDoc(doc, change, selAfter, stretchSpansOverChange(doc, change)); - var rebased = []; - - linkedDocs(doc, function(doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, stretchSpansOverChange(doc, change)); - }); - } - - // Revert a change stored in a document's history. - function makeChangeFromHistory(doc, type, allowSelectionOnly) { - if (doc.cm && doc.cm.state.suppressEdits) return; - - var hist = doc.history, event, selAfter = doc.sel; - var source = type == "undo" ? hist.done : hist.undone, dest = type == "undo" ? hist.undone : hist.done; - - // Verify that there is a useable event (so that ctrl-z won't - // needlessly clear selection events) - for (var i = 0; i < source.length; i++) { - event = source[i]; - if (allowSelectionOnly ? event.ranges && !event.equals(doc.sel) : !event.ranges) - break; - } - if (i == source.length) return; - hist.lastOrigin = hist.lastSelOrigin = null; - - for (;;) { - event = source.pop(); - if (event.ranges) { - pushSelectionToHistory(event, dest); - if (allowSelectionOnly && !event.equals(doc.sel)) { - setSelection(doc, event, {clearRedo: false}); - return; - } - selAfter = event; - } - else break; - } - - // Build up a reverse change object to add to the opposite history - // stack (redo when undoing, and vice versa). - var antiChanges = []; - pushSelectionToHistory(selAfter, dest); - dest.push({changes: antiChanges, generation: hist.generation}); - hist.generation = event.generation || ++hist.maxGeneration; - - var filter = hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange"); - - for (var i = event.changes.length - 1; i >= 0; --i) { - var change = event.changes[i]; - change.origin = type; - if (filter && !filterChange(doc, change, false)) { - source.length = 0; - return; - } - - antiChanges.push(historyChangeFromChange(doc, change)); - - var after = i ? computeSelAfterChange(doc, change) : lst(source); - makeChangeSingleDoc(doc, change, after, mergeOldSpans(doc, change)); - if (!i && doc.cm) doc.cm.scrollIntoView({from: change.from, to: changeEnd(change)}); - var rebased = []; - - // Propagate to the linked documents - linkedDocs(doc, function(doc, sharedHist) { - if (!sharedHist && indexOf(rebased, doc.history) == -1) { - rebaseHist(doc.history, change); - rebased.push(doc.history); - } - makeChangeSingleDoc(doc, change, null, mergeOldSpans(doc, change)); - }); - } - } - - // Sub-views need their line numbers shifted when text is added - // above or below them in the parent document. - function shiftDoc(doc, distance) { - if (distance == 0) return; - doc.first += distance; - doc.sel = new Selection(map(doc.sel.ranges, function(range) { - return new Range(Pos(range.anchor.line + distance, range.anchor.ch), - Pos(range.head.line + distance, range.head.ch)); - }), doc.sel.primIndex); - if (doc.cm) { - regChange(doc.cm, doc.first, doc.first - distance, distance); - for (var d = doc.cm.display, l = d.viewFrom; l < d.viewTo; l++) - regLineChange(doc.cm, l, "gutter"); - } - } - - // More lower-level change function, handling only a single document - // (not linked ones). - function makeChangeSingleDoc(doc, change, selAfter, spans) { - if (doc.cm && !doc.cm.curOp) - return operation(doc.cm, makeChangeSingleDoc)(doc, change, selAfter, spans); - - if (change.to.line < doc.first) { - shiftDoc(doc, change.text.length - 1 - (change.to.line - change.from.line)); - return; - } - if (change.from.line > doc.lastLine()) return; - - // Clip the change to the size of this doc - if (change.from.line < doc.first) { - var shift = change.text.length - 1 - (doc.first - change.from.line); - shiftDoc(doc, shift); - change = {from: Pos(doc.first, 0), to: Pos(change.to.line + shift, change.to.ch), - text: [lst(change.text)], origin: change.origin}; - } - var last = doc.lastLine(); - if (change.to.line > last) { - change = {from: change.from, to: Pos(last, getLine(doc, last).text.length), - text: [change.text[0]], origin: change.origin}; - } - - change.removed = getBetween(doc, change.from, change.to); - - if (!selAfter) selAfter = computeSelAfterChange(doc, change); - if (doc.cm) makeChangeSingleDocInEditor(doc.cm, change, spans); - else updateDoc(doc, change, spans); - setSelectionNoUndo(doc, selAfter, sel_dontScroll); - } - - // Handle the interaction of a change to a document with the editor - // that this document is part of. - function makeChangeSingleDocInEditor(cm, change, spans) { - var doc = cm.doc, display = cm.display, from = change.from, to = change.to; - - var recomputeMaxLength = false, checkWidthStart = from.line; - if (!cm.options.lineWrapping) { - checkWidthStart = lineNo(visualLine(getLine(doc, from.line))); - doc.iter(checkWidthStart, to.line + 1, function(line) { - if (line == display.maxLine) { - recomputeMaxLength = true; - return true; - } - }); - } - - if (doc.sel.contains(change.from, change.to) > -1) - signalCursorActivity(cm); - - updateDoc(doc, change, spans, estimateHeight(cm)); - - if (!cm.options.lineWrapping) { - doc.iter(checkWidthStart, from.line + change.text.length, function(line) { - var len = lineLength(line); - if (len > display.maxLineLength) { - display.maxLine = line; - display.maxLineLength = len; - display.maxLineChanged = true; - recomputeMaxLength = false; - } - }); - if (recomputeMaxLength) cm.curOp.updateMaxLine = true; - } - - // Adjust frontier, schedule worker - doc.frontier = Math.min(doc.frontier, from.line); - startWorker(cm, 400); - - var lendiff = change.text.length - (to.line - from.line) - 1; - // Remember that these lines changed, for updating the display - if (change.full) - regChange(cm); - else if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change)) - regLineChange(cm, from.line, "text"); - else - regChange(cm, from.line, to.line + 1, lendiff); - - var changesHandler = hasHandler(cm, "changes"), changeHandler = hasHandler(cm, "change"); - if (changeHandler || changesHandler) { - var obj = { - from: from, to: to, - text: change.text, - removed: change.removed, - origin: change.origin - }; - if (changeHandler) signalLater(cm, "change", cm, obj); - if (changesHandler) (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj); - } - cm.display.selForContextMenu = null; - } - - function replaceRange(doc, code, from, to, origin) { - if (!to) to = from; - if (cmp(to, from) < 0) { var tmp = to; to = from; from = tmp; } - if (typeof code == "string") code = doc.splitLines(code); - makeChange(doc, {from: from, to: to, text: code, origin: origin}); - } - - // SCROLLING THINGS INTO VIEW - - // If an editor sits on the top or bottom of the window, partially - // scrolled out of view, this ensures that the cursor is visible. - function maybeScrollWindow(cm, coords) { - if (signalDOMEvent(cm, "scrollCursorIntoView")) return; - - var display = cm.display, box = display.sizer.getBoundingClientRect(), doScroll = null; - if (coords.top + box.top < 0) doScroll = true; - else if (coords.bottom + box.top > (window.innerHeight || document.documentElement.clientHeight)) doScroll = false; - if (doScroll != null && !phantom) { - var scrollNode = elt("div", "\u200b", null, "position: absolute; top: " + - (coords.top - display.viewOffset - paddingTop(cm.display)) + "px; height: " + - (coords.bottom - coords.top + scrollGap(cm) + display.barHeight) + "px; left: " + - coords.left + "px; width: 2px;"); - cm.display.lineSpace.appendChild(scrollNode); - scrollNode.scrollIntoView(doScroll); - cm.display.lineSpace.removeChild(scrollNode); - } - } - - // Scroll a given position into view (immediately), verifying that - // it actually became visible (as line heights are accurately - // measured, the position of something may 'drift' during drawing). - function scrollPosIntoView(cm, pos, end, margin) { - if (margin == null) margin = 0; - for (var limit = 0; limit < 5; limit++) { - var changed = false, coords = cursorCoords(cm, pos); - var endCoords = !end || end == pos ? coords : cursorCoords(cm, end); - var scrollPos = calculateScrollPos(cm, Math.min(coords.left, endCoords.left), - Math.min(coords.top, endCoords.top) - margin, - Math.max(coords.left, endCoords.left), - Math.max(coords.bottom, endCoords.bottom) + margin); - var startTop = cm.doc.scrollTop, startLeft = cm.doc.scrollLeft; - if (scrollPos.scrollTop != null) { - setScrollTop(cm, scrollPos.scrollTop); - if (Math.abs(cm.doc.scrollTop - startTop) > 1) changed = true; - } - if (scrollPos.scrollLeft != null) { - setScrollLeft(cm, scrollPos.scrollLeft); - if (Math.abs(cm.doc.scrollLeft - startLeft) > 1) changed = true; - } - if (!changed) break; - } - return coords; - } - - // Scroll a given set of coordinates into view (immediately). - function scrollIntoView(cm, x1, y1, x2, y2) { - var scrollPos = calculateScrollPos(cm, x1, y1, x2, y2); - if (scrollPos.scrollTop != null) setScrollTop(cm, scrollPos.scrollTop); - if (scrollPos.scrollLeft != null) setScrollLeft(cm, scrollPos.scrollLeft); - } - - // Calculate a new scroll position needed to scroll the given - // rectangle into view. Returns an object with scrollTop and - // scrollLeft properties. When these are undefined, the - // vertical/horizontal position does not need to be adjusted. - function calculateScrollPos(cm, x1, y1, x2, y2) { - var display = cm.display, snapMargin = textHeight(cm.display); - if (y1 < 0) y1 = 0; - var screentop = cm.curOp && cm.curOp.scrollTop != null ? cm.curOp.scrollTop : display.scroller.scrollTop; - var screen = displayHeight(cm), result = {}; - if (y2 - y1 > screen) y2 = y1 + screen; - var docBottom = cm.doc.height + paddingVert(display); - var atTop = y1 < snapMargin, atBottom = y2 > docBottom - snapMargin; - if (y1 < screentop) { - result.scrollTop = atTop ? 0 : y1; - } else if (y2 > screentop + screen) { - var newTop = Math.min(y1, (atBottom ? docBottom : y2) - screen); - if (newTop != screentop) result.scrollTop = newTop; - } - - var screenleft = cm.curOp && cm.curOp.scrollLeft != null ? cm.curOp.scrollLeft : display.scroller.scrollLeft; - var screenw = displayWidth(cm) - (cm.options.fixedGutter ? display.gutters.offsetWidth : 0); - var tooWide = x2 - x1 > screenw; - if (tooWide) x2 = x1 + screenw; - if (x1 < 10) - result.scrollLeft = 0; - else if (x1 < screenleft) - result.scrollLeft = Math.max(0, x1 - (tooWide ? 0 : 10)); - else if (x2 > screenw + screenleft - 3) - result.scrollLeft = x2 + (tooWide ? 0 : 10) - screenw; - return result; - } - - // Store a relative adjustment to the scroll position in the current - // operation (to be applied when the operation finishes). - function addToScrollPos(cm, left, top) { - if (left != null || top != null) resolveScrollToPos(cm); - if (left != null) - cm.curOp.scrollLeft = (cm.curOp.scrollLeft == null ? cm.doc.scrollLeft : cm.curOp.scrollLeft) + left; - if (top != null) - cm.curOp.scrollTop = (cm.curOp.scrollTop == null ? cm.doc.scrollTop : cm.curOp.scrollTop) + top; - } - - // Make sure that at the end of the operation the current cursor is - // shown. - function ensureCursorVisible(cm) { - resolveScrollToPos(cm); - var cur = cm.getCursor(), from = cur, to = cur; - if (!cm.options.lineWrapping) { - from = cur.ch ? Pos(cur.line, cur.ch - 1) : cur; - to = Pos(cur.line, cur.ch + 1); - } - cm.curOp.scrollToPos = {from: from, to: to, margin: cm.options.cursorScrollMargin, isCursor: true}; - } - - // When an operation has its scrollToPos property set, and another - // scroll action is applied before the end of the operation, this - // 'simulates' scrolling that position into view in a cheap way, so - // that the effect of intermediate scroll commands is not ignored. - function resolveScrollToPos(cm) { - var range = cm.curOp.scrollToPos; - if (range) { - cm.curOp.scrollToPos = null; - var from = estimateCoords(cm, range.from), to = estimateCoords(cm, range.to); - var sPos = calculateScrollPos(cm, Math.min(from.left, to.left), - Math.min(from.top, to.top) - range.margin, - Math.max(from.right, to.right), - Math.max(from.bottom, to.bottom) + range.margin); - cm.scrollTo(sPos.scrollLeft, sPos.scrollTop); - } - } - - // API UTILITIES - - // Indent the given line. The how parameter can be "smart", - // "add"/null, "subtract", or "prev". When aggressive is false - // (typically set to true for forced single-line indents), empty - // lines are not indented, and places where the mode returns Pass - // are left alone. - function indentLine(cm, n, how, aggressive) { - var doc = cm.doc, state; - if (how == null) how = "add"; - if (how == "smart") { - // Fall back to "prev" when the mode doesn't have an indentation - // method. - if (!doc.mode.indent) how = "prev"; - else state = getStateBefore(cm, n); - } - - var tabSize = cm.options.tabSize; - var line = getLine(doc, n), curSpace = countColumn(line.text, null, tabSize); - if (line.stateAfter) line.stateAfter = null; - var curSpaceString = line.text.match(/^\s*/)[0], indentation; - if (!aggressive && !/\S/.test(line.text)) { - indentation = 0; - how = "not"; - } else if (how == "smart") { - indentation = doc.mode.indent(state, line.text.slice(curSpaceString.length), line.text); - if (indentation == Pass || indentation > 150) { - if (!aggressive) return; - how = "prev"; - } - } - if (how == "prev") { - if (n > doc.first) indentation = countColumn(getLine(doc, n-1).text, null, tabSize); - else indentation = 0; - } else if (how == "add") { - indentation = curSpace + cm.options.indentUnit; - } else if (how == "subtract") { - indentation = curSpace - cm.options.indentUnit; - } else if (typeof how == "number") { - indentation = curSpace + how; - } - indentation = Math.max(0, indentation); - - var indentString = "", pos = 0; - if (cm.options.indentWithTabs) - for (var i = Math.floor(indentation / tabSize); i; --i) {pos += tabSize; indentString += "\t";} - if (pos < indentation) indentString += spaceStr(indentation - pos); - - if (indentString != curSpaceString) { - replaceRange(doc, indentString, Pos(n, 0), Pos(n, curSpaceString.length), "+input"); - line.stateAfter = null; - return true; - } else { - // Ensure that, if the cursor was in the whitespace at the start - // of the line, it is moved to the end of that space. - for (var i = 0; i < doc.sel.ranges.length; i++) { - var range = doc.sel.ranges[i]; - if (range.head.line == n && range.head.ch < curSpaceString.length) { - var pos = Pos(n, curSpaceString.length); - replaceOneSelection(doc, i, new Range(pos, pos)); - break; - } - } - } - } - - // Utility for applying a change to a line by handle or number, - // returning the number and optionally registering the line as - // changed. - function changeLine(doc, handle, changeType, op) { - var no = handle, line = handle; - if (typeof handle == "number") line = getLine(doc, clipLine(doc, handle)); - else no = lineNo(handle); - if (no == null) return null; - if (op(line, no) && doc.cm) regLineChange(doc.cm, no, changeType); - return line; - } - - // Helper for deleting text near the selection(s), used to implement - // backspace, delete, and similar functionality. - function deleteNearSelection(cm, compute) { - var ranges = cm.doc.sel.ranges, kill = []; - // Build up a set of ranges to kill first, merging overlapping - // ranges. - for (var i = 0; i < ranges.length; i++) { - var toKill = compute(ranges[i]); - while (kill.length && cmp(toKill.from, lst(kill).to) <= 0) { - var replaced = kill.pop(); - if (cmp(replaced.from, toKill.from) < 0) { - toKill.from = replaced.from; - break; - } - } - kill.push(toKill); - } - // Next, remove those actual ranges. - runInOp(cm, function() { - for (var i = kill.length - 1; i >= 0; i--) - replaceRange(cm.doc, "", kill[i].from, kill[i].to, "+delete"); - ensureCursorVisible(cm); - }); - } - - // Used for horizontal relative motion. Dir is -1 or 1 (left or - // right), unit can be "char", "column" (like char, but doesn't - // cross line boundaries), "word" (across next word), or "group" (to - // the start of next group of word or non-word-non-whitespace - // chars). The visually param controls whether, in right-to-left - // text, direction 1 means to move towards the next index in the - // string, or towards the character to the right of the current - // position. The resulting position will have a hitSide=true - // property if it reached the end of the document. - function findPosH(doc, pos, dir, unit, visually) { - var line = pos.line, ch = pos.ch, origDir = dir; - var lineObj = getLine(doc, line); - function findNextLine() { - var l = line + dir; - if (l < doc.first || l >= doc.first + doc.size) return false - line = l; - return lineObj = getLine(doc, l); - } - function moveOnce(boundToLine) { - var next = (visually ? moveVisually : moveLogically)(lineObj, ch, dir, true); - if (next == null) { - if (!boundToLine && findNextLine()) { - if (visually) ch = (dir < 0 ? lineRight : lineLeft)(lineObj); - else ch = dir < 0 ? lineObj.text.length : 0; - } else return false - } else ch = next; - return true; - } - - if (unit == "char") { - moveOnce() - } else if (unit == "column") { - moveOnce(true) - } else if (unit == "word" || unit == "group") { - var sawType = null, group = unit == "group"; - var helper = doc.cm && doc.cm.getHelper(pos, "wordChars"); - for (var first = true;; first = false) { - if (dir < 0 && !moveOnce(!first)) break; - var cur = lineObj.text.charAt(ch) || "\n"; - var type = isWordChar(cur, helper) ? "w" - : group && cur == "\n" ? "n" - : !group || /\s/.test(cur) ? null - : "p"; - if (group && !first && !type) type = "s"; - if (sawType && sawType != type) { - if (dir < 0) {dir = 1; moveOnce();} - break; - } - - if (type) sawType = type; - if (dir > 0 && !moveOnce(!first)) break; - } - } - var result = skipAtomic(doc, Pos(line, ch), pos, origDir, true); - if (!cmp(pos, result)) result.hitSide = true; - return result; - } - - // For relative vertical movement. Dir may be -1 or 1. Unit can be - // "page" or "line". The resulting position will have a hitSide=true - // property if it reached the end of the document. - function findPosV(cm, pos, dir, unit) { - var doc = cm.doc, x = pos.left, y; - if (unit == "page") { - var pageSize = Math.min(cm.display.wrapper.clientHeight, window.innerHeight || document.documentElement.clientHeight); - y = pos.top + dir * (pageSize - (dir < 0 ? 1.5 : .5) * textHeight(cm.display)); - } else if (unit == "line") { - y = dir > 0 ? pos.bottom + 3 : pos.top - 3; - } - for (;;) { - var target = coordsChar(cm, x, y); - if (!target.outside) break; - if (dir < 0 ? y <= 0 : y >= doc.height) { target.hitSide = true; break; } - y += dir * 5; - } - return target; - } - - // EDITOR METHODS - - // The publicly visible API. Note that methodOp(f) means - // 'wrap f in an operation, performed on its `this` parameter'. - - // This is not the complete set of editor methods. Most of the - // methods defined on the Doc type are also injected into - // CodeMirror.prototype, for backwards compatibility and - // convenience. - - CodeMirror.prototype = { - constructor: CodeMirror, - focus: function(){window.focus(); this.display.input.focus();}, - - setOption: function(option, value) { - var options = this.options, old = options[option]; - if (options[option] == value && option != "mode") return; - options[option] = value; - if (optionHandlers.hasOwnProperty(option)) - operation(this, optionHandlers[option])(this, value, old); - }, - - getOption: function(option) {return this.options[option];}, - getDoc: function() {return this.doc;}, - - addKeyMap: function(map, bottom) { - this.state.keyMaps[bottom ? "push" : "unshift"](getKeyMap(map)); - }, - removeKeyMap: function(map) { - var maps = this.state.keyMaps; - for (var i = 0; i < maps.length; ++i) - if (maps[i] == map || maps[i].name == map) { - maps.splice(i, 1); - return true; - } - }, - - addOverlay: methodOp(function(spec, options) { - var mode = spec.token ? spec : CodeMirror.getMode(this.options, spec); - if (mode.startState) throw new Error("Overlays may not be stateful."); - this.state.overlays.push({mode: mode, modeSpec: spec, opaque: options && options.opaque}); - this.state.modeGen++; - regChange(this); - }), - removeOverlay: methodOp(function(spec) { - var overlays = this.state.overlays; - for (var i = 0; i < overlays.length; ++i) { - var cur = overlays[i].modeSpec; - if (cur == spec || typeof spec == "string" && cur.name == spec) { - overlays.splice(i, 1); - this.state.modeGen++; - regChange(this); - return; - } - } - }), - - indentLine: methodOp(function(n, dir, aggressive) { - if (typeof dir != "string" && typeof dir != "number") { - if (dir == null) dir = this.options.smartIndent ? "smart" : "prev"; - else dir = dir ? "add" : "subtract"; - } - if (isLine(this.doc, n)) indentLine(this, n, dir, aggressive); - }), - indentSelection: methodOp(function(how) { - var ranges = this.doc.sel.ranges, end = -1; - for (var i = 0; i < ranges.length; i++) { - var range = ranges[i]; - if (!range.empty()) { - var from = range.from(), to = range.to(); - var start = Math.max(end, from.line); - end = Math.min(this.lastLine(), to.line - (to.ch ? 0 : 1)) + 1; - for (var j = start; j < end; ++j) - indentLine(this, j, how); - var newRanges = this.doc.sel.ranges; - if (from.ch == 0 && ranges.length == newRanges.length && newRanges[i].from().ch > 0) - replaceOneSelection(this.doc, i, new Range(from, newRanges[i].to()), sel_dontScroll); - } else if (range.head.line > end) { - indentLine(this, range.head.line, how, true); - end = range.head.line; - if (i == this.doc.sel.primIndex) ensureCursorVisible(this); - } - } - }), - - // Fetch the parser token for a given character. Useful for hacks - // that want to inspect the mode state (say, for completion). - getTokenAt: function(pos, precise) { - return takeToken(this, pos, precise); - }, - - getLineTokens: function(line, precise) { - return takeToken(this, Pos(line), precise, true); - }, - - getTokenTypeAt: function(pos) { - pos = clipPos(this.doc, pos); - var styles = getLineStyles(this, getLine(this.doc, pos.line)); - var before = 0, after = (styles.length - 1) / 2, ch = pos.ch; - var type; - if (ch == 0) type = styles[2]; - else for (;;) { - var mid = (before + after) >> 1; - if ((mid ? styles[mid * 2 - 1] : 0) >= ch) after = mid; - else if (styles[mid * 2 + 1] < ch) before = mid + 1; - else { type = styles[mid * 2 + 2]; break; } - } - var cut = type ? type.indexOf("cm-overlay ") : -1; - return cut < 0 ? type : cut == 0 ? null : type.slice(0, cut - 1); - }, - - getModeAt: function(pos) { - var mode = this.doc.mode; - if (!mode.innerMode) return mode; - return CodeMirror.innerMode(mode, this.getTokenAt(pos).state).mode; - }, - - getHelper: function(pos, type) { - return this.getHelpers(pos, type)[0]; - }, - - getHelpers: function(pos, type) { - var found = []; - if (!helpers.hasOwnProperty(type)) return found; - var help = helpers[type], mode = this.getModeAt(pos); - if (typeof mode[type] == "string") { - if (help[mode[type]]) found.push(help[mode[type]]); - } else if (mode[type]) { - for (var i = 0; i < mode[type].length; i++) { - var val = help[mode[type][i]]; - if (val) found.push(val); - } - } else if (mode.helperType && help[mode.helperType]) { - found.push(help[mode.helperType]); - } else if (help[mode.name]) { - found.push(help[mode.name]); - } - for (var i = 0; i < help._global.length; i++) { - var cur = help._global[i]; - if (cur.pred(mode, this) && indexOf(found, cur.val) == -1) - found.push(cur.val); - } - return found; - }, - - getStateAfter: function(line, precise) { - var doc = this.doc; - line = clipLine(doc, line == null ? doc.first + doc.size - 1: line); - return getStateBefore(this, line + 1, precise); - }, - - cursorCoords: function(start, mode) { - var pos, range = this.doc.sel.primary(); - if (start == null) pos = range.head; - else if (typeof start == "object") pos = clipPos(this.doc, start); - else pos = start ? range.from() : range.to(); - return cursorCoords(this, pos, mode || "page"); - }, - - charCoords: function(pos, mode) { - return charCoords(this, clipPos(this.doc, pos), mode || "page"); - }, - - coordsChar: function(coords, mode) { - coords = fromCoordSystem(this, coords, mode || "page"); - return coordsChar(this, coords.left, coords.top); - }, - - lineAtHeight: function(height, mode) { - height = fromCoordSystem(this, {top: height, left: 0}, mode || "page").top; - return lineAtHeight(this.doc, height + this.display.viewOffset); - }, - heightAtLine: function(line, mode) { - var end = false, lineObj; - if (typeof line == "number") { - var last = this.doc.first + this.doc.size - 1; - if (line < this.doc.first) line = this.doc.first; - else if (line > last) { line = last; end = true; } - lineObj = getLine(this.doc, line); - } else { - lineObj = line; - } - return intoCoordSystem(this, lineObj, {top: 0, left: 0}, mode || "page").top + - (end ? this.doc.height - heightAtLine(lineObj) : 0); - }, - - defaultTextHeight: function() { return textHeight(this.display); }, - defaultCharWidth: function() { return charWidth(this.display); }, - - setGutterMarker: methodOp(function(line, gutterID, value) { - return changeLine(this.doc, line, "gutter", function(line) { - var markers = line.gutterMarkers || (line.gutterMarkers = {}); - markers[gutterID] = value; - if (!value && isEmpty(markers)) line.gutterMarkers = null; - return true; - }); - }), - - clearGutter: methodOp(function(gutterID) { - var cm = this, doc = cm.doc, i = doc.first; - doc.iter(function(line) { - if (line.gutterMarkers && line.gutterMarkers[gutterID]) { - line.gutterMarkers[gutterID] = null; - regLineChange(cm, i, "gutter"); - if (isEmpty(line.gutterMarkers)) line.gutterMarkers = null; - } - ++i; - }); - }), - - lineInfo: function(line) { - if (typeof line == "number") { - if (!isLine(this.doc, line)) return null; - var n = line; - line = getLine(this.doc, line); - if (!line) return null; - } else { - var n = lineNo(line); - if (n == null) return null; - } - return {line: n, handle: line, text: line.text, gutterMarkers: line.gutterMarkers, - textClass: line.textClass, bgClass: line.bgClass, wrapClass: line.wrapClass, - widgets: line.widgets}; - }, - - getViewport: function() { return {from: this.display.viewFrom, to: this.display.viewTo};}, - - addWidget: function(pos, node, scroll, vert, horiz) { - var display = this.display; - pos = cursorCoords(this, clipPos(this.doc, pos)); - var top = pos.bottom, left = pos.left; - node.style.position = "absolute"; - node.setAttribute("cm-ignore-events", "true"); - this.display.input.setUneditable(node); - display.sizer.appendChild(node); - if (vert == "over") { - top = pos.top; - } else if (vert == "above" || vert == "near") { - var vspace = Math.max(display.wrapper.clientHeight, this.doc.height), - hspace = Math.max(display.sizer.clientWidth, display.lineSpace.clientWidth); - // Default to positioning above (if specified and possible); otherwise default to positioning below - if ((vert == 'above' || pos.bottom + node.offsetHeight > vspace) && pos.top > node.offsetHeight) - top = pos.top - node.offsetHeight; - else if (pos.bottom + node.offsetHeight <= vspace) - top = pos.bottom; - if (left + node.offsetWidth > hspace) - left = hspace - node.offsetWidth; - } - node.style.top = top + "px"; - node.style.left = node.style.right = ""; - if (horiz == "right") { - left = display.sizer.clientWidth - node.offsetWidth; - node.style.right = "0px"; - } else { - if (horiz == "left") left = 0; - else if (horiz == "middle") left = (display.sizer.clientWidth - node.offsetWidth) / 2; - node.style.left = left + "px"; - } - if (scroll) - scrollIntoView(this, left, top, left + node.offsetWidth, top + node.offsetHeight); - }, - - triggerOnKeyDown: methodOp(onKeyDown), - triggerOnKeyPress: methodOp(onKeyPress), - triggerOnKeyUp: onKeyUp, - - execCommand: function(cmd) { - if (commands.hasOwnProperty(cmd)) - return commands[cmd].call(null, this); - }, - - triggerElectric: methodOp(function(text) { triggerElectric(this, text); }), - - findPosH: function(from, amount, unit, visually) { - var dir = 1; - if (amount < 0) { dir = -1; amount = -amount; } - for (var i = 0, cur = clipPos(this.doc, from); i < amount; ++i) { - cur = findPosH(this.doc, cur, dir, unit, visually); - if (cur.hitSide) break; - } - return cur; - }, - - moveH: methodOp(function(dir, unit) { - var cm = this; - cm.extendSelectionsBy(function(range) { - if (cm.display.shift || cm.doc.extend || range.empty()) - return findPosH(cm.doc, range.head, dir, unit, cm.options.rtlMoveVisually); - else - return dir < 0 ? range.from() : range.to(); - }, sel_move); - }), - - deleteH: methodOp(function(dir, unit) { - var sel = this.doc.sel, doc = this.doc; - if (sel.somethingSelected()) - doc.replaceSelection("", null, "+delete"); - else - deleteNearSelection(this, function(range) { - var other = findPosH(doc, range.head, dir, unit, false); - return dir < 0 ? {from: other, to: range.head} : {from: range.head, to: other}; - }); - }), - - findPosV: function(from, amount, unit, goalColumn) { - var dir = 1, x = goalColumn; - if (amount < 0) { dir = -1; amount = -amount; } - for (var i = 0, cur = clipPos(this.doc, from); i < amount; ++i) { - var coords = cursorCoords(this, cur, "div"); - if (x == null) x = coords.left; - else coords.left = x; - cur = findPosV(this, coords, dir, unit); - if (cur.hitSide) break; - } - return cur; - }, - - moveV: methodOp(function(dir, unit) { - var cm = this, doc = this.doc, goals = []; - var collapse = !cm.display.shift && !doc.extend && doc.sel.somethingSelected(); - doc.extendSelectionsBy(function(range) { - if (collapse) - return dir < 0 ? range.from() : range.to(); - var headPos = cursorCoords(cm, range.head, "div"); - if (range.goalColumn != null) headPos.left = range.goalColumn; - goals.push(headPos.left); - var pos = findPosV(cm, headPos, dir, unit); - if (unit == "page" && range == doc.sel.primary()) - addToScrollPos(cm, null, charCoords(cm, pos, "div").top - headPos.top); - return pos; - }, sel_move); - if (goals.length) for (var i = 0; i < doc.sel.ranges.length; i++) - doc.sel.ranges[i].goalColumn = goals[i]; - }), - - // Find the word at the given position (as returned by coordsChar). - findWordAt: function(pos) { - var doc = this.doc, line = getLine(doc, pos.line).text; - var start = pos.ch, end = pos.ch; - if (line) { - var helper = this.getHelper(pos, "wordChars"); - if ((pos.xRel < 0 || end == line.length) && start) --start; else ++end; - var startChar = line.charAt(start); - var check = isWordChar(startChar, helper) - ? function(ch) { return isWordChar(ch, helper); } - : /\s/.test(startChar) ? function(ch) {return /\s/.test(ch);} - : function(ch) {return !/\s/.test(ch) && !isWordChar(ch);}; - while (start > 0 && check(line.charAt(start - 1))) --start; - while (end < line.length && check(line.charAt(end))) ++end; - } - return new Range(Pos(pos.line, start), Pos(pos.line, end)); - }, - - toggleOverwrite: function(value) { - if (value != null && value == this.state.overwrite) return; - if (this.state.overwrite = !this.state.overwrite) - addClass(this.display.cursorDiv, "CodeMirror-overwrite"); - else - rmClass(this.display.cursorDiv, "CodeMirror-overwrite"); - - signal(this, "overwriteToggle", this, this.state.overwrite); - }, - hasFocus: function() { return this.display.input.getField() == activeElt(); }, - isReadOnly: function() { return !!(this.options.readOnly || this.doc.cantEdit); }, - - scrollTo: methodOp(function(x, y) { - if (x != null || y != null) resolveScrollToPos(this); - if (x != null) this.curOp.scrollLeft = x; - if (y != null) this.curOp.scrollTop = y; - }), - getScrollInfo: function() { - var scroller = this.display.scroller; - return {left: scroller.scrollLeft, top: scroller.scrollTop, - height: scroller.scrollHeight - scrollGap(this) - this.display.barHeight, - width: scroller.scrollWidth - scrollGap(this) - this.display.barWidth, - clientHeight: displayHeight(this), clientWidth: displayWidth(this)}; - }, - - scrollIntoView: methodOp(function(range, margin) { - if (range == null) { - range = {from: this.doc.sel.primary().head, to: null}; - if (margin == null) margin = this.options.cursorScrollMargin; - } else if (typeof range == "number") { - range = {from: Pos(range, 0), to: null}; - } else if (range.from == null) { - range = {from: range, to: null}; - } - if (!range.to) range.to = range.from; - range.margin = margin || 0; - - if (range.from.line != null) { - resolveScrollToPos(this); - this.curOp.scrollToPos = range; - } else { - var sPos = calculateScrollPos(this, Math.min(range.from.left, range.to.left), - Math.min(range.from.top, range.to.top) - range.margin, - Math.max(range.from.right, range.to.right), - Math.max(range.from.bottom, range.to.bottom) + range.margin); - this.scrollTo(sPos.scrollLeft, sPos.scrollTop); - } - }), - - setSize: methodOp(function(width, height) { - var cm = this; - function interpret(val) { - return typeof val == "number" || /^\d+$/.test(String(val)) ? val + "px" : val; - } - if (width != null) cm.display.wrapper.style.width = interpret(width); - if (height != null) cm.display.wrapper.style.height = interpret(height); - if (cm.options.lineWrapping) clearLineMeasurementCache(this); - var lineNo = cm.display.viewFrom; - cm.doc.iter(lineNo, cm.display.viewTo, function(line) { - if (line.widgets) for (var i = 0; i < line.widgets.length; i++) - if (line.widgets[i].noHScroll) { regLineChange(cm, lineNo, "widget"); break; } - ++lineNo; - }); - cm.curOp.forceUpdate = true; - signal(cm, "refresh", this); - }), - - operation: function(f){return runInOp(this, f);}, - - refresh: methodOp(function() { - var oldHeight = this.display.cachedTextHeight; - regChange(this); - this.curOp.forceUpdate = true; - clearCaches(this); - this.scrollTo(this.doc.scrollLeft, this.doc.scrollTop); - updateGutterSpace(this); - if (oldHeight == null || Math.abs(oldHeight - textHeight(this.display)) > .5) - estimateLineHeights(this); - signal(this, "refresh", this); - }), - - swapDoc: methodOp(function(doc) { - var old = this.doc; - old.cm = null; - attachDoc(this, doc); - clearCaches(this); - this.display.input.reset(); - this.scrollTo(doc.scrollLeft, doc.scrollTop); - this.curOp.forceScroll = true; - signalLater(this, "swapDoc", this, old); - return old; - }), - - getInputField: function(){return this.display.input.getField();}, - getWrapperElement: function(){return this.display.wrapper;}, - getScrollerElement: function(){return this.display.scroller;}, - getGutterElement: function(){return this.display.gutters;} - }; - eventMixin(CodeMirror); - - // OPTION DEFAULTS - - // The default configuration options. - var defaults = CodeMirror.defaults = {}; - // Functions to run when options are changed. - var optionHandlers = CodeMirror.optionHandlers = {}; - - function option(name, deflt, handle, notOnInit) { - CodeMirror.defaults[name] = deflt; - if (handle) optionHandlers[name] = - notOnInit ? function(cm, val, old) {if (old != Init) handle(cm, val, old);} : handle; - } - - // Passed to option handlers when there is no old value. - var Init = CodeMirror.Init = {toString: function(){return "CodeMirror.Init";}}; - - // These two are, on init, called from the constructor because they - // have to be initialized before the editor can start at all. - option("value", "", function(cm, val) { - cm.setValue(val); - }, true); - option("mode", null, function(cm, val) { - cm.doc.modeOption = val; - loadMode(cm); - }, true); - - option("indentUnit", 2, loadMode, true); - option("indentWithTabs", false); - option("smartIndent", true); - option("tabSize", 4, function(cm) { + return new Selection(out, doc.sel.primIndex) + } + + // Used to get the editor into a consistent state again when options change. + + function loadMode(cm) { + cm.doc.mode = getMode(cm.options, cm.doc.modeOption); resetModeState(cm); - clearCaches(cm); - regChange(cm); - }, true); - option("lineSeparator", null, function(cm, val) { - cm.doc.lineSep = val; - if (!val) return; - var newBreaks = [], lineNo = cm.doc.first; - cm.doc.iter(function(line) { - for (var pos = 0;;) { - var found = line.text.indexOf(val, pos); - if (found == -1) break; - pos = found + val.length; - newBreaks.push(Pos(lineNo, found)); - } - lineNo++; + } + + function resetModeState(cm) { + cm.doc.iter(function (line) { + if (line.stateAfter) { line.stateAfter = null; } + if (line.styles) { line.styles = null; } }); - for (var i = newBreaks.length - 1; i >= 0; i--) - replaceRange(cm.doc, val, newBreaks[i], Pos(newBreaks[i].line, newBreaks[i].ch + val.length)) - }); - option("specialChars", /[\t\u0000-\u0019\u00ad\u200b-\u200f\u2028\u2029\ufeff]/g, function(cm, val, old) { - cm.state.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g"); - if (old != CodeMirror.Init) cm.refresh(); - }); - option("specialCharPlaceholder", defaultSpecialCharPlaceholder, function(cm) {cm.refresh();}, true); - option("electricChars", true); - option("inputStyle", mobile ? "contenteditable" : "textarea", function() { - throw new Error("inputStyle can not (yet) be changed in a running editor"); // FIXME - }, true); - option("rtlMoveVisually", !windows); - option("wholeLineUpdateBefore", true); - - option("theme", "default", function(cm) { - themeChanged(cm); - guttersChanged(cm); - }, true); - option("keyMap", "default", function(cm, val, old) { - var next = getKeyMap(val); - var prev = old != CodeMirror.Init && getKeyMap(old); - if (prev && prev.detach) prev.detach(cm, next); - if (next.attach) next.attach(cm, prev || null); - }); - option("extraKeys", null); - - option("lineWrapping", false, wrappingChanged, true); - option("gutters", [], function(cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("fixedGutter", true, function(cm, val) { - cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0"; - cm.refresh(); - }, true); - option("coverGutterNextToScrollbar", false, function(cm) {updateScrollbars(cm);}, true); - option("scrollbarStyle", "native", function(cm) { - initScrollbars(cm); - updateScrollbars(cm); - cm.display.scrollbars.setScrollTop(cm.doc.scrollTop); - cm.display.scrollbars.setScrollLeft(cm.doc.scrollLeft); - }, true); - option("lineNumbers", false, function(cm) { - setGuttersForLineNumbers(cm.options); - guttersChanged(cm); - }, true); - option("firstLineNumber", 1, guttersChanged, true); - option("lineNumberFormatter", function(integer) {return integer;}, guttersChanged, true); - option("showCursorWhenSelecting", false, updateSelection, true); - - option("resetSelectionOnContextMenu", true); - option("lineWiseCopyCut", true); - - option("readOnly", false, function(cm, val) { - if (val == "nocursor") { - onBlur(cm); - cm.display.input.blur(); - cm.display.disabled = true; - } else { - cm.display.disabled = false; - } - cm.display.input.readOnlyChanged(val) - }); - option("disableInput", false, function(cm, val) {if (!val) cm.display.input.reset();}, true); - option("dragDrop", true, dragDropChanged); - option("allowDropFileTypes", null); - - option("cursorBlinkRate", 530); - option("cursorScrollMargin", 0); - option("cursorHeight", 1, updateSelection, true); - option("singleCursorHeightPerLine", true, updateSelection, true); - option("workTime", 100); - option("workDelay", 100); - option("flattenSpans", true, resetModeState, true); - option("addModeClass", false, resetModeState, true); - option("pollInterval", 100); - option("undoDepth", 200, function(cm, val){cm.doc.history.undoDepth = val;}); - option("historyEventDelay", 1250); - option("viewportMargin", 10, function(cm){cm.refresh();}, true); - option("maxHighlightLength", 10000, resetModeState, true); - option("moveInputWithCursor", true, function(cm, val) { - if (!val) cm.display.input.resetPosition(); - }); - - option("tabindex", null, function(cm, val) { - cm.display.input.getField().tabIndex = val || ""; - }); - option("autofocus", null); - - // MODE DEFINITION AND QUERYING - - // Known modes, by name and by MIME - var modes = CodeMirror.modes = {}, mimeModes = CodeMirror.mimeModes = {}; - - // Extra arguments are stored as the mode's dependencies, which is - // used by (legacy) mechanisms like loadmode.js to automatically - // load a mode. (Preferred mechanism is the require/define calls.) - CodeMirror.defineMode = function(name, mode) { - if (!CodeMirror.defaults.mode && name != "null") CodeMirror.defaults.mode = name; - if (arguments.length > 2) - mode.dependencies = Array.prototype.slice.call(arguments, 2); - modes[name] = mode; - }; - - CodeMirror.defineMIME = function(mime, spec) { - mimeModes[mime] = spec; - }; - - // Given a MIME type, a {name, ...options} config object, or a name - // string, return a mode config object. - CodeMirror.resolveMode = function(spec) { - if (typeof spec == "string" && mimeModes.hasOwnProperty(spec)) { - spec = mimeModes[spec]; - } else if (spec && typeof spec.name == "string" && mimeModes.hasOwnProperty(spec.name)) { - var found = mimeModes[spec.name]; - if (typeof found == "string") found = {name: found}; - spec = createObj(found, spec); - spec.name = found.name; - } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec)) { - return CodeMirror.resolveMode("application/xml"); - } - if (typeof spec == "string") return {name: spec}; - else return spec || {name: "null"}; - }; - - // Given a mode spec (anything that resolveMode accepts), find and - // initialize an actual mode object. - CodeMirror.getMode = function(options, spec) { - var spec = CodeMirror.resolveMode(spec); - var mfactory = modes[spec.name]; - if (!mfactory) return CodeMirror.getMode(options, "text/plain"); - var modeObj = mfactory(options, spec); - if (modeExtensions.hasOwnProperty(spec.name)) { - var exts = modeExtensions[spec.name]; - for (var prop in exts) { - if (!exts.hasOwnProperty(prop)) continue; - if (modeObj.hasOwnProperty(prop)) modeObj["_" + prop] = modeObj[prop]; - modeObj[prop] = exts[prop]; - } - } - modeObj.name = spec.name; - if (spec.helperType) modeObj.helperType = spec.helperType; - if (spec.modeProps) for (var prop in spec.modeProps) - modeObj[prop] = spec.modeProps[prop]; - - return modeObj; - }; - - // Minimal default mode. - CodeMirror.defineMode("null", function() { - return {token: function(stream) {stream.skipToEnd();}}; - }); - CodeMirror.defineMIME("text/plain", "null"); - - // This can be used to attach properties to mode objects from - // outside the actual mode definition. - var modeExtensions = CodeMirror.modeExtensions = {}; - CodeMirror.extendMode = function(mode, properties) { - var exts = modeExtensions.hasOwnProperty(mode) ? modeExtensions[mode] : (modeExtensions[mode] = {}); - copyObj(properties, exts); - }; - - // EXTENSIONS - - CodeMirror.defineExtension = function(name, func) { - CodeMirror.prototype[name] = func; - }; - CodeMirror.defineDocExtension = function(name, func) { - Doc.prototype[name] = func; - }; - CodeMirror.defineOption = option; - - var initHooks = []; - CodeMirror.defineInitHook = function(f) {initHooks.push(f);}; - - var helpers = CodeMirror.helpers = {}; - CodeMirror.registerHelper = function(type, name, value) { - if (!helpers.hasOwnProperty(type)) helpers[type] = CodeMirror[type] = {_global: []}; - helpers[type][name] = value; - }; - CodeMirror.registerGlobalHelper = function(type, name, predicate, value) { - CodeMirror.registerHelper(type, name, value); - helpers[type]._global.push({pred: predicate, val: value}); - }; - - // MODE STATE HANDLING - - // Utility functions for working with state. Exported because nested - // modes need to do this for their inner modes. - - var copyState = CodeMirror.copyState = function(mode, state) { - if (state === true) return state; - if (mode.copyState) return mode.copyState(state); - var nstate = {}; - for (var n in state) { - var val = state[n]; - if (val instanceof Array) val = val.concat([]); - nstate[n] = val; - } - return nstate; - }; - - var startState = CodeMirror.startState = function(mode, a1, a2) { - return mode.startState ? mode.startState(a1, a2) : true; - }; - - // Given a mode and a state (for that mode), find the inner mode and - // state at the position that the state refers to. - CodeMirror.innerMode = function(mode, state) { - while (mode.innerMode) { - var info = mode.innerMode(state); - if (!info || info.mode == mode) break; - state = info.state; - mode = info.mode; - } - return info || {mode: mode, state: state}; - }; - - // STANDARD COMMANDS - - // Commands are parameter-less actions that can be performed on an - // editor, mostly used for keybindings. - var commands = CodeMirror.commands = { - selectAll: function(cm) {cm.setSelection(Pos(cm.firstLine(), 0), Pos(cm.lastLine()), sel_dontScroll);}, - singleSelection: function(cm) { - cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll); - }, - killLine: function(cm) { - deleteNearSelection(cm, function(range) { - if (range.empty()) { - var len = getLine(cm.doc, range.head.line).text.length; - if (range.head.ch == len && range.head.line < cm.lastLine()) - return {from: range.head, to: Pos(range.head.line + 1, 0)}; - else - return {from: range.head, to: Pos(range.head.line, len)}; - } else { - return {from: range.from(), to: range.to()}; - } - }); - }, - deleteLine: function(cm) { - deleteNearSelection(cm, function(range) { - return {from: Pos(range.from().line, 0), - to: clipPos(cm.doc, Pos(range.to().line + 1, 0))}; - }); - }, - delLineLeft: function(cm) { - deleteNearSelection(cm, function(range) { - return {from: Pos(range.from().line, 0), to: range.from()}; - }); - }, - delWrappedLineLeft: function(cm) { - deleteNearSelection(cm, function(range) { - var top = cm.charCoords(range.head, "div").top + 5; - var leftPos = cm.coordsChar({left: 0, top: top}, "div"); - return {from: leftPos, to: range.from()}; - }); - }, - delWrappedLineRight: function(cm) { - deleteNearSelection(cm, function(range) { - var top = cm.charCoords(range.head, "div").top + 5; - var rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div"); - return {from: range.from(), to: rightPos }; - }); - }, - undo: function(cm) {cm.undo();}, - redo: function(cm) {cm.redo();}, - undoSelection: function(cm) {cm.undoSelection();}, - redoSelection: function(cm) {cm.redoSelection();}, - goDocStart: function(cm) {cm.extendSelection(Pos(cm.firstLine(), 0));}, - goDocEnd: function(cm) {cm.extendSelection(Pos(cm.lastLine()));}, - goLineStart: function(cm) { - cm.extendSelectionsBy(function(range) { return lineStart(cm, range.head.line); }, - {origin: "+move", bias: 1}); - }, - goLineStartSmart: function(cm) { - cm.extendSelectionsBy(function(range) { - return lineStartSmart(cm, range.head); - }, {origin: "+move", bias: 1}); - }, - goLineEnd: function(cm) { - cm.extendSelectionsBy(function(range) { return lineEnd(cm, range.head.line); }, - {origin: "+move", bias: -1}); - }, - goLineRight: function(cm) { - cm.extendSelectionsBy(function(range) { - var top = cm.charCoords(range.head, "div").top + 5; - return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div"); - }, sel_move); - }, - goLineLeft: function(cm) { - cm.extendSelectionsBy(function(range) { - var top = cm.charCoords(range.head, "div").top + 5; - return cm.coordsChar({left: 0, top: top}, "div"); - }, sel_move); - }, - goLineLeftSmart: function(cm) { - cm.extendSelectionsBy(function(range) { - var top = cm.charCoords(range.head, "div").top + 5; - var pos = cm.coordsChar({left: 0, top: top}, "div"); - if (pos.ch < cm.getLine(pos.line).search(/\S/)) return lineStartSmart(cm, range.head); - return pos; - }, sel_move); - }, - goLineUp: function(cm) {cm.moveV(-1, "line");}, - goLineDown: function(cm) {cm.moveV(1, "line");}, - goPageUp: function(cm) {cm.moveV(-1, "page");}, - goPageDown: function(cm) {cm.moveV(1, "page");}, - goCharLeft: function(cm) {cm.moveH(-1, "char");}, - goCharRight: function(cm) {cm.moveH(1, "char");}, - goColumnLeft: function(cm) {cm.moveH(-1, "column");}, - goColumnRight: function(cm) {cm.moveH(1, "column");}, - goWordLeft: function(cm) {cm.moveH(-1, "word");}, - goGroupRight: function(cm) {cm.moveH(1, "group");}, - goGroupLeft: function(cm) {cm.moveH(-1, "group");}, - goWordRight: function(cm) {cm.moveH(1, "word");}, - delCharBefore: function(cm) {cm.deleteH(-1, "char");}, - delCharAfter: function(cm) {cm.deleteH(1, "char");}, - delWordBefore: function(cm) {cm.deleteH(-1, "word");}, - delWordAfter: function(cm) {cm.deleteH(1, "word");}, - delGroupBefore: function(cm) {cm.deleteH(-1, "group");}, - delGroupAfter: function(cm) {cm.deleteH(1, "group");}, - indentAuto: function(cm) {cm.indentSelection("smart");}, - indentMore: function(cm) {cm.indentSelection("add");}, - indentLess: function(cm) {cm.indentSelection("subtract");}, - insertTab: function(cm) {cm.replaceSelection("\t");}, - insertSoftTab: function(cm) { - var spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize; - for (var i = 0; i < ranges.length; i++) { - var pos = ranges[i].from(); - var col = countColumn(cm.getLine(pos.line), pos.ch, tabSize); - spaces.push(new Array(tabSize - col % tabSize + 1).join(" ")); - } - cm.replaceSelections(spaces); - }, - defaultTab: function(cm) { - if (cm.somethingSelected()) cm.indentSelection("add"); - else cm.execCommand("insertTab"); - }, - transposeChars: function(cm) { - runInOp(cm, function() { - var ranges = cm.listSelections(), newSel = []; - for (var i = 0; i < ranges.length; i++) { - var cur = ranges[i].head, line = getLine(cm.doc, cur.line).text; - if (line) { - if (cur.ch == line.length) cur = new Pos(cur.line, cur.ch - 1); - if (cur.ch > 0) { - cur = new Pos(cur.line, cur.ch + 1); - cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2), - Pos(cur.line, cur.ch - 2), cur, "+transpose"); - } else if (cur.line > cm.doc.first) { - var prev = getLine(cm.doc, cur.line - 1).text; - if (prev) - cm.replaceRange(line.charAt(0) + cm.doc.lineSeparator() + - prev.charAt(prev.length - 1), - Pos(cur.line - 1, prev.length - 1), Pos(cur.line, 1), "+transpose"); - } - } - newSel.push(new Range(cur, cur)); - } - cm.setSelections(newSel); - }); - }, - newlineAndIndent: function(cm) { - runInOp(cm, function() { - var len = cm.listSelections().length; - for (var i = 0; i < len; i++) { - var range = cm.listSelections()[i]; - cm.replaceRange(cm.doc.lineSeparator(), range.anchor, range.head, "+input"); - cm.indentLine(range.from().line + 1, null, true); - } - ensureCursorVisible(cm); - }); - }, - toggleOverwrite: function(cm) {cm.toggleOverwrite();} - }; - - - // STANDARD KEYMAPS - - var keyMap = CodeMirror.keyMap = {}; - - keyMap.basic = { - "Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown", - "End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown", - "Delete": "delCharAfter", "Backspace": "delCharBefore", "Shift-Backspace": "delCharBefore", - "Tab": "defaultTab", "Shift-Tab": "indentAuto", - "Enter": "newlineAndIndent", "Insert": "toggleOverwrite", - "Esc": "singleSelection" - }; - // Note that the save and find-related commands aren't defined by - // default. User code or addons can define them. Unknown commands - // are simply ignored. - keyMap.pcDefault = { - "Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo", - "Ctrl-Home": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Up": "goLineUp", "Ctrl-Down": "goLineDown", - "Ctrl-Left": "goGroupLeft", "Ctrl-Right": "goGroupRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd", - "Ctrl-Backspace": "delGroupBefore", "Ctrl-Delete": "delGroupAfter", "Ctrl-S": "save", "Ctrl-F": "find", - "Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll", - "Ctrl-[": "indentLess", "Ctrl-]": "indentMore", - "Ctrl-U": "undoSelection", "Shift-Ctrl-U": "redoSelection", "Alt-U": "redoSelection", - fallthrough: "basic" - }; - // Very basic readline/emacs-style bindings, which are standard on Mac. - keyMap.emacsy = { - "Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown", - "Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd", - "Ctrl-V": "goPageDown", "Shift-Ctrl-V": "goPageUp", "Ctrl-D": "delCharAfter", "Ctrl-H": "delCharBefore", - "Alt-D": "delWordAfter", "Alt-Backspace": "delWordBefore", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars" - }; - keyMap.macDefault = { - "Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo", - "Cmd-Home": "goDocStart", "Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goGroupLeft", - "Alt-Right": "goGroupRight", "Cmd-Left": "goLineLeft", "Cmd-Right": "goLineRight", "Alt-Backspace": "delGroupBefore", - "Ctrl-Alt-Backspace": "delGroupAfter", "Alt-Delete": "delGroupAfter", "Cmd-S": "save", "Cmd-F": "find", - "Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll", - "Cmd-[": "indentLess", "Cmd-]": "indentMore", "Cmd-Backspace": "delWrappedLineLeft", "Cmd-Delete": "delWrappedLineRight", - "Cmd-U": "undoSelection", "Shift-Cmd-U": "redoSelection", "Ctrl-Up": "goDocStart", "Ctrl-Down": "goDocEnd", - fallthrough: ["basic", "emacsy"] - }; - keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault; - - // KEYMAP DISPATCH - - function normalizeKeyName(name) { - var parts = name.split(/-(?!$)/), name = parts[parts.length - 1]; - var alt, ctrl, shift, cmd; - for (var i = 0; i < parts.length - 1; i++) { - var mod = parts[i]; - if (/^(cmd|meta|m)$/i.test(mod)) cmd = true; - else if (/^a(lt)?$/i.test(mod)) alt = true; - else if (/^(c|ctrl|control)$/i.test(mod)) ctrl = true; - else if (/^s(hift)$/i.test(mod)) shift = true; - else throw new Error("Unrecognized modifier name: " + mod); - } - if (alt) name = "Alt-" + name; - if (ctrl) name = "Ctrl-" + name; - if (cmd) name = "Cmd-" + name; - if (shift) name = "Shift-" + name; - return name; - } - - // This is a kludge to keep keymaps mostly working as raw objects - // (backwards compatibility) while at the same time support features - // like normalization and multi-stroke key bindings. It compiles a - // new normalized keymap, and then updates the old object to reflect - // this. - CodeMirror.normalizeKeyMap = function(keymap) { - var copy = {}; - for (var keyname in keymap) if (keymap.hasOwnProperty(keyname)) { - var value = keymap[keyname]; - if (/^(name|fallthrough|(de|at)tach)$/.test(keyname)) continue; - if (value == "...") { delete keymap[keyname]; continue; } - - var keys = map(keyname.split(" "), normalizeKeyName); - for (var i = 0; i < keys.length; i++) { - var val, name; - if (i == keys.length - 1) { - name = keys.join(" "); - val = value; - } else { - name = keys.slice(0, i + 1).join(" "); - val = "..."; - } - var prev = copy[name]; - if (!prev) copy[name] = val; - else if (prev != val) throw new Error("Inconsistent bindings for " + name); - } - delete keymap[keyname]; - } - for (var prop in copy) keymap[prop] = copy[prop]; - return keymap; - }; - - var lookupKey = CodeMirror.lookupKey = function(key, map, handle, context) { - map = getKeyMap(map); - var found = map.call ? map.call(key, context) : map[key]; - if (found === false) return "nothing"; - if (found === "...") return "multi"; - if (found != null && handle(found)) return "handled"; - - if (map.fallthrough) { - if (Object.prototype.toString.call(map.fallthrough) != "[object Array]") - return lookupKey(key, map.fallthrough, handle, context); - for (var i = 0; i < map.fallthrough.length; i++) { - var result = lookupKey(key, map.fallthrough[i], handle, context); - if (result) return result; - } - } - }; - - // Modifier key presses don't count as 'real' key presses for the - // purpose of keymap fallthrough. - var isModifierKey = CodeMirror.isModifierKey = function(value) { - var name = typeof value == "string" ? value : keyNames[value.keyCode]; - return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod"; - }; - - // Look up the name of a key as indicated by an event object. - var keyName = CodeMirror.keyName = function(event, noShift) { - if (presto && event.keyCode == 34 && event["char"]) return false; - var base = keyNames[event.keyCode], name = base; - if (name == null || event.altGraphKey) return false; - if (event.altKey && base != "Alt") name = "Alt-" + name; - if ((flipCtrlCmd ? event.metaKey : event.ctrlKey) && base != "Ctrl") name = "Ctrl-" + name; - if ((flipCtrlCmd ? event.ctrlKey : event.metaKey) && base != "Cmd") name = "Cmd-" + name; - if (!noShift && event.shiftKey && base != "Shift") name = "Shift-" + name; - return name; - }; - - function getKeyMap(val) { - return typeof val == "string" ? keyMap[val] : val; - } - - // FROMTEXTAREA - - CodeMirror.fromTextArea = function(textarea, options) { - options = options ? copyObj(options) : {}; - options.value = textarea.value; - if (!options.tabindex && textarea.tabIndex) - options.tabindex = textarea.tabIndex; - if (!options.placeholder && textarea.placeholder) - options.placeholder = textarea.placeholder; - // Set autofocus to true if this textarea is focused, or if it has - // autofocus and no other element is focused. - if (options.autofocus == null) { - var hasFocus = activeElt(); - options.autofocus = hasFocus == textarea || - textarea.getAttribute("autofocus") != null && hasFocus == document.body; - } - - function save() {textarea.value = cm.getValue();} - if (textarea.form) { - on(textarea.form, "submit", save); - // Deplorable hack to make the submit method do the right thing. - if (!options.leaveSubmitMethodAlone) { - var form = textarea.form, realSubmit = form.submit; - try { - var wrappedSubmit = form.submit = function() { - save(); - form.submit = realSubmit; - form.submit(); - form.submit = wrappedSubmit; - }; - } catch(e) {} - } - } - - options.finishInit = function(cm) { - cm.save = save; - cm.getTextArea = function() { return textarea; }; - cm.toTextArea = function() { - cm.toTextArea = isNaN; // Prevent this from being ran twice - save(); - textarea.parentNode.removeChild(cm.getWrapperElement()); - textarea.style.display = ""; - if (textarea.form) { - off(textarea.form, "submit", save); - if (typeof textarea.form.submit == "function") - textarea.form.submit = realSubmit; - } - }; - }; - - textarea.style.display = "none"; - var cm = CodeMirror(function(node) { - textarea.parentNode.insertBefore(node, textarea.nextSibling); - }, options); - return cm; - }; - - // STRING STREAM - - // Fed to the mode parsers, provides helper functions to make - // parsers more succinct. - - var StringStream = CodeMirror.StringStream = function(string, tabSize) { - this.pos = this.start = 0; - this.string = string; - this.tabSize = tabSize || 8; - this.lastColumnPos = this.lastColumnValue = 0; - this.lineStart = 0; - }; - - StringStream.prototype = { - eol: function() {return this.pos >= this.string.length;}, - sol: function() {return this.pos == this.lineStart;}, - peek: function() {return this.string.charAt(this.pos) || undefined;}, - next: function() { - if (this.pos < this.string.length) - return this.string.charAt(this.pos++); - }, - eat: function(match) { - var ch = this.string.charAt(this.pos); - if (typeof match == "string") var ok = ch == match; - else var ok = ch && (match.test ? match.test(ch) : match(ch)); - if (ok) {++this.pos; return ch;} - }, - eatWhile: function(match) { - var start = this.pos; - while (this.eat(match)){} - return this.pos > start; - }, - eatSpace: function() { - var start = this.pos; - while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) ++this.pos; - return this.pos > start; - }, - skipToEnd: function() {this.pos = this.string.length;}, - skipTo: function(ch) { - var found = this.string.indexOf(ch, this.pos); - if (found > -1) {this.pos = found; return true;} - }, - backUp: function(n) {this.pos -= n;}, - column: function() { - if (this.lastColumnPos < this.start) { - this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue); - this.lastColumnPos = this.start; - } - return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0); - }, - indentation: function() { - return countColumn(this.string, null, this.tabSize) - - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0); - }, - match: function(pattern, consume, caseInsensitive) { - if (typeof pattern == "string") { - var cased = function(str) {return caseInsensitive ? str.toLowerCase() : str;}; - var substr = this.string.substr(this.pos, pattern.length); - if (cased(substr) == cased(pattern)) { - if (consume !== false) this.pos += pattern.length; - return true; - } - } else { - var match = this.string.slice(this.pos).match(pattern); - if (match && match.index > 0) return null; - if (match && consume !== false) this.pos += match[0].length; - return match; - } - }, - current: function(){return this.string.slice(this.start, this.pos);}, - hideFirstChars: function(n, inner) { - this.lineStart += n; - try { return inner(); } - finally { this.lineStart -= n; } - } - }; - - // TEXTMARKERS - - // Created with markText and setBookmark methods. A TextMarker is a - // handle that can be used to clear or find a marked position in the - // document. Line objects hold arrays (markedSpans) containing - // {from, to, marker} object pointing to such marker objects, and - // indicating that such a marker is present on that line. Multiple - // lines may point to the same marker when it spans across lines. - // The spans will have null for their from/to properties when the - // marker continues beyond the start/end of the line. Markers have - // links back to the lines they currently touch. - - var nextMarkerId = 0; - - var TextMarker = CodeMirror.TextMarker = function(doc, type) { - this.lines = []; - this.type = type; - this.doc = doc; - this.id = ++nextMarkerId; - }; - eventMixin(TextMarker); - - // Clear the marker. - TextMarker.prototype.clear = function() { - if (this.explicitlyCleared) return; - var cm = this.doc.cm, withOp = cm && !cm.curOp; - if (withOp) startOperation(cm); - if (hasHandler(this, "clear")) { - var found = this.find(); - if (found) signalLater(this, "clear", found.from, found.to); - } - var min = null, max = null; - for (var i = 0; i < this.lines.length; ++i) { - var line = this.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this); - if (cm && !this.collapsed) regLineChange(cm, lineNo(line), "text"); - else if (cm) { - if (span.to != null) max = lineNo(line); - if (span.from != null) min = lineNo(line); - } - line.markedSpans = removeMarkedSpan(line.markedSpans, span); - if (span.from == null && this.collapsed && !lineIsHidden(this.doc, line) && cm) - updateLineHeight(line, textHeight(cm.display)); - } - if (cm && this.collapsed && !cm.options.lineWrapping) for (var i = 0; i < this.lines.length; ++i) { - var visual = visualLine(this.lines[i]), len = lineLength(visual); - if (len > cm.display.maxLineLength) { - cm.display.maxLine = visual; - cm.display.maxLineLength = len; - cm.display.maxLineChanged = true; - } - } - - if (min != null && cm && this.collapsed) regChange(cm, min, max + 1); - this.lines.length = 0; - this.explicitlyCleared = true; - if (this.atomic && this.doc.cantEdit) { - this.doc.cantEdit = false; - if (cm) reCheckSelection(cm.doc); - } - if (cm) signalLater(cm, "markerCleared", cm, this); - if (withOp) endOperation(cm); - if (this.parent) this.parent.clear(); - }; - - // Find the position of the marker in the document. Returns a {from, - // to} object by default. Side can be passed to get a specific side - // -- 0 (both), -1 (left), or 1 (right). When lineObj is true, the - // Pos objects returned contain a line object, rather than a line - // number (used to prevent looking up the same line twice). - TextMarker.prototype.find = function(side, lineObj) { - if (side == null && this.type == "bookmark") side = 1; - var from, to; - for (var i = 0; i < this.lines.length; ++i) { - var line = this.lines[i]; - var span = getMarkedSpanFor(line.markedSpans, this); - if (span.from != null) { - from = Pos(lineObj ? line : lineNo(line), span.from); - if (side == -1) return from; - } - if (span.to != null) { - to = Pos(lineObj ? line : lineNo(line), span.to); - if (side == 1) return to; - } - } - return from && {from: from, to: to}; - }; - - // Signals that the marker's widget changed, and surrounding layout - // should be recomputed. - TextMarker.prototype.changed = function() { - var pos = this.find(-1, true), widget = this, cm = this.doc.cm; - if (!pos || !cm) return; - runInOp(cm, function() { - var line = pos.line, lineN = lineNo(pos.line); - var view = findViewForLine(cm, lineN); - if (view) { - clearLineMeasurementCacheFor(view); - cm.curOp.selectionChanged = cm.curOp.forceUpdate = true; - } - cm.curOp.updateMaxLine = true; - if (!lineIsHidden(widget.doc, line) && widget.height != null) { - var oldHeight = widget.height; - widget.height = null; - var dHeight = widgetHeight(widget) - oldHeight; - if (dHeight) - updateLineHeight(line, line.height + dHeight); - } - }); - }; - - TextMarker.prototype.attachLine = function(line) { - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp; - if (!op.maybeHiddenMarkers || indexOf(op.maybeHiddenMarkers, this) == -1) - (op.maybeUnhiddenMarkers || (op.maybeUnhiddenMarkers = [])).push(this); - } - this.lines.push(line); - }; - TextMarker.prototype.detachLine = function(line) { - this.lines.splice(indexOf(this.lines, line), 1); - if (!this.lines.length && this.doc.cm) { - var op = this.doc.cm.curOp; - (op.maybeHiddenMarkers || (op.maybeHiddenMarkers = [])).push(this); - } - }; - - // Collapsed markers have unique ids, in order to be able to order - // them, which is needed for uniquely determining an outer marker - // when they overlap (they may nest, but not partially overlap). - var nextMarkerId = 0; - - // Create a marker, wire it up to the right lines, and - function markText(doc, from, to, options, type) { - // Shared markers (across linked documents) are handled separately - // (markTextShared will call out to this again, once per - // document). - if (options && options.shared) return markTextShared(doc, from, to, options, type); - // Ensure we are in an operation. - if (doc.cm && !doc.cm.curOp) return operation(doc.cm, markText)(doc, from, to, options, type); - - var marker = new TextMarker(doc, type), diff = cmp(from, to); - if (options) copyObj(options, marker, false); - // Don't connect empty markers unless clearWhenEmpty is false - if (diff > 0 || diff == 0 && marker.clearWhenEmpty !== false) - return marker; - if (marker.replacedWith) { - // Showing up as a widget implies collapsed (widget replaces text) - marker.collapsed = true; - marker.widgetNode = elt("span", [marker.replacedWith], "CodeMirror-widget"); - if (!options.handleMouseEvents) marker.widgetNode.setAttribute("cm-ignore-events", "true"); - if (options.insertLeft) marker.widgetNode.insertLeft = true; - } - if (marker.collapsed) { - if (conflictingCollapsedRange(doc, from.line, from, to, marker) || - from.line != to.line && conflictingCollapsedRange(doc, to.line, from, to, marker)) - throw new Error("Inserting collapsed marker partially overlapping an existing one"); - sawCollapsedSpans = true; - } - - if (marker.addToHistory) - addChangeToHistory(doc, {from: from, to: to, origin: "markText"}, doc.sel, NaN); - - var curLine = from.line, cm = doc.cm, updateMaxLine; - doc.iter(curLine, to.line + 1, function(line) { - if (cm && marker.collapsed && !cm.options.lineWrapping && visualLine(line) == cm.display.maxLine) - updateMaxLine = true; - if (marker.collapsed && curLine != from.line) updateLineHeight(line, 0); - addMarkedSpan(line, new MarkedSpan(marker, - curLine == from.line ? from.ch : null, - curLine == to.line ? to.ch : null)); - ++curLine; - }); - // lineIsHidden depends on the presence of the spans, so needs a second pass - if (marker.collapsed) doc.iter(from.line, to.line + 1, function(line) { - if (lineIsHidden(doc, line)) updateLineHeight(line, 0); - }); - - if (marker.clearOnEnter) on(marker, "beforeCursorEnter", function() { marker.clear(); }); - - if (marker.readOnly) { - sawReadOnlySpans = true; - if (doc.history.done.length || doc.history.undone.length) - doc.clearHistory(); - } - if (marker.collapsed) { - marker.id = ++nextMarkerId; - marker.atomic = true; - } - if (cm) { - // Sync editor state - if (updateMaxLine) cm.curOp.updateMaxLine = true; - if (marker.collapsed) - regChange(cm, from.line, to.line + 1); - else if (marker.className || marker.title || marker.startStyle || marker.endStyle || marker.css) - for (var i = from.line; i <= to.line; i++) regLineChange(cm, i, "text"); - if (marker.atomic) reCheckSelection(cm.doc); - signalLater(cm, "markerAdded", cm, marker); - } - return marker; - } - - // SHARED TEXTMARKERS - - // A shared marker spans multiple linked documents. It is - // implemented as a meta-marker-object controlling multiple normal - // markers. - var SharedTextMarker = CodeMirror.SharedTextMarker = function(markers, primary) { - this.markers = markers; - this.primary = primary; - for (var i = 0; i < markers.length; ++i) - markers[i].parent = this; - }; - eventMixin(SharedTextMarker); - - SharedTextMarker.prototype.clear = function() { - if (this.explicitlyCleared) return; - this.explicitlyCleared = true; - for (var i = 0; i < this.markers.length; ++i) - this.markers[i].clear(); - signalLater(this, "clear"); - }; - SharedTextMarker.prototype.find = function(side, lineObj) { - return this.primary.find(side, lineObj); - }; - - function markTextShared(doc, from, to, options, type) { - options = copyObj(options); - options.shared = false; - var markers = [markText(doc, from, to, options, type)], primary = markers[0]; - var widget = options.widgetNode; - linkedDocs(doc, function(doc) { - if (widget) options.widgetNode = widget.cloneNode(true); - markers.push(markText(doc, clipPos(doc, from), clipPos(doc, to), options, type)); - for (var i = 0; i < doc.linked.length; ++i) - if (doc.linked[i].isParent) return; - primary = lst(markers); - }); - return new SharedTextMarker(markers, primary); - } - - function findSharedMarkers(doc) { - return doc.findMarks(Pos(doc.first, 0), doc.clipPos(Pos(doc.lastLine())), - function(m) { return m.parent; }); - } - - function copySharedMarkers(doc, markers) { - for (var i = 0; i < markers.length; i++) { - var marker = markers[i], pos = marker.find(); - var mFrom = doc.clipPos(pos.from), mTo = doc.clipPos(pos.to); - if (cmp(mFrom, mTo)) { - var subMark = markText(doc, mFrom, mTo, marker.primary, marker.primary.type); - marker.markers.push(subMark); - subMark.parent = marker; - } - } - } - - function detachSharedMarkers(markers) { - for (var i = 0; i < markers.length; i++) { - var marker = markers[i], linked = [marker.primary.doc];; - linkedDocs(marker.primary.doc, function(d) { linked.push(d); }); - for (var j = 0; j < marker.markers.length; j++) { - var subMarker = marker.markers[j]; - if (indexOf(linked, subMarker.doc) == -1) { - subMarker.parent = null; - marker.markers.splice(j--, 1); - } - } - } - } - - // TEXTMARKER SPANS - - function MarkedSpan(marker, from, to) { - this.marker = marker; - this.from = from; this.to = to; - } - - // Search an array of spans for a span matching the given marker. - function getMarkedSpanFor(spans, marker) { - if (spans) for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.marker == marker) return span; - } - } - // Remove a span from an array, returning undefined if no spans are - // left (we don't store arrays for lines without spans). - function removeMarkedSpan(spans, span) { - for (var r, i = 0; i < spans.length; ++i) - if (spans[i] != span) (r || (r = [])).push(spans[i]); - return r; - } - // Add a span to a line. - function addMarkedSpan(line, span) { - line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span]; - span.marker.attachLine(line); - } - - // Used for the algorithm that adjusts markers for a change in the - // document. These functions cut an array of spans at a given - // character position, returning an array of remaining chunks (or - // undefined if nothing remains). - function markedSpansBefore(old, startCh, isInsert) { - if (old) for (var i = 0, nw; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh); - if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) { - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh); - (nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to)); - } - } - return nw; - } - function markedSpansAfter(old, endCh, isInsert) { - if (old) for (var i = 0, nw; i < old.length; ++i) { - var span = old[i], marker = span.marker; - var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh); - if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) { - var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh); - (nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh, - span.to == null ? null : span.to - endCh)); - } - } - return nw; - } - - // Given a change object, compute the new set of marker spans that - // cover the line in which the change took place. Removes spans - // entirely within the change, reconnects spans belonging to the - // same marker that appear on both sides of the change, and cuts off - // spans partially within the change. Returns an array of span - // arrays with one element for each line in (after) the change. - function stretchSpansOverChange(doc, change) { - if (change.full) return null; - var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans; - var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans; - if (!oldFirst && !oldLast) return null; - - var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0; - // Get the spans that 'stick out' on both sides - var first = markedSpansBefore(oldFirst, startCh, isInsert); - var last = markedSpansAfter(oldLast, endCh, isInsert); - - // Next, merge those two ends - var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0); - if (first) { - // Fix up .to properties of first - for (var i = 0; i < first.length; ++i) { - var span = first[i]; - if (span.to == null) { - var found = getMarkedSpanFor(last, span.marker); - if (!found) span.to = startCh; - else if (sameLine) span.to = found.to == null ? null : found.to + offset; - } - } - } - if (last) { - // Fix up .from in last (or move them into first in case of sameLine) - for (var i = 0; i < last.length; ++i) { - var span = last[i]; - if (span.to != null) span.to += offset; - if (span.from == null) { - var found = getMarkedSpanFor(first, span.marker); - if (!found) { - span.from = offset; - if (sameLine) (first || (first = [])).push(span); - } - } else { - span.from += offset; - if (sameLine) (first || (first = [])).push(span); - } - } - } - // Make sure we didn't create any zero-length spans - if (first) first = clearEmptySpans(first); - if (last && last != first) last = clearEmptySpans(last); - - var newMarkers = [first]; - if (!sameLine) { - // Fill gap with whole-line-spans - var gap = change.text.length - 2, gapMarkers; - if (gap > 0 && first) - for (var i = 0; i < first.length; ++i) - if (first[i].to == null) - (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i].marker, null, null)); - for (var i = 0; i < gap; ++i) - newMarkers.push(gapMarkers); - newMarkers.push(last); - } - return newMarkers; - } - - // Remove spans that are empty and don't have a clearWhenEmpty - // option of false. - function clearEmptySpans(spans) { - for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false) - spans.splice(i--, 1); - } - if (!spans.length) return null; - return spans; - } - - // Used for un/re-doing changes from the history. Combines the - // result of computing the existing spans with the set of spans that - // existed in the history (so that deleting around a span and then - // undoing brings back the span). - function mergeOldSpans(doc, change) { - var old = getOldSpans(doc, change); - var stretched = stretchSpansOverChange(doc, change); - if (!old) return stretched; - if (!stretched) return old; - - for (var i = 0; i < old.length; ++i) { - var oldCur = old[i], stretchCur = stretched[i]; - if (oldCur && stretchCur) { - spans: for (var j = 0; j < stretchCur.length; ++j) { - var span = stretchCur[j]; - for (var k = 0; k < oldCur.length; ++k) - if (oldCur[k].marker == span.marker) continue spans; - oldCur.push(span); - } - } else if (stretchCur) { - old[i] = stretchCur; - } - } - return old; - } - - // Used to 'clip' out readOnly ranges when making a change. - function removeReadOnlyRanges(doc, from, to) { - var markers = null; - doc.iter(from.line, to.line + 1, function(line) { - if (line.markedSpans) for (var i = 0; i < line.markedSpans.length; ++i) { - var mark = line.markedSpans[i].marker; - if (mark.readOnly && (!markers || indexOf(markers, mark) == -1)) - (markers || (markers = [])).push(mark); - } - }); - if (!markers) return null; - var parts = [{from: from, to: to}]; - for (var i = 0; i < markers.length; ++i) { - var mk = markers[i], m = mk.find(0); - for (var j = 0; j < parts.length; ++j) { - var p = parts[j]; - if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) continue; - var newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to); - if (dfrom < 0 || !mk.inclusiveLeft && !dfrom) - newParts.push({from: p.from, to: m.from}); - if (dto > 0 || !mk.inclusiveRight && !dto) - newParts.push({from: m.to, to: p.to}); - parts.splice.apply(parts, newParts); - j += newParts.length - 1; - } - } - return parts; - } - - // Connect or disconnect spans from a line. - function detachMarkedSpans(line) { - var spans = line.markedSpans; - if (!spans) return; - for (var i = 0; i < spans.length; ++i) - spans[i].marker.detachLine(line); - line.markedSpans = null; - } - function attachMarkedSpans(line, spans) { - if (!spans) return; - for (var i = 0; i < spans.length; ++i) - spans[i].marker.attachLine(line); - line.markedSpans = spans; - } - - // Helpers used when computing which overlapping collapsed span - // counts as the larger one. - function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0; } - function extraRight(marker) { return marker.inclusiveRight ? 1 : 0; } - - // Returns a number indicating which of two overlapping collapsed - // spans is larger (and thus includes the other). Falls back to - // comparing ids when the spans cover exactly the same range. - function compareCollapsedMarkers(a, b) { - var lenDiff = a.lines.length - b.lines.length; - if (lenDiff != 0) return lenDiff; - var aPos = a.find(), bPos = b.find(); - var fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b); - if (fromCmp) return -fromCmp; - var toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b); - if (toCmp) return toCmp; - return b.id - a.id; - } - - // Find out whether a line ends or starts in a collapsed span. If - // so, return the marker for that span. - function collapsedSpanAtSide(line, start) { - var sps = sawCollapsedSpans && line.markedSpans, found; - if (sps) for (var sp, i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (sp.marker.collapsed && (start ? sp.from : sp.to) == null && - (!found || compareCollapsedMarkers(found, sp.marker) < 0)) - found = sp.marker; - } - return found; - } - function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true); } - function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false); } - - // Test whether there exists a collapsed span that partially - // overlaps (covers the start or end, but not both) of a new span. - // Such overlap is not allowed. - function conflictingCollapsedRange(doc, lineNo, from, to, marker) { - var line = getLine(doc, lineNo); - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) for (var i = 0; i < sps.length; ++i) { - var sp = sps[i]; - if (!sp.marker.collapsed) continue; - var found = sp.marker.find(0); - var fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker); - var toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker); - if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) continue; - if (fromCmp <= 0 && (cmp(found.to, from) > 0 || (sp.marker.inclusiveRight && marker.inclusiveLeft)) || - fromCmp >= 0 && (cmp(found.from, to) < 0 || (sp.marker.inclusiveLeft && marker.inclusiveRight))) - return true; - } - } - - // A visual line is a line as drawn on the screen. Folding, for - // example, can cause multiple logical lines to appear on the same - // visual line. This finds the start of the visual line that the - // given line is part of (usually that is the line itself). - function visualLine(line) { - var merged; - while (merged = collapsedSpanAtStart(line)) - line = merged.find(-1, true).line; - return line; - } - - // Returns an array of logical lines that continue the visual line - // started by the argument, or undefined if there are no such lines. - function visualLineContinued(line) { - var merged, lines; - while (merged = collapsedSpanAtEnd(line)) { - line = merged.find(1, true).line; - (lines || (lines = [])).push(line); - } - return lines; - } - - // Get the line number of the start of the visual line that the - // given line number is part of. - function visualLineNo(doc, lineN) { - var line = getLine(doc, lineN), vis = visualLine(line); - if (line == vis) return lineN; - return lineNo(vis); - } - // Get the line number of the start of the next visual line after - // the given line. - function visualLineEndNo(doc, lineN) { - if (lineN > doc.lastLine()) return lineN; - var line = getLine(doc, lineN), merged; - if (!lineIsHidden(doc, line)) return lineN; - while (merged = collapsedSpanAtEnd(line)) - line = merged.find(1, true).line; - return lineNo(line) + 1; - } - - // Compute whether a line is hidden. Lines count as hidden when they - // are part of a visual line that starts with another line, or when - // they are entirely covered by collapsed, non-widget span. - function lineIsHidden(doc, line) { - var sps = sawCollapsedSpans && line.markedSpans; - if (sps) for (var sp, i = 0; i < sps.length; ++i) { - sp = sps[i]; - if (!sp.marker.collapsed) continue; - if (sp.from == null) return true; - if (sp.marker.widgetNode) continue; - if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp)) - return true; - } - } - function lineIsHiddenInner(doc, line, span) { - if (span.to == null) { - var end = span.marker.find(1, true); - return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker)); - } - if (span.marker.inclusiveRight && span.to == line.text.length) - return true; - for (var sp, i = 0; i < line.markedSpans.length; ++i) { - sp = line.markedSpans[i]; - if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to && - (sp.to == null || sp.to != span.from) && - (sp.marker.inclusiveLeft || span.marker.inclusiveRight) && - lineIsHiddenInner(doc, line, sp)) return true; - } - } - - // LINE WIDGETS - - // Line widgets are block elements displayed above or below a line. - - var LineWidget = CodeMirror.LineWidget = function(doc, node, options) { - if (options) for (var opt in options) if (options.hasOwnProperty(opt)) - this[opt] = options[opt]; - this.doc = doc; - this.node = node; - }; - eventMixin(LineWidget); - - function adjustScrollWhenAboveVisible(cm, line, diff) { - if (heightAtLine(line) < ((cm.curOp && cm.curOp.scrollTop) || cm.doc.scrollTop)) - addToScrollPos(cm, null, diff); - } - - LineWidget.prototype.clear = function() { - var cm = this.doc.cm, ws = this.line.widgets, line = this.line, no = lineNo(line); - if (no == null || !ws) return; - for (var i = 0; i < ws.length; ++i) if (ws[i] == this) ws.splice(i--, 1); - if (!ws.length) line.widgets = null; - var height = widgetHeight(this); - updateLineHeight(line, Math.max(0, line.height - height)); - if (cm) runInOp(cm, function() { - adjustScrollWhenAboveVisible(cm, line, -height); - regLineChange(cm, no, "widget"); - }); - }; - LineWidget.prototype.changed = function() { - var oldH = this.height, cm = this.doc.cm, line = this.line; - this.height = null; - var diff = widgetHeight(this) - oldH; - if (!diff) return; - updateLineHeight(line, line.height + diff); - if (cm) runInOp(cm, function() { - cm.curOp.forceUpdate = true; - adjustScrollWhenAboveVisible(cm, line, diff); - }); - }; - - function widgetHeight(widget) { - if (widget.height != null) return widget.height; - var cm = widget.doc.cm; - if (!cm) return 0; - if (!contains(document.body, widget.node)) { - var parentStyle = "position: relative;"; - if (widget.coverGutter) - parentStyle += "margin-left: -" + cm.display.gutters.offsetWidth + "px;"; - if (widget.noHScroll) - parentStyle += "width: " + cm.display.wrapper.clientWidth + "px;"; - removeChildrenAndAdd(cm.display.measure, elt("div", [widget.node], null, parentStyle)); - } - return widget.height = widget.node.parentNode.offsetHeight; - } - - function addLineWidget(doc, handle, node, options) { - var widget = new LineWidget(doc, node, options); - var cm = doc.cm; - if (cm && widget.noHScroll) cm.display.alignWidgets = true; - changeLine(doc, handle, "widget", function(line) { - var widgets = line.widgets || (line.widgets = []); - if (widget.insertAt == null) widgets.push(widget); - else widgets.splice(Math.min(widgets.length - 1, Math.max(0, widget.insertAt)), 0, widget); - widget.line = line; - if (cm && !lineIsHidden(doc, line)) { - var aboveVisible = heightAtLine(line) < doc.scrollTop; - updateLineHeight(line, line.height + widgetHeight(widget)); - if (aboveVisible) addToScrollPos(cm, null, widget.height); - cm.curOp.forceUpdate = true; - } - return true; - }); - return widget; - } - - // LINE DATA STRUCTURE - - // Line objects. These hold state related to a line, including - // highlighting info (the styles array). - var Line = CodeMirror.Line = function(text, markedSpans, estimateHeight) { - this.text = text; - attachMarkedSpans(this, markedSpans); - this.height = estimateHeight ? estimateHeight(this) : 1; - }; - eventMixin(Line); - Line.prototype.lineNo = function() { return lineNo(this); }; - - // Change the content (text, markers) of a line. Automatically - // invalidates cached information and tries to re-estimate the - // line's height. - function updateLine(line, text, markedSpans, estimateHeight) { - line.text = text; - if (line.stateAfter) line.stateAfter = null; - if (line.styles) line.styles = null; - if (line.order != null) line.order = null; - detachMarkedSpans(line); - attachMarkedSpans(line, markedSpans); - var estHeight = estimateHeight ? estimateHeight(line) : 1; - if (estHeight != line.height) updateLineHeight(line, estHeight); - } - - // Detach a line from the document tree and its markers. - function cleanUpLine(line) { - line.parent = null; - detachMarkedSpans(line); - } - - function extractLineClasses(type, output) { - if (type) for (;;) { - var lineClass = type.match(/(?:^|\s+)line-(background-)?(\S+)/); - if (!lineClass) break; - type = type.slice(0, lineClass.index) + type.slice(lineClass.index + lineClass[0].length); - var prop = lineClass[1] ? "bgClass" : "textClass"; - if (output[prop] == null) - output[prop] = lineClass[2]; - else if (!(new RegExp("(?:^|\s)" + lineClass[2] + "(?:$|\s)")).test(output[prop])) - output[prop] += " " + lineClass[2]; - } - return type; - } - - function callBlankLine(mode, state) { - if (mode.blankLine) return mode.blankLine(state); - if (!mode.innerMode) return; - var inner = CodeMirror.innerMode(mode, state); - if (inner.mode.blankLine) return inner.mode.blankLine(inner.state); - } - - function readToken(mode, stream, state, inner) { - for (var i = 0; i < 10; i++) { - if (inner) inner[0] = CodeMirror.innerMode(mode, state).mode; - var style = mode.token(stream, state); - if (stream.pos > stream.start) return style; - } - throw new Error("Mode " + mode.name + " failed to advance stream."); - } - - // Utility for getTokenAt and getLineTokens - function takeToken(cm, pos, precise, asArray) { - function getObj(copy) { - return {start: stream.start, end: stream.pos, - string: stream.current(), - type: style || null, - state: copy ? copyState(doc.mode, state) : state}; - } - - var doc = cm.doc, mode = doc.mode, style; - pos = clipPos(doc, pos); - var line = getLine(doc, pos.line), state = getStateBefore(cm, pos.line, precise); - var stream = new StringStream(line.text, cm.options.tabSize), tokens; - if (asArray) tokens = []; - while ((asArray || stream.pos < pos.ch) && !stream.eol()) { - stream.start = stream.pos; - style = readToken(mode, stream, state); - if (asArray) tokens.push(getObj(true)); - } - return asArray ? tokens : getObj(); - } - - // Run the given mode's parser over a line, calling f for each token. - function runMode(cm, text, mode, state, f, lineClasses, forceToEnd) { - var flattenSpans = mode.flattenSpans; - if (flattenSpans == null) flattenSpans = cm.options.flattenSpans; - var curStart = 0, curStyle = null; - var stream = new StringStream(text, cm.options.tabSize), style; - var inner = cm.options.addModeClass && [null]; - if (text == "") extractLineClasses(callBlankLine(mode, state), lineClasses); - while (!stream.eol()) { - if (stream.pos > cm.options.maxHighlightLength) { - flattenSpans = false; - if (forceToEnd) processLine(cm, text, state, stream.pos); - stream.pos = text.length; - style = null; - } else { - style = extractLineClasses(readToken(mode, stream, state, inner), lineClasses); - } - if (inner) { - var mName = inner[0].name; - if (mName) style = "m-" + (style ? mName + " " + style : mName); - } - if (!flattenSpans || curStyle != style) { - while (curStart < stream.start) { - curStart = Math.min(stream.start, curStart + 50000); - f(curStart, curStyle); - } - curStyle = style; - } - stream.start = stream.pos; - } - while (curStart < stream.pos) { - // Webkit seems to refuse to render text nodes longer than 57444 characters - var pos = Math.min(stream.pos, curStart + 50000); - f(pos, curStyle); - curStart = pos; - } - } - - // Compute a style array (an array starting with a mode generation - // -- for invalidation -- followed by pairs of end positions and - // style strings), which is used to highlight the tokens on the - // line. - function highlightLine(cm, line, state, forceToEnd) { - // A styles array always starts with a number identifying the - // mode/overlays that it is based on (for easy invalidation). - var st = [cm.state.modeGen], lineClasses = {}; - // Compute the base array of styles - runMode(cm, line.text, cm.doc.mode, state, function(end, style) { - st.push(end, style); - }, lineClasses, forceToEnd); - - // Run overlays, adjust style array. - for (var o = 0; o < cm.state.overlays.length; ++o) { - var overlay = cm.state.overlays[o], i = 1, at = 0; - runMode(cm, line.text, overlay.mode, true, function(end, style) { - var start = i; - // Ensure there's a token end at the current position, and that i points at it - while (at < end) { - var i_end = st[i]; - if (i_end > end) - st.splice(i, 1, end, st[i+1], i_end); - i += 2; - at = Math.min(end, i_end); - } - if (!style) return; - if (overlay.opaque) { - st.splice(start, i - start, end, "cm-overlay " + style); - i = start + 2; - } else { - for (; start < i; start += 2) { - var cur = st[start+1]; - st[start+1] = (cur ? cur + " " : "") + "cm-overlay " + style; - } - } - }, lineClasses); - } - - return {styles: st, classes: lineClasses.bgClass || lineClasses.textClass ? lineClasses : null}; - } - - function getLineStyles(cm, line, updateFrontier) { - if (!line.styles || line.styles[0] != cm.state.modeGen) { - var state = getStateBefore(cm, lineNo(line)); - var result = highlightLine(cm, line, line.text.length > cm.options.maxHighlightLength ? copyState(cm.doc.mode, state) : state); - line.stateAfter = state; - line.styles = result.styles; - if (result.classes) line.styleClasses = result.classes; - else if (line.styleClasses) line.styleClasses = null; - if (updateFrontier === cm.doc.frontier) cm.doc.frontier++; - } - return line.styles; - } - - // Lightweight form of highlight -- proceed over this line and - // update state, but don't save a style array. Used for lines that - // aren't currently visible. - function processLine(cm, text, state, startAt) { - var mode = cm.doc.mode; - var stream = new StringStream(text, cm.options.tabSize); - stream.start = stream.pos = startAt || 0; - if (text == "") callBlankLine(mode, state); - while (!stream.eol()) { - readToken(mode, stream, state); - stream.start = stream.pos; - } - } - - // Convert a style as returned by a mode (either null, or a string - // containing one or more styles) to a CSS style. This is cached, - // and also looks for line-wide styles. - var styleToClassCache = {}, styleToClassCacheWithMode = {}; - function interpretTokenStyle(style, options) { - if (!style || /^\s*$/.test(style)) return null; - var cache = options.addModeClass ? styleToClassCacheWithMode : styleToClassCache; - return cache[style] || - (cache[style] = style.replace(/\S+/g, "cm-$&")); - } - - // Render the DOM representation of the text of a line. Also builds - // up a 'line map', which points at the DOM nodes that represent - // specific stretches of text, and is used by the measuring code. - // The returned object contains the DOM node, this map, and - // information about line-wide styles that were set by the mode. - function buildLineContent(cm, lineView) { - // The padding-right forces the element to have a 'border', which - // is needed on Webkit to be able to get line-level bounding - // rectangles for it (in measureChar). - var content = elt("span", null, null, webkit ? "padding-right: .1px" : null); - var builder = {pre: elt("pre", [content], "CodeMirror-line"), content: content, - col: 0, pos: 0, cm: cm, - splitSpaces: (ie || webkit) && cm.getOption("lineWrapping")}; - lineView.measure = {}; - - // Iterate over the logical lines that make up this visual line. - for (var i = 0; i <= (lineView.rest ? lineView.rest.length : 0); i++) { - var line = i ? lineView.rest[i - 1] : lineView.line, order; - builder.pos = 0; - builder.addToken = buildToken; - // Optionally wire in some hacks into the token-rendering - // algorithm, to deal with browser quirks. - if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line))) - builder.addToken = buildTokenBadBidi(builder.addToken, order); - builder.map = []; - var allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line); - insertLineContent(line, builder, getLineStyles(cm, line, allowFrontierUpdate)); - if (line.styleClasses) { - if (line.styleClasses.bgClass) - builder.bgClass = joinClasses(line.styleClasses.bgClass, builder.bgClass || ""); - if (line.styleClasses.textClass) - builder.textClass = joinClasses(line.styleClasses.textClass, builder.textClass || ""); - } - - // Ensure at least a single node is present, for measuring. - if (builder.map.length == 0) - builder.map.push(0, 0, builder.content.appendChild(zeroWidthElement(cm.display.measure))); - - // Store the map and a cache object for the current logical line - if (i == 0) { - lineView.measure.map = builder.map; - lineView.measure.cache = {}; - } else { - (lineView.measure.maps || (lineView.measure.maps = [])).push(builder.map); - (lineView.measure.caches || (lineView.measure.caches = [])).push({}); - } - } - - // See issue #2901 - if (webkit && /\bcm-tab\b/.test(builder.content.lastChild.className)) - builder.content.className = "cm-tab-wrap-hack"; - - signal(cm, "renderLine", cm, lineView.line, builder.pre); - if (builder.pre.className) - builder.textClass = joinClasses(builder.pre.className, builder.textClass || ""); - - return builder; - } - - function defaultSpecialCharPlaceholder(ch) { - var token = elt("span", "\u2022", "cm-invalidchar"); - token.title = "\\u" + ch.charCodeAt(0).toString(16); - token.setAttribute("aria-label", token.title); - return token; - } - - // Build up the DOM representation for a single token, and add it to - // the line map. Takes care to render special characters separately. - function buildToken(builder, text, style, startStyle, endStyle, title, css) { - if (!text) return; - var displayText = builder.splitSpaces ? text.replace(/ {3,}/g, splitSpaces) : text; - var special = builder.cm.state.specialChars, mustWrap = false; - if (!special.test(text)) { - builder.col += text.length; - var content = document.createTextNode(displayText); - builder.map.push(builder.pos, builder.pos + text.length, content); - if (ie && ie_version < 9) mustWrap = true; - builder.pos += text.length; - } else { - var content = document.createDocumentFragment(), pos = 0; - while (true) { - special.lastIndex = pos; - var m = special.exec(text); - var skipped = m ? m.index - pos : text.length - pos; - if (skipped) { - var txt = document.createTextNode(displayText.slice(pos, pos + skipped)); - if (ie && ie_version < 9) content.appendChild(elt("span", [txt])); - else content.appendChild(txt); - builder.map.push(builder.pos, builder.pos + skipped, txt); - builder.col += skipped; - builder.pos += skipped; - } - if (!m) break; - pos += skipped + 1; - if (m[0] == "\t") { - var tabSize = builder.cm.options.tabSize, tabWidth = tabSize - builder.col % tabSize; - var txt = content.appendChild(elt("span", spaceStr(tabWidth), "cm-tab")); - txt.setAttribute("role", "presentation"); - txt.setAttribute("cm-text", "\t"); - builder.col += tabWidth; - } else if (m[0] == "\r" || m[0] == "\n") { - var txt = content.appendChild(elt("span", m[0] == "\r" ? "\u240d" : "\u2424", "cm-invalidchar")); - txt.setAttribute("cm-text", m[0]); - builder.col += 1; - } else { - var txt = builder.cm.options.specialCharPlaceholder(m[0]); - txt.setAttribute("cm-text", m[0]); - if (ie && ie_version < 9) content.appendChild(elt("span", [txt])); - else content.appendChild(txt); - builder.col += 1; - } - builder.map.push(builder.pos, builder.pos + 1, txt); - builder.pos++; - } - } - if (style || startStyle || endStyle || mustWrap || css) { - var fullStyle = style || ""; - if (startStyle) fullStyle += startStyle; - if (endStyle) fullStyle += endStyle; - var token = elt("span", [content], fullStyle, css); - if (title) token.title = title; - return builder.content.appendChild(token); - } - builder.content.appendChild(content); - } - - function splitSpaces(old) { - var out = " "; - for (var i = 0; i < old.length - 2; ++i) out += i % 2 ? " " : "\u00a0"; - out += " "; - return out; - } - - // Work around nonsense dimensions being reported for stretches of - // right-to-left text. - function buildTokenBadBidi(inner, order) { - return function(builder, text, style, startStyle, endStyle, title, css) { - style = style ? style + " cm-force-border" : "cm-force-border"; - var start = builder.pos, end = start + text.length; - for (;;) { - // Find the part that overlaps with the start of this text - for (var i = 0; i < order.length; i++) { - var part = order[i]; - if (part.to > start && part.from <= start) break; - } - if (part.to >= end) return inner(builder, text, style, startStyle, endStyle, title, css); - inner(builder, text.slice(0, part.to - start), style, startStyle, null, title, css); - startStyle = null; - text = text.slice(part.to - start); - start = part.to; - } - }; - } - - function buildCollapsedSpan(builder, size, marker, ignoreWidget) { - var widget = !ignoreWidget && marker.widgetNode; - if (widget) builder.map.push(builder.pos, builder.pos + size, widget); - if (!ignoreWidget && builder.cm.display.input.needsContentAttribute) { - if (!widget) - widget = builder.content.appendChild(document.createElement("span")); - widget.setAttribute("cm-marker", marker.id); - } - if (widget) { - builder.cm.display.input.setUneditable(widget); - builder.content.appendChild(widget); - } - builder.pos += size; - } - - // Outputs a number of spans to make up a line, taking highlighting - // and marked text into account. - function insertLineContent(line, builder, styles) { - var spans = line.markedSpans, allText = line.text, at = 0; - if (!spans) { - for (var i = 1; i < styles.length; i+=2) - builder.addToken(builder, allText.slice(at, at = styles[i]), interpretTokenStyle(styles[i+1], builder.cm.options)); - return; - } - - var len = allText.length, pos = 0, i = 1, text = "", style, css; - var nextChange = 0, spanStyle, spanEndStyle, spanStartStyle, title, collapsed; - for (;;) { - if (nextChange == pos) { // Update current marker set - spanStyle = spanEndStyle = spanStartStyle = title = css = ""; - collapsed = null; nextChange = Infinity; - var foundBookmarks = [], endStyles - for (var j = 0; j < spans.length; ++j) { - var sp = spans[j], m = sp.marker; - if (m.type == "bookmark" && sp.from == pos && m.widgetNode) { - foundBookmarks.push(m); - } else if (sp.from <= pos && (sp.to == null || sp.to > pos || m.collapsed && sp.to == pos && sp.from == pos)) { - if (sp.to != null && sp.to != pos && nextChange > sp.to) { - nextChange = sp.to; - spanEndStyle = ""; - } - if (m.className) spanStyle += " " + m.className; - if (m.css) css = (css ? css + ";" : "") + m.css; - if (m.startStyle && sp.from == pos) spanStartStyle += " " + m.startStyle; - if (m.endStyle && sp.to == nextChange) (endStyles || (endStyles = [])).push(m.endStyle, sp.to) - if (m.title && !title) title = m.title; - if (m.collapsed && (!collapsed || compareCollapsedMarkers(collapsed.marker, m) < 0)) - collapsed = sp; - } else if (sp.from > pos && nextChange > sp.from) { - nextChange = sp.from; - } - } - if (endStyles) for (var j = 0; j < endStyles.length; j += 2) - if (endStyles[j + 1] == nextChange) spanEndStyle += " " + endStyles[j] - - if (!collapsed || collapsed.from == pos) for (var j = 0; j < foundBookmarks.length; ++j) - buildCollapsedSpan(builder, 0, foundBookmarks[j]); - if (collapsed && (collapsed.from || 0) == pos) { - buildCollapsedSpan(builder, (collapsed.to == null ? len + 1 : collapsed.to) - pos, - collapsed.marker, collapsed.from == null); - if (collapsed.to == null) return; - if (collapsed.to == pos) collapsed = false; - } - } - if (pos >= len) break; - - var upto = Math.min(len, nextChange); - while (true) { - if (text) { - var end = pos + text.length; - if (!collapsed) { - var tokenText = end > upto ? text.slice(0, upto - pos) : text; - builder.addToken(builder, tokenText, style ? style + spanStyle : spanStyle, - spanStartStyle, pos + tokenText.length == nextChange ? spanEndStyle : "", title, css); - } - if (end >= upto) {text = text.slice(upto - pos); pos = upto; break;} - pos = end; - spanStartStyle = ""; - } - text = allText.slice(at, at = styles[i++]); - style = interpretTokenStyle(styles[i++], builder.cm.options); - } - } + cm.doc.modeFrontier = cm.doc.highlightFrontier = cm.doc.first; + startWorker(cm, 100); + cm.state.modeGen++; + if (cm.curOp) { regChange(cm); } } // DOCUMENT DATA STRUCTURE @@ -7153,20 +4708,21 @@ // widgets and marker elements with the text behave more intuitive. function isWholeLineUpdate(doc, change) { return change.from.ch == 0 && change.to.ch == 0 && lst(change.text) == "" && - (!doc.cm || doc.cm.options.wholeLineUpdateBefore); + (!doc.cm || doc.cm.options.wholeLineUpdateBefore) } // Perform a change on the document data structure. - function updateDoc(doc, change, markedSpans, estimateHeight) { - function spansFor(n) {return markedSpans ? markedSpans[n] : null;} + function updateDoc(doc, change, markedSpans, estimateHeight$$1) { + function spansFor(n) {return markedSpans ? markedSpans[n] : null} function update(line, text, spans) { - updateLine(line, text, spans, estimateHeight); + updateLine(line, text, spans, estimateHeight$$1); signalLater(line, "change", line, change); } function linesFor(start, end) { - for (var i = start, result = []; i < end; ++i) - result.push(new Line(text[i], spansFor(i), estimateHeight)); - return result; + var result = []; + for (var i = start; i < end; ++i) + { result.push(new Line(text[i], spansFor(i), estimateHeight$$1)); } + return result } var from = change.from, to = change.to, text = change.text; @@ -7182,16 +4738,16 @@ // sure line objects move the way they are supposed to. var added = linesFor(0, text.length - 1); update(lastLine, lastLine.text, lastSpans); - if (nlines) doc.remove(from.line, nlines); - if (added.length) doc.insert(from.line, added); + if (nlines) { doc.remove(from.line, nlines); } + if (added.length) { doc.insert(from.line, added); } } else if (firstLine == lastLine) { if (text.length == 1) { update(firstLine, firstLine.text.slice(0, from.ch) + lastText + firstLine.text.slice(to.ch), lastSpans); } else { - var added = linesFor(1, text.length - 1); - added.push(new Line(lastText + firstLine.text.slice(to.ch), lastSpans, estimateHeight)); + var added$1 = linesFor(1, text.length - 1); + added$1.push(new Line(lastText + firstLine.text.slice(to.ch), lastSpans, estimateHeight$$1)); update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); - doc.insert(from.line + 1, added); + doc.insert(from.line + 1, added$1); } } else if (text.length == 1) { update(firstLine, firstLine.text.slice(0, from.ch) + text[0] + lastLine.text.slice(to.ch), spansFor(0)); @@ -7199,671 +4755,52 @@ } else { update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0)); update(lastLine, lastText + lastLine.text.slice(to.ch), lastSpans); - var added = linesFor(1, text.length - 1); - if (nlines > 1) doc.remove(from.line + 1, nlines - 1); - doc.insert(from.line + 1, added); + var added$2 = linesFor(1, text.length - 1); + if (nlines > 1) { doc.remove(from.line + 1, nlines - 1); } + doc.insert(from.line + 1, added$2); } signalLater(doc, "change", doc, change); } - // The document is represented as a BTree consisting of leaves, with - // chunk of lines in them, and branches, with up to ten leaves or - // other branch nodes below them. The top node is always a branch - // node, and is the document object itself (meaning it has - // additional methods and properties). - // - // All nodes have parent links. The tree is used both to go from - // line numbers to line objects, and to go from objects to numbers. - // It also indexes by height, and is used to convert between height - // and line object, and to find the total height of the document. - // - // See also http://marijnhaverbeke.nl/blog/codemirror-line-tree.html - - function LeafChunk(lines) { - this.lines = lines; - this.parent = null; - for (var i = 0, height = 0; i < lines.length; ++i) { - lines[i].parent = this; - height += lines[i].height; - } - this.height = height; - } - - LeafChunk.prototype = { - chunkSize: function() { return this.lines.length; }, - // Remove the n lines at offset 'at'. - removeInner: function(at, n) { - for (var i = at, e = at + n; i < e; ++i) { - var line = this.lines[i]; - this.height -= line.height; - cleanUpLine(line); - signalLater(line, "delete"); - } - this.lines.splice(at, n); - }, - // Helper used to collapse a small branch into a single leaf. - collapse: function(lines) { - lines.push.apply(lines, this.lines); - }, - // Insert the given array of lines at offset 'at', count them as - // having the given height. - insertInner: function(at, lines, height) { - this.height += height; - this.lines = this.lines.slice(0, at).concat(lines).concat(this.lines.slice(at)); - for (var i = 0; i < lines.length; ++i) lines[i].parent = this; - }, - // Used to iterate over a part of the tree. - iterN: function(at, n, op) { - for (var e = at + n; at < e; ++at) - if (op(this.lines[at])) return true; - } - }; - - function BranchChunk(children) { - this.children = children; - var size = 0, height = 0; - for (var i = 0; i < children.length; ++i) { - var ch = children[i]; - size += ch.chunkSize(); height += ch.height; - ch.parent = this; - } - this.size = size; - this.height = height; - this.parent = null; - } - - BranchChunk.prototype = { - chunkSize: function() { return this.size; }, - removeInner: function(at, n) { - this.size -= n; - for (var i = 0; i < this.children.length; ++i) { - var child = this.children[i], sz = child.chunkSize(); - if (at < sz) { - var rm = Math.min(n, sz - at), oldHeight = child.height; - child.removeInner(at, rm); - this.height -= oldHeight - child.height; - if (sz == rm) { this.children.splice(i--, 1); child.parent = null; } - if ((n -= rm) == 0) break; - at = 0; - } else at -= sz; - } - // If the result is smaller than 25 lines, ensure that it is a - // single leaf node. - if (this.size - n < 25 && - (this.children.length > 1 || !(this.children[0] instanceof LeafChunk))) { - var lines = []; - this.collapse(lines); - this.children = [new LeafChunk(lines)]; - this.children[0].parent = this; - } - }, - collapse: function(lines) { - for (var i = 0; i < this.children.length; ++i) this.children[i].collapse(lines); - }, - insertInner: function(at, lines, height) { - this.size += lines.length; - this.height += height; - for (var i = 0; i < this.children.length; ++i) { - var child = this.children[i], sz = child.chunkSize(); - if (at <= sz) { - child.insertInner(at, lines, height); - if (child.lines && child.lines.length > 50) { - while (child.lines.length > 50) { - var spilled = child.lines.splice(child.lines.length - 25, 25); - var newleaf = new LeafChunk(spilled); - child.height -= newleaf.height; - this.children.splice(i + 1, 0, newleaf); - newleaf.parent = this; - } - this.maybeSpill(); - } - break; - } - at -= sz; - } - }, - // When a node has grown, check whether it should be split. - maybeSpill: function() { - if (this.children.length <= 10) return; - var me = this; - do { - var spilled = me.children.splice(me.children.length - 5, 5); - var sibling = new BranchChunk(spilled); - if (!me.parent) { // Become the parent node - var copy = new BranchChunk(me.children); - copy.parent = me; - me.children = [copy, sibling]; - me = copy; - } else { - me.size -= sibling.size; - me.height -= sibling.height; - var myIndex = indexOf(me.parent.children, me); - me.parent.children.splice(myIndex + 1, 0, sibling); - } - sibling.parent = me.parent; - } while (me.children.length > 10); - me.parent.maybeSpill(); - }, - iterN: function(at, n, op) { - for (var i = 0; i < this.children.length; ++i) { - var child = this.children[i], sz = child.chunkSize(); - if (at < sz) { - var used = Math.min(n, sz - at); - if (child.iterN(at, used, op)) return true; - if ((n -= used) == 0) break; - at = 0; - } else at -= sz; - } - } - }; - - var nextDocId = 0; - var Doc = CodeMirror.Doc = function(text, mode, firstLine, lineSep) { - if (!(this instanceof Doc)) return new Doc(text, mode, firstLine, lineSep); - if (firstLine == null) firstLine = 0; - - BranchChunk.call(this, [new LeafChunk([new Line("", null)])]); - this.first = firstLine; - this.scrollTop = this.scrollLeft = 0; - this.cantEdit = false; - this.cleanGeneration = 1; - this.frontier = firstLine; - var start = Pos(firstLine, 0); - this.sel = simpleSelection(start); - this.history = new History(null); - this.id = ++nextDocId; - this.modeOption = mode; - this.lineSep = lineSep; - this.extend = false; - - if (typeof text == "string") text = this.splitLines(text); - updateDoc(this, {from: start, to: start, text: text}); - setSelection(this, simpleSelection(start), sel_dontScroll); - }; - - Doc.prototype = createObj(BranchChunk.prototype, { - constructor: Doc, - // Iterate over the document. Supports two forms -- with only one - // argument, it calls that for each line in the document. With - // three, it iterates over the range given by the first two (with - // the second being non-inclusive). - iter: function(from, to, op) { - if (op) this.iterN(from - this.first, to - from, op); - else this.iterN(this.first, this.first + this.size, from); - }, - - // Non-public interface for adding and removing lines. - insert: function(at, lines) { - var height = 0; - for (var i = 0; i < lines.length; ++i) height += lines[i].height; - this.insertInner(at - this.first, lines, height); - }, - remove: function(at, n) { this.removeInner(at - this.first, n); }, - - // From here, the methods are part of the public interface. Most - // are also available from CodeMirror (editor) instances. - - getValue: function(lineSep) { - var lines = getLines(this, this.first, this.first + this.size); - if (lineSep === false) return lines; - return lines.join(lineSep || this.lineSeparator()); - }, - setValue: docMethodOp(function(code) { - var top = Pos(this.first, 0), last = this.first + this.size - 1; - makeChange(this, {from: top, to: Pos(last, getLine(this, last).text.length), - text: this.splitLines(code), origin: "setValue", full: true}, true); - setSelection(this, simpleSelection(top)); - }), - replaceRange: function(code, from, to, origin) { - from = clipPos(this, from); - to = to ? clipPos(this, to) : from; - replaceRange(this, code, from, to, origin); - }, - getRange: function(from, to, lineSep) { - var lines = getBetween(this, clipPos(this, from), clipPos(this, to)); - if (lineSep === false) return lines; - return lines.join(lineSep || this.lineSeparator()); - }, - - getLine: function(line) {var l = this.getLineHandle(line); return l && l.text;}, - - getLineHandle: function(line) {if (isLine(this, line)) return getLine(this, line);}, - getLineNumber: function(line) {return lineNo(line);}, - - getLineHandleVisualStart: function(line) { - if (typeof line == "number") line = getLine(this, line); - return visualLine(line); - }, - - lineCount: function() {return this.size;}, - firstLine: function() {return this.first;}, - lastLine: function() {return this.first + this.size - 1;}, - - clipPos: function(pos) {return clipPos(this, pos);}, - - getCursor: function(start) { - var range = this.sel.primary(), pos; - if (start == null || start == "head") pos = range.head; - else if (start == "anchor") pos = range.anchor; - else if (start == "end" || start == "to" || start === false) pos = range.to(); - else pos = range.from(); - return pos; - }, - listSelections: function() { return this.sel.ranges; }, - somethingSelected: function() {return this.sel.somethingSelected();}, - - setCursor: docMethodOp(function(line, ch, options) { - setSimpleSelection(this, clipPos(this, typeof line == "number" ? Pos(line, ch || 0) : line), null, options); - }), - setSelection: docMethodOp(function(anchor, head, options) { - setSimpleSelection(this, clipPos(this, anchor), clipPos(this, head || anchor), options); - }), - extendSelection: docMethodOp(function(head, other, options) { - extendSelection(this, clipPos(this, head), other && clipPos(this, other), options); - }), - extendSelections: docMethodOp(function(heads, options) { - extendSelections(this, clipPosArray(this, heads), options); - }), - extendSelectionsBy: docMethodOp(function(f, options) { - var heads = map(this.sel.ranges, f); - extendSelections(this, clipPosArray(this, heads), options); - }), - setSelections: docMethodOp(function(ranges, primary, options) { - if (!ranges.length) return; - for (var i = 0, out = []; i < ranges.length; i++) - out[i] = new Range(clipPos(this, ranges[i].anchor), - clipPos(this, ranges[i].head)); - if (primary == null) primary = Math.min(ranges.length - 1, this.sel.primIndex); - setSelection(this, normalizeSelection(out, primary), options); - }), - addSelection: docMethodOp(function(anchor, head, options) { - var ranges = this.sel.ranges.slice(0); - ranges.push(new Range(clipPos(this, anchor), clipPos(this, head || anchor))); - setSelection(this, normalizeSelection(ranges, ranges.length - 1), options); - }), - - getSelection: function(lineSep) { - var ranges = this.sel.ranges, lines; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this, ranges[i].from(), ranges[i].to()); - lines = lines ? lines.concat(sel) : sel; - } - if (lineSep === false) return lines; - else return lines.join(lineSep || this.lineSeparator()); - }, - getSelections: function(lineSep) { - var parts = [], ranges = this.sel.ranges; - for (var i = 0; i < ranges.length; i++) { - var sel = getBetween(this, ranges[i].from(), ranges[i].to()); - if (lineSep !== false) sel = sel.join(lineSep || this.lineSeparator()); - parts[i] = sel; - } - return parts; - }, - replaceSelection: function(code, collapse, origin) { - var dup = []; - for (var i = 0; i < this.sel.ranges.length; i++) - dup[i] = code; - this.replaceSelections(dup, collapse, origin || "+input"); - }, - replaceSelections: docMethodOp(function(code, collapse, origin) { - var changes = [], sel = this.sel; - for (var i = 0; i < sel.ranges.length; i++) { - var range = sel.ranges[i]; - changes[i] = {from: range.from(), to: range.to(), text: this.splitLines(code[i]), origin: origin}; - } - var newSel = collapse && collapse != "end" && computeReplacedSel(this, changes, collapse); - for (var i = changes.length - 1; i >= 0; i--) - makeChange(this, changes[i]); - if (newSel) setSelectionReplaceHistory(this, newSel); - else if (this.cm) ensureCursorVisible(this.cm); - }), - undo: docMethodOp(function() {makeChangeFromHistory(this, "undo");}), - redo: docMethodOp(function() {makeChangeFromHistory(this, "redo");}), - undoSelection: docMethodOp(function() {makeChangeFromHistory(this, "undo", true);}), - redoSelection: docMethodOp(function() {makeChangeFromHistory(this, "redo", true);}), - - setExtending: function(val) {this.extend = val;}, - getExtending: function() {return this.extend;}, - - historySize: function() { - var hist = this.history, done = 0, undone = 0; - for (var i = 0; i < hist.done.length; i++) if (!hist.done[i].ranges) ++done; - for (var i = 0; i < hist.undone.length; i++) if (!hist.undone[i].ranges) ++undone; - return {undo: done, redo: undone}; - }, - clearHistory: function() {this.history = new History(this.history.maxGeneration);}, - - markClean: function() { - this.cleanGeneration = this.changeGeneration(true); - }, - changeGeneration: function(forceSplit) { - if (forceSplit) - this.history.lastOp = this.history.lastSelOp = this.history.lastOrigin = null; - return this.history.generation; - }, - isClean: function (gen) { - return this.history.generation == (gen || this.cleanGeneration); - }, - - getHistory: function() { - return {done: copyHistoryArray(this.history.done), - undone: copyHistoryArray(this.history.undone)}; - }, - setHistory: function(histData) { - var hist = this.history = new History(this.history.maxGeneration); - hist.done = copyHistoryArray(histData.done.slice(0), null, true); - hist.undone = copyHistoryArray(histData.undone.slice(0), null, true); - }, - - addLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function(line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - if (!line[prop]) line[prop] = cls; - else if (classTest(cls).test(line[prop])) return false; - else line[prop] += " " + cls; - return true; - }); - }), - removeLineClass: docMethodOp(function(handle, where, cls) { - return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function(line) { - var prop = where == "text" ? "textClass" - : where == "background" ? "bgClass" - : where == "gutter" ? "gutterClass" : "wrapClass"; - var cur = line[prop]; - if (!cur) return false; - else if (cls == null) line[prop] = null; - else { - var found = cur.match(classTest(cls)); - if (!found) return false; - var end = found.index + found[0].length; - line[prop] = cur.slice(0, found.index) + (!found.index || end == cur.length ? "" : " ") + cur.slice(end) || null; - } - return true; - }); - }), - - addLineWidget: docMethodOp(function(handle, node, options) { - return addLineWidget(this, handle, node, options); - }), - removeLineWidget: function(widget) { widget.clear(); }, - - markText: function(from, to, options) { - return markText(this, clipPos(this, from), clipPos(this, to), options, options && options.type || "range"); - }, - setBookmark: function(pos, options) { - var realOpts = {replacedWith: options && (options.nodeType == null ? options.widget : options), - insertLeft: options && options.insertLeft, - clearWhenEmpty: false, shared: options && options.shared, - handleMouseEvents: options && options.handleMouseEvents}; - pos = clipPos(this, pos); - return markText(this, pos, pos, realOpts, "bookmark"); - }, - findMarksAt: function(pos) { - pos = clipPos(this, pos); - var markers = [], spans = getLine(this, pos.line).markedSpans; - if (spans) for (var i = 0; i < spans.length; ++i) { - var span = spans[i]; - if ((span.from == null || span.from <= pos.ch) && - (span.to == null || span.to >= pos.ch)) - markers.push(span.marker.parent || span.marker); - } - return markers; - }, - findMarks: function(from, to, filter) { - from = clipPos(this, from); to = clipPos(this, to); - var found = [], lineNo = from.line; - this.iter(from.line, to.line + 1, function(line) { - var spans = line.markedSpans; - if (spans) for (var i = 0; i < spans.length; i++) { - var span = spans[i]; - if (!(lineNo == from.line && from.ch > span.to || - span.from == null && lineNo != from.line|| - lineNo == to.line && span.from > to.ch) && - (!filter || filter(span.marker))) - found.push(span.marker.parent || span.marker); - } - ++lineNo; - }); - return found; - }, - getAllMarks: function() { - var markers = []; - this.iter(function(line) { - var sps = line.markedSpans; - if (sps) for (var i = 0; i < sps.length; ++i) - if (sps[i].from != null) markers.push(sps[i].marker); - }); - return markers; - }, - - posFromIndex: function(off) { - var ch, lineNo = this.first; - this.iter(function(line) { - var sz = line.text.length + 1; - if (sz > off) { ch = off; return true; } - off -= sz; - ++lineNo; - }); - return clipPos(this, Pos(lineNo, ch)); - }, - indexFromPos: function (coords) { - coords = clipPos(this, coords); - var index = coords.ch; - if (coords.line < this.first || coords.ch < 0) return 0; - this.iter(this.first, coords.line, function (line) { - index += line.text.length + 1; - }); - return index; - }, - - copy: function(copyHistory) { - var doc = new Doc(getLines(this, this.first, this.first + this.size), - this.modeOption, this.first, this.lineSep); - doc.scrollTop = this.scrollTop; doc.scrollLeft = this.scrollLeft; - doc.sel = this.sel; - doc.extend = false; - if (copyHistory) { - doc.history.undoDepth = this.history.undoDepth; - doc.setHistory(this.getHistory()); - } - return doc; - }, - - linkedDoc: function(options) { - if (!options) options = {}; - var from = this.first, to = this.first + this.size; - if (options.from != null && options.from > from) from = options.from; - if (options.to != null && options.to < to) to = options.to; - var copy = new Doc(getLines(this, from, to), options.mode || this.modeOption, from, this.lineSep); - if (options.sharedHist) copy.history = this.history; - (this.linked || (this.linked = [])).push({doc: copy, sharedHist: options.sharedHist}); - copy.linked = [{doc: this, isParent: true, sharedHist: options.sharedHist}]; - copySharedMarkers(copy, findSharedMarkers(this)); - return copy; - }, - unlinkDoc: function(other) { - if (other instanceof CodeMirror) other = other.doc; - if (this.linked) for (var i = 0; i < this.linked.length; ++i) { - var link = this.linked[i]; - if (link.doc != other) continue; - this.linked.splice(i, 1); - other.unlinkDoc(this); - detachSharedMarkers(findSharedMarkers(this)); - break; - } - // If the histories were shared, split them again - if (other.history == this.history) { - var splitIds = [other.id]; - linkedDocs(other, function(doc) {splitIds.push(doc.id);}, true); - other.history = new History(null); - other.history.done = copyHistoryArray(this.history.done, splitIds); - other.history.undone = copyHistoryArray(this.history.undone, splitIds); - } - }, - iterLinkedDocs: function(f) {linkedDocs(this, f);}, - - getMode: function() {return this.mode;}, - getEditor: function() {return this.cm;}, - - splitLines: function(str) { - if (this.lineSep) return str.split(this.lineSep); - return splitLinesAuto(str); - }, - lineSeparator: function() { return this.lineSep || "\n"; } - }); - - // Public alias. - Doc.prototype.eachLine = Doc.prototype.iter; - - // Set up methods on CodeMirror's prototype to redirect to the editor's document. - var dontDelegate = "iter insert remove copy getEditor constructor".split(" "); - for (var prop in Doc.prototype) if (Doc.prototype.hasOwnProperty(prop) && indexOf(dontDelegate, prop) < 0) - CodeMirror.prototype[prop] = (function(method) { - return function() {return method.apply(this.doc, arguments);}; - })(Doc.prototype[prop]); - - eventMixin(Doc); - // Call f for all linked documents. function linkedDocs(doc, f, sharedHistOnly) { function propagate(doc, skip, sharedHist) { - if (doc.linked) for (var i = 0; i < doc.linked.length; ++i) { + if (doc.linked) { for (var i = 0; i < doc.linked.length; ++i) { var rel = doc.linked[i]; - if (rel.doc == skip) continue; + if (rel.doc == skip) { continue } var shared = sharedHist && rel.sharedHist; - if (sharedHistOnly && !shared) continue; + if (sharedHistOnly && !shared) { continue } f(rel.doc, shared); propagate(rel.doc, doc, shared); - } + } } } propagate(doc, null, true); } // Attach a document to an editor. function attachDoc(cm, doc) { - if (doc.cm) throw new Error("This document is already in use."); + if (doc.cm) { throw new Error("This document is already in use.") } cm.doc = doc; doc.cm = cm; estimateLineHeights(cm); loadMode(cm); - if (!cm.options.lineWrapping) findMaxLine(cm); + setDirectionClass(cm); + if (!cm.options.lineWrapping) { findMaxLine(cm); } cm.options.mode = doc.modeOption; regChange(cm); } - // LINE UTILITIES - - // Find the line object corresponding to the given line number. - function getLine(doc, n) { - n -= doc.first; - if (n < 0 || n >= doc.size) throw new Error("There is no line " + (n + doc.first) + " in the document."); - for (var chunk = doc; !chunk.lines;) { - for (var i = 0;; ++i) { - var child = chunk.children[i], sz = child.chunkSize(); - if (n < sz) { chunk = child; break; } - n -= sz; - } - } - return chunk.lines[n]; - } - - // Get the part of a document between two positions, as an array of - // strings. - function getBetween(doc, start, end) { - var out = [], n = start.line; - doc.iter(start.line, end.line + 1, function(line) { - var text = line.text; - if (n == end.line) text = text.slice(0, end.ch); - if (n == start.line) text = text.slice(start.ch); - out.push(text); - ++n; + function setDirectionClass(cm) { + (cm.doc.direction == "rtl" ? addClass : rmClass)(cm.display.lineDiv, "CodeMirror-rtl"); + } + + function directionChanged(cm) { + runInOp(cm, function () { + setDirectionClass(cm); + regChange(cm); }); - return out; - } - // Get the lines between from and to, as array of strings. - function getLines(doc, from, to) { - var out = []; - doc.iter(from, to, function(line) { out.push(line.text); }); - return out; - } - - // Update the height of a line, propagating the height change - // upwards to parent nodes. - function updateLineHeight(line, height) { - var diff = height - line.height; - if (diff) for (var n = line; n; n = n.parent) n.height += diff; - } - - // Given a line object, find its line number by walking up through - // its parent links. - function lineNo(line) { - if (line.parent == null) return null; - var cur = line.parent, no = indexOf(cur.lines, line); - for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) { - for (var i = 0;; ++i) { - if (chunk.children[i] == cur) break; - no += chunk.children[i].chunkSize(); - } - } - return no + cur.first; - } - - // Find the line at the given vertical position, using the height - // information in the document tree. - function lineAtHeight(chunk, h) { - var n = chunk.first; - outer: do { - for (var i = 0; i < chunk.children.length; ++i) { - var child = chunk.children[i], ch = child.height; - if (h < ch) { chunk = child; continue outer; } - h -= ch; - n += child.chunkSize(); - } - return n; - } while (!chunk.lines); - for (var i = 0; i < chunk.lines.length; ++i) { - var line = chunk.lines[i], lh = line.height; - if (h < lh) break; - h -= lh; - } - return n + i; - } - - - // Find the height above the given line. - function heightAtLine(lineObj) { - lineObj = visualLine(lineObj); - - var h = 0, chunk = lineObj.parent; - for (var i = 0; i < chunk.lines.length; ++i) { - var line = chunk.lines[i]; - if (line == lineObj) break; - else h += line.height; - } - for (var p = chunk.parent; p; chunk = p, p = chunk.parent) { - for (var i = 0; i < p.children.length; ++i) { - var cur = p.children[i]; - if (cur == chunk) break; - else h += cur.height; - } - } - return h; - } - - // Get the bidi ordering for the given line (and cache it). Returns - // false for lines that are fully left-to-right, and an array of - // BidiSpan objects otherwise. - function getOrder(line) { - var order = line.order; - if (order == null) order = line.order = bidiOrdering(line.text); - return order; - } - - // HISTORY + } function History(startGen) { // Arrays of change events and selections. Doing something adds an @@ -7885,8 +4822,8 @@ function historyChangeFromChange(doc, change) { var histChange = {from: copyPos(change.from), to: changeEnd(change), text: getBetween(doc, change.from, change.to)}; attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); - linkedDocs(doc, function(doc) {attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1);}, true); - return histChange; + linkedDocs(doc, function (doc) { return attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1); }, true); + return histChange } // Pop all selection events off the end of a history array. Stop at @@ -7894,8 +4831,8 @@ function clearSelectionEvents(array) { while (array.length) { var last = lst(array); - if (last.ranges) array.pop(); - else break; + if (last.ranges) { array.pop(); } + else { break } } } @@ -7904,30 +4841,31 @@ function lastChangeEvent(hist, force) { if (force) { clearSelectionEvents(hist.done); - return lst(hist.done); + return lst(hist.done) } else if (hist.done.length && !lst(hist.done).ranges) { - return lst(hist.done); + return lst(hist.done) } else if (hist.done.length > 1 && !hist.done[hist.done.length - 2].ranges) { hist.done.pop(); - return lst(hist.done); + return lst(hist.done) } } // Register a change in the history. Merges changes that are within - // a single operation, ore are close together with an origin that + // a single operation, or are close together with an origin that // allows merging (starting with "+") into a single event. function addChangeToHistory(doc, change, selAfter, opId) { var hist = doc.history; hist.undone.length = 0; var time = +new Date, cur; + var last; if ((hist.lastOp == opId || hist.lastOrigin == change.origin && change.origin && - ((change.origin.charAt(0) == "+" && doc.cm && hist.lastModTime > time - doc.cm.options.historyEventDelay) || + ((change.origin.charAt(0) == "+" && hist.lastModTime > time - (doc.cm ? doc.cm.options.historyEventDelay : 500)) || change.origin.charAt(0) == "*")) && (cur = lastChangeEvent(hist, hist.lastOp == opId))) { // Merge this change into the last event - var last = lst(cur.changes); + last = lst(cur.changes); if (cmp(change.from, change.to) == 0 && cmp(change.from, last.to) == 0) { // Optimized case for simple insertion -- don't want to add // new changesets for every character typed @@ -7940,13 +4878,13 @@ // Can not be merged, start a new event. var before = lst(hist.done); if (!before || !before.ranges) - pushSelectionToHistory(doc.sel, hist.done); + { pushSelectionToHistory(doc.sel, hist.done); } cur = {changes: [historyChangeFromChange(doc, change)], generation: hist.generation}; hist.done.push(cur); while (hist.done.length > hist.undoDepth) { hist.done.shift(); - if (!hist.done[0].ranges) hist.done.shift(); + if (!hist.done[0].ranges) { hist.done.shift(); } } } hist.done.push(selAfter); @@ -7955,7 +4893,7 @@ hist.lastOp = hist.lastSelOp = opId; hist.lastOrigin = hist.lastSelOrigin = change.origin; - if (!last) signal(doc, "historyAdded"); + if (!last) { signal(doc, "historyAdded"); } } function selectionEventCanBeMerged(doc, origin, prev, sel) { @@ -7964,7 +4902,7 @@ ch == "+" && prev.ranges.length == sel.ranges.length && prev.somethingSelected() == sel.somethingSelected() && - new Date - doc.history.lastSelTime <= (doc.cm ? doc.cm.options.historyEventDelay : 500); + new Date - doc.history.lastSelTime <= (doc.cm ? doc.cm.options.historyEventDelay : 500) } // Called whenever the selection changes, sets the new selection as @@ -7982,29 +4920,29 @@ (origin && hist.lastSelOrigin == origin && (hist.lastModTime == hist.lastSelTime && hist.lastOrigin == origin || selectionEventCanBeMerged(doc, origin, lst(hist.done), sel)))) - hist.done[hist.done.length - 1] = sel; + { hist.done[hist.done.length - 1] = sel; } else - pushSelectionToHistory(sel, hist.done); + { pushSelectionToHistory(sel, hist.done); } hist.lastSelTime = +new Date; hist.lastSelOrigin = origin; hist.lastSelOp = opId; if (options && options.clearRedo !== false) - clearSelectionEvents(hist.undone); + { clearSelectionEvents(hist.undone); } } function pushSelectionToHistory(sel, dest) { var top = lst(dest); if (!(top && top.ranges && top.equals(sel))) - dest.push(sel); + { dest.push(sel); } } // Used to store marked span information in the history. function attachLocalSpans(doc, change, from, to) { var existing = change["spans_" + doc.id], n = 0; - doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function(line) { + doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function (line) { if (line.markedSpans) - (existing || (existing = change["spans_" + doc.id] = {}))[n] = line.markedSpans; + { (existing || (existing = change["spans_" + doc.id] = {}))[n] = line.markedSpans; } ++n; }); } @@ -8012,46 +4950,549 @@ // When un/re-doing restores text containing marked spans, those // that have been explicitly cleared should not be restored. function removeClearedSpans(spans) { - if (!spans) return null; - for (var i = 0, out; i < spans.length; ++i) { - if (spans[i].marker.explicitlyCleared) { if (!out) out = spans.slice(0, i); } - else if (out) out.push(spans[i]); - } - return !out ? spans : out.length ? out : null; + if (!spans) { return null } + var out; + for (var i = 0; i < spans.length; ++i) { + if (spans[i].marker.explicitlyCleared) { if (!out) { out = spans.slice(0, i); } } + else if (out) { out.push(spans[i]); } + } + return !out ? spans : out.length ? out : null } // Retrieve and filter the old marked spans stored in a change event. function getOldSpans(doc, change) { var found = change["spans_" + doc.id]; - if (!found) return null; - for (var i = 0, nw = []; i < change.text.length; ++i) - nw.push(removeClearedSpans(found[i])); - return nw; + if (!found) { return null } + var nw = []; + for (var i = 0; i < change.text.length; ++i) + { nw.push(removeClearedSpans(found[i])); } + return nw + } + + // Used for un/re-doing changes from the history. Combines the + // result of computing the existing spans with the set of spans that + // existed in the history (so that deleting around a span and then + // undoing brings back the span). + function mergeOldSpans(doc, change) { + var old = getOldSpans(doc, change); + var stretched = stretchSpansOverChange(doc, change); + if (!old) { return stretched } + if (!stretched) { return old } + + for (var i = 0; i < old.length; ++i) { + var oldCur = old[i], stretchCur = stretched[i]; + if (oldCur && stretchCur) { + spans: for (var j = 0; j < stretchCur.length; ++j) { + var span = stretchCur[j]; + for (var k = 0; k < oldCur.length; ++k) + { if (oldCur[k].marker == span.marker) { continue spans } } + oldCur.push(span); + } + } else if (stretchCur) { + old[i] = stretchCur; + } + } + return old } // Used both to provide a JSON-safe object in .getHistory, and, when // detaching a document, to split the history in two function copyHistoryArray(events, newGroup, instantiateSel) { - for (var i = 0, copy = []; i < events.length; ++i) { + var copy = []; + for (var i = 0; i < events.length; ++i) { var event = events[i]; if (event.ranges) { copy.push(instantiateSel ? Selection.prototype.deepCopy.call(event) : event); - continue; + continue } var changes = event.changes, newChanges = []; copy.push({changes: newChanges}); for (var j = 0; j < changes.length; ++j) { - var change = changes[j], m; + var change = changes[j], m = (void 0); newChanges.push({from: change.from, to: change.to, text: change.text}); - if (newGroup) for (var prop in change) if (m = prop.match(/^spans_(\d+)$/)) { + if (newGroup) { for (var prop in change) { if (m = prop.match(/^spans_(\d+)$/)) { if (indexOf(newGroup, Number(m[1])) > -1) { lst(newChanges)[prop] = change[prop]; delete change[prop]; } + } } } + } + } + return copy + } + + // The 'scroll' parameter given to many of these indicated whether + // the new cursor position should be scrolled into view after + // modifying the selection. + + // If shift is held or the extend flag is set, extends a range to + // include a given position (and optionally a second position). + // Otherwise, simply returns the range between the given positions. + // Used for cursor motion and such. + function extendRange(range, head, other, extend) { + if (extend) { + var anchor = range.anchor; + if (other) { + var posBefore = cmp(head, anchor) < 0; + if (posBefore != (cmp(other, anchor) < 0)) { + anchor = head; + head = other; + } else if (posBefore != (cmp(head, other) < 0)) { + head = other; } } - } - return copy; + return new Range(anchor, head) + } else { + return new Range(other || head, head) + } + } + + // Extend the primary selection range, discard the rest. + function extendSelection(doc, head, other, options, extend) { + if (extend == null) { extend = doc.cm && (doc.cm.display.shift || doc.extend); } + setSelection(doc, new Selection([extendRange(doc.sel.primary(), head, other, extend)], 0), options); + } + + // Extend all selections (pos is an array of selections with length + // equal the number of selections) + function extendSelections(doc, heads, options) { + var out = []; + var extend = doc.cm && (doc.cm.display.shift || doc.extend); + for (var i = 0; i < doc.sel.ranges.length; i++) + { out[i] = extendRange(doc.sel.ranges[i], heads[i], null, extend); } + var newSel = normalizeSelection(doc.cm, out, doc.sel.primIndex); + setSelection(doc, newSel, options); + } + + // Updates a single range in the selection. + function replaceOneSelection(doc, i, range, options) { + var ranges = doc.sel.ranges.slice(0); + ranges[i] = range; + setSelection(doc, normalizeSelection(doc.cm, ranges, doc.sel.primIndex), options); + } + + // Reset the selection to a single range. + function setSimpleSelection(doc, anchor, head, options) { + setSelection(doc, simpleSelection(anchor, head), options); + } + + // Give beforeSelectionChange handlers a change to influence a + // selection update. + function filterSelectionChange(doc, sel, options) { + var obj = { + ranges: sel.ranges, + update: function(ranges) { + var this$1 = this; + + this.ranges = []; + for (var i = 0; i < ranges.length; i++) + { this$1.ranges[i] = new Range(clipPos(doc, ranges[i].anchor), + clipPos(doc, ranges[i].head)); } + }, + origin: options && options.origin + }; + signal(doc, "beforeSelectionChange", doc, obj); + if (doc.cm) { signal(doc.cm, "beforeSelectionChange", doc.cm, obj); } + if (obj.ranges != sel.ranges) { return normalizeSelection(doc.cm, obj.ranges, obj.ranges.length - 1) } + else { return sel } + } + + function setSelectionReplaceHistory(doc, sel, options) { + var done = doc.history.done, last = lst(done); + if (last && last.ranges) { + done[done.length - 1] = sel; + setSelectionNoUndo(doc, sel, options); + } else { + setSelection(doc, sel, options); + } + } + + // Set a new selection. + function setSelection(doc, sel, options) { + setSelectionNoUndo(doc, sel, options); + addSelectionToHistory(doc, doc.sel, doc.cm ? doc.cm.curOp.id : NaN, options); + } + + function setSelectionNoUndo(doc, sel, options) { + if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange")) + { sel = filterSelectionChange(doc, sel, options); } + + var bias = options && options.bias || + (cmp(sel.primary().head, doc.sel.primary().head) < 0 ? -1 : 1); + setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, true)); + + if (!(options && options.scroll === false) && doc.cm) + { ensureCursorVisible(doc.cm); } + } + + function setSelectionInner(doc, sel) { + if (sel.equals(doc.sel)) { return } + + doc.sel = sel; + + if (doc.cm) { + doc.cm.curOp.updateInput = 1; + doc.cm.curOp.selectionChanged = true; + signalCursorActivity(doc.cm); + } + signalLater(doc, "cursorActivity", doc); + } + + // Verify that the selection does not partially select any atomic + // marked ranges. + function reCheckSelection(doc) { + setSelectionInner(doc, skipAtomicInSelection(doc, doc.sel, null, false)); + } + + // Return a selection that does not partially select any atomic + // ranges. + function skipAtomicInSelection(doc, sel, bias, mayClear) { + var out; + for (var i = 0; i < sel.ranges.length; i++) { + var range = sel.ranges[i]; + var old = sel.ranges.length == doc.sel.ranges.length && doc.sel.ranges[i]; + var newAnchor = skipAtomic(doc, range.anchor, old && old.anchor, bias, mayClear); + var newHead = skipAtomic(doc, range.head, old && old.head, bias, mayClear); + if (out || newAnchor != range.anchor || newHead != range.head) { + if (!out) { out = sel.ranges.slice(0, i); } + out[i] = new Range(newAnchor, newHead); + } + } + return out ? normalizeSelection(doc.cm, out, sel.primIndex) : sel + } + + function skipAtomicInner(doc, pos, oldPos, dir, mayClear) { + var line = getLine(doc, pos.line); + if (line.markedSpans) { for (var i = 0; i < line.markedSpans.length; ++i) { + var sp = line.markedSpans[i], m = sp.marker; + + // Determine if we should prevent the cursor being placed to the left/right of an atomic marker + // Historically this was determined using the inclusiveLeft/Right option, but the new way to control it + // is with selectLeft/Right + var preventCursorLeft = ("selectLeft" in m) ? !m.selectLeft : m.inclusiveLeft; + var preventCursorRight = ("selectRight" in m) ? !m.selectRight : m.inclusiveRight; + + if ((sp.from == null || (preventCursorLeft ? sp.from <= pos.ch : sp.from < pos.ch)) && + (sp.to == null || (preventCursorRight ? sp.to >= pos.ch : sp.to > pos.ch))) { + if (mayClear) { + signal(m, "beforeCursorEnter"); + if (m.explicitlyCleared) { + if (!line.markedSpans) { break } + else {--i; continue} + } + } + if (!m.atomic) { continue } + + if (oldPos) { + var near = m.find(dir < 0 ? 1 : -1), diff = (void 0); + if (dir < 0 ? preventCursorRight : preventCursorLeft) + { near = movePos(doc, near, -dir, near && near.line == pos.line ? line : null); } + if (near && near.line == pos.line && (diff = cmp(near, oldPos)) && (dir < 0 ? diff < 0 : diff > 0)) + { return skipAtomicInner(doc, near, pos, dir, mayClear) } + } + + var far = m.find(dir < 0 ? -1 : 1); + if (dir < 0 ? preventCursorLeft : preventCursorRight) + { far = movePos(doc, far, dir, far.line == pos.line ? line : null); } + return far ? skipAtomicInner(doc, far, pos, dir, mayClear) : null + } + } } + return pos + } + + // Ensure a given position is not inside an atomic range. + function skipAtomic(doc, pos, oldPos, bias, mayClear) { + var dir = bias || 1; + var found = skipAtomicInner(doc, pos, oldPos, dir, mayClear) || + (!mayClear && skipAtomicInner(doc, pos, oldPos, dir, true)) || + skipAtomicInner(doc, pos, oldPos, -dir, mayClear) || + (!mayClear && skipAtomicInner(doc, pos, oldPos, -dir, true)); + if (!found) { + doc.cantEdit = true; + return Pos(doc.first, 0) + } + return found + } + + function movePos(doc, pos, dir, line) { + if (dir < 0 && pos.ch == 0) { + if (pos.line > doc.first) { return clipPos(doc, Pos(pos.line - 1)) } + else { return null } + } else if (dir > 0 && pos.ch == (line || getLine(doc, pos.line)).text.length) { + if (pos.line < doc.first + doc.size - 1) { return Pos(pos.line + 1, 0) } + else { return null } + } else { + return new Pos(pos.line, pos.ch + dir) + } + } + + function selectAll(cm) { + cm.setSelection(Pos(cm.firstLine(), 0), Pos(cm.lastLine()), sel_dontScroll); + } + + // UPDATING + + // Allow "beforeChange" event handlers to influence a change + function filterChange(doc, change, update) { + var obj = { + canceled: false, + from: change.from, + to: change.to, + text: change.text, + origin: change.origin, + cancel: function () { return obj.canceled = true; } + }; + if (update) { obj.update = function (from, to, text, origin) { + if (from) { obj.from = clipPos(doc, from); } + if (to) { obj.to = clipPos(doc, to); } + if (text) { obj.text = text; } + if (origin !== undefined) { obj.origin = origin; } + }; } + signal(doc, "beforeChange", doc, obj); + if (doc.cm) { signal(doc.cm, "beforeChange", doc.cm, obj); } + + if (obj.canceled) { + if (doc.cm) { doc.cm.curOp.updateInput = 2; } + return null + } + return {from: obj.from, to: obj.to, text: obj.text, origin: obj.origin} + } + + // Apply a change to a document, and add it to the document's + // history, and propagating it to all linked documents. + function makeChange(doc, change, ignoreReadOnly) { + if (doc.cm) { + if (!doc.cm.curOp) { return operation(doc.cm, makeChange)(doc, change, ignoreReadOnly) } + if (doc.cm.state.suppressEdits) { return } + } + + if (hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange")) { + change = filterChange(doc, change, true); + if (!change) { return } + } + + // Possibly split or suppress the update based on the presence + // of read-only spans in its range. + var split = sawReadOnlySpans && !ignoreReadOnly && removeReadOnlyRanges(doc, change.from, change.to); + if (split) { + for (var i = split.length - 1; i >= 0; --i) + { makeChangeInner(doc, {from: split[i].from, to: split[i].to, text: i ? [""] : change.text, origin: change.origin}); } + } else { + makeChangeInner(doc, change); + } + } + + function makeChangeInner(doc, change) { + if (change.text.length == 1 && change.text[0] == "" && cmp(change.from, change.to) == 0) { return } + var selAfter = computeSelAfterChange(doc, change); + addChangeToHistory(doc, change, selAfter, doc.cm ? doc.cm.curOp.id : NaN); + + makeChangeSingleDoc(doc, change, selAfter, stretchSpansOverChange(doc, change)); + var rebased = []; + + linkedDocs(doc, function (doc, sharedHist) { + if (!sharedHist && indexOf(rebased, doc.history) == -1) { + rebaseHist(doc.history, change); + rebased.push(doc.history); + } + makeChangeSingleDoc(doc, change, null, stretchSpansOverChange(doc, change)); + }); + } + + // Revert a change stored in a document's history. + function makeChangeFromHistory(doc, type, allowSelectionOnly) { + var suppress = doc.cm && doc.cm.state.suppressEdits; + if (suppress && !allowSelectionOnly) { return } + + var hist = doc.history, event, selAfter = doc.sel; + var source = type == "undo" ? hist.done : hist.undone, dest = type == "undo" ? hist.undone : hist.done; + + // Verify that there is a useable event (so that ctrl-z won't + // needlessly clear selection events) + var i = 0; + for (; i < source.length; i++) { + event = source[i]; + if (allowSelectionOnly ? event.ranges && !event.equals(doc.sel) : !event.ranges) + { break } + } + if (i == source.length) { return } + hist.lastOrigin = hist.lastSelOrigin = null; + + for (;;) { + event = source.pop(); + if (event.ranges) { + pushSelectionToHistory(event, dest); + if (allowSelectionOnly && !event.equals(doc.sel)) { + setSelection(doc, event, {clearRedo: false}); + return + } + selAfter = event; + } else if (suppress) { + source.push(event); + return + } else { break } + } + + // Build up a reverse change object to add to the opposite history + // stack (redo when undoing, and vice versa). + var antiChanges = []; + pushSelectionToHistory(selAfter, dest); + dest.push({changes: antiChanges, generation: hist.generation}); + hist.generation = event.generation || ++hist.maxGeneration; + + var filter = hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange"); + + var loop = function ( i ) { + var change = event.changes[i]; + change.origin = type; + if (filter && !filterChange(doc, change, false)) { + source.length = 0; + return {} + } + + antiChanges.push(historyChangeFromChange(doc, change)); + + var after = i ? computeSelAfterChange(doc, change) : lst(source); + makeChangeSingleDoc(doc, change, after, mergeOldSpans(doc, change)); + if (!i && doc.cm) { doc.cm.scrollIntoView({from: change.from, to: changeEnd(change)}); } + var rebased = []; + + // Propagate to the linked documents + linkedDocs(doc, function (doc, sharedHist) { + if (!sharedHist && indexOf(rebased, doc.history) == -1) { + rebaseHist(doc.history, change); + rebased.push(doc.history); + } + makeChangeSingleDoc(doc, change, null, mergeOldSpans(doc, change)); + }); + }; + + for (var i$1 = event.changes.length - 1; i$1 >= 0; --i$1) { + var returned = loop( i$1 ); + + if ( returned ) return returned.v; + } + } + + // Sub-views need their line numbers shifted when text is added + // above or below them in the parent document. + function shiftDoc(doc, distance) { + if (distance == 0) { return } + doc.first += distance; + doc.sel = new Selection(map(doc.sel.ranges, function (range) { return new Range( + Pos(range.anchor.line + distance, range.anchor.ch), + Pos(range.head.line + distance, range.head.ch) + ); }), doc.sel.primIndex); + if (doc.cm) { + regChange(doc.cm, doc.first, doc.first - distance, distance); + for (var d = doc.cm.display, l = d.viewFrom; l < d.viewTo; l++) + { regLineChange(doc.cm, l, "gutter"); } + } + } + + // More lower-level change function, handling only a single document + // (not linked ones). + function makeChangeSingleDoc(doc, change, selAfter, spans) { + if (doc.cm && !doc.cm.curOp) + { return operation(doc.cm, makeChangeSingleDoc)(doc, change, selAfter, spans) } + + if (change.to.line < doc.first) { + shiftDoc(doc, change.text.length - 1 - (change.to.line - change.from.line)); + return + } + if (change.from.line > doc.lastLine()) { return } + + // Clip the change to the size of this doc + if (change.from.line < doc.first) { + var shift = change.text.length - 1 - (doc.first - change.from.line); + shiftDoc(doc, shift); + change = {from: Pos(doc.first, 0), to: Pos(change.to.line + shift, change.to.ch), + text: [lst(change.text)], origin: change.origin}; + } + var last = doc.lastLine(); + if (change.to.line > last) { + change = {from: change.from, to: Pos(last, getLine(doc, last).text.length), + text: [change.text[0]], origin: change.origin}; + } + + change.removed = getBetween(doc, change.from, change.to); + + if (!selAfter) { selAfter = computeSelAfterChange(doc, change); } + if (doc.cm) { makeChangeSingleDocInEditor(doc.cm, change, spans); } + else { updateDoc(doc, change, spans); } + setSelectionNoUndo(doc, selAfter, sel_dontScroll); + + if (doc.cantEdit && skipAtomic(doc, Pos(doc.firstLine(), 0))) + { doc.cantEdit = false; } + } + + // Handle the interaction of a change to a document with the editor + // that this document is part of. + function makeChangeSingleDocInEditor(cm, change, spans) { + var doc = cm.doc, display = cm.display, from = change.from, to = change.to; + + var recomputeMaxLength = false, checkWidthStart = from.line; + if (!cm.options.lineWrapping) { + checkWidthStart = lineNo(visualLine(getLine(doc, from.line))); + doc.iter(checkWidthStart, to.line + 1, function (line) { + if (line == display.maxLine) { + recomputeMaxLength = true; + return true + } + }); + } + + if (doc.sel.contains(change.from, change.to) > -1) + { signalCursorActivity(cm); } + + updateDoc(doc, change, spans, estimateHeight(cm)); + + if (!cm.options.lineWrapping) { + doc.iter(checkWidthStart, from.line + change.text.length, function (line) { + var len = lineLength(line); + if (len > display.maxLineLength) { + display.maxLine = line; + display.maxLineLength = len; + display.maxLineChanged = true; + recomputeMaxLength = false; + } + }); + if (recomputeMaxLength) { cm.curOp.updateMaxLine = true; } + } + + retreatFrontier(doc, from.line); + startWorker(cm, 400); + + var lendiff = change.text.length - (to.line - from.line) - 1; + // Remember that these lines changed, for updating the display + if (change.full) + { regChange(cm); } + else if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change)) + { regLineChange(cm, from.line, "text"); } + else + { regChange(cm, from.line, to.line + 1, lendiff); } + + var changesHandler = hasHandler(cm, "changes"), changeHandler = hasHandler(cm, "change"); + if (changeHandler || changesHandler) { + var obj = { + from: from, to: to, + text: change.text, + removed: change.removed, + origin: change.origin + }; + if (changeHandler) { signalLater(cm, "change", cm, obj); } + if (changesHandler) { (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj); } + } + cm.display.selForContextMenu = null; + } + + function replaceRange(doc, code, from, to, origin) { + var assign; + + if (!to) { to = from; } + if (cmp(to, from) < 0) { (assign = [to, from], from = assign[0], to = assign[1]); } + if (typeof code == "string") { code = doc.splitLines(code); } + makeChange(doc, {from: from, to: to, text: code, origin: origin}); } // Rebasing/resetting history to deal with externally-sourced changes @@ -8081,16 +5522,16 @@ rebaseHistSelSingle(sub.ranges[j].anchor, from, to, diff); rebaseHistSelSingle(sub.ranges[j].head, from, to, diff); } - continue; - } - for (var j = 0; j < sub.changes.length; ++j) { - var cur = sub.changes[j]; + continue + } + for (var j$1 = 0; j$1 < sub.changes.length; ++j$1) { + var cur = sub.changes[j$1]; if (to < cur.from.line) { cur.from = Pos(cur.from.line + diff, cur.from.ch); cur.to = Pos(cur.to.line + diff, cur.to.ch); } else if (from <= cur.to.line) { ok = false; - break; + break } } if (!ok) { @@ -8106,785 +5547,4237 @@ rebaseHistArray(hist.undone, from, to, diff); } - // EVENT UTILITIES - - // Due to the fact that we still support jurassic IE versions, some - // compatibility wrappers are needed. - - var e_preventDefault = CodeMirror.e_preventDefault = function(e) { - if (e.preventDefault) e.preventDefault(); - else e.returnValue = false; - }; - var e_stopPropagation = CodeMirror.e_stopPropagation = function(e) { - if (e.stopPropagation) e.stopPropagation(); - else e.cancelBubble = true; - }; - function e_defaultPrevented(e) { - return e.defaultPrevented != null ? e.defaultPrevented : e.returnValue == false; - } - var e_stop = CodeMirror.e_stop = function(e) {e_preventDefault(e); e_stopPropagation(e);}; - - function e_target(e) {return e.target || e.srcElement;} - function e_button(e) { - var b = e.which; - if (b == null) { - if (e.button & 1) b = 1; - else if (e.button & 2) b = 3; - else if (e.button & 4) b = 2; - } - if (mac && e.ctrlKey && b == 1) b = 3; - return b; - } - - // EVENT HANDLING - - // Lightweight event framework. on/off also work on DOM nodes, - // registering native DOM handlers. - - var on = CodeMirror.on = function(emitter, type, f) { - if (emitter.addEventListener) - emitter.addEventListener(type, f, false); - else if (emitter.attachEvent) - emitter.attachEvent("on" + type, f); - else { - var map = emitter._handlers || (emitter._handlers = {}); - var arr = map[type] || (map[type] = []); - arr.push(f); - } - }; - - var noHandlers = [] - function getHandlers(emitter, type, copy) { - var arr = emitter._handlers && emitter._handlers[type] - if (copy) return arr && arr.length > 0 ? arr.slice() : noHandlers - else return arr || noHandlers - } - - var off = CodeMirror.off = function(emitter, type, f) { - if (emitter.removeEventListener) - emitter.removeEventListener(type, f, false); - else if (emitter.detachEvent) - emitter.detachEvent("on" + type, f); - else { - var handlers = getHandlers(emitter, type, false) - for (var i = 0; i < handlers.length; ++i) - if (handlers[i] == f) { handlers.splice(i, 1); break; } - } - }; - - var signal = CodeMirror.signal = function(emitter, type /*, values...*/) { - var handlers = getHandlers(emitter, type, true) - if (!handlers.length) return; - var args = Array.prototype.slice.call(arguments, 2); - for (var i = 0; i < handlers.length; ++i) handlers[i].apply(null, args); - }; - - var orphanDelayedCallbacks = null; - - // Often, we want to signal events at a point where we are in the - // middle of some work, but don't want the handler to start calling - // other methods on the editor, which might be in an inconsistent - // state or simply not expect any other events to happen. - // signalLater looks whether there are any handlers, and schedules - // them to be executed when the last operation ends, or, if no - // operation is active, when a timeout fires. - function signalLater(emitter, type /*, values...*/) { - var arr = getHandlers(emitter, type, false) - if (!arr.length) return; - var args = Array.prototype.slice.call(arguments, 2), list; - if (operationGroup) { - list = operationGroup.delayedCallbacks; - } else if (orphanDelayedCallbacks) { - list = orphanDelayedCallbacks; - } else { - list = orphanDelayedCallbacks = []; - setTimeout(fireOrphanDelayed, 0); - } - function bnd(f) {return function(){f.apply(null, args);};}; - for (var i = 0; i < arr.length; ++i) - list.push(bnd(arr[i])); - } - - function fireOrphanDelayed() { - var delayed = orphanDelayedCallbacks; - orphanDelayedCallbacks = null; - for (var i = 0; i < delayed.length; ++i) delayed[i](); - } - - // The DOM events that CodeMirror handles can be overridden by - // registering a (non-DOM) handler on the editor for the event name, - // and preventDefault-ing the event in that handler. - function signalDOMEvent(cm, e, override) { - if (typeof e == "string") - e = {type: e, preventDefault: function() { this.defaultPrevented = true; }}; - signal(cm, override || e.type, cm, e); - return e_defaultPrevented(e) || e.codemirrorIgnore; - } - - function signalCursorActivity(cm) { - var arr = cm._handlers && cm._handlers.cursorActivity; - if (!arr) return; - var set = cm.curOp.cursorActivityHandlers || (cm.curOp.cursorActivityHandlers = []); - for (var i = 0; i < arr.length; ++i) if (indexOf(set, arr[i]) == -1) - set.push(arr[i]); - } - - function hasHandler(emitter, type) { - return getHandlers(emitter, type).length > 0 - } - - // Add on and off methods to a constructor's prototype, to make - // registering events on such objects more convenient. - function eventMixin(ctor) { - ctor.prototype.on = function(type, f) {on(this, type, f);}; - ctor.prototype.off = function(type, f) {off(this, type, f);}; - } - - // MISC UTILITIES - - // Number of pixels added to scroller and sizer to hide scrollbar - var scrollerGap = 30; - - // Returned or thrown by various protocols to signal 'I'm not - // handling this'. - var Pass = CodeMirror.Pass = {toString: function(){return "CodeMirror.Pass";}}; - - // Reused option objects for setSelection & friends - var sel_dontScroll = {scroll: false}, sel_mouse = {origin: "*mouse"}, sel_move = {origin: "+move"}; - - function Delayed() {this.id = null;} - Delayed.prototype.set = function(ms, f) { - clearTimeout(this.id); - this.id = setTimeout(f, ms); - }; - - // Counts the column offset in a string, taking tabs into account. - // Used mostly to find indentation. - var countColumn = CodeMirror.countColumn = function(string, end, tabSize, startIndex, startValue) { - if (end == null) { - end = string.search(/[^\s\u00a0]/); - if (end == -1) end = string.length; - } - for (var i = startIndex || 0, n = startValue || 0;;) { - var nextTab = string.indexOf("\t", i); - if (nextTab < 0 || nextTab >= end) - return n + (end - i); - n += nextTab - i; - n += tabSize - (n % tabSize); - i = nextTab + 1; - } - }; - - // The inverse of countColumn -- find the offset that corresponds to - // a particular column. - var findColumn = CodeMirror.findColumn = function(string, goal, tabSize) { - for (var pos = 0, col = 0;;) { - var nextTab = string.indexOf("\t", pos); - if (nextTab == -1) nextTab = string.length; - var skipped = nextTab - pos; - if (nextTab == string.length || col + skipped >= goal) - return pos + Math.min(skipped, goal - col); - col += nextTab - pos; - col += tabSize - (col % tabSize); - pos = nextTab + 1; - if (col >= goal) return pos; - } - } - - var spaceStrs = [""]; - function spaceStr(n) { - while (spaceStrs.length <= n) - spaceStrs.push(lst(spaceStrs) + " "); - return spaceStrs[n]; - } - - function lst(arr) { return arr[arr.length-1]; } - - var selectInput = function(node) { node.select(); }; - if (ios) // Mobile Safari apparently has a bug where select() is broken. - selectInput = function(node) { node.selectionStart = 0; node.selectionEnd = node.value.length; }; - else if (ie) // Suppress mysterious IE10 errors - selectInput = function(node) { try { node.select(); } catch(_e) {} }; - - function indexOf(array, elt) { - for (var i = 0; i < array.length; ++i) - if (array[i] == elt) return i; - return -1; - } - function map(array, f) { - var out = []; - for (var i = 0; i < array.length; i++) out[i] = f(array[i], i); - return out; - } - - function nothing() {} - - function createObj(base, props) { - var inst; - if (Object.create) { - inst = Object.create(base); - } else { - nothing.prototype = base; - inst = new nothing(); - } - if (props) copyObj(props, inst); - return inst; - }; - - function copyObj(obj, target, overwrite) { - if (!target) target = {}; - for (var prop in obj) - if (obj.hasOwnProperty(prop) && (overwrite !== false || !target.hasOwnProperty(prop))) - target[prop] = obj[prop]; - return target; - } - - function bind(f) { - var args = Array.prototype.slice.call(arguments, 1); - return function(){return f.apply(null, args);}; - } - - var nonASCIISingleCaseWordChar = /[\u00df\u0587\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/; - var isWordCharBasic = CodeMirror.isWordChar = function(ch) { - return /\w/.test(ch) || ch > "\x80" && - (ch.toUpperCase() != ch.toLowerCase() || nonASCIISingleCaseWordChar.test(ch)); - }; - function isWordChar(ch, helper) { - if (!helper) return isWordCharBasic(ch); - if (helper.source.indexOf("\\w") > -1 && isWordCharBasic(ch)) return true; - return helper.test(ch); - } - - function isEmpty(obj) { - for (var n in obj) if (obj.hasOwnProperty(n) && obj[n]) return false; - return true; - } - - // Extending unicode characters. A series of a non-extending char + - // any number of extending chars is treated as a single unit as far - // as editing and measuring is concerned. This is not fully correct, - // since some scripts/fonts/browsers also treat other configurations - // of code points as a group. - var extendingChars = /[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/; - function isExtendingChar(ch) { return ch.charCodeAt(0) >= 768 && extendingChars.test(ch); } - - // DOM UTILITIES - - function elt(tag, content, className, style) { - var e = document.createElement(tag); - if (className) e.className = className; - if (style) e.style.cssText = style; - if (typeof content == "string") e.appendChild(document.createTextNode(content)); - else if (content) for (var i = 0; i < content.length; ++i) e.appendChild(content[i]); - return e; - } - - var range; - if (document.createRange) range = function(node, start, end, endNode) { - var r = document.createRange(); - r.setEnd(endNode || node, end); - r.setStart(node, start); - return r; - }; - else range = function(node, start, end) { - var r = document.body.createTextRange(); - try { r.moveToElementText(node.parentNode); } - catch(e) { return r; } - r.collapse(true); - r.moveEnd("character", end); - r.moveStart("character", start); - return r; - }; - - function removeChildren(e) { - for (var count = e.childNodes.length; count > 0; --count) - e.removeChild(e.firstChild); - return e; - } - - function removeChildrenAndAdd(parent, e) { - return removeChildren(parent).appendChild(e); - } - - var contains = CodeMirror.contains = function(parent, child) { - if (child.nodeType == 3) // Android browser always returns false when child is a textnode - child = child.parentNode; - if (parent.contains) - return parent.contains(child); - do { - if (child.nodeType == 11) child = child.host; - if (child == parent) return true; - } while (child = child.parentNode); - }; - - function activeElt() { - var activeElement = document.activeElement; - while (activeElement && activeElement.root && activeElement.root.activeElement) - activeElement = activeElement.root.activeElement; - return activeElement; - } - // Older versions of IE throws unspecified error when touching - // document.activeElement in some cases (during loading, in iframe) - if (ie && ie_version < 11) activeElt = function() { - try { return document.activeElement; } - catch(e) { return document.body; } - }; - - function classTest(cls) { return new RegExp("(^|\\s)" + cls + "(?:$|\\s)\\s*"); } - var rmClass = CodeMirror.rmClass = function(node, cls) { - var current = node.className; - var match = classTest(cls).exec(current); - if (match) { - var after = current.slice(match.index + match[0].length); - node.className = current.slice(0, match.index) + (after ? match[1] + after : ""); - } - }; - var addClass = CodeMirror.addClass = function(node, cls) { - var current = node.className; - if (!classTest(cls).test(current)) node.className += (current ? " " : "") + cls; - }; - function joinClasses(a, b) { - var as = a.split(" "); - for (var i = 0; i < as.length; i++) - if (as[i] && !classTest(as[i]).test(b)) b += " " + as[i]; - return b; - } - - // WINDOW-WIDE EVENTS + // Utility for applying a change to a line by handle or number, + // returning the number and optionally registering the line as + // changed. + function changeLine(doc, handle, changeType, op) { + var no = handle, line = handle; + if (typeof handle == "number") { line = getLine(doc, clipLine(doc, handle)); } + else { no = lineNo(handle); } + if (no == null) { return null } + if (op(line, no) && doc.cm) { regLineChange(doc.cm, no, changeType); } + return line + } + + // The document is represented as a BTree consisting of leaves, with + // chunk of lines in them, and branches, with up to ten leaves or + // other branch nodes below them. The top node is always a branch + // node, and is the document object itself (meaning it has + // additional methods and properties). + // + // All nodes have parent links. The tree is used both to go from + // line numbers to line objects, and to go from objects to numbers. + // It also indexes by height, and is used to convert between height + // and line object, and to find the total height of the document. + // + // See also http://marijnhaverbeke.nl/blog/codemirror-line-tree.html + + function LeafChunk(lines) { + var this$1 = this; + + this.lines = lines; + this.parent = null; + var height = 0; + for (var i = 0; i < lines.length; ++i) { + lines[i].parent = this$1; + height += lines[i].height; + } + this.height = height; + } + + LeafChunk.prototype = { + chunkSize: function() { return this.lines.length }, + + // Remove the n lines at offset 'at'. + removeInner: function(at, n) { + var this$1 = this; + + for (var i = at, e = at + n; i < e; ++i) { + var line = this$1.lines[i]; + this$1.height -= line.height; + cleanUpLine(line); + signalLater(line, "delete"); + } + this.lines.splice(at, n); + }, + + // Helper used to collapse a small branch into a single leaf. + collapse: function(lines) { + lines.push.apply(lines, this.lines); + }, + + // Insert the given array of lines at offset 'at', count them as + // having the given height. + insertInner: function(at, lines, height) { + var this$1 = this; + + this.height += height; + this.lines = this.lines.slice(0, at).concat(lines).concat(this.lines.slice(at)); + for (var i = 0; i < lines.length; ++i) { lines[i].parent = this$1; } + }, + + // Used to iterate over a part of the tree. + iterN: function(at, n, op) { + var this$1 = this; + + for (var e = at + n; at < e; ++at) + { if (op(this$1.lines[at])) { return true } } + } + }; + + function BranchChunk(children) { + var this$1 = this; + + this.children = children; + var size = 0, height = 0; + for (var i = 0; i < children.length; ++i) { + var ch = children[i]; + size += ch.chunkSize(); height += ch.height; + ch.parent = this$1; + } + this.size = size; + this.height = height; + this.parent = null; + } + + BranchChunk.prototype = { + chunkSize: function() { return this.size }, + + removeInner: function(at, n) { + var this$1 = this; + + this.size -= n; + for (var i = 0; i < this.children.length; ++i) { + var child = this$1.children[i], sz = child.chunkSize(); + if (at < sz) { + var rm = Math.min(n, sz - at), oldHeight = child.height; + child.removeInner(at, rm); + this$1.height -= oldHeight - child.height; + if (sz == rm) { this$1.children.splice(i--, 1); child.parent = null; } + if ((n -= rm) == 0) { break } + at = 0; + } else { at -= sz; } + } + // If the result is smaller than 25 lines, ensure that it is a + // single leaf node. + if (this.size - n < 25 && + (this.children.length > 1 || !(this.children[0] instanceof LeafChunk))) { + var lines = []; + this.collapse(lines); + this.children = [new LeafChunk(lines)]; + this.children[0].parent = this; + } + }, + + collapse: function(lines) { + var this$1 = this; + + for (var i = 0; i < this.children.length; ++i) { this$1.children[i].collapse(lines); } + }, + + insertInner: function(at, lines, height) { + var this$1 = this; + + this.size += lines.length; + this.height += height; + for (var i = 0; i < this.children.length; ++i) { + var child = this$1.children[i], sz = child.chunkSize(); + if (at <= sz) { + child.insertInner(at, lines, height); + if (child.lines && child.lines.length > 50) { + // To avoid memory thrashing when child.lines is huge (e.g. first view of a large file), it's never spliced. + // Instead, small slices are taken. They're taken in order because sequential memory accesses are fastest. + var remaining = child.lines.length % 25 + 25; + for (var pos = remaining; pos < child.lines.length;) { + var leaf = new LeafChunk(child.lines.slice(pos, pos += 25)); + child.height -= leaf.height; + this$1.children.splice(++i, 0, leaf); + leaf.parent = this$1; + } + child.lines = child.lines.slice(0, remaining); + this$1.maybeSpill(); + } + break + } + at -= sz; + } + }, + + // When a node has grown, check whether it should be split. + maybeSpill: function() { + if (this.children.length <= 10) { return } + var me = this; + do { + var spilled = me.children.splice(me.children.length - 5, 5); + var sibling = new BranchChunk(spilled); + if (!me.parent) { // Become the parent node + var copy = new BranchChunk(me.children); + copy.parent = me; + me.children = [copy, sibling]; + me = copy; + } else { + me.size -= sibling.size; + me.height -= sibling.height; + var myIndex = indexOf(me.parent.children, me); + me.parent.children.splice(myIndex + 1, 0, sibling); + } + sibling.parent = me.parent; + } while (me.children.length > 10) + me.parent.maybeSpill(); + }, + + iterN: function(at, n, op) { + var this$1 = this; + + for (var i = 0; i < this.children.length; ++i) { + var child = this$1.children[i], sz = child.chunkSize(); + if (at < sz) { + var used = Math.min(n, sz - at); + if (child.iterN(at, used, op)) { return true } + if ((n -= used) == 0) { break } + at = 0; + } else { at -= sz; } + } + } + }; + + // Line widgets are block elements displayed above or below a line. + + var LineWidget = function(doc, node, options) { + var this$1 = this; + + if (options) { for (var opt in options) { if (options.hasOwnProperty(opt)) + { this$1[opt] = options[opt]; } } } + this.doc = doc; + this.node = node; + }; + + LineWidget.prototype.clear = function () { + var this$1 = this; + + var cm = this.doc.cm, ws = this.line.widgets, line = this.line, no = lineNo(line); + if (no == null || !ws) { return } + for (var i = 0; i < ws.length; ++i) { if (ws[i] == this$1) { ws.splice(i--, 1); } } + if (!ws.length) { line.widgets = null; } + var height = widgetHeight(this); + updateLineHeight(line, Math.max(0, line.height - height)); + if (cm) { + runInOp(cm, function () { + adjustScrollWhenAboveVisible(cm, line, -height); + regLineChange(cm, no, "widget"); + }); + signalLater(cm, "lineWidgetCleared", cm, this, no); + } + }; + + LineWidget.prototype.changed = function () { + var this$1 = this; + + var oldH = this.height, cm = this.doc.cm, line = this.line; + this.height = null; + var diff = widgetHeight(this) - oldH; + if (!diff) { return } + if (!lineIsHidden(this.doc, line)) { updateLineHeight(line, line.height + diff); } + if (cm) { + runInOp(cm, function () { + cm.curOp.forceUpdate = true; + adjustScrollWhenAboveVisible(cm, line, diff); + signalLater(cm, "lineWidgetChanged", cm, this$1, lineNo(line)); + }); + } + }; + eventMixin(LineWidget); + + function adjustScrollWhenAboveVisible(cm, line, diff) { + if (heightAtLine(line) < ((cm.curOp && cm.curOp.scrollTop) || cm.doc.scrollTop)) + { addToScrollTop(cm, diff); } + } + + function addLineWidget(doc, handle, node, options) { + var widget = new LineWidget(doc, node, options); + var cm = doc.cm; + if (cm && widget.noHScroll) { cm.display.alignWidgets = true; } + changeLine(doc, handle, "widget", function (line) { + var widgets = line.widgets || (line.widgets = []); + if (widget.insertAt == null) { widgets.push(widget); } + else { widgets.splice(Math.min(widgets.length - 1, Math.max(0, widget.insertAt)), 0, widget); } + widget.line = line; + if (cm && !lineIsHidden(doc, line)) { + var aboveVisible = heightAtLine(line) < doc.scrollTop; + updateLineHeight(line, line.height + widgetHeight(widget)); + if (aboveVisible) { addToScrollTop(cm, widget.height); } + cm.curOp.forceUpdate = true; + } + return true + }); + if (cm) { signalLater(cm, "lineWidgetAdded", cm, widget, typeof handle == "number" ? handle : lineNo(handle)); } + return widget + } + + // TEXTMARKERS + + // Created with markText and setBookmark methods. A TextMarker is a + // handle that can be used to clear or find a marked position in the + // document. Line objects hold arrays (markedSpans) containing + // {from, to, marker} object pointing to such marker objects, and + // indicating that such a marker is present on that line. Multiple + // lines may point to the same marker when it spans across lines. + // The spans will have null for their from/to properties when the + // marker continues beyond the start/end of the line. Markers have + // links back to the lines they currently touch. + + // Collapsed markers have unique ids, in order to be able to order + // them, which is needed for uniquely determining an outer marker + // when they overlap (they may nest, but not partially overlap). + var nextMarkerId = 0; + + var TextMarker = function(doc, type) { + this.lines = []; + this.type = type; + this.doc = doc; + this.id = ++nextMarkerId; + }; + + // Clear the marker. + TextMarker.prototype.clear = function () { + var this$1 = this; + + if (this.explicitlyCleared) { return } + var cm = this.doc.cm, withOp = cm && !cm.curOp; + if (withOp) { startOperation(cm); } + if (hasHandler(this, "clear")) { + var found = this.find(); + if (found) { signalLater(this, "clear", found.from, found.to); } + } + var min = null, max = null; + for (var i = 0; i < this.lines.length; ++i) { + var line = this$1.lines[i]; + var span = getMarkedSpanFor(line.markedSpans, this$1); + if (cm && !this$1.collapsed) { regLineChange(cm, lineNo(line), "text"); } + else if (cm) { + if (span.to != null) { max = lineNo(line); } + if (span.from != null) { min = lineNo(line); } + } + line.markedSpans = removeMarkedSpan(line.markedSpans, span); + if (span.from == null && this$1.collapsed && !lineIsHidden(this$1.doc, line) && cm) + { updateLineHeight(line, textHeight(cm.display)); } + } + if (cm && this.collapsed && !cm.options.lineWrapping) { for (var i$1 = 0; i$1 < this.lines.length; ++i$1) { + var visual = visualLine(this$1.lines[i$1]), len = lineLength(visual); + if (len > cm.display.maxLineLength) { + cm.display.maxLine = visual; + cm.display.maxLineLength = len; + cm.display.maxLineChanged = true; + } + } } + + if (min != null && cm && this.collapsed) { regChange(cm, min, max + 1); } + this.lines.length = 0; + this.explicitlyCleared = true; + if (this.atomic && this.doc.cantEdit) { + this.doc.cantEdit = false; + if (cm) { reCheckSelection(cm.doc); } + } + if (cm) { signalLater(cm, "markerCleared", cm, this, min, max); } + if (withOp) { endOperation(cm); } + if (this.parent) { this.parent.clear(); } + }; + + // Find the position of the marker in the document. Returns a {from, + // to} object by default. Side can be passed to get a specific side + // -- 0 (both), -1 (left), or 1 (right). When lineObj is true, the + // Pos objects returned contain a line object, rather than a line + // number (used to prevent looking up the same line twice). + TextMarker.prototype.find = function (side, lineObj) { + var this$1 = this; + + if (side == null && this.type == "bookmark") { side = 1; } + var from, to; + for (var i = 0; i < this.lines.length; ++i) { + var line = this$1.lines[i]; + var span = getMarkedSpanFor(line.markedSpans, this$1); + if (span.from != null) { + from = Pos(lineObj ? line : lineNo(line), span.from); + if (side == -1) { return from } + } + if (span.to != null) { + to = Pos(lineObj ? line : lineNo(line), span.to); + if (side == 1) { return to } + } + } + return from && {from: from, to: to} + }; + + // Signals that the marker's widget changed, and surrounding layout + // should be recomputed. + TextMarker.prototype.changed = function () { + var this$1 = this; + + var pos = this.find(-1, true), widget = this, cm = this.doc.cm; + if (!pos || !cm) { return } + runInOp(cm, function () { + var line = pos.line, lineN = lineNo(pos.line); + var view = findViewForLine(cm, lineN); + if (view) { + clearLineMeasurementCacheFor(view); + cm.curOp.selectionChanged = cm.curOp.forceUpdate = true; + } + cm.curOp.updateMaxLine = true; + if (!lineIsHidden(widget.doc, line) && widget.height != null) { + var oldHeight = widget.height; + widget.height = null; + var dHeight = widgetHeight(widget) - oldHeight; + if (dHeight) + { updateLineHeight(line, line.height + dHeight); } + } + signalLater(cm, "markerChanged", cm, this$1); + }); + }; + + TextMarker.prototype.attachLine = function (line) { + if (!this.lines.length && this.doc.cm) { + var op = this.doc.cm.curOp; + if (!op.maybeHiddenMarkers || indexOf(op.maybeHiddenMarkers, this) == -1) + { (op.maybeUnhiddenMarkers || (op.maybeUnhiddenMarkers = [])).push(this); } + } + this.lines.push(line); + }; + + TextMarker.prototype.detachLine = function (line) { + this.lines.splice(indexOf(this.lines, line), 1); + if (!this.lines.length && this.doc.cm) { + var op = this.doc.cm.curOp + ;(op.maybeHiddenMarkers || (op.maybeHiddenMarkers = [])).push(this); + } + }; + eventMixin(TextMarker); + + // Create a marker, wire it up to the right lines, and + function markText(doc, from, to, options, type) { + // Shared markers (across linked documents) are handled separately + // (markTextShared will call out to this again, once per + // document). + if (options && options.shared) { return markTextShared(doc, from, to, options, type) } + // Ensure we are in an operation. + if (doc.cm && !doc.cm.curOp) { return operation(doc.cm, markText)(doc, from, to, options, type) } + + var marker = new TextMarker(doc, type), diff = cmp(from, to); + if (options) { copyObj(options, marker, false); } + // Don't connect empty markers unless clearWhenEmpty is false + if (diff > 0 || diff == 0 && marker.clearWhenEmpty !== false) + { return marker } + if (marker.replacedWith) { + // Showing up as a widget implies collapsed (widget replaces text) + marker.collapsed = true; + marker.widgetNode = eltP("span", [marker.replacedWith], "CodeMirror-widget"); + if (!options.handleMouseEvents) { marker.widgetNode.setAttribute("cm-ignore-events", "true"); } + if (options.insertLeft) { marker.widgetNode.insertLeft = true; } + } + if (marker.collapsed) { + if (conflictingCollapsedRange(doc, from.line, from, to, marker) || + from.line != to.line && conflictingCollapsedRange(doc, to.line, from, to, marker)) + { throw new Error("Inserting collapsed marker partially overlapping an existing one") } + seeCollapsedSpans(); + } + + if (marker.addToHistory) + { addChangeToHistory(doc, {from: from, to: to, origin: "markText"}, doc.sel, NaN); } + + var curLine = from.line, cm = doc.cm, updateMaxLine; + doc.iter(curLine, to.line + 1, function (line) { + if (cm && marker.collapsed && !cm.options.lineWrapping && visualLine(line) == cm.display.maxLine) + { updateMaxLine = true; } + if (marker.collapsed && curLine != from.line) { updateLineHeight(line, 0); } + addMarkedSpan(line, new MarkedSpan(marker, + curLine == from.line ? from.ch : null, + curLine == to.line ? to.ch : null)); + ++curLine; + }); + // lineIsHidden depends on the presence of the spans, so needs a second pass + if (marker.collapsed) { doc.iter(from.line, to.line + 1, function (line) { + if (lineIsHidden(doc, line)) { updateLineHeight(line, 0); } + }); } + + if (marker.clearOnEnter) { on(marker, "beforeCursorEnter", function () { return marker.clear(); }); } + + if (marker.readOnly) { + seeReadOnlySpans(); + if (doc.history.done.length || doc.history.undone.length) + { doc.clearHistory(); } + } + if (marker.collapsed) { + marker.id = ++nextMarkerId; + marker.atomic = true; + } + if (cm) { + // Sync editor state + if (updateMaxLine) { cm.curOp.updateMaxLine = true; } + if (marker.collapsed) + { regChange(cm, from.line, to.line + 1); } + else if (marker.className || marker.startStyle || marker.endStyle || marker.css || + marker.attributes || marker.title) + { for (var i = from.line; i <= to.line; i++) { regLineChange(cm, i, "text"); } } + if (marker.atomic) { reCheckSelection(cm.doc); } + signalLater(cm, "markerAdded", cm, marker); + } + return marker + } + + // SHARED TEXTMARKERS + + // A shared marker spans multiple linked documents. It is + // implemented as a meta-marker-object controlling multiple normal + // markers. + var SharedTextMarker = function(markers, primary) { + var this$1 = this; + + this.markers = markers; + this.primary = primary; + for (var i = 0; i < markers.length; ++i) + { markers[i].parent = this$1; } + }; + + SharedTextMarker.prototype.clear = function () { + var this$1 = this; + + if (this.explicitlyCleared) { return } + this.explicitlyCleared = true; + for (var i = 0; i < this.markers.length; ++i) + { this$1.markers[i].clear(); } + signalLater(this, "clear"); + }; + + SharedTextMarker.prototype.find = function (side, lineObj) { + return this.primary.find(side, lineObj) + }; + eventMixin(SharedTextMarker); + + function markTextShared(doc, from, to, options, type) { + options = copyObj(options); + options.shared = false; + var markers = [markText(doc, from, to, options, type)], primary = markers[0]; + var widget = options.widgetNode; + linkedDocs(doc, function (doc) { + if (widget) { options.widgetNode = widget.cloneNode(true); } + markers.push(markText(doc, clipPos(doc, from), clipPos(doc, to), options, type)); + for (var i = 0; i < doc.linked.length; ++i) + { if (doc.linked[i].isParent) { return } } + primary = lst(markers); + }); + return new SharedTextMarker(markers, primary) + } + + function findSharedMarkers(doc) { + return doc.findMarks(Pos(doc.first, 0), doc.clipPos(Pos(doc.lastLine())), function (m) { return m.parent; }) + } + + function copySharedMarkers(doc, markers) { + for (var i = 0; i < markers.length; i++) { + var marker = markers[i], pos = marker.find(); + var mFrom = doc.clipPos(pos.from), mTo = doc.clipPos(pos.to); + if (cmp(mFrom, mTo)) { + var subMark = markText(doc, mFrom, mTo, marker.primary, marker.primary.type); + marker.markers.push(subMark); + subMark.parent = marker; + } + } + } + + function detachSharedMarkers(markers) { + var loop = function ( i ) { + var marker = markers[i], linked = [marker.primary.doc]; + linkedDocs(marker.primary.doc, function (d) { return linked.push(d); }); + for (var j = 0; j < marker.markers.length; j++) { + var subMarker = marker.markers[j]; + if (indexOf(linked, subMarker.doc) == -1) { + subMarker.parent = null; + marker.markers.splice(j--, 1); + } + } + }; + + for (var i = 0; i < markers.length; i++) loop( i ); + } + + var nextDocId = 0; + var Doc = function(text, mode, firstLine, lineSep, direction) { + if (!(this instanceof Doc)) { return new Doc(text, mode, firstLine, lineSep, direction) } + if (firstLine == null) { firstLine = 0; } + + BranchChunk.call(this, [new LeafChunk([new Line("", null)])]); + this.first = firstLine; + this.scrollTop = this.scrollLeft = 0; + this.cantEdit = false; + this.cleanGeneration = 1; + this.modeFrontier = this.highlightFrontier = firstLine; + var start = Pos(firstLine, 0); + this.sel = simpleSelection(start); + this.history = new History(null); + this.id = ++nextDocId; + this.modeOption = mode; + this.lineSep = lineSep; + this.direction = (direction == "rtl") ? "rtl" : "ltr"; + this.extend = false; + + if (typeof text == "string") { text = this.splitLines(text); } + updateDoc(this, {from: start, to: start, text: text}); + setSelection(this, simpleSelection(start), sel_dontScroll); + }; + + Doc.prototype = createObj(BranchChunk.prototype, { + constructor: Doc, + // Iterate over the document. Supports two forms -- with only one + // argument, it calls that for each line in the document. With + // three, it iterates over the range given by the first two (with + // the second being non-inclusive). + iter: function(from, to, op) { + if (op) { this.iterN(from - this.first, to - from, op); } + else { this.iterN(this.first, this.first + this.size, from); } + }, + + // Non-public interface for adding and removing lines. + insert: function(at, lines) { + var height = 0; + for (var i = 0; i < lines.length; ++i) { height += lines[i].height; } + this.insertInner(at - this.first, lines, height); + }, + remove: function(at, n) { this.removeInner(at - this.first, n); }, + + // From here, the methods are part of the public interface. Most + // are also available from CodeMirror (editor) instances. + + getValue: function(lineSep) { + var lines = getLines(this, this.first, this.first + this.size); + if (lineSep === false) { return lines } + return lines.join(lineSep || this.lineSeparator()) + }, + setValue: docMethodOp(function(code) { + var top = Pos(this.first, 0), last = this.first + this.size - 1; + makeChange(this, {from: top, to: Pos(last, getLine(this, last).text.length), + text: this.splitLines(code), origin: "setValue", full: true}, true); + if (this.cm) { scrollToCoords(this.cm, 0, 0); } + setSelection(this, simpleSelection(top), sel_dontScroll); + }), + replaceRange: function(code, from, to, origin) { + from = clipPos(this, from); + to = to ? clipPos(this, to) : from; + replaceRange(this, code, from, to, origin); + }, + getRange: function(from, to, lineSep) { + var lines = getBetween(this, clipPos(this, from), clipPos(this, to)); + if (lineSep === false) { return lines } + return lines.join(lineSep || this.lineSeparator()) + }, + + getLine: function(line) {var l = this.getLineHandle(line); return l && l.text}, + + getLineHandle: function(line) {if (isLine(this, line)) { return getLine(this, line) }}, + getLineNumber: function(line) {return lineNo(line)}, + + getLineHandleVisualStart: function(line) { + if (typeof line == "number") { line = getLine(this, line); } + return visualLine(line) + }, + + lineCount: function() {return this.size}, + firstLine: function() {return this.first}, + lastLine: function() {return this.first + this.size - 1}, + + clipPos: function(pos) {return clipPos(this, pos)}, + + getCursor: function(start) { + var range$$1 = this.sel.primary(), pos; + if (start == null || start == "head") { pos = range$$1.head; } + else if (start == "anchor") { pos = range$$1.anchor; } + else if (start == "end" || start == "to" || start === false) { pos = range$$1.to(); } + else { pos = range$$1.from(); } + return pos + }, + listSelections: function() { return this.sel.ranges }, + somethingSelected: function() {return this.sel.somethingSelected()}, + + setCursor: docMethodOp(function(line, ch, options) { + setSimpleSelection(this, clipPos(this, typeof line == "number" ? Pos(line, ch || 0) : line), null, options); + }), + setSelection: docMethodOp(function(anchor, head, options) { + setSimpleSelection(this, clipPos(this, anchor), clipPos(this, head || anchor), options); + }), + extendSelection: docMethodOp(function(head, other, options) { + extendSelection(this, clipPos(this, head), other && clipPos(this, other), options); + }), + extendSelections: docMethodOp(function(heads, options) { + extendSelections(this, clipPosArray(this, heads), options); + }), + extendSelectionsBy: docMethodOp(function(f, options) { + var heads = map(this.sel.ranges, f); + extendSelections(this, clipPosArray(this, heads), options); + }), + setSelections: docMethodOp(function(ranges, primary, options) { + var this$1 = this; + + if (!ranges.length) { return } + var out = []; + for (var i = 0; i < ranges.length; i++) + { out[i] = new Range(clipPos(this$1, ranges[i].anchor), + clipPos(this$1, ranges[i].head)); } + if (primary == null) { primary = Math.min(ranges.length - 1, this.sel.primIndex); } + setSelection(this, normalizeSelection(this.cm, out, primary), options); + }), + addSelection: docMethodOp(function(anchor, head, options) { + var ranges = this.sel.ranges.slice(0); + ranges.push(new Range(clipPos(this, anchor), clipPos(this, head || anchor))); + setSelection(this, normalizeSelection(this.cm, ranges, ranges.length - 1), options); + }), + + getSelection: function(lineSep) { + var this$1 = this; + + var ranges = this.sel.ranges, lines; + for (var i = 0; i < ranges.length; i++) { + var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); + lines = lines ? lines.concat(sel) : sel; + } + if (lineSep === false) { return lines } + else { return lines.join(lineSep || this.lineSeparator()) } + }, + getSelections: function(lineSep) { + var this$1 = this; + + var parts = [], ranges = this.sel.ranges; + for (var i = 0; i < ranges.length; i++) { + var sel = getBetween(this$1, ranges[i].from(), ranges[i].to()); + if (lineSep !== false) { sel = sel.join(lineSep || this$1.lineSeparator()); } + parts[i] = sel; + } + return parts + }, + replaceSelection: function(code, collapse, origin) { + var dup = []; + for (var i = 0; i < this.sel.ranges.length; i++) + { dup[i] = code; } + this.replaceSelections(dup, collapse, origin || "+input"); + }, + replaceSelections: docMethodOp(function(code, collapse, origin) { + var this$1 = this; + + var changes = [], sel = this.sel; + for (var i = 0; i < sel.ranges.length; i++) { + var range$$1 = sel.ranges[i]; + changes[i] = {from: range$$1.from(), to: range$$1.to(), text: this$1.splitLines(code[i]), origin: origin}; + } + var newSel = collapse && collapse != "end" && computeReplacedSel(this, changes, collapse); + for (var i$1 = changes.length - 1; i$1 >= 0; i$1--) + { makeChange(this$1, changes[i$1]); } + if (newSel) { setSelectionReplaceHistory(this, newSel); } + else if (this.cm) { ensureCursorVisible(this.cm); } + }), + undo: docMethodOp(function() {makeChangeFromHistory(this, "undo");}), + redo: docMethodOp(function() {makeChangeFromHistory(this, "redo");}), + undoSelection: docMethodOp(function() {makeChangeFromHistory(this, "undo", true);}), + redoSelection: docMethodOp(function() {makeChangeFromHistory(this, "redo", true);}), + + setExtending: function(val) {this.extend = val;}, + getExtending: function() {return this.extend}, + + historySize: function() { + var hist = this.history, done = 0, undone = 0; + for (var i = 0; i < hist.done.length; i++) { if (!hist.done[i].ranges) { ++done; } } + for (var i$1 = 0; i$1 < hist.undone.length; i$1++) { if (!hist.undone[i$1].ranges) { ++undone; } } + return {undo: done, redo: undone} + }, + clearHistory: function() {this.history = new History(this.history.maxGeneration);}, + + markClean: function() { + this.cleanGeneration = this.changeGeneration(true); + }, + changeGeneration: function(forceSplit) { + if (forceSplit) + { this.history.lastOp = this.history.lastSelOp = this.history.lastOrigin = null; } + return this.history.generation + }, + isClean: function (gen) { + return this.history.generation == (gen || this.cleanGeneration) + }, + + getHistory: function() { + return {done: copyHistoryArray(this.history.done), + undone: copyHistoryArray(this.history.undone)} + }, + setHistory: function(histData) { + var hist = this.history = new History(this.history.maxGeneration); + hist.done = copyHistoryArray(histData.done.slice(0), null, true); + hist.undone = copyHistoryArray(histData.undone.slice(0), null, true); + }, + + setGutterMarker: docMethodOp(function(line, gutterID, value) { + return changeLine(this, line, "gutter", function (line) { + var markers = line.gutterMarkers || (line.gutterMarkers = {}); + markers[gutterID] = value; + if (!value && isEmpty(markers)) { line.gutterMarkers = null; } + return true + }) + }), + + clearGutter: docMethodOp(function(gutterID) { + var this$1 = this; + + this.iter(function (line) { + if (line.gutterMarkers && line.gutterMarkers[gutterID]) { + changeLine(this$1, line, "gutter", function () { + line.gutterMarkers[gutterID] = null; + if (isEmpty(line.gutterMarkers)) { line.gutterMarkers = null; } + return true + }); + } + }); + }), + + lineInfo: function(line) { + var n; + if (typeof line == "number") { + if (!isLine(this, line)) { return null } + n = line; + line = getLine(this, line); + if (!line) { return null } + } else { + n = lineNo(line); + if (n == null) { return null } + } + return {line: n, handle: line, text: line.text, gutterMarkers: line.gutterMarkers, + textClass: line.textClass, bgClass: line.bgClass, wrapClass: line.wrapClass, + widgets: line.widgets} + }, + + addLineClass: docMethodOp(function(handle, where, cls) { + return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { + var prop = where == "text" ? "textClass" + : where == "background" ? "bgClass" + : where == "gutter" ? "gutterClass" : "wrapClass"; + if (!line[prop]) { line[prop] = cls; } + else if (classTest(cls).test(line[prop])) { return false } + else { line[prop] += " " + cls; } + return true + }) + }), + removeLineClass: docMethodOp(function(handle, where, cls) { + return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function (line) { + var prop = where == "text" ? "textClass" + : where == "background" ? "bgClass" + : where == "gutter" ? "gutterClass" : "wrapClass"; + var cur = line[prop]; + if (!cur) { return false } + else if (cls == null) { line[prop] = null; } + else { + var found = cur.match(classTest(cls)); + if (!found) { return false } + var end = found.index + found[0].length; + line[prop] = cur.slice(0, found.index) + (!found.index || end == cur.length ? "" : " ") + cur.slice(end) || null; + } + return true + }) + }), + + addLineWidget: docMethodOp(function(handle, node, options) { + return addLineWidget(this, handle, node, options) + }), + removeLineWidget: function(widget) { widget.clear(); }, + + markText: function(from, to, options) { + return markText(this, clipPos(this, from), clipPos(this, to), options, options && options.type || "range") + }, + setBookmark: function(pos, options) { + var realOpts = {replacedWith: options && (options.nodeType == null ? options.widget : options), + insertLeft: options && options.insertLeft, + clearWhenEmpty: false, shared: options && options.shared, + handleMouseEvents: options && options.handleMouseEvents}; + pos = clipPos(this, pos); + return markText(this, pos, pos, realOpts, "bookmark") + }, + findMarksAt: function(pos) { + pos = clipPos(this, pos); + var markers = [], spans = getLine(this, pos.line).markedSpans; + if (spans) { for (var i = 0; i < spans.length; ++i) { + var span = spans[i]; + if ((span.from == null || span.from <= pos.ch) && + (span.to == null || span.to >= pos.ch)) + { markers.push(span.marker.parent || span.marker); } + } } + return markers + }, + findMarks: function(from, to, filter) { + from = clipPos(this, from); to = clipPos(this, to); + var found = [], lineNo$$1 = from.line; + this.iter(from.line, to.line + 1, function (line) { + var spans = line.markedSpans; + if (spans) { for (var i = 0; i < spans.length; i++) { + var span = spans[i]; + if (!(span.to != null && lineNo$$1 == from.line && from.ch >= span.to || + span.from == null && lineNo$$1 != from.line || + span.from != null && lineNo$$1 == to.line && span.from >= to.ch) && + (!filter || filter(span.marker))) + { found.push(span.marker.parent || span.marker); } + } } + ++lineNo$$1; + }); + return found + }, + getAllMarks: function() { + var markers = []; + this.iter(function (line) { + var sps = line.markedSpans; + if (sps) { for (var i = 0; i < sps.length; ++i) + { if (sps[i].from != null) { markers.push(sps[i].marker); } } } + }); + return markers + }, + + posFromIndex: function(off) { + var ch, lineNo$$1 = this.first, sepSize = this.lineSeparator().length; + this.iter(function (line) { + var sz = line.text.length + sepSize; + if (sz > off) { ch = off; return true } + off -= sz; + ++lineNo$$1; + }); + return clipPos(this, Pos(lineNo$$1, ch)) + }, + indexFromPos: function (coords) { + coords = clipPos(this, coords); + var index = coords.ch; + if (coords.line < this.first || coords.ch < 0) { return 0 } + var sepSize = this.lineSeparator().length; + this.iter(this.first, coords.line, function (line) { // iter aborts when callback returns a truthy value + index += line.text.length + sepSize; + }); + return index + }, + + copy: function(copyHistory) { + var doc = new Doc(getLines(this, this.first, this.first + this.size), + this.modeOption, this.first, this.lineSep, this.direction); + doc.scrollTop = this.scrollTop; doc.scrollLeft = this.scrollLeft; + doc.sel = this.sel; + doc.extend = false; + if (copyHistory) { + doc.history.undoDepth = this.history.undoDepth; + doc.setHistory(this.getHistory()); + } + return doc + }, + + linkedDoc: function(options) { + if (!options) { options = {}; } + var from = this.first, to = this.first + this.size; + if (options.from != null && options.from > from) { from = options.from; } + if (options.to != null && options.to < to) { to = options.to; } + var copy = new Doc(getLines(this, from, to), options.mode || this.modeOption, from, this.lineSep, this.direction); + if (options.sharedHist) { copy.history = this.history + ; }(this.linked || (this.linked = [])).push({doc: copy, sharedHist: options.sharedHist}); + copy.linked = [{doc: this, isParent: true, sharedHist: options.sharedHist}]; + copySharedMarkers(copy, findSharedMarkers(this)); + return copy + }, + unlinkDoc: function(other) { + var this$1 = this; + + if (other instanceof CodeMirror) { other = other.doc; } + if (this.linked) { for (var i = 0; i < this.linked.length; ++i) { + var link = this$1.linked[i]; + if (link.doc != other) { continue } + this$1.linked.splice(i, 1); + other.unlinkDoc(this$1); + detachSharedMarkers(findSharedMarkers(this$1)); + break + } } + // If the histories were shared, split them again + if (other.history == this.history) { + var splitIds = [other.id]; + linkedDocs(other, function (doc) { return splitIds.push(doc.id); }, true); + other.history = new History(null); + other.history.done = copyHistoryArray(this.history.done, splitIds); + other.history.undone = copyHistoryArray(this.history.undone, splitIds); + } + }, + iterLinkedDocs: function(f) {linkedDocs(this, f);}, + + getMode: function() {return this.mode}, + getEditor: function() {return this.cm}, + + splitLines: function(str) { + if (this.lineSep) { return str.split(this.lineSep) } + return splitLinesAuto(str) + }, + lineSeparator: function() { return this.lineSep || "\n" }, + + setDirection: docMethodOp(function (dir) { + if (dir != "rtl") { dir = "ltr"; } + if (dir == this.direction) { return } + this.direction = dir; + this.iter(function (line) { return line.order = null; }); + if (this.cm) { directionChanged(this.cm); } + }) + }); + + // Public alias. + Doc.prototype.eachLine = Doc.prototype.iter; + + // Kludge to work around strange IE behavior where it'll sometimes + // re-fire a series of drag-related events right after the drop (#1551) + var lastDrop = 0; + + function onDrop(e) { + var cm = this; + clearDragCursor(cm); + if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) + { return } + e_preventDefault(e); + if (ie) { lastDrop = +new Date; } + var pos = posFromMouse(cm, e, true), files = e.dataTransfer.files; + if (!pos || cm.isReadOnly()) { return } + // Might be a file drop, in which case we simply extract the text + // and insert it. + if (files && files.length && window.FileReader && window.File) { + var n = files.length, text = Array(n), read = 0; + var loadFile = function (file, i) { + if (cm.options.allowDropFileTypes && + indexOf(cm.options.allowDropFileTypes, file.type) == -1) + { return } + + var reader = new FileReader; + reader.onload = operation(cm, function () { + var content = reader.result; + if (/[\x00-\x08\x0e-\x1f]{2}/.test(content)) { content = ""; } + text[i] = content; + if (++read == n) { + pos = clipPos(cm.doc, pos); + var change = {from: pos, to: pos, + text: cm.doc.splitLines(text.join(cm.doc.lineSeparator())), + origin: "paste"}; + makeChange(cm.doc, change); + setSelectionReplaceHistory(cm.doc, simpleSelection(pos, changeEnd(change))); + } + }); + reader.readAsText(file); + }; + for (var i = 0; i < n; ++i) { loadFile(files[i], i); } + } else { // Normal drop + // Don't do a replace if the drop happened inside of the selected text. + if (cm.state.draggingText && cm.doc.sel.contains(pos) > -1) { + cm.state.draggingText(e); + // Ensure the editor is re-focused + setTimeout(function () { return cm.display.input.focus(); }, 20); + return + } + try { + var text$1 = e.dataTransfer.getData("Text"); + if (text$1) { + var selected; + if (cm.state.draggingText && !cm.state.draggingText.copy) + { selected = cm.listSelections(); } + setSelectionNoUndo(cm.doc, simpleSelection(pos, pos)); + if (selected) { for (var i$1 = 0; i$1 < selected.length; ++i$1) + { replaceRange(cm.doc, "", selected[i$1].anchor, selected[i$1].head, "drag"); } } + cm.replaceSelection(text$1, "around", "paste"); + cm.display.input.focus(); + } + } + catch(e){} + } + } + + function onDragStart(cm, e) { + if (ie && (!cm.state.draggingText || +new Date - lastDrop < 100)) { e_stop(e); return } + if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) { return } + + e.dataTransfer.setData("Text", cm.getSelection()); + e.dataTransfer.effectAllowed = "copyMove"; + + // Use dummy image instead of default browsers image. + // Recent Safari (~6.0.2) have a tendency to segfault when this happens, so we don't do it there. + if (e.dataTransfer.setDragImage && !safari) { + var img = elt("img", null, null, "position: fixed; left: 0; top: 0;"); + img.src = "data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw=="; + if (presto) { + img.width = img.height = 1; + cm.display.wrapper.appendChild(img); + // Force a relayout, or Opera won't use our image for some obscure reason + img._top = img.offsetTop; + } + e.dataTransfer.setDragImage(img, 0, 0); + if (presto) { img.parentNode.removeChild(img); } + } + } + + function onDragOver(cm, e) { + var pos = posFromMouse(cm, e); + if (!pos) { return } + var frag = document.createDocumentFragment(); + drawSelectionCursor(cm, pos, frag); + if (!cm.display.dragCursor) { + cm.display.dragCursor = elt("div", null, "CodeMirror-cursors CodeMirror-dragcursors"); + cm.display.lineSpace.insertBefore(cm.display.dragCursor, cm.display.cursorDiv); + } + removeChildrenAndAdd(cm.display.dragCursor, frag); + } + + function clearDragCursor(cm) { + if (cm.display.dragCursor) { + cm.display.lineSpace.removeChild(cm.display.dragCursor); + cm.display.dragCursor = null; + } + } // These must be handled carefully, because naively registering a // handler for each editor will cause the editors to never be // garbage collected. function forEachCodeMirror(f) { - if (!document.body.getElementsByClassName) return; - var byClass = document.body.getElementsByClassName("CodeMirror"); + if (!document.getElementsByClassName) { return } + var byClass = document.getElementsByClassName("CodeMirror"), editors = []; for (var i = 0; i < byClass.length; i++) { var cm = byClass[i].CodeMirror; - if (cm) f(cm); - } + if (cm) { editors.push(cm); } + } + if (editors.length) { editors[0].operation(function () { + for (var i = 0; i < editors.length; i++) { f(editors[i]); } + }); } } var globalsRegistered = false; function ensureGlobalHandlers() { - if (globalsRegistered) return; + if (globalsRegistered) { return } registerGlobalHandlers(); globalsRegistered = true; } function registerGlobalHandlers() { // When the window resizes, we need to refresh active editors. var resizeTimer; - on(window, "resize", function() { - if (resizeTimer == null) resizeTimer = setTimeout(function() { + on(window, "resize", function () { + if (resizeTimer == null) { resizeTimer = setTimeout(function () { resizeTimer = null; forEachCodeMirror(onResize); - }, 100); + }, 100); } }); // When the window loses focus, we want to show the editor as blurred - on(window, "blur", function() { - forEachCodeMirror(onBlur); - }); - } - - // FEATURE DETECTION - - // Detect drag-and-drop - var dragAndDrop = function() { - // There is *some* kind of drag-and-drop support in IE6-8, but I - // couldn't get it to work yet. - if (ie && ie_version < 9) return false; - var div = elt('div'); - return "draggable" in div || "dragDrop" in div; - }(); - - var zwspSupported; - function zeroWidthElement(measure) { - if (zwspSupported == null) { - var test = elt("span", "\u200b"); - removeChildrenAndAdd(measure, elt("span", [test, document.createTextNode("x")])); - if (measure.firstChild.offsetHeight != 0) - zwspSupported = test.offsetWidth <= 1 && test.offsetHeight > 2 && !(ie && ie_version < 8); - } - var node = zwspSupported ? elt("span", "\u200b") : - elt("span", "\u00a0", null, "display: inline-block; width: 1px; margin-right: -1px"); - node.setAttribute("cm-text", ""); - return node; - } - - // Feature-detect IE's crummy client rect reporting for bidi text - var badBidiRects; - function hasBadBidiRects(measure) { - if (badBidiRects != null) return badBidiRects; - var txt = removeChildrenAndAdd(measure, document.createTextNode("A\u062eA")); - var r0 = range(txt, 0, 1).getBoundingClientRect(); - if (!r0 || r0.left == r0.right) return false; // Safari returns null in some cases (#2780) - var r1 = range(txt, 1, 2).getBoundingClientRect(); - return badBidiRects = (r1.right - r0.right < 3); - } - - // See if "".split is the broken IE version, if so, provide an - // alternative way to split lines. - var splitLinesAuto = CodeMirror.splitLines = "\n\nb".split(/\n/).length != 3 ? function(string) { - var pos = 0, result = [], l = string.length; - while (pos <= l) { - var nl = string.indexOf("\n", pos); - if (nl == -1) nl = string.length; - var line = string.slice(pos, string.charAt(nl - 1) == "\r" ? nl - 1 : nl); - var rt = line.indexOf("\r"); - if (rt != -1) { - result.push(line.slice(0, rt)); - pos += rt + 1; - } else { - result.push(line); - pos = nl + 1; - } - } - return result; - } : function(string){return string.split(/\r\n?|\n/);}; - - var hasSelection = window.getSelection ? function(te) { - try { return te.selectionStart != te.selectionEnd; } - catch(e) { return false; } - } : function(te) { - try {var range = te.ownerDocument.selection.createRange();} - catch(e) {} - if (!range || range.parentElement() != te) return false; - return range.compareEndPoints("StartToEnd", range) != 0; - }; - - var hasCopyEvent = (function() { - var e = elt("div"); - if ("oncopy" in e) return true; - e.setAttribute("oncopy", "return;"); - return typeof e.oncopy == "function"; - })(); - - var badZoomedRects = null; - function hasBadZoomedRects(measure) { - if (badZoomedRects != null) return badZoomedRects; - var node = removeChildrenAndAdd(measure, elt("span", "x")); - var normal = node.getBoundingClientRect(); - var fromRange = range(node, 0, 1).getBoundingClientRect(); - return badZoomedRects = Math.abs(normal.left - fromRange.left) > 1; - } - - // KEY NAMES - - var keyNames = CodeMirror.keyNames = { - 3: "Enter", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt", + on(window, "blur", function () { return forEachCodeMirror(onBlur); }); + } + // Called when the window resizes + function onResize(cm) { + var d = cm.display; + // Might be a text scaling operation, clear size caches. + d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null; + d.scrollbarsClipped = false; + cm.setSize(); + } + + var keyNames = { + 3: "Pause", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt", 19: "Pause", 20: "CapsLock", 27: "Esc", 32: "Space", 33: "PageUp", 34: "PageDown", 35: "End", 36: "Home", 37: "Left", 38: "Up", 39: "Right", 40: "Down", 44: "PrintScrn", 45: "Insert", 46: "Delete", 59: ";", 61: "=", 91: "Mod", 92: "Mod", 93: "Mod", - 106: "*", 107: "=", 109: "-", 110: ".", 111: "/", 127: "Delete", + 106: "*", 107: "=", 109: "-", 110: ".", 111: "/", 145: "ScrollLock", 173: "-", 186: ";", 187: "=", 188: ",", 189: "-", 190: ".", 191: "/", 192: "`", 219: "[", 220: "\\", 221: "]", 222: "'", 63232: "Up", 63233: "Down", 63234: "Left", 63235: "Right", 63272: "Delete", 63273: "Home", 63275: "End", 63276: "PageUp", 63277: "PageDown", 63302: "Insert" }; - (function() { - // Number keys - for (var i = 0; i < 10; i++) keyNames[i + 48] = keyNames[i + 96] = String(i); - // Alphabetic keys - for (var i = 65; i <= 90; i++) keyNames[i] = String.fromCharCode(i); - // Function keys - for (var i = 1; i <= 12; i++) keyNames[i + 111] = keyNames[i + 63235] = "F" + i; - })(); - - // BIDI HELPERS - - function iterateBidiSections(order, from, to, f) { - if (!order) return f(from, to, "ltr"); - var found = false; - for (var i = 0; i < order.length; ++i) { - var part = order[i]; - if (part.from < to && part.to > from || from == to && part.to == from) { - f(Math.max(part.from, from), Math.min(part.to, to), part.level == 1 ? "rtl" : "ltr"); - found = true; - } - } - if (!found) f(from, to, "ltr"); - } - - function bidiLeft(part) { return part.level % 2 ? part.to : part.from; } - function bidiRight(part) { return part.level % 2 ? part.from : part.to; } - - function lineLeft(line) { var order = getOrder(line); return order ? bidiLeft(order[0]) : 0; } - function lineRight(line) { - var order = getOrder(line); - if (!order) return line.text.length; - return bidiRight(lst(order)); - } + + // Number keys + for (var i = 0; i < 10; i++) { keyNames[i + 48] = keyNames[i + 96] = String(i); } + // Alphabetic keys + for (var i$1 = 65; i$1 <= 90; i$1++) { keyNames[i$1] = String.fromCharCode(i$1); } + // Function keys + for (var i$2 = 1; i$2 <= 12; i$2++) { keyNames[i$2 + 111] = keyNames[i$2 + 63235] = "F" + i$2; } + + var keyMap = {}; + + keyMap.basic = { + "Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown", + "End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown", + "Delete": "delCharAfter", "Backspace": "delCharBefore", "Shift-Backspace": "delCharBefore", + "Tab": "defaultTab", "Shift-Tab": "indentAuto", + "Enter": "newlineAndIndent", "Insert": "toggleOverwrite", + "Esc": "singleSelection" + }; + // Note that the save and find-related commands aren't defined by + // default. User code or addons can define them. Unknown commands + // are simply ignored. + keyMap.pcDefault = { + "Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo", + "Ctrl-Home": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Up": "goLineUp", "Ctrl-Down": "goLineDown", + "Ctrl-Left": "goGroupLeft", "Ctrl-Right": "goGroupRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd", + "Ctrl-Backspace": "delGroupBefore", "Ctrl-Delete": "delGroupAfter", "Ctrl-S": "save", "Ctrl-F": "find", + "Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll", + "Ctrl-[": "indentLess", "Ctrl-]": "indentMore", + "Ctrl-U": "undoSelection", "Shift-Ctrl-U": "redoSelection", "Alt-U": "redoSelection", + "fallthrough": "basic" + }; + // Very basic readline/emacs-style bindings, which are standard on Mac. + keyMap.emacsy = { + "Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown", + "Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd", + "Ctrl-V": "goPageDown", "Shift-Ctrl-V": "goPageUp", "Ctrl-D": "delCharAfter", "Ctrl-H": "delCharBefore", + "Alt-D": "delWordAfter", "Alt-Backspace": "delWordBefore", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars", + "Ctrl-O": "openLine" + }; + keyMap.macDefault = { + "Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo", + "Cmd-Home": "goDocStart", "Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goGroupLeft", + "Alt-Right": "goGroupRight", "Cmd-Left": "goLineLeft", "Cmd-Right": "goLineRight", "Alt-Backspace": "delGroupBefore", + "Ctrl-Alt-Backspace": "delGroupAfter", "Alt-Delete": "delGroupAfter", "Cmd-S": "save", "Cmd-F": "find", + "Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll", + "Cmd-[": "indentLess", "Cmd-]": "indentMore", "Cmd-Backspace": "delWrappedLineLeft", "Cmd-Delete": "delWrappedLineRight", + "Cmd-U": "undoSelection", "Shift-Cmd-U": "redoSelection", "Ctrl-Up": "goDocStart", "Ctrl-Down": "goDocEnd", + "fallthrough": ["basic", "emacsy"] + }; + keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault; + + // KEYMAP DISPATCH + + function normalizeKeyName(name) { + var parts = name.split(/-(?!$)/); + name = parts[parts.length - 1]; + var alt, ctrl, shift, cmd; + for (var i = 0; i < parts.length - 1; i++) { + var mod = parts[i]; + if (/^(cmd|meta|m)$/i.test(mod)) { cmd = true; } + else if (/^a(lt)?$/i.test(mod)) { alt = true; } + else if (/^(c|ctrl|control)$/i.test(mod)) { ctrl = true; } + else if (/^s(hift)?$/i.test(mod)) { shift = true; } + else { throw new Error("Unrecognized modifier name: " + mod) } + } + if (alt) { name = "Alt-" + name; } + if (ctrl) { name = "Ctrl-" + name; } + if (cmd) { name = "Cmd-" + name; } + if (shift) { name = "Shift-" + name; } + return name + } + + // This is a kludge to keep keymaps mostly working as raw objects + // (backwards compatibility) while at the same time support features + // like normalization and multi-stroke key bindings. It compiles a + // new normalized keymap, and then updates the old object to reflect + // this. + function normalizeKeyMap(keymap) { + var copy = {}; + for (var keyname in keymap) { if (keymap.hasOwnProperty(keyname)) { + var value = keymap[keyname]; + if (/^(name|fallthrough|(de|at)tach)$/.test(keyname)) { continue } + if (value == "...") { delete keymap[keyname]; continue } + + var keys = map(keyname.split(" "), normalizeKeyName); + for (var i = 0; i < keys.length; i++) { + var val = (void 0), name = (void 0); + if (i == keys.length - 1) { + name = keys.join(" "); + val = value; + } else { + name = keys.slice(0, i + 1).join(" "); + val = "..."; + } + var prev = copy[name]; + if (!prev) { copy[name] = val; } + else if (prev != val) { throw new Error("Inconsistent bindings for " + name) } + } + delete keymap[keyname]; + } } + for (var prop in copy) { keymap[prop] = copy[prop]; } + return keymap + } + + function lookupKey(key, map$$1, handle, context) { + map$$1 = getKeyMap(map$$1); + var found = map$$1.call ? map$$1.call(key, context) : map$$1[key]; + if (found === false) { return "nothing" } + if (found === "...") { return "multi" } + if (found != null && handle(found)) { return "handled" } + + if (map$$1.fallthrough) { + if (Object.prototype.toString.call(map$$1.fallthrough) != "[object Array]") + { return lookupKey(key, map$$1.fallthrough, handle, context) } + for (var i = 0; i < map$$1.fallthrough.length; i++) { + var result = lookupKey(key, map$$1.fallthrough[i], handle, context); + if (result) { return result } + } + } + } + + // Modifier key presses don't count as 'real' key presses for the + // purpose of keymap fallthrough. + function isModifierKey(value) { + var name = typeof value == "string" ? value : keyNames[value.keyCode]; + return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod" + } + + function addModifierNames(name, event, noShift) { + var base = name; + if (event.altKey && base != "Alt") { name = "Alt-" + name; } + if ((flipCtrlCmd ? event.metaKey : event.ctrlKey) && base != "Ctrl") { name = "Ctrl-" + name; } + if ((flipCtrlCmd ? event.ctrlKey : event.metaKey) && base != "Cmd") { name = "Cmd-" + name; } + if (!noShift && event.shiftKey && base != "Shift") { name = "Shift-" + name; } + return name + } + + // Look up the name of a key as indicated by an event object. + function keyName(event, noShift) { + if (presto && event.keyCode == 34 && event["char"]) { return false } + var name = keyNames[event.keyCode]; + if (name == null || event.altGraphKey) { return false } + // Ctrl-ScrollLock has keyCode 3, same as Ctrl-Pause, + // so we'll use event.code when available (Chrome 48+, FF 38+, Safari 10.1+) + if (event.keyCode == 3 && event.code) { name = event.code; } + return addModifierNames(name, event, noShift) + } + + function getKeyMap(val) { + return typeof val == "string" ? keyMap[val] : val + } + + // Helper for deleting text near the selection(s), used to implement + // backspace, delete, and similar functionality. + function deleteNearSelection(cm, compute) { + var ranges = cm.doc.sel.ranges, kill = []; + // Build up a set of ranges to kill first, merging overlapping + // ranges. + for (var i = 0; i < ranges.length; i++) { + var toKill = compute(ranges[i]); + while (kill.length && cmp(toKill.from, lst(kill).to) <= 0) { + var replaced = kill.pop(); + if (cmp(replaced.from, toKill.from) < 0) { + toKill.from = replaced.from; + break + } + } + kill.push(toKill); + } + // Next, remove those actual ranges. + runInOp(cm, function () { + for (var i = kill.length - 1; i >= 0; i--) + { replaceRange(cm.doc, "", kill[i].from, kill[i].to, "+delete"); } + ensureCursorVisible(cm); + }); + } + + function moveCharLogically(line, ch, dir) { + var target = skipExtendingChars(line.text, ch + dir, dir); + return target < 0 || target > line.text.length ? null : target + } + + function moveLogically(line, start, dir) { + var ch = moveCharLogically(line, start.ch, dir); + return ch == null ? null : new Pos(start.line, ch, dir < 0 ? "after" : "before") + } + + function endOfLine(visually, cm, lineObj, lineNo, dir) { + if (visually) { + var order = getOrder(lineObj, cm.doc.direction); + if (order) { + var part = dir < 0 ? lst(order) : order[0]; + var moveInStorageOrder = (dir < 0) == (part.level == 1); + var sticky = moveInStorageOrder ? "after" : "before"; + var ch; + // With a wrapped rtl chunk (possibly spanning multiple bidi parts), + // it could be that the last bidi part is not on the last visual line, + // since visual lines contain content order-consecutive chunks. + // Thus, in rtl, we are looking for the first (content-order) character + // in the rtl chunk that is on the last line (that is, the same line + // as the last (content-order) character). + if (part.level > 0 || cm.doc.direction == "rtl") { + var prep = prepareMeasureForLine(cm, lineObj); + ch = dir < 0 ? lineObj.text.length - 1 : 0; + var targetTop = measureCharPrepared(cm, prep, ch).top; + ch = findFirst(function (ch) { return measureCharPrepared(cm, prep, ch).top == targetTop; }, (dir < 0) == (part.level == 1) ? part.from : part.to - 1, ch); + if (sticky == "before") { ch = moveCharLogically(lineObj, ch, 1); } + } else { ch = dir < 0 ? part.to : part.from; } + return new Pos(lineNo, ch, sticky) + } + } + return new Pos(lineNo, dir < 0 ? lineObj.text.length : 0, dir < 0 ? "before" : "after") + } + + function moveVisually(cm, line, start, dir) { + var bidi = getOrder(line, cm.doc.direction); + if (!bidi) { return moveLogically(line, start, dir) } + if (start.ch >= line.text.length) { + start.ch = line.text.length; + start.sticky = "before"; + } else if (start.ch <= 0) { + start.ch = 0; + start.sticky = "after"; + } + var partPos = getBidiPartAt(bidi, start.ch, start.sticky), part = bidi[partPos]; + if (cm.doc.direction == "ltr" && part.level % 2 == 0 && (dir > 0 ? part.to > start.ch : part.from < start.ch)) { + // Case 1: We move within an ltr part in an ltr editor. Even with wrapped lines, + // nothing interesting happens. + return moveLogically(line, start, dir) + } + + var mv = function (pos, dir) { return moveCharLogically(line, pos instanceof Pos ? pos.ch : pos, dir); }; + var prep; + var getWrappedLineExtent = function (ch) { + if (!cm.options.lineWrapping) { return {begin: 0, end: line.text.length} } + prep = prep || prepareMeasureForLine(cm, line); + return wrappedLineExtentChar(cm, line, prep, ch) + }; + var wrappedLineExtent = getWrappedLineExtent(start.sticky == "before" ? mv(start, -1) : start.ch); + + if (cm.doc.direction == "rtl" || part.level == 1) { + var moveInStorageOrder = (part.level == 1) == (dir < 0); + var ch = mv(start, moveInStorageOrder ? 1 : -1); + if (ch != null && (!moveInStorageOrder ? ch >= part.from && ch >= wrappedLineExtent.begin : ch <= part.to && ch <= wrappedLineExtent.end)) { + // Case 2: We move within an rtl part or in an rtl editor on the same visual line + var sticky = moveInStorageOrder ? "before" : "after"; + return new Pos(start.line, ch, sticky) + } + } + + // Case 3: Could not move within this bidi part in this visual line, so leave + // the current bidi part + + var searchInVisualLine = function (partPos, dir, wrappedLineExtent) { + var getRes = function (ch, moveInStorageOrder) { return moveInStorageOrder + ? new Pos(start.line, mv(ch, 1), "before") + : new Pos(start.line, ch, "after"); }; + + for (; partPos >= 0 && partPos < bidi.length; partPos += dir) { + var part = bidi[partPos]; + var moveInStorageOrder = (dir > 0) == (part.level != 1); + var ch = moveInStorageOrder ? wrappedLineExtent.begin : mv(wrappedLineExtent.end, -1); + if (part.from <= ch && ch < part.to) { return getRes(ch, moveInStorageOrder) } + ch = moveInStorageOrder ? part.from : mv(part.to, -1); + if (wrappedLineExtent.begin <= ch && ch < wrappedLineExtent.end) { return getRes(ch, moveInStorageOrder) } + } + }; + + // Case 3a: Look for other bidi parts on the same visual line + var res = searchInVisualLine(partPos + dir, dir, wrappedLineExtent); + if (res) { return res } + + // Case 3b: Look for other bidi parts on the next visual line + var nextCh = dir > 0 ? wrappedLineExtent.end : mv(wrappedLineExtent.begin, -1); + if (nextCh != null && !(dir > 0 && nextCh == line.text.length)) { + res = searchInVisualLine(dir > 0 ? 0 : bidi.length - 1, dir, getWrappedLineExtent(nextCh)); + if (res) { return res } + } + + // Case 4: Nowhere to move + return null + } + + // Commands are parameter-less actions that can be performed on an + // editor, mostly used for keybindings. + var commands = { + selectAll: selectAll, + singleSelection: function (cm) { return cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll); }, + killLine: function (cm) { return deleteNearSelection(cm, function (range) { + if (range.empty()) { + var len = getLine(cm.doc, range.head.line).text.length; + if (range.head.ch == len && range.head.line < cm.lastLine()) + { return {from: range.head, to: Pos(range.head.line + 1, 0)} } + else + { return {from: range.head, to: Pos(range.head.line, len)} } + } else { + return {from: range.from(), to: range.to()} + } + }); }, + deleteLine: function (cm) { return deleteNearSelection(cm, function (range) { return ({ + from: Pos(range.from().line, 0), + to: clipPos(cm.doc, Pos(range.to().line + 1, 0)) + }); }); }, + delLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { return ({ + from: Pos(range.from().line, 0), to: range.from() + }); }); }, + delWrappedLineLeft: function (cm) { return deleteNearSelection(cm, function (range) { + var top = cm.charCoords(range.head, "div").top + 5; + var leftPos = cm.coordsChar({left: 0, top: top}, "div"); + return {from: leftPos, to: range.from()} + }); }, + delWrappedLineRight: function (cm) { return deleteNearSelection(cm, function (range) { + var top = cm.charCoords(range.head, "div").top + 5; + var rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div"); + return {from: range.from(), to: rightPos } + }); }, + undo: function (cm) { return cm.undo(); }, + redo: function (cm) { return cm.redo(); }, + undoSelection: function (cm) { return cm.undoSelection(); }, + redoSelection: function (cm) { return cm.redoSelection(); }, + goDocStart: function (cm) { return cm.extendSelection(Pos(cm.firstLine(), 0)); }, + goDocEnd: function (cm) { return cm.extendSelection(Pos(cm.lastLine())); }, + goLineStart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStart(cm, range.head.line); }, + {origin: "+move", bias: 1} + ); }, + goLineStartSmart: function (cm) { return cm.extendSelectionsBy(function (range) { return lineStartSmart(cm, range.head); }, + {origin: "+move", bias: 1} + ); }, + goLineEnd: function (cm) { return cm.extendSelectionsBy(function (range) { return lineEnd(cm, range.head.line); }, + {origin: "+move", bias: -1} + ); }, + goLineRight: function (cm) { return cm.extendSelectionsBy(function (range) { + var top = cm.cursorCoords(range.head, "div").top + 5; + return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div") + }, sel_move); }, + goLineLeft: function (cm) { return cm.extendSelectionsBy(function (range) { + var top = cm.cursorCoords(range.head, "div").top + 5; + return cm.coordsChar({left: 0, top: top}, "div") + }, sel_move); }, + goLineLeftSmart: function (cm) { return cm.extendSelectionsBy(function (range) { + var top = cm.cursorCoords(range.head, "div").top + 5; + var pos = cm.coordsChar({left: 0, top: top}, "div"); + if (pos.ch < cm.getLine(pos.line).search(/\S/)) { return lineStartSmart(cm, range.head) } + return pos + }, sel_move); }, + goLineUp: function (cm) { return cm.moveV(-1, "line"); }, + goLineDown: function (cm) { return cm.moveV(1, "line"); }, + goPageUp: function (cm) { return cm.moveV(-1, "page"); }, + goPageDown: function (cm) { return cm.moveV(1, "page"); }, + goCharLeft: function (cm) { return cm.moveH(-1, "char"); }, + goCharRight: function (cm) { return cm.moveH(1, "char"); }, + goColumnLeft: function (cm) { return cm.moveH(-1, "column"); }, + goColumnRight: function (cm) { return cm.moveH(1, "column"); }, + goWordLeft: function (cm) { return cm.moveH(-1, "word"); }, + goGroupRight: function (cm) { return cm.moveH(1, "group"); }, + goGroupLeft: function (cm) { return cm.moveH(-1, "group"); }, + goWordRight: function (cm) { return cm.moveH(1, "word"); }, + delCharBefore: function (cm) { return cm.deleteH(-1, "char"); }, + delCharAfter: function (cm) { return cm.deleteH(1, "char"); }, + delWordBefore: function (cm) { return cm.deleteH(-1, "word"); }, + delWordAfter: function (cm) { return cm.deleteH(1, "word"); }, + delGroupBefore: function (cm) { return cm.deleteH(-1, "group"); }, + delGroupAfter: function (cm) { return cm.deleteH(1, "group"); }, + indentAuto: function (cm) { return cm.indentSelection("smart"); }, + indentMore: function (cm) { return cm.indentSelection("add"); }, + indentLess: function (cm) { return cm.indentSelection("subtract"); }, + insertTab: function (cm) { return cm.replaceSelection("\t"); }, + insertSoftTab: function (cm) { + var spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize; + for (var i = 0; i < ranges.length; i++) { + var pos = ranges[i].from(); + var col = countColumn(cm.getLine(pos.line), pos.ch, tabSize); + spaces.push(spaceStr(tabSize - col % tabSize)); + } + cm.replaceSelections(spaces); + }, + defaultTab: function (cm) { + if (cm.somethingSelected()) { cm.indentSelection("add"); } + else { cm.execCommand("insertTab"); } + }, + // Swap the two chars left and right of each selection's head. + // Move cursor behind the two swapped characters afterwards. + // + // Doesn't consider line feeds a character. + // Doesn't scan more than one line above to find a character. + // Doesn't do anything on an empty line. + // Doesn't do anything with non-empty selections. + transposeChars: function (cm) { return runInOp(cm, function () { + var ranges = cm.listSelections(), newSel = []; + for (var i = 0; i < ranges.length; i++) { + if (!ranges[i].empty()) { continue } + var cur = ranges[i].head, line = getLine(cm.doc, cur.line).text; + if (line) { + if (cur.ch == line.length) { cur = new Pos(cur.line, cur.ch - 1); } + if (cur.ch > 0) { + cur = new Pos(cur.line, cur.ch + 1); + cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2), + Pos(cur.line, cur.ch - 2), cur, "+transpose"); + } else if (cur.line > cm.doc.first) { + var prev = getLine(cm.doc, cur.line - 1).text; + if (prev) { + cur = new Pos(cur.line, 1); + cm.replaceRange(line.charAt(0) + cm.doc.lineSeparator() + + prev.charAt(prev.length - 1), + Pos(cur.line - 1, prev.length - 1), cur, "+transpose"); + } + } + } + newSel.push(new Range(cur, cur)); + } + cm.setSelections(newSel); + }); }, + newlineAndIndent: function (cm) { return runInOp(cm, function () { + var sels = cm.listSelections(); + for (var i = sels.length - 1; i >= 0; i--) + { cm.replaceRange(cm.doc.lineSeparator(), sels[i].anchor, sels[i].head, "+input"); } + sels = cm.listSelections(); + for (var i$1 = 0; i$1 < sels.length; i$1++) + { cm.indentLine(sels[i$1].from().line, null, true); } + ensureCursorVisible(cm); + }); }, + openLine: function (cm) { return cm.replaceSelection("\n", "start"); }, + toggleOverwrite: function (cm) { return cm.toggleOverwrite(); } + }; + function lineStart(cm, lineN) { var line = getLine(cm.doc, lineN); var visual = visualLine(line); - if (visual != line) lineN = lineNo(visual); - var order = getOrder(visual); - var ch = !order ? 0 : order[0].level % 2 ? lineRight(visual) : lineLeft(visual); - return Pos(lineN, ch); + if (visual != line) { lineN = lineNo(visual); } + return endOfLine(true, cm, visual, lineN, 1) } function lineEnd(cm, lineN) { - var merged, line = getLine(cm.doc, lineN); - while (merged = collapsedSpanAtEnd(line)) { - line = merged.find(1, true).line; - lineN = null; - } - var order = getOrder(line); - var ch = !order ? line.text.length : order[0].level % 2 ? lineLeft(line) : lineRight(line); - return Pos(lineN == null ? lineNo(line) : lineN, ch); + var line = getLine(cm.doc, lineN); + var visual = visualLineEnd(line); + if (visual != line) { lineN = lineNo(visual); } + return endOfLine(true, cm, line, lineN, -1) } function lineStartSmart(cm, pos) { var start = lineStart(cm, pos.line); var line = getLine(cm.doc, start.line); - var order = getOrder(line); + var order = getOrder(line, cm.doc.direction); if (!order || order[0].level == 0) { var firstNonWS = Math.max(0, line.text.search(/\S/)); var inWS = pos.line == start.line && pos.ch <= firstNonWS && pos.ch; - return Pos(start.line, inWS ? 0 : firstNonWS); - } - return start; - } - - function compareBidiLevel(order, a, b) { - var linedir = order[0].level; - if (a == linedir) return true; - if (b == linedir) return false; - return a < b; - } - var bidiOther; - function getBidiPartAt(order, pos) { - bidiOther = null; - for (var i = 0, found; i < order.length; ++i) { - var cur = order[i]; - if (cur.from < pos && cur.to > pos) return i; - if ((cur.from == pos || cur.to == pos)) { - if (found == null) { - found = i; - } else if (compareBidiLevel(order, cur.level, order[found].level)) { - if (cur.from != cur.to) bidiOther = found; - return i; - } else { - if (cur.from != cur.to) bidiOther = i; - return found; - } - } - } - return found; - } - - function moveInLine(line, pos, dir, byUnit) { - if (!byUnit) return pos + dir; - do pos += dir; - while (pos > 0 && isExtendingChar(line.text.charAt(pos))); - return pos; - } - - // This is needed in order to move 'visually' through bi-directional - // text -- i.e., pressing left should make the cursor go left, even - // when in RTL text. The tricky part is the 'jumps', where RTL and - // LTR text touch each other. This often requires the cursor offset - // to move more than one unit, in order to visually move one unit. - function moveVisually(line, start, dir, byUnit) { - var bidi = getOrder(line); - if (!bidi) return moveLogically(line, start, dir, byUnit); - var pos = getBidiPartAt(bidi, start), part = bidi[pos]; - var target = moveInLine(line, start, part.level % 2 ? -dir : dir, byUnit); - - for (;;) { - if (target > part.from && target < part.to) return target; - if (target == part.from || target == part.to) { - if (getBidiPartAt(bidi, target) == pos) return target; - part = bidi[pos += dir]; - return (dir > 0) == part.level % 2 ? part.to : part.from; - } else { - part = bidi[pos += dir]; - if (!part) return null; - if ((dir > 0) == part.level % 2) - target = moveInLine(line, part.to, -1, byUnit); + return Pos(start.line, inWS ? 0 : firstNonWS, start.sticky) + } + return start + } + + // Run a handler that was bound to a key. + function doHandleBinding(cm, bound, dropShift) { + if (typeof bound == "string") { + bound = commands[bound]; + if (!bound) { return false } + } + // Ensure previous input has been read, so that the handler sees a + // consistent view of the document + cm.display.input.ensurePolled(); + var prevShift = cm.display.shift, done = false; + try { + if (cm.isReadOnly()) { cm.state.suppressEdits = true; } + if (dropShift) { cm.display.shift = false; } + done = bound(cm) != Pass; + } finally { + cm.display.shift = prevShift; + cm.state.suppressEdits = false; + } + return done + } + + function lookupKeyForEditor(cm, name, handle) { + for (var i = 0; i < cm.state.keyMaps.length; i++) { + var result = lookupKey(name, cm.state.keyMaps[i], handle, cm); + if (result) { return result } + } + return (cm.options.extraKeys && lookupKey(name, cm.options.extraKeys, handle, cm)) + || lookupKey(name, cm.options.keyMap, handle, cm) + } + + // Note that, despite the name, this function is also used to check + // for bound mouse clicks. + + var stopSeq = new Delayed; + + function dispatchKey(cm, name, e, handle) { + var seq = cm.state.keySeq; + if (seq) { + if (isModifierKey(name)) { return "handled" } + if (/\'$/.test(name)) + { cm.state.keySeq = null; } + else + { stopSeq.set(50, function () { + if (cm.state.keySeq == seq) { + cm.state.keySeq = null; + cm.display.input.reset(); + } + }); } + if (dispatchKeyInner(cm, seq + " " + name, e, handle)) { return true } + } + return dispatchKeyInner(cm, name, e, handle) + } + + function dispatchKeyInner(cm, name, e, handle) { + var result = lookupKeyForEditor(cm, name, handle); + + if (result == "multi") + { cm.state.keySeq = name; } + if (result == "handled") + { signalLater(cm, "keyHandled", cm, name, e); } + + if (result == "handled" || result == "multi") { + e_preventDefault(e); + restartBlink(cm); + } + + return !!result + } + + // Handle a key from the keydown event. + function handleKeyBinding(cm, e) { + var name = keyName(e, true); + if (!name) { return false } + + if (e.shiftKey && !cm.state.keySeq) { + // First try to resolve full name (including 'Shift-'). Failing + // that, see if there is a cursor-motion command (starting with + // 'go') bound to the keyname without 'Shift-'. + return dispatchKey(cm, "Shift-" + name, e, function (b) { return doHandleBinding(cm, b, true); }) + || dispatchKey(cm, name, e, function (b) { + if (typeof b == "string" ? /^go[A-Z]/.test(b) : b.motion) + { return doHandleBinding(cm, b) } + }) + } else { + return dispatchKey(cm, name, e, function (b) { return doHandleBinding(cm, b); }) + } + } + + // Handle a key from the keypress event + function handleCharBinding(cm, e, ch) { + return dispatchKey(cm, "'" + ch + "'", e, function (b) { return doHandleBinding(cm, b, true); }) + } + + var lastStoppedKey = null; + function onKeyDown(e) { + var cm = this; + cm.curOp.focus = activeElt(); + if (signalDOMEvent(cm, e)) { return } + // IE does strange things with escape. + if (ie && ie_version < 11 && e.keyCode == 27) { e.returnValue = false; } + var code = e.keyCode; + cm.display.shift = code == 16 || e.shiftKey; + var handled = handleKeyBinding(cm, e); + if (presto) { + lastStoppedKey = handled ? code : null; + // Opera has no cut event... we try to at least catch the key combo + if (!handled && code == 88 && !hasCopyEvent && (mac ? e.metaKey : e.ctrlKey)) + { cm.replaceSelection("", null, "cut"); } + } + + // Turn mouse into crosshair when Alt is held on Mac. + if (code == 18 && !/\bCodeMirror-crosshair\b/.test(cm.display.lineDiv.className)) + { showCrossHair(cm); } + } + + function showCrossHair(cm) { + var lineDiv = cm.display.lineDiv; + addClass(lineDiv, "CodeMirror-crosshair"); + + function up(e) { + if (e.keyCode == 18 || !e.altKey) { + rmClass(lineDiv, "CodeMirror-crosshair"); + off(document, "keyup", up); + off(document, "mouseover", up); + } + } + on(document, "keyup", up); + on(document, "mouseover", up); + } + + function onKeyUp(e) { + if (e.keyCode == 16) { this.doc.sel.shift = false; } + signalDOMEvent(this, e); + } + + function onKeyPress(e) { + var cm = this; + if (eventInWidget(cm.display, e) || signalDOMEvent(cm, e) || e.ctrlKey && !e.altKey || mac && e.metaKey) { return } + var keyCode = e.keyCode, charCode = e.charCode; + if (presto && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return} + if ((presto && (!e.which || e.which < 10)) && handleKeyBinding(cm, e)) { return } + var ch = String.fromCharCode(charCode == null ? keyCode : charCode); + // Some browsers fire keypress events for backspace + if (ch == "\x08") { return } + if (handleCharBinding(cm, e, ch)) { return } + cm.display.input.onKeyPress(e); + } + + var DOUBLECLICK_DELAY = 400; + + var PastClick = function(time, pos, button) { + this.time = time; + this.pos = pos; + this.button = button; + }; + + PastClick.prototype.compare = function (time, pos, button) { + return this.time + DOUBLECLICK_DELAY > time && + cmp(pos, this.pos) == 0 && button == this.button + }; + + var lastClick, lastDoubleClick; + function clickRepeat(pos, button) { + var now = +new Date; + if (lastDoubleClick && lastDoubleClick.compare(now, pos, button)) { + lastClick = lastDoubleClick = null; + return "triple" + } else if (lastClick && lastClick.compare(now, pos, button)) { + lastDoubleClick = new PastClick(now, pos, button); + lastClick = null; + return "double" + } else { + lastClick = new PastClick(now, pos, button); + lastDoubleClick = null; + return "single" + } + } + + // A mouse down can be a single click, double click, triple click, + // start of selection drag, start of text drag, new cursor + // (ctrl-click), rectangle drag (alt-drag), or xwin + // middle-click-paste. Or it might be a click on something we should + // not interfere with, such as a scrollbar or widget. + function onMouseDown(e) { + var cm = this, display = cm.display; + if (signalDOMEvent(cm, e) || display.activeTouch && display.input.supportsTouch()) { return } + display.input.ensurePolled(); + display.shift = e.shiftKey; + + if (eventInWidget(display, e)) { + if (!webkit) { + // Briefly turn off draggability, to allow widgets to do + // normal dragging things. + display.scroller.draggable = false; + setTimeout(function () { return display.scroller.draggable = true; }, 100); + } + return + } + if (clickInGutter(cm, e)) { return } + var pos = posFromMouse(cm, e), button = e_button(e), repeat = pos ? clickRepeat(pos, button) : "single"; + window.focus(); + + // #3261: make sure, that we're not starting a second selection + if (button == 1 && cm.state.selectingText) + { cm.state.selectingText(e); } + + if (pos && handleMappedButton(cm, button, pos, repeat, e)) { return } + + if (button == 1) { + if (pos) { leftButtonDown(cm, pos, repeat, e); } + else if (e_target(e) == display.scroller) { e_preventDefault(e); } + } else if (button == 2) { + if (pos) { extendSelection(cm.doc, pos); } + setTimeout(function () { return display.input.focus(); }, 20); + } else if (button == 3) { + if (captureRightClick) { cm.display.input.onContextMenu(e); } + else { delayBlurEvent(cm); } + } + } + + function handleMappedButton(cm, button, pos, repeat, event) { + var name = "Click"; + if (repeat == "double") { name = "Double" + name; } + else if (repeat == "triple") { name = "Triple" + name; } + name = (button == 1 ? "Left" : button == 2 ? "Middle" : "Right") + name; + + return dispatchKey(cm, addModifierNames(name, event), event, function (bound) { + if (typeof bound == "string") { bound = commands[bound]; } + if (!bound) { return false } + var done = false; + try { + if (cm.isReadOnly()) { cm.state.suppressEdits = true; } + done = bound(cm, pos) != Pass; + } finally { + cm.state.suppressEdits = false; + } + return done + }) + } + + function configureMouse(cm, repeat, event) { + var option = cm.getOption("configureMouse"); + var value = option ? option(cm, repeat, event) : {}; + if (value.unit == null) { + var rect = chromeOS ? event.shiftKey && event.metaKey : event.altKey; + value.unit = rect ? "rectangle" : repeat == "single" ? "char" : repeat == "double" ? "word" : "line"; + } + if (value.extend == null || cm.doc.extend) { value.extend = cm.doc.extend || event.shiftKey; } + if (value.addNew == null) { value.addNew = mac ? event.metaKey : event.ctrlKey; } + if (value.moveOnDrag == null) { value.moveOnDrag = !(mac ? event.altKey : event.ctrlKey); } + return value + } + + function leftButtonDown(cm, pos, repeat, event) { + if (ie) { setTimeout(bind(ensureFocus, cm), 0); } + else { cm.curOp.focus = activeElt(); } + + var behavior = configureMouse(cm, repeat, event); + + var sel = cm.doc.sel, contained; + if (cm.options.dragDrop && dragAndDrop && !cm.isReadOnly() && + repeat == "single" && (contained = sel.contains(pos)) > -1 && + (cmp((contained = sel.ranges[contained]).from(), pos) < 0 || pos.xRel > 0) && + (cmp(contained.to(), pos) > 0 || pos.xRel < 0)) + { leftButtonStartDrag(cm, event, pos, behavior); } + else + { leftButtonSelect(cm, event, pos, behavior); } + } + + // Start a text drag. When it ends, see if any dragging actually + // happen, and treat as a click if it didn't. + function leftButtonStartDrag(cm, event, pos, behavior) { + var display = cm.display, moved = false; + var dragEnd = operation(cm, function (e) { + if (webkit) { display.scroller.draggable = false; } + cm.state.draggingText = false; + off(display.wrapper.ownerDocument, "mouseup", dragEnd); + off(display.wrapper.ownerDocument, "mousemove", mouseMove); + off(display.scroller, "dragstart", dragStart); + off(display.scroller, "drop", dragEnd); + if (!moved) { + e_preventDefault(e); + if (!behavior.addNew) + { extendSelection(cm.doc, pos, null, null, behavior.extend); } + // Work around unexplainable focus problem in IE9 (#2127) and Chrome (#3081) + if (webkit || ie && ie_version == 9) + { setTimeout(function () {display.wrapper.ownerDocument.body.focus(); display.input.focus();}, 20); } else - target = moveInLine(line, part.from, 1, byUnit); - } - } - } - - function moveLogically(line, start, dir, byUnit) { - var target = start + dir; - if (byUnit) while (target > 0 && isExtendingChar(line.text.charAt(target))) target += dir; - return target < 0 || target > line.text.length ? null : target; - } - - // Bidirectional ordering algorithm - // See http://unicode.org/reports/tr9/tr9-13.html for the algorithm - // that this (partially) implements. - - // One-char codes used for character types: - // L (L): Left-to-Right - // R (R): Right-to-Left - // r (AL): Right-to-Left Arabic - // 1 (EN): European Number - // + (ES): European Number Separator - // % (ET): European Number Terminator - // n (AN): Arabic Number - // , (CS): Common Number Separator - // m (NSM): Non-Spacing Mark - // b (BN): Boundary Neutral - // s (B): Paragraph Separator - // t (S): Segment Separator - // w (WS): Whitespace - // N (ON): Other Neutrals - - // Returns null if characters are ordered as they appear - // (left-to-right), or an array of sections ({from, to, level} - // objects) in the order in which they occur visually. - var bidiOrdering = (function() { - // Character types for codepoints 0 to 0xff - var lowTypes = "bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN"; - // Character types for codepoints 0x600 to 0x6ff - var arabicTypes = "rrrrrrrrrrrr,rNNmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmrrrrrrrnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmNmmmm"; - function charType(code) { - if (code <= 0xf7) return lowTypes.charAt(code); - else if (0x590 <= code && code <= 0x5f4) return "R"; - else if (0x600 <= code && code <= 0x6ed) return arabicTypes.charAt(code - 0x600); - else if (0x6ee <= code && code <= 0x8ac) return "r"; - else if (0x2000 <= code && code <= 0x200b) return "w"; - else if (code == 0x200c) return "b"; - else return "L"; - } - - var bidiRE = /[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/; - var isNeutral = /[stwN]/, isStrong = /[LRr]/, countsAsLeft = /[Lb1n]/, countsAsNum = /[1n]/; - // Browsers seem to always treat the boundaries of block elements as being L. - var outerType = "L"; - - function BidiSpan(level, from, to) { - this.level = level; - this.from = from; this.to = to; - } - - return function(str) { - if (!bidiRE.test(str)) return false; - var len = str.length, types = []; - for (var i = 0, type; i < len; ++i) - types.push(type = charType(str.charCodeAt(i))); - - // W1. Examine each non-spacing mark (NSM) in the level run, and - // change the type of the NSM to the type of the previous - // character. If the NSM is at the start of the level run, it will - // get the type of sor. - for (var i = 0, prev = outerType; i < len; ++i) { - var type = types[i]; - if (type == "m") types[i] = prev; - else prev = type; - } - - // W2. Search backwards from each instance of a European number - // until the first strong type (R, L, AL, or sor) is found. If an - // AL is found, change the type of the European number to Arabic - // number. - // W3. Change all ALs to R. - for (var i = 0, cur = outerType; i < len; ++i) { - var type = types[i]; - if (type == "1" && cur == "r") types[i] = "n"; - else if (isStrong.test(type)) { cur = type; if (type == "r") types[i] = "R"; } - } - - // W4. A single European separator between two European numbers - // changes to a European number. A single common separator between - // two numbers of the same type changes to that type. - for (var i = 1, prev = types[0]; i < len - 1; ++i) { - var type = types[i]; - if (type == "+" && prev == "1" && types[i+1] == "1") types[i] = "1"; - else if (type == "," && prev == types[i+1] && - (prev == "1" || prev == "n")) types[i] = prev; - prev = type; - } - - // W5. A sequence of European terminators adjacent to European - // numbers changes to all European numbers. - // W6. Otherwise, separators and terminators change to Other - // Neutral. - for (var i = 0; i < len; ++i) { - var type = types[i]; - if (type == ",") types[i] = "N"; - else if (type == "%") { - for (var end = i + 1; end < len && types[end] == "%"; ++end) {} - var replace = (i && types[i-1] == "!") || (end < len && types[end] == "1") ? "1" : "N"; - for (var j = i; j < end; ++j) types[j] = replace; - i = end - 1; + { display.input.focus(); } + } + }); + var mouseMove = function(e2) { + moved = moved || Math.abs(event.clientX - e2.clientX) + Math.abs(event.clientY - e2.clientY) >= 10; + }; + var dragStart = function () { return moved = true; }; + // Let the drag handler handle this. + if (webkit) { display.scroller.draggable = true; } + cm.state.draggingText = dragEnd; + dragEnd.copy = !behavior.moveOnDrag; + // IE's approach to draggable + if (display.scroller.dragDrop) { display.scroller.dragDrop(); } + on(display.wrapper.ownerDocument, "mouseup", dragEnd); + on(display.wrapper.ownerDocument, "mousemove", mouseMove); + on(display.scroller, "dragstart", dragStart); + on(display.scroller, "drop", dragEnd); + + delayBlurEvent(cm); + setTimeout(function () { return display.input.focus(); }, 20); + } + + function rangeForUnit(cm, pos, unit) { + if (unit == "char") { return new Range(pos, pos) } + if (unit == "word") { return cm.findWordAt(pos) } + if (unit == "line") { return new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))) } + var result = unit(cm, pos); + return new Range(result.from, result.to) + } + + // Normal selection, as opposed to text dragging. + function leftButtonSelect(cm, event, start, behavior) { + var display = cm.display, doc = cm.doc; + e_preventDefault(event); + + var ourRange, ourIndex, startSel = doc.sel, ranges = startSel.ranges; + if (behavior.addNew && !behavior.extend) { + ourIndex = doc.sel.contains(start); + if (ourIndex > -1) + { ourRange = ranges[ourIndex]; } + else + { ourRange = new Range(start, start); } + } else { + ourRange = doc.sel.primary(); + ourIndex = doc.sel.primIndex; + } + + if (behavior.unit == "rectangle") { + if (!behavior.addNew) { ourRange = new Range(start, start); } + start = posFromMouse(cm, event, true, true); + ourIndex = -1; + } else { + var range$$1 = rangeForUnit(cm, start, behavior.unit); + if (behavior.extend) + { ourRange = extendRange(ourRange, range$$1.anchor, range$$1.head, behavior.extend); } + else + { ourRange = range$$1; } + } + + if (!behavior.addNew) { + ourIndex = 0; + setSelection(doc, new Selection([ourRange], 0), sel_mouse); + startSel = doc.sel; + } else if (ourIndex == -1) { + ourIndex = ranges.length; + setSelection(doc, normalizeSelection(cm, ranges.concat([ourRange]), ourIndex), + {scroll: false, origin: "*mouse"}); + } else if (ranges.length > 1 && ranges[ourIndex].empty() && behavior.unit == "char" && !behavior.extend) { + setSelection(doc, normalizeSelection(cm, ranges.slice(0, ourIndex).concat(ranges.slice(ourIndex + 1)), 0), + {scroll: false, origin: "*mouse"}); + startSel = doc.sel; + } else { + replaceOneSelection(doc, ourIndex, ourRange, sel_mouse); + } + + var lastPos = start; + function extendTo(pos) { + if (cmp(lastPos, pos) == 0) { return } + lastPos = pos; + + if (behavior.unit == "rectangle") { + var ranges = [], tabSize = cm.options.tabSize; + var startCol = countColumn(getLine(doc, start.line).text, start.ch, tabSize); + var posCol = countColumn(getLine(doc, pos.line).text, pos.ch, tabSize); + var left = Math.min(startCol, posCol), right = Math.max(startCol, posCol); + for (var line = Math.min(start.line, pos.line), end = Math.min(cm.lastLine(), Math.max(start.line, pos.line)); + line <= end; line++) { + var text = getLine(doc, line).text, leftPos = findColumn(text, left, tabSize); + if (left == right) + { ranges.push(new Range(Pos(line, leftPos), Pos(line, leftPos))); } + else if (text.length > leftPos) + { ranges.push(new Range(Pos(line, leftPos), Pos(line, findColumn(text, right, tabSize)))); } + } + if (!ranges.length) { ranges.push(new Range(start, start)); } + setSelection(doc, normalizeSelection(cm, startSel.ranges.slice(0, ourIndex).concat(ranges), ourIndex), + {origin: "*mouse", scroll: false}); + cm.scrollIntoView(pos); + } else { + var oldRange = ourRange; + var range$$1 = rangeForUnit(cm, pos, behavior.unit); + var anchor = oldRange.anchor, head; + if (cmp(range$$1.anchor, anchor) > 0) { + head = range$$1.head; + anchor = minPos(oldRange.from(), range$$1.anchor); + } else { + head = range$$1.anchor; + anchor = maxPos(oldRange.to(), range$$1.head); + } + var ranges$1 = startSel.ranges.slice(0); + ranges$1[ourIndex] = bidiSimplify(cm, new Range(clipPos(doc, anchor), head)); + setSelection(doc, normalizeSelection(cm, ranges$1, ourIndex), sel_mouse); + } + } + + var editorSize = display.wrapper.getBoundingClientRect(); + // Used to ensure timeout re-tries don't fire when another extend + // happened in the meantime (clearTimeout isn't reliable -- at + // least on Chrome, the timeouts still happen even when cleared, + // if the clear happens after their scheduled firing time). + var counter = 0; + + function extend(e) { + var curCount = ++counter; + var cur = posFromMouse(cm, e, true, behavior.unit == "rectangle"); + if (!cur) { return } + if (cmp(cur, lastPos) != 0) { + cm.curOp.focus = activeElt(); + extendTo(cur); + var visible = visibleLines(display, doc); + if (cur.line >= visible.to || cur.line < visible.from) + { setTimeout(operation(cm, function () {if (counter == curCount) { extend(e); }}), 150); } + } else { + var outside = e.clientY < editorSize.top ? -20 : e.clientY > editorSize.bottom ? 20 : 0; + if (outside) { setTimeout(operation(cm, function () { + if (counter != curCount) { return } + display.scroller.scrollTop += outside; + extend(e); + }), 50); } + } + } + + function done(e) { + cm.state.selectingText = false; + counter = Infinity; + // If e is null or undefined we interpret this as someone trying + // to explicitly cancel the selection rather than the user + // letting go of the mouse button. + if (e) { + e_preventDefault(e); + display.input.focus(); + } + off(display.wrapper.ownerDocument, "mousemove", move); + off(display.wrapper.ownerDocument, "mouseup", up); + doc.history.lastSelOrigin = null; + } + + var move = operation(cm, function (e) { + if (e.buttons === 0 || !e_button(e)) { done(e); } + else { extend(e); } + }); + var up = operation(cm, done); + cm.state.selectingText = up; + on(display.wrapper.ownerDocument, "mousemove", move); + on(display.wrapper.ownerDocument, "mouseup", up); + } + + // Used when mouse-selecting to adjust the anchor to the proper side + // of a bidi jump depending on the visual position of the head. + function bidiSimplify(cm, range$$1) { + var anchor = range$$1.anchor; + var head = range$$1.head; + var anchorLine = getLine(cm.doc, anchor.line); + if (cmp(anchor, head) == 0 && anchor.sticky == head.sticky) { return range$$1 } + var order = getOrder(anchorLine); + if (!order) { return range$$1 } + var index = getBidiPartAt(order, anchor.ch, anchor.sticky), part = order[index]; + if (part.from != anchor.ch && part.to != anchor.ch) { return range$$1 } + var boundary = index + ((part.from == anchor.ch) == (part.level != 1) ? 0 : 1); + if (boundary == 0 || boundary == order.length) { return range$$1 } + + // Compute the relative visual position of the head compared to the + // anchor (<0 is to the left, >0 to the right) + var leftSide; + if (head.line != anchor.line) { + leftSide = (head.line - anchor.line) * (cm.doc.direction == "ltr" ? 1 : -1) > 0; + } else { + var headIndex = getBidiPartAt(order, head.ch, head.sticky); + var dir = headIndex - index || (head.ch - anchor.ch) * (part.level == 1 ? -1 : 1); + if (headIndex == boundary - 1 || headIndex == boundary) + { leftSide = dir < 0; } + else + { leftSide = dir > 0; } + } + + var usePart = order[boundary + (leftSide ? -1 : 0)]; + var from = leftSide == (usePart.level == 1); + var ch = from ? usePart.from : usePart.to, sticky = from ? "after" : "before"; + return anchor.ch == ch && anchor.sticky == sticky ? range$$1 : new Range(new Pos(anchor.line, ch, sticky), head) + } + + + // Determines whether an event happened in the gutter, and fires the + // handlers for the corresponding event. + function gutterEvent(cm, e, type, prevent) { + var mX, mY; + if (e.touches) { + mX = e.touches[0].clientX; + mY = e.touches[0].clientY; + } else { + try { mX = e.clientX; mY = e.clientY; } + catch(e) { return false } + } + if (mX >= Math.floor(cm.display.gutters.getBoundingClientRect().right)) { return false } + if (prevent) { e_preventDefault(e); } + + var display = cm.display; + var lineBox = display.lineDiv.getBoundingClientRect(); + + if (mY > lineBox.bottom || !hasHandler(cm, type)) { return e_defaultPrevented(e) } + mY -= lineBox.top - display.viewOffset; + + for (var i = 0; i < cm.display.gutterSpecs.length; ++i) { + var g = display.gutters.childNodes[i]; + if (g && g.getBoundingClientRect().right >= mX) { + var line = lineAtHeight(cm.doc, mY); + var gutter = cm.display.gutterSpecs[i]; + signal(cm, type, cm, line, gutter.className, e); + return e_defaultPrevented(e) + } + } + } + + function clickInGutter(cm, e) { + return gutterEvent(cm, e, "gutterClick", true) + } + + // CONTEXT MENU HANDLING + + // To make the context menu work, we need to briefly unhide the + // textarea (making it as unobtrusive as possible) to let the + // right-click take effect on it. + function onContextMenu(cm, e) { + if (eventInWidget(cm.display, e) || contextMenuInGutter(cm, e)) { return } + if (signalDOMEvent(cm, e, "contextmenu")) { return } + if (!captureRightClick) { cm.display.input.onContextMenu(e); } + } + + function contextMenuInGutter(cm, e) { + if (!hasHandler(cm, "gutterContextMenu")) { return false } + return gutterEvent(cm, e, "gutterContextMenu", false) + } + + function themeChanged(cm) { + cm.display.wrapper.className = cm.display.wrapper.className.replace(/\s*cm-s-\S+/g, "") + + cm.options.theme.replace(/(^|\s)\s*/g, " cm-s-"); + clearCaches(cm); + } + + var Init = {toString: function(){return "CodeMirror.Init"}}; + + var defaults = {}; + var optionHandlers = {}; + + function defineOptions(CodeMirror) { + var optionHandlers = CodeMirror.optionHandlers; + + function option(name, deflt, handle, notOnInit) { + CodeMirror.defaults[name] = deflt; + if (handle) { optionHandlers[name] = + notOnInit ? function (cm, val, old) {if (old != Init) { handle(cm, val, old); }} : handle; } + } + + CodeMirror.defineOption = option; + + // Passed to option handlers when there is no old value. + CodeMirror.Init = Init; + + // These two are, on init, called from the constructor because they + // have to be initialized before the editor can start at all. + option("value", "", function (cm, val) { return cm.setValue(val); }, true); + option("mode", null, function (cm, val) { + cm.doc.modeOption = val; + loadMode(cm); + }, true); + + option("indentUnit", 2, loadMode, true); + option("indentWithTabs", false); + option("smartIndent", true); + option("tabSize", 4, function (cm) { + resetModeState(cm); + clearCaches(cm); + regChange(cm); + }, true); + + option("lineSeparator", null, function (cm, val) { + cm.doc.lineSep = val; + if (!val) { return } + var newBreaks = [], lineNo = cm.doc.first; + cm.doc.iter(function (line) { + for (var pos = 0;;) { + var found = line.text.indexOf(val, pos); + if (found == -1) { break } + pos = found + val.length; + newBreaks.push(Pos(lineNo, found)); + } + lineNo++; + }); + for (var i = newBreaks.length - 1; i >= 0; i--) + { replaceRange(cm.doc, val, newBreaks[i], Pos(newBreaks[i].line, newBreaks[i].ch + val.length)); } + }); + option("specialChars", /[\u0000-\u001f\u007f-\u009f\u00ad\u061c\u200b-\u200f\u2028\u2029\ufeff\ufff9-\ufffc]/g, function (cm, val, old) { + cm.state.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g"); + if (old != Init) { cm.refresh(); } + }); + option("specialCharPlaceholder", defaultSpecialCharPlaceholder, function (cm) { return cm.refresh(); }, true); + option("electricChars", true); + option("inputStyle", mobile ? "contenteditable" : "textarea", function () { + throw new Error("inputStyle can not (yet) be changed in a running editor") // FIXME + }, true); + option("spellcheck", false, function (cm, val) { return cm.getInputField().spellcheck = val; }, true); + option("autocorrect", false, function (cm, val) { return cm.getInputField().autocorrect = val; }, true); + option("autocapitalize", false, function (cm, val) { return cm.getInputField().autocapitalize = val; }, true); + option("rtlMoveVisually", !windows); + option("wholeLineUpdateBefore", true); + + option("theme", "default", function (cm) { + themeChanged(cm); + updateGutters(cm); + }, true); + option("keyMap", "default", function (cm, val, old) { + var next = getKeyMap(val); + var prev = old != Init && getKeyMap(old); + if (prev && prev.detach) { prev.detach(cm, next); } + if (next.attach) { next.attach(cm, prev || null); } + }); + option("extraKeys", null); + option("configureMouse", null); + + option("lineWrapping", false, wrappingChanged, true); + option("gutters", [], function (cm, val) { + cm.display.gutterSpecs = getGutters(val, cm.options.lineNumbers); + updateGutters(cm); + }, true); + option("fixedGutter", true, function (cm, val) { + cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0"; + cm.refresh(); + }, true); + option("coverGutterNextToScrollbar", false, function (cm) { return updateScrollbars(cm); }, true); + option("scrollbarStyle", "native", function (cm) { + initScrollbars(cm); + updateScrollbars(cm); + cm.display.scrollbars.setScrollTop(cm.doc.scrollTop); + cm.display.scrollbars.setScrollLeft(cm.doc.scrollLeft); + }, true); + option("lineNumbers", false, function (cm, val) { + cm.display.gutterSpecs = getGutters(cm.options.gutters, val); + updateGutters(cm); + }, true); + option("firstLineNumber", 1, updateGutters, true); + option("lineNumberFormatter", function (integer) { return integer; }, updateGutters, true); + option("showCursorWhenSelecting", false, updateSelection, true); + + option("resetSelectionOnContextMenu", true); + option("lineWiseCopyCut", true); + option("pasteLinesPerSelection", true); + option("selectionsMayTouch", false); + + option("readOnly", false, function (cm, val) { + if (val == "nocursor") { + onBlur(cm); + cm.display.input.blur(); + } + cm.display.input.readOnlyChanged(val); + }); + option("disableInput", false, function (cm, val) {if (!val) { cm.display.input.reset(); }}, true); + option("dragDrop", true, dragDropChanged); + option("allowDropFileTypes", null); + + option("cursorBlinkRate", 530); + option("cursorScrollMargin", 0); + option("cursorHeight", 1, updateSelection, true); + option("singleCursorHeightPerLine", true, updateSelection, true); + option("workTime", 100); + option("workDelay", 100); + option("flattenSpans", true, resetModeState, true); + option("addModeClass", false, resetModeState, true); + option("pollInterval", 100); + option("undoDepth", 200, function (cm, val) { return cm.doc.history.undoDepth = val; }); + option("historyEventDelay", 1250); + option("viewportMargin", 10, function (cm) { return cm.refresh(); }, true); + option("maxHighlightLength", 10000, resetModeState, true); + option("moveInputWithCursor", true, function (cm, val) { + if (!val) { cm.display.input.resetPosition(); } + }); + + option("tabindex", null, function (cm, val) { return cm.display.input.getField().tabIndex = val || ""; }); + option("autofocus", null); + option("direction", "ltr", function (cm, val) { return cm.doc.setDirection(val); }, true); + option("phrases", null); + } + + function dragDropChanged(cm, value, old) { + var wasOn = old && old != Init; + if (!value != !wasOn) { + var funcs = cm.display.dragFunctions; + var toggle = value ? on : off; + toggle(cm.display.scroller, "dragstart", funcs.start); + toggle(cm.display.scroller, "dragenter", funcs.enter); + toggle(cm.display.scroller, "dragover", funcs.over); + toggle(cm.display.scroller, "dragleave", funcs.leave); + toggle(cm.display.scroller, "drop", funcs.drop); + } + } + + function wrappingChanged(cm) { + if (cm.options.lineWrapping) { + addClass(cm.display.wrapper, "CodeMirror-wrap"); + cm.display.sizer.style.minWidth = ""; + cm.display.sizerWidth = null; + } else { + rmClass(cm.display.wrapper, "CodeMirror-wrap"); + findMaxLine(cm); + } + estimateLineHeights(cm); + regChange(cm); + clearCaches(cm); + setTimeout(function () { return updateScrollbars(cm); }, 100); + } + + // A CodeMirror instance represents an editor. This is the object + // that user code is usually dealing with. + + function CodeMirror(place, options) { + var this$1 = this; + + if (!(this instanceof CodeMirror)) { return new CodeMirror(place, options) } + + this.options = options = options ? copyObj(options) : {}; + // Determine effective options based on given values and defaults. + copyObj(defaults, options, false); + + var doc = options.value; + if (typeof doc == "string") { doc = new Doc(doc, options.mode, null, options.lineSeparator, options.direction); } + else if (options.mode) { doc.modeOption = options.mode; } + this.doc = doc; + + var input = new CodeMirror.inputStyles[options.inputStyle](this); + var display = this.display = new Display(place, doc, input, options); + display.wrapper.CodeMirror = this; + themeChanged(this); + if (options.lineWrapping) + { this.display.wrapper.className += " CodeMirror-wrap"; } + initScrollbars(this); + + this.state = { + keyMaps: [], // stores maps added by addKeyMap + overlays: [], // highlighting overlays, as added by addOverlay + modeGen: 0, // bumped when mode/overlay changes, used to invalidate highlighting info + overwrite: false, + delayingBlurEvent: false, + focused: false, + suppressEdits: false, // used to disable editing during key handlers when in readOnly mode + pasteIncoming: -1, cutIncoming: -1, // help recognize paste/cut edits in input.poll + selectingText: false, + draggingText: false, + highlight: new Delayed(), // stores highlight worker timeout + keySeq: null, // Unfinished key sequence + specialChars: null + }; + + if (options.autofocus && !mobile) { display.input.focus(); } + + // Override magic textarea content restore that IE sometimes does + // on our hidden textarea on reload + if (ie && ie_version < 11) { setTimeout(function () { return this$1.display.input.reset(true); }, 20); } + + registerEventHandlers(this); + ensureGlobalHandlers(); + + startOperation(this); + this.curOp.forceUpdate = true; + attachDoc(this, doc); + + if ((options.autofocus && !mobile) || this.hasFocus()) + { setTimeout(bind(onFocus, this), 20); } + else + { onBlur(this); } + + for (var opt in optionHandlers) { if (optionHandlers.hasOwnProperty(opt)) + { optionHandlers[opt](this$1, options[opt], Init); } } + maybeUpdateLineNumberWidth(this); + if (options.finishInit) { options.finishInit(this); } + for (var i = 0; i < initHooks.length; ++i) { initHooks[i](this$1); } + endOperation(this); + // Suppress optimizelegibility in Webkit, since it breaks text + // measuring on line wrapping boundaries. + if (webkit && options.lineWrapping && + getComputedStyle(display.lineDiv).textRendering == "optimizelegibility") + { display.lineDiv.style.textRendering = "auto"; } + } + + // The default configuration options. + CodeMirror.defaults = defaults; + // Functions to run when options are changed. + CodeMirror.optionHandlers = optionHandlers; + + // Attach the necessary event handlers when initializing the editor + function registerEventHandlers(cm) { + var d = cm.display; + on(d.scroller, "mousedown", operation(cm, onMouseDown)); + // Older IE's will not fire a second mousedown for a double click + if (ie && ie_version < 11) + { on(d.scroller, "dblclick", operation(cm, function (e) { + if (signalDOMEvent(cm, e)) { return } + var pos = posFromMouse(cm, e); + if (!pos || clickInGutter(cm, e) || eventInWidget(cm.display, e)) { return } + e_preventDefault(e); + var word = cm.findWordAt(pos); + extendSelection(cm.doc, word.anchor, word.head); + })); } + else + { on(d.scroller, "dblclick", function (e) { return signalDOMEvent(cm, e) || e_preventDefault(e); }); } + // Some browsers fire contextmenu *after* opening the menu, at + // which point we can't mess with it anymore. Context menu is + // handled in onMouseDown for these browsers. + on(d.scroller, "contextmenu", function (e) { return onContextMenu(cm, e); }); + + // Used to suppress mouse event handling when a touch happens + var touchFinished, prevTouch = {end: 0}; + function finishTouch() { + if (d.activeTouch) { + touchFinished = setTimeout(function () { return d.activeTouch = null; }, 1000); + prevTouch = d.activeTouch; + prevTouch.end = +new Date; + } + } + function isMouseLikeTouchEvent(e) { + if (e.touches.length != 1) { return false } + var touch = e.touches[0]; + return touch.radiusX <= 1 && touch.radiusY <= 1 + } + function farAway(touch, other) { + if (other.left == null) { return true } + var dx = other.left - touch.left, dy = other.top - touch.top; + return dx * dx + dy * dy > 20 * 20 + } + on(d.scroller, "touchstart", function (e) { + if (!signalDOMEvent(cm, e) && !isMouseLikeTouchEvent(e) && !clickInGutter(cm, e)) { + d.input.ensurePolled(); + clearTimeout(touchFinished); + var now = +new Date; + d.activeTouch = {start: now, moved: false, + prev: now - prevTouch.end <= 300 ? prevTouch : null}; + if (e.touches.length == 1) { + d.activeTouch.left = e.touches[0].pageX; + d.activeTouch.top = e.touches[0].pageY; + } + } + }); + on(d.scroller, "touchmove", function () { + if (d.activeTouch) { d.activeTouch.moved = true; } + }); + on(d.scroller, "touchend", function (e) { + var touch = d.activeTouch; + if (touch && !eventInWidget(d, e) && touch.left != null && + !touch.moved && new Date - touch.start < 300) { + var pos = cm.coordsChar(d.activeTouch, "page"), range; + if (!touch.prev || farAway(touch, touch.prev)) // Single tap + { range = new Range(pos, pos); } + else if (!touch.prev.prev || farAway(touch, touch.prev.prev)) // Double tap + { range = cm.findWordAt(pos); } + else // Triple tap + { range = new Range(Pos(pos.line, 0), clipPos(cm.doc, Pos(pos.line + 1, 0))); } + cm.setSelection(range.anchor, range.head); + cm.focus(); + e_preventDefault(e); + } + finishTouch(); + }); + on(d.scroller, "touchcancel", finishTouch); + + // Sync scrolling between fake scrollbars and real scrollable + // area, ensure viewport is updated when scrolling. + on(d.scroller, "scroll", function () { + if (d.scroller.clientHeight) { + updateScrollTop(cm, d.scroller.scrollTop); + setScrollLeft(cm, d.scroller.scrollLeft, true); + signal(cm, "scroll", cm); + } + }); + + // Listen to wheel events in order to try and update the viewport on time. + on(d.scroller, "mousewheel", function (e) { return onScrollWheel(cm, e); }); + on(d.scroller, "DOMMouseScroll", function (e) { return onScrollWheel(cm, e); }); + + // Prevent wrapper from ever scrolling + on(d.wrapper, "scroll", function () { return d.wrapper.scrollTop = d.wrapper.scrollLeft = 0; }); + + d.dragFunctions = { + enter: function (e) {if (!signalDOMEvent(cm, e)) { e_stop(e); }}, + over: function (e) {if (!signalDOMEvent(cm, e)) { onDragOver(cm, e); e_stop(e); }}, + start: function (e) { return onDragStart(cm, e); }, + drop: operation(cm, onDrop), + leave: function (e) {if (!signalDOMEvent(cm, e)) { clearDragCursor(cm); }} + }; + + var inp = d.input.getField(); + on(inp, "keyup", function (e) { return onKeyUp.call(cm, e); }); + on(inp, "keydown", operation(cm, onKeyDown)); + on(inp, "keypress", operation(cm, onKeyPress)); + on(inp, "focus", function (e) { return onFocus(cm, e); }); + on(inp, "blur", function (e) { return onBlur(cm, e); }); + } + + var initHooks = []; + CodeMirror.defineInitHook = function (f) { return initHooks.push(f); }; + + // Indent the given line. The how parameter can be "smart", + // "add"/null, "subtract", or "prev". When aggressive is false + // (typically set to true for forced single-line indents), empty + // lines are not indented, and places where the mode returns Pass + // are left alone. + function indentLine(cm, n, how, aggressive) { + var doc = cm.doc, state; + if (how == null) { how = "add"; } + if (how == "smart") { + // Fall back to "prev" when the mode doesn't have an indentation + // method. + if (!doc.mode.indent) { how = "prev"; } + else { state = getContextBefore(cm, n).state; } + } + + var tabSize = cm.options.tabSize; + var line = getLine(doc, n), curSpace = countColumn(line.text, null, tabSize); + if (line.stateAfter) { line.stateAfter = null; } + var curSpaceString = line.text.match(/^\s*/)[0], indentation; + if (!aggressive && !/\S/.test(line.text)) { + indentation = 0; + how = "not"; + } else if (how == "smart") { + indentation = doc.mode.indent(state, line.text.slice(curSpaceString.length), line.text); + if (indentation == Pass || indentation > 150) { + if (!aggressive) { return } + how = "prev"; + } + } + if (how == "prev") { + if (n > doc.first) { indentation = countColumn(getLine(doc, n-1).text, null, tabSize); } + else { indentation = 0; } + } else if (how == "add") { + indentation = curSpace + cm.options.indentUnit; + } else if (how == "subtract") { + indentation = curSpace - cm.options.indentUnit; + } else if (typeof how == "number") { + indentation = curSpace + how; + } + indentation = Math.max(0, indentation); + + var indentString = "", pos = 0; + if (cm.options.indentWithTabs) + { for (var i = Math.floor(indentation / tabSize); i; --i) {pos += tabSize; indentString += "\t";} } + if (pos < indentation) { indentString += spaceStr(indentation - pos); } + + if (indentString != curSpaceString) { + replaceRange(doc, indentString, Pos(n, 0), Pos(n, curSpaceString.length), "+input"); + line.stateAfter = null; + return true + } else { + // Ensure that, if the cursor was in the whitespace at the start + // of the line, it is moved to the end of that space. + for (var i$1 = 0; i$1 < doc.sel.ranges.length; i$1++) { + var range = doc.sel.ranges[i$1]; + if (range.head.line == n && range.head.ch < curSpaceString.length) { + var pos$1 = Pos(n, curSpaceString.length); + replaceOneSelection(doc, i$1, new Range(pos$1, pos$1)); + break + } + } + } + } + + // This will be set to a {lineWise: bool, text: [string]} object, so + // that, when pasting, we know what kind of selections the copied + // text was made out of. + var lastCopied = null; + + function setLastCopied(newLastCopied) { + lastCopied = newLastCopied; + } + + function applyTextInput(cm, inserted, deleted, sel, origin) { + var doc = cm.doc; + cm.display.shift = false; + if (!sel) { sel = doc.sel; } + + var recent = +new Date - 200; + var paste = origin == "paste" || cm.state.pasteIncoming > recent; + var textLines = splitLinesAuto(inserted), multiPaste = null; + // When pasting N lines into N selections, insert one line per selection + if (paste && sel.ranges.length > 1) { + if (lastCopied && lastCopied.text.join("\n") == inserted) { + if (sel.ranges.length % lastCopied.text.length == 0) { + multiPaste = []; + for (var i = 0; i < lastCopied.text.length; i++) + { multiPaste.push(doc.splitLines(lastCopied.text[i])); } + } + } else if (textLines.length == sel.ranges.length && cm.options.pasteLinesPerSelection) { + multiPaste = map(textLines, function (l) { return [l]; }); + } + } + + var updateInput = cm.curOp.updateInput; + // Normal behavior is to insert the new text into every selection + for (var i$1 = sel.ranges.length - 1; i$1 >= 0; i$1--) { + var range$$1 = sel.ranges[i$1]; + var from = range$$1.from(), to = range$$1.to(); + if (range$$1.empty()) { + if (deleted && deleted > 0) // Handle deletion + { from = Pos(from.line, from.ch - deleted); } + else if (cm.state.overwrite && !paste) // Handle overwrite + { to = Pos(to.line, Math.min(getLine(doc, to.line).text.length, to.ch + lst(textLines).length)); } + else if (paste && lastCopied && lastCopied.lineWise && lastCopied.text.join("\n") == inserted) + { from = to = Pos(from.line, 0); } + } + var changeEvent = {from: from, to: to, text: multiPaste ? multiPaste[i$1 % multiPaste.length] : textLines, + origin: origin || (paste ? "paste" : cm.state.cutIncoming > recent ? "cut" : "+input")}; + makeChange(cm.doc, changeEvent); + signalLater(cm, "inputRead", cm, changeEvent); + } + if (inserted && !paste) + { triggerElectric(cm, inserted); } + + ensureCursorVisible(cm); + if (cm.curOp.updateInput < 2) { cm.curOp.updateInput = updateInput; } + cm.curOp.typing = true; + cm.state.pasteIncoming = cm.state.cutIncoming = -1; + } + + function handlePaste(e, cm) { + var pasted = e.clipboardData && e.clipboardData.getData("Text"); + if (pasted) { + e.preventDefault(); + if (!cm.isReadOnly() && !cm.options.disableInput) + { runInOp(cm, function () { return applyTextInput(cm, pasted, 0, null, "paste"); }); } + return true + } + } + + function triggerElectric(cm, inserted) { + // When an 'electric' character is inserted, immediately trigger a reindent + if (!cm.options.electricChars || !cm.options.smartIndent) { return } + var sel = cm.doc.sel; + + for (var i = sel.ranges.length - 1; i >= 0; i--) { + var range$$1 = sel.ranges[i]; + if (range$$1.head.ch > 100 || (i && sel.ranges[i - 1].head.line == range$$1.head.line)) { continue } + var mode = cm.getModeAt(range$$1.head); + var indented = false; + if (mode.electricChars) { + for (var j = 0; j < mode.electricChars.length; j++) + { if (inserted.indexOf(mode.electricChars.charAt(j)) > -1) { + indented = indentLine(cm, range$$1.head.line, "smart"); + break + } } + } else if (mode.electricInput) { + if (mode.electricInput.test(getLine(cm.doc, range$$1.head.line).text.slice(0, range$$1.head.ch))) + { indented = indentLine(cm, range$$1.head.line, "smart"); } + } + if (indented) { signalLater(cm, "electricInput", cm, range$$1.head.line); } + } + } + + function copyableRanges(cm) { + var text = [], ranges = []; + for (var i = 0; i < cm.doc.sel.ranges.length; i++) { + var line = cm.doc.sel.ranges[i].head.line; + var lineRange = {anchor: Pos(line, 0), head: Pos(line + 1, 0)}; + ranges.push(lineRange); + text.push(cm.getRange(lineRange.anchor, lineRange.head)); + } + return {text: text, ranges: ranges} + } + + function disableBrowserMagic(field, spellcheck, autocorrect, autocapitalize) { + field.setAttribute("autocorrect", autocorrect ? "" : "off"); + field.setAttribute("autocapitalize", autocapitalize ? "" : "off"); + field.setAttribute("spellcheck", !!spellcheck); + } + + function hiddenTextarea() { + var te = elt("textarea", null, null, "position: absolute; bottom: -1em; padding: 0; width: 1px; height: 1em; outline: none"); + var div = elt("div", [te], null, "overflow: hidden; position: relative; width: 3px; height: 0px;"); + // The textarea is kept positioned near the cursor to prevent the + // fact that it'll be scrolled into view on input from scrolling + // our fake cursor out of view. On webkit, when wrap=off, paste is + // very slow. So make the area wide instead. + if (webkit) { te.style.width = "1000px"; } + else { te.setAttribute("wrap", "off"); } + // If border: 0; -- iOS fails to open keyboard (issue #1287) + if (ios) { te.style.border = "1px solid black"; } + disableBrowserMagic(te); + return div + } + + // The publicly visible API. Note that methodOp(f) means + // 'wrap f in an operation, performed on its `this` parameter'. + + // This is not the complete set of editor methods. Most of the + // methods defined on the Doc type are also injected into + // CodeMirror.prototype, for backwards compatibility and + // convenience. + + function addEditorMethods(CodeMirror) { + var optionHandlers = CodeMirror.optionHandlers; + + var helpers = CodeMirror.helpers = {}; + + CodeMirror.prototype = { + constructor: CodeMirror, + focus: function(){window.focus(); this.display.input.focus();}, + + setOption: function(option, value) { + var options = this.options, old = options[option]; + if (options[option] == value && option != "mode") { return } + options[option] = value; + if (optionHandlers.hasOwnProperty(option)) + { operation(this, optionHandlers[option])(this, value, old); } + signal(this, "optionChange", this, option); + }, + + getOption: function(option) {return this.options[option]}, + getDoc: function() {return this.doc}, + + addKeyMap: function(map$$1, bottom) { + this.state.keyMaps[bottom ? "push" : "unshift"](getKeyMap(map$$1)); + }, + removeKeyMap: function(map$$1) { + var maps = this.state.keyMaps; + for (var i = 0; i < maps.length; ++i) + { if (maps[i] == map$$1 || maps[i].name == map$$1) { + maps.splice(i, 1); + return true + } } + }, + + addOverlay: methodOp(function(spec, options) { + var mode = spec.token ? spec : CodeMirror.getMode(this.options, spec); + if (mode.startState) { throw new Error("Overlays may not be stateful.") } + insertSorted(this.state.overlays, + {mode: mode, modeSpec: spec, opaque: options && options.opaque, + priority: (options && options.priority) || 0}, + function (overlay) { return overlay.priority; }); + this.state.modeGen++; + regChange(this); + }), + removeOverlay: methodOp(function(spec) { + var this$1 = this; + + var overlays = this.state.overlays; + for (var i = 0; i < overlays.length; ++i) { + var cur = overlays[i].modeSpec; + if (cur == spec || typeof spec == "string" && cur.name == spec) { + overlays.splice(i, 1); + this$1.state.modeGen++; + regChange(this$1); + return + } + } + }), + + indentLine: methodOp(function(n, dir, aggressive) { + if (typeof dir != "string" && typeof dir != "number") { + if (dir == null) { dir = this.options.smartIndent ? "smart" : "prev"; } + else { dir = dir ? "add" : "subtract"; } + } + if (isLine(this.doc, n)) { indentLine(this, n, dir, aggressive); } + }), + indentSelection: methodOp(function(how) { + var this$1 = this; + + var ranges = this.doc.sel.ranges, end = -1; + for (var i = 0; i < ranges.length; i++) { + var range$$1 = ranges[i]; + if (!range$$1.empty()) { + var from = range$$1.from(), to = range$$1.to(); + var start = Math.max(end, from.line); + end = Math.min(this$1.lastLine(), to.line - (to.ch ? 0 : 1)) + 1; + for (var j = start; j < end; ++j) + { indentLine(this$1, j, how); } + var newRanges = this$1.doc.sel.ranges; + if (from.ch == 0 && ranges.length == newRanges.length && newRanges[i].from().ch > 0) + { replaceOneSelection(this$1.doc, i, new Range(from, newRanges[i].to()), sel_dontScroll); } + } else if (range$$1.head.line > end) { + indentLine(this$1, range$$1.head.line, how, true); + end = range$$1.head.line; + if (i == this$1.doc.sel.primIndex) { ensureCursorVisible(this$1); } + } + } + }), + + // Fetch the parser token for a given character. Useful for hacks + // that want to inspect the mode state (say, for completion). + getTokenAt: function(pos, precise) { + return takeToken(this, pos, precise) + }, + + getLineTokens: function(line, precise) { + return takeToken(this, Pos(line), precise, true) + }, + + getTokenTypeAt: function(pos) { + pos = clipPos(this.doc, pos); + var styles = getLineStyles(this, getLine(this.doc, pos.line)); + var before = 0, after = (styles.length - 1) / 2, ch = pos.ch; + var type; + if (ch == 0) { type = styles[2]; } + else { for (;;) { + var mid = (before + after) >> 1; + if ((mid ? styles[mid * 2 - 1] : 0) >= ch) { after = mid; } + else if (styles[mid * 2 + 1] < ch) { before = mid + 1; } + else { type = styles[mid * 2 + 2]; break } + } } + var cut = type ? type.indexOf("overlay ") : -1; + return cut < 0 ? type : cut == 0 ? null : type.slice(0, cut - 1) + }, + + getModeAt: function(pos) { + var mode = this.doc.mode; + if (!mode.innerMode) { return mode } + return CodeMirror.innerMode(mode, this.getTokenAt(pos).state).mode + }, + + getHelper: function(pos, type) { + return this.getHelpers(pos, type)[0] + }, + + getHelpers: function(pos, type) { + var this$1 = this; + + var found = []; + if (!helpers.hasOwnProperty(type)) { return found } + var help = helpers[type], mode = this.getModeAt(pos); + if (typeof mode[type] == "string") { + if (help[mode[type]]) { found.push(help[mode[type]]); } + } else if (mode[type]) { + for (var i = 0; i < mode[type].length; i++) { + var val = help[mode[type][i]]; + if (val) { found.push(val); } + } + } else if (mode.helperType && help[mode.helperType]) { + found.push(help[mode.helperType]); + } else if (help[mode.name]) { + found.push(help[mode.name]); + } + for (var i$1 = 0; i$1 < help._global.length; i$1++) { + var cur = help._global[i$1]; + if (cur.pred(mode, this$1) && indexOf(found, cur.val) == -1) + { found.push(cur.val); } + } + return found + }, + + getStateAfter: function(line, precise) { + var doc = this.doc; + line = clipLine(doc, line == null ? doc.first + doc.size - 1: line); + return getContextBefore(this, line + 1, precise).state + }, + + cursorCoords: function(start, mode) { + var pos, range$$1 = this.doc.sel.primary(); + if (start == null) { pos = range$$1.head; } + else if (typeof start == "object") { pos = clipPos(this.doc, start); } + else { pos = start ? range$$1.from() : range$$1.to(); } + return cursorCoords(this, pos, mode || "page") + }, + + charCoords: function(pos, mode) { + return charCoords(this, clipPos(this.doc, pos), mode || "page") + }, + + coordsChar: function(coords, mode) { + coords = fromCoordSystem(this, coords, mode || "page"); + return coordsChar(this, coords.left, coords.top) + }, + + lineAtHeight: function(height, mode) { + height = fromCoordSystem(this, {top: height, left: 0}, mode || "page").top; + return lineAtHeight(this.doc, height + this.display.viewOffset) + }, + heightAtLine: function(line, mode, includeWidgets) { + var end = false, lineObj; + if (typeof line == "number") { + var last = this.doc.first + this.doc.size - 1; + if (line < this.doc.first) { line = this.doc.first; } + else if (line > last) { line = last; end = true; } + lineObj = getLine(this.doc, line); + } else { + lineObj = line; + } + return intoCoordSystem(this, lineObj, {top: 0, left: 0}, mode || "page", includeWidgets || end).top + + (end ? this.doc.height - heightAtLine(lineObj) : 0) + }, + + defaultTextHeight: function() { return textHeight(this.display) }, + defaultCharWidth: function() { return charWidth(this.display) }, + + getViewport: function() { return {from: this.display.viewFrom, to: this.display.viewTo}}, + + addWidget: function(pos, node, scroll, vert, horiz) { + var display = this.display; + pos = cursorCoords(this, clipPos(this.doc, pos)); + var top = pos.bottom, left = pos.left; + node.style.position = "absolute"; + node.setAttribute("cm-ignore-events", "true"); + this.display.input.setUneditable(node); + display.sizer.appendChild(node); + if (vert == "over") { + top = pos.top; + } else if (vert == "above" || vert == "near") { + var vspace = Math.max(display.wrapper.clientHeight, this.doc.height), + hspace = Math.max(display.sizer.clientWidth, display.lineSpace.clientWidth); + // Default to positioning above (if specified and possible); otherwise default to positioning below + if ((vert == 'above' || pos.bottom + node.offsetHeight > vspace) && pos.top > node.offsetHeight) + { top = pos.top - node.offsetHeight; } + else if (pos.bottom + node.offsetHeight <= vspace) + { top = pos.bottom; } + if (left + node.offsetWidth > hspace) + { left = hspace - node.offsetWidth; } + } + node.style.top = top + "px"; + node.style.left = node.style.right = ""; + if (horiz == "right") { + left = display.sizer.clientWidth - node.offsetWidth; + node.style.right = "0px"; + } else { + if (horiz == "left") { left = 0; } + else if (horiz == "middle") { left = (display.sizer.clientWidth - node.offsetWidth) / 2; } + node.style.left = left + "px"; + } + if (scroll) + { scrollIntoView(this, {left: left, top: top, right: left + node.offsetWidth, bottom: top + node.offsetHeight}); } + }, + + triggerOnKeyDown: methodOp(onKeyDown), + triggerOnKeyPress: methodOp(onKeyPress), + triggerOnKeyUp: onKeyUp, + triggerOnMouseDown: methodOp(onMouseDown), + + execCommand: function(cmd) { + if (commands.hasOwnProperty(cmd)) + { return commands[cmd].call(null, this) } + }, + + triggerElectric: methodOp(function(text) { triggerElectric(this, text); }), + + findPosH: function(from, amount, unit, visually) { + var this$1 = this; + + var dir = 1; + if (amount < 0) { dir = -1; amount = -amount; } + var cur = clipPos(this.doc, from); + for (var i = 0; i < amount; ++i) { + cur = findPosH(this$1.doc, cur, dir, unit, visually); + if (cur.hitSide) { break } + } + return cur + }, + + moveH: methodOp(function(dir, unit) { + var this$1 = this; + + this.extendSelectionsBy(function (range$$1) { + if (this$1.display.shift || this$1.doc.extend || range$$1.empty()) + { return findPosH(this$1.doc, range$$1.head, dir, unit, this$1.options.rtlMoveVisually) } + else + { return dir < 0 ? range$$1.from() : range$$1.to() } + }, sel_move); + }), + + deleteH: methodOp(function(dir, unit) { + var sel = this.doc.sel, doc = this.doc; + if (sel.somethingSelected()) + { doc.replaceSelection("", null, "+delete"); } + else + { deleteNearSelection(this, function (range$$1) { + var other = findPosH(doc, range$$1.head, dir, unit, false); + return dir < 0 ? {from: other, to: range$$1.head} : {from: range$$1.head, to: other} + }); } + }), + + findPosV: function(from, amount, unit, goalColumn) { + var this$1 = this; + + var dir = 1, x = goalColumn; + if (amount < 0) { dir = -1; amount = -amount; } + var cur = clipPos(this.doc, from); + for (var i = 0; i < amount; ++i) { + var coords = cursorCoords(this$1, cur, "div"); + if (x == null) { x = coords.left; } + else { coords.left = x; } + cur = findPosV(this$1, coords, dir, unit); + if (cur.hitSide) { break } + } + return cur + }, + + moveV: methodOp(function(dir, unit) { + var this$1 = this; + + var doc = this.doc, goals = []; + var collapse = !this.display.shift && !doc.extend && doc.sel.somethingSelected(); + doc.extendSelectionsBy(function (range$$1) { + if (collapse) + { return dir < 0 ? range$$1.from() : range$$1.to() } + var headPos = cursorCoords(this$1, range$$1.head, "div"); + if (range$$1.goalColumn != null) { headPos.left = range$$1.goalColumn; } + goals.push(headPos.left); + var pos = findPosV(this$1, headPos, dir, unit); + if (unit == "page" && range$$1 == doc.sel.primary()) + { addToScrollTop(this$1, charCoords(this$1, pos, "div").top - headPos.top); } + return pos + }, sel_move); + if (goals.length) { for (var i = 0; i < doc.sel.ranges.length; i++) + { doc.sel.ranges[i].goalColumn = goals[i]; } } + }), + + // Find the word at the given position (as returned by coordsChar). + findWordAt: function(pos) { + var doc = this.doc, line = getLine(doc, pos.line).text; + var start = pos.ch, end = pos.ch; + if (line) { + var helper = this.getHelper(pos, "wordChars"); + if ((pos.sticky == "before" || end == line.length) && start) { --start; } else { ++end; } + var startChar = line.charAt(start); + var check = isWordChar(startChar, helper) + ? function (ch) { return isWordChar(ch, helper); } + : /\s/.test(startChar) ? function (ch) { return /\s/.test(ch); } + : function (ch) { return (!/\s/.test(ch) && !isWordChar(ch)); }; + while (start > 0 && check(line.charAt(start - 1))) { --start; } + while (end < line.length && check(line.charAt(end))) { ++end; } + } + return new Range(Pos(pos.line, start), Pos(pos.line, end)) + }, + + toggleOverwrite: function(value) { + if (value != null && value == this.state.overwrite) { return } + if (this.state.overwrite = !this.state.overwrite) + { addClass(this.display.cursorDiv, "CodeMirror-overwrite"); } + else + { rmClass(this.display.cursorDiv, "CodeMirror-overwrite"); } + + signal(this, "overwriteToggle", this, this.state.overwrite); + }, + hasFocus: function() { return this.display.input.getField() == activeElt() }, + isReadOnly: function() { return !!(this.options.readOnly || this.doc.cantEdit) }, + + scrollTo: methodOp(function (x, y) { scrollToCoords(this, x, y); }), + getScrollInfo: function() { + var scroller = this.display.scroller; + return {left: scroller.scrollLeft, top: scroller.scrollTop, + height: scroller.scrollHeight - scrollGap(this) - this.display.barHeight, + width: scroller.scrollWidth - scrollGap(this) - this.display.barWidth, + clientHeight: displayHeight(this), clientWidth: displayWidth(this)} + }, + + scrollIntoView: methodOp(function(range$$1, margin) { + if (range$$1 == null) { + range$$1 = {from: this.doc.sel.primary().head, to: null}; + if (margin == null) { margin = this.options.cursorScrollMargin; } + } else if (typeof range$$1 == "number") { + range$$1 = {from: Pos(range$$1, 0), to: null}; + } else if (range$$1.from == null) { + range$$1 = {from: range$$1, to: null}; } - } - - // W7. Search backwards from each instance of a European number - // until the first strong type (R, L, or sor) is found. If an L is - // found, then change the type of the European number to L. - for (var i = 0, cur = outerType; i < len; ++i) { - var type = types[i]; - if (cur == "L" && type == "1") types[i] = "L"; - else if (isStrong.test(type)) cur = type; - } - - // N1. A sequence of neutrals takes the direction of the - // surrounding strong text if the text on both sides has the same - // direction. European and Arabic numbers act as if they were R in - // terms of their influence on neutrals. Start-of-level-run (sor) - // and end-of-level-run (eor) are used at level run boundaries. - // N2. Any remaining neutrals take the embedding direction. - for (var i = 0; i < len; ++i) { - if (isNeutral.test(types[i])) { - for (var end = i + 1; end < len && isNeutral.test(types[end]); ++end) {} - var before = (i ? types[i-1] : outerType) == "L"; - var after = (end < len ? types[end] : outerType) == "L"; - var replace = before || after ? "L" : "R"; - for (var j = i; j < end; ++j) types[j] = replace; - i = end - 1; + if (!range$$1.to) { range$$1.to = range$$1.from; } + range$$1.margin = margin || 0; + + if (range$$1.from.line != null) { + scrollToRange(this, range$$1); + } else { + scrollToCoordsRange(this, range$$1.from, range$$1.to, range$$1.margin); + } + }), + + setSize: methodOp(function(width, height) { + var this$1 = this; + + var interpret = function (val) { return typeof val == "number" || /^\d+$/.test(String(val)) ? val + "px" : val; }; + if (width != null) { this.display.wrapper.style.width = interpret(width); } + if (height != null) { this.display.wrapper.style.height = interpret(height); } + if (this.options.lineWrapping) { clearLineMeasurementCache(this); } + var lineNo$$1 = this.display.viewFrom; + this.doc.iter(lineNo$$1, this.display.viewTo, function (line) { + if (line.widgets) { for (var i = 0; i < line.widgets.length; i++) + { if (line.widgets[i].noHScroll) { regLineChange(this$1, lineNo$$1, "widget"); break } } } + ++lineNo$$1; + }); + this.curOp.forceUpdate = true; + signal(this, "refresh", this); + }), + + operation: function(f){return runInOp(this, f)}, + startOperation: function(){return startOperation(this)}, + endOperation: function(){return endOperation(this)}, + + refresh: methodOp(function() { + var oldHeight = this.display.cachedTextHeight; + regChange(this); + this.curOp.forceUpdate = true; + clearCaches(this); + scrollToCoords(this, this.doc.scrollLeft, this.doc.scrollTop); + updateGutterSpace(this.display); + if (oldHeight == null || Math.abs(oldHeight - textHeight(this.display)) > .5) + { estimateLineHeights(this); } + signal(this, "refresh", this); + }), + + swapDoc: methodOp(function(doc) { + var old = this.doc; + old.cm = null; + // Cancel the current text selection if any (#5821) + if (this.state.selectingText) { this.state.selectingText(); } + attachDoc(this, doc); + clearCaches(this); + this.display.input.reset(); + scrollToCoords(this, doc.scrollLeft, doc.scrollTop); + this.curOp.forceScroll = true; + signalLater(this, "swapDoc", this, old); + return old + }), + + phrase: function(phraseText) { + var phrases = this.options.phrases; + return phrases && Object.prototype.hasOwnProperty.call(phrases, phraseText) ? phrases[phraseText] : phraseText + }, + + getInputField: function(){return this.display.input.getField()}, + getWrapperElement: function(){return this.display.wrapper}, + getScrollerElement: function(){return this.display.scroller}, + getGutterElement: function(){return this.display.gutters} + }; + eventMixin(CodeMirror); + + CodeMirror.registerHelper = function(type, name, value) { + if (!helpers.hasOwnProperty(type)) { helpers[type] = CodeMirror[type] = {_global: []}; } + helpers[type][name] = value; + }; + CodeMirror.registerGlobalHelper = function(type, name, predicate, value) { + CodeMirror.registerHelper(type, name, value); + helpers[type]._global.push({pred: predicate, val: value}); + }; + } + + // Used for horizontal relative motion. Dir is -1 or 1 (left or + // right), unit can be "char", "column" (like char, but doesn't + // cross line boundaries), "word" (across next word), or "group" (to + // the start of next group of word or non-word-non-whitespace + // chars). The visually param controls whether, in right-to-left + // text, direction 1 means to move towards the next index in the + // string, or towards the character to the right of the current + // position. The resulting position will have a hitSide=true + // property if it reached the end of the document. + function findPosH(doc, pos, dir, unit, visually) { + var oldPos = pos; + var origDir = dir; + var lineObj = getLine(doc, pos.line); + function findNextLine() { + var l = pos.line + dir; + if (l < doc.first || l >= doc.first + doc.size) { return false } + pos = new Pos(l, pos.ch, pos.sticky); + return lineObj = getLine(doc, l) + } + function moveOnce(boundToLine) { + var next; + if (visually) { + next = moveVisually(doc.cm, lineObj, pos, dir); + } else { + next = moveLogically(lineObj, pos, dir); + } + if (next == null) { + if (!boundToLine && findNextLine()) + { pos = endOfLine(visually, doc.cm, lineObj, pos.line, dir); } + else + { return false } + } else { + pos = next; + } + return true + } + + if (unit == "char") { + moveOnce(); + } else if (unit == "column") { + moveOnce(true); + } else if (unit == "word" || unit == "group") { + var sawType = null, group = unit == "group"; + var helper = doc.cm && doc.cm.getHelper(pos, "wordChars"); + for (var first = true;; first = false) { + if (dir < 0 && !moveOnce(!first)) { break } + var cur = lineObj.text.charAt(pos.ch) || "\n"; + var type = isWordChar(cur, helper) ? "w" + : group && cur == "\n" ? "n" + : !group || /\s/.test(cur) ? null + : "p"; + if (group && !first && !type) { type = "s"; } + if (sawType && sawType != type) { + if (dir < 0) {dir = 1; moveOnce(); pos.sticky = "after";} + break + } + + if (type) { sawType = type; } + if (dir > 0 && !moveOnce(!first)) { break } + } + } + var result = skipAtomic(doc, pos, oldPos, origDir, true); + if (equalCursorPos(oldPos, result)) { result.hitSide = true; } + return result + } + + // For relative vertical movement. Dir may be -1 or 1. Unit can be + // "page" or "line". The resulting position will have a hitSide=true + // property if it reached the end of the document. + function findPosV(cm, pos, dir, unit) { + var doc = cm.doc, x = pos.left, y; + if (unit == "page") { + var pageSize = Math.min(cm.display.wrapper.clientHeight, window.innerHeight || document.documentElement.clientHeight); + var moveAmount = Math.max(pageSize - .5 * textHeight(cm.display), 3); + y = (dir > 0 ? pos.bottom : pos.top) + dir * moveAmount; + + } else if (unit == "line") { + y = dir > 0 ? pos.bottom + 3 : pos.top - 3; + } + var target; + for (;;) { + target = coordsChar(cm, x, y); + if (!target.outside) { break } + if (dir < 0 ? y <= 0 : y >= doc.height) { target.hitSide = true; break } + y += dir * 5; + } + return target + } + + // CONTENTEDITABLE INPUT STYLE + + var ContentEditableInput = function(cm) { + this.cm = cm; + this.lastAnchorNode = this.lastAnchorOffset = this.lastFocusNode = this.lastFocusOffset = null; + this.polling = new Delayed(); + this.composing = null; + this.gracePeriod = false; + this.readDOMTimeout = null; + }; + + ContentEditableInput.prototype.init = function (display) { + var this$1 = this; + + var input = this, cm = input.cm; + var div = input.div = display.lineDiv; + disableBrowserMagic(div, cm.options.spellcheck, cm.options.autocorrect, cm.options.autocapitalize); + + on(div, "paste", function (e) { + if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } + // IE doesn't fire input events, so we schedule a read for the pasted content in this way + if (ie_version <= 11) { setTimeout(operation(cm, function () { return this$1.updateFromDOM(); }), 20); } + }); + + on(div, "compositionstart", function (e) { + this$1.composing = {data: e.data, done: false}; + }); + on(div, "compositionupdate", function (e) { + if (!this$1.composing) { this$1.composing = {data: e.data, done: false}; } + }); + on(div, "compositionend", function (e) { + if (this$1.composing) { + if (e.data != this$1.composing.data) { this$1.readFromDOMSoon(); } + this$1.composing.done = true; + } + }); + + on(div, "touchstart", function () { return input.forceCompositionEnd(); }); + + on(div, "input", function () { + if (!this$1.composing) { this$1.readFromDOMSoon(); } + }); + + function onCopyCut(e) { + if (signalDOMEvent(cm, e)) { return } + if (cm.somethingSelected()) { + setLastCopied({lineWise: false, text: cm.getSelections()}); + if (e.type == "cut") { cm.replaceSelection("", null, "cut"); } + } else if (!cm.options.lineWiseCopyCut) { + return + } else { + var ranges = copyableRanges(cm); + setLastCopied({lineWise: true, text: ranges.text}); + if (e.type == "cut") { + cm.operation(function () { + cm.setSelections(ranges.ranges, 0, sel_dontScroll); + cm.replaceSelection("", null, "cut"); + }); + } + } + if (e.clipboardData) { + e.clipboardData.clearData(); + var content = lastCopied.text.join("\n"); + // iOS exposes the clipboard API, but seems to discard content inserted into it + e.clipboardData.setData("Text", content); + if (e.clipboardData.getData("Text") == content) { + e.preventDefault(); + return + } + } + // Old-fashioned briefly-focus-a-textarea hack + var kludge = hiddenTextarea(), te = kludge.firstChild; + cm.display.lineSpace.insertBefore(kludge, cm.display.lineSpace.firstChild); + te.value = lastCopied.text.join("\n"); + var hadFocus = document.activeElement; + selectInput(te); + setTimeout(function () { + cm.display.lineSpace.removeChild(kludge); + hadFocus.focus(); + if (hadFocus == div) { input.showPrimarySelection(); } + }, 50); + } + on(div, "copy", onCopyCut); + on(div, "cut", onCopyCut); + }; + + ContentEditableInput.prototype.prepareSelection = function () { + var result = prepareSelection(this.cm, false); + result.focus = this.cm.state.focused; + return result + }; + + ContentEditableInput.prototype.showSelection = function (info, takeFocus) { + if (!info || !this.cm.display.view.length) { return } + if (info.focus || takeFocus) { this.showPrimarySelection(); } + this.showMultipleSelections(info); + }; + + ContentEditableInput.prototype.getSelection = function () { + return this.cm.display.wrapper.ownerDocument.getSelection() + }; + + ContentEditableInput.prototype.showPrimarySelection = function () { + var sel = this.getSelection(), cm = this.cm, prim = cm.doc.sel.primary(); + var from = prim.from(), to = prim.to(); + + if (cm.display.viewTo == cm.display.viewFrom || from.line >= cm.display.viewTo || to.line < cm.display.viewFrom) { + sel.removeAllRanges(); + return + } + + var curAnchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); + var curFocus = domToPos(cm, sel.focusNode, sel.focusOffset); + if (curAnchor && !curAnchor.bad && curFocus && !curFocus.bad && + cmp(minPos(curAnchor, curFocus), from) == 0 && + cmp(maxPos(curAnchor, curFocus), to) == 0) + { return } + + var view = cm.display.view; + var start = (from.line >= cm.display.viewFrom && posToDOM(cm, from)) || + {node: view[0].measure.map[2], offset: 0}; + var end = to.line < cm.display.viewTo && posToDOM(cm, to); + if (!end) { + var measure = view[view.length - 1].measure; + var map$$1 = measure.maps ? measure.maps[measure.maps.length - 1] : measure.map; + end = {node: map$$1[map$$1.length - 1], offset: map$$1[map$$1.length - 2] - map$$1[map$$1.length - 3]}; + } + + if (!start || !end) { + sel.removeAllRanges(); + return + } + + var old = sel.rangeCount && sel.getRangeAt(0), rng; + try { rng = range(start.node, start.offset, end.offset, end.node); } + catch(e) {} // Our model of the DOM might be outdated, in which case the range we try to set can be impossible + if (rng) { + if (!gecko && cm.state.focused) { + sel.collapse(start.node, start.offset); + if (!rng.collapsed) { + sel.removeAllRanges(); + sel.addRange(rng); + } + } else { + sel.removeAllRanges(); + sel.addRange(rng); + } + if (old && sel.anchorNode == null) { sel.addRange(old); } + else if (gecko) { this.startGracePeriod(); } + } + this.rememberSelection(); + }; + + ContentEditableInput.prototype.startGracePeriod = function () { + var this$1 = this; + + clearTimeout(this.gracePeriod); + this.gracePeriod = setTimeout(function () { + this$1.gracePeriod = false; + if (this$1.selectionChanged()) + { this$1.cm.operation(function () { return this$1.cm.curOp.selectionChanged = true; }); } + }, 20); + }; + + ContentEditableInput.prototype.showMultipleSelections = function (info) { + removeChildrenAndAdd(this.cm.display.cursorDiv, info.cursors); + removeChildrenAndAdd(this.cm.display.selectionDiv, info.selection); + }; + + ContentEditableInput.prototype.rememberSelection = function () { + var sel = this.getSelection(); + this.lastAnchorNode = sel.anchorNode; this.lastAnchorOffset = sel.anchorOffset; + this.lastFocusNode = sel.focusNode; this.lastFocusOffset = sel.focusOffset; + }; + + ContentEditableInput.prototype.selectionInEditor = function () { + var sel = this.getSelection(); + if (!sel.rangeCount) { return false } + var node = sel.getRangeAt(0).commonAncestorContainer; + return contains(this.div, node) + }; + + ContentEditableInput.prototype.focus = function () { + if (this.cm.options.readOnly != "nocursor") { + if (!this.selectionInEditor()) + { this.showSelection(this.prepareSelection(), true); } + this.div.focus(); + } + }; + ContentEditableInput.prototype.blur = function () { this.div.blur(); }; + ContentEditableInput.prototype.getField = function () { return this.div }; + + ContentEditableInput.prototype.supportsTouch = function () { return true }; + + ContentEditableInput.prototype.receivedFocus = function () { + var input = this; + if (this.selectionInEditor()) + { this.pollSelection(); } + else + { runInOp(this.cm, function () { return input.cm.curOp.selectionChanged = true; }); } + + function poll() { + if (input.cm.state.focused) { + input.pollSelection(); + input.polling.set(input.cm.options.pollInterval, poll); + } + } + this.polling.set(this.cm.options.pollInterval, poll); + }; + + ContentEditableInput.prototype.selectionChanged = function () { + var sel = this.getSelection(); + return sel.anchorNode != this.lastAnchorNode || sel.anchorOffset != this.lastAnchorOffset || + sel.focusNode != this.lastFocusNode || sel.focusOffset != this.lastFocusOffset + }; + + ContentEditableInput.prototype.pollSelection = function () { + if (this.readDOMTimeout != null || this.gracePeriod || !this.selectionChanged()) { return } + var sel = this.getSelection(), cm = this.cm; + // On Android Chrome (version 56, at least), backspacing into an + // uneditable block element will put the cursor in that element, + // and then, because it's not editable, hide the virtual keyboard. + // Because Android doesn't allow us to actually detect backspace + // presses in a sane way, this code checks for when that happens + // and simulates a backspace press in this case. + if (android && chrome && this.cm.display.gutterSpecs.length && isInGutter(sel.anchorNode)) { + this.cm.triggerOnKeyDown({type: "keydown", keyCode: 8, preventDefault: Math.abs}); + this.blur(); + this.focus(); + return + } + if (this.composing) { return } + this.rememberSelection(); + var anchor = domToPos(cm, sel.anchorNode, sel.anchorOffset); + var head = domToPos(cm, sel.focusNode, sel.focusOffset); + if (anchor && head) { runInOp(cm, function () { + setSelection(cm.doc, simpleSelection(anchor, head), sel_dontScroll); + if (anchor.bad || head.bad) { cm.curOp.selectionChanged = true; } + }); } + }; + + ContentEditableInput.prototype.pollContent = function () { + if (this.readDOMTimeout != null) { + clearTimeout(this.readDOMTimeout); + this.readDOMTimeout = null; + } + + var cm = this.cm, display = cm.display, sel = cm.doc.sel.primary(); + var from = sel.from(), to = sel.to(); + if (from.ch == 0 && from.line > cm.firstLine()) + { from = Pos(from.line - 1, getLine(cm.doc, from.line - 1).length); } + if (to.ch == getLine(cm.doc, to.line).text.length && to.line < cm.lastLine()) + { to = Pos(to.line + 1, 0); } + if (from.line < display.viewFrom || to.line > display.viewTo - 1) { return false } + + var fromIndex, fromLine, fromNode; + if (from.line == display.viewFrom || (fromIndex = findViewIndex(cm, from.line)) == 0) { + fromLine = lineNo(display.view[0].line); + fromNode = display.view[0].node; + } else { + fromLine = lineNo(display.view[fromIndex].line); + fromNode = display.view[fromIndex - 1].node.nextSibling; + } + var toIndex = findViewIndex(cm, to.line); + var toLine, toNode; + if (toIndex == display.view.length - 1) { + toLine = display.viewTo - 1; + toNode = display.lineDiv.lastChild; + } else { + toLine = lineNo(display.view[toIndex + 1].line) - 1; + toNode = display.view[toIndex + 1].node.previousSibling; + } + + if (!fromNode) { return false } + var newText = cm.doc.splitLines(domTextBetween(cm, fromNode, toNode, fromLine, toLine)); + var oldText = getBetween(cm.doc, Pos(fromLine, 0), Pos(toLine, getLine(cm.doc, toLine).text.length)); + while (newText.length > 1 && oldText.length > 1) { + if (lst(newText) == lst(oldText)) { newText.pop(); oldText.pop(); toLine--; } + else if (newText[0] == oldText[0]) { newText.shift(); oldText.shift(); fromLine++; } + else { break } + } + + var cutFront = 0, cutEnd = 0; + var newTop = newText[0], oldTop = oldText[0], maxCutFront = Math.min(newTop.length, oldTop.length); + while (cutFront < maxCutFront && newTop.charCodeAt(cutFront) == oldTop.charCodeAt(cutFront)) + { ++cutFront; } + var newBot = lst(newText), oldBot = lst(oldText); + var maxCutEnd = Math.min(newBot.length - (newText.length == 1 ? cutFront : 0), + oldBot.length - (oldText.length == 1 ? cutFront : 0)); + while (cutEnd < maxCutEnd && + newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) + { ++cutEnd; } + // Try to move start of change to start of selection if ambiguous + if (newText.length == 1 && oldText.length == 1 && fromLine == from.line) { + while (cutFront && cutFront > from.ch && + newBot.charCodeAt(newBot.length - cutEnd - 1) == oldBot.charCodeAt(oldBot.length - cutEnd - 1)) { + cutFront--; + cutEnd++; + } + } + + newText[newText.length - 1] = newBot.slice(0, newBot.length - cutEnd).replace(/^\u200b+/, ""); + newText[0] = newText[0].slice(cutFront).replace(/\u200b+$/, ""); + + var chFrom = Pos(fromLine, cutFront); + var chTo = Pos(toLine, oldText.length ? lst(oldText).length - cutEnd : 0); + if (newText.length > 1 || newText[0] || cmp(chFrom, chTo)) { + replaceRange(cm.doc, newText, chFrom, chTo, "+input"); + return true + } + }; + + ContentEditableInput.prototype.ensurePolled = function () { + this.forceCompositionEnd(); + }; + ContentEditableInput.prototype.reset = function () { + this.forceCompositionEnd(); + }; + ContentEditableInput.prototype.forceCompositionEnd = function () { + if (!this.composing) { return } + clearTimeout(this.readDOMTimeout); + this.composing = null; + this.updateFromDOM(); + this.div.blur(); + this.div.focus(); + }; + ContentEditableInput.prototype.readFromDOMSoon = function () { + var this$1 = this; + + if (this.readDOMTimeout != null) { return } + this.readDOMTimeout = setTimeout(function () { + this$1.readDOMTimeout = null; + if (this$1.composing) { + if (this$1.composing.done) { this$1.composing = null; } + else { return } + } + this$1.updateFromDOM(); + }, 80); + }; + + ContentEditableInput.prototype.updateFromDOM = function () { + var this$1 = this; + + if (this.cm.isReadOnly() || !this.pollContent()) + { runInOp(this.cm, function () { return regChange(this$1.cm); }); } + }; + + ContentEditableInput.prototype.setUneditable = function (node) { + node.contentEditable = "false"; + }; + + ContentEditableInput.prototype.onKeyPress = function (e) { + if (e.charCode == 0 || this.composing) { return } + e.preventDefault(); + if (!this.cm.isReadOnly()) + { operation(this.cm, applyTextInput)(this.cm, String.fromCharCode(e.charCode == null ? e.keyCode : e.charCode), 0); } + }; + + ContentEditableInput.prototype.readOnlyChanged = function (val) { + this.div.contentEditable = String(val != "nocursor"); + }; + + ContentEditableInput.prototype.onContextMenu = function () {}; + ContentEditableInput.prototype.resetPosition = function () {}; + + ContentEditableInput.prototype.needsContentAttribute = true; + + function posToDOM(cm, pos) { + var view = findViewForLine(cm, pos.line); + if (!view || view.hidden) { return null } + var line = getLine(cm.doc, pos.line); + var info = mapFromLineView(view, line, pos.line); + + var order = getOrder(line, cm.doc.direction), side = "left"; + if (order) { + var partPos = getBidiPartAt(order, pos.ch); + side = partPos % 2 ? "right" : "left"; + } + var result = nodeAndOffsetInLineMap(info.map, pos.ch, side); + result.offset = result.collapse == "right" ? result.end : result.start; + return result + } + + function isInGutter(node) { + for (var scan = node; scan; scan = scan.parentNode) + { if (/CodeMirror-gutter-wrapper/.test(scan.className)) { return true } } + return false + } + + function badPos(pos, bad) { if (bad) { pos.bad = true; } return pos } + + function domTextBetween(cm, from, to, fromLine, toLine) { + var text = "", closing = false, lineSep = cm.doc.lineSeparator(), extraLinebreak = false; + function recognizeMarker(id) { return function (marker) { return marker.id == id; } } + function close() { + if (closing) { + text += lineSep; + if (extraLinebreak) { text += lineSep; } + closing = extraLinebreak = false; + } + } + function addText(str) { + if (str) { + close(); + text += str; + } + } + function walk(node) { + if (node.nodeType == 1) { + var cmText = node.getAttribute("cm-text"); + if (cmText) { + addText(cmText); + return + } + var markerID = node.getAttribute("cm-marker"), range$$1; + if (markerID) { + var found = cm.findMarks(Pos(fromLine, 0), Pos(toLine + 1, 0), recognizeMarker(+markerID)); + if (found.length && (range$$1 = found[0].find(0))) + { addText(getBetween(cm.doc, range$$1.from, range$$1.to).join(lineSep)); } + return } - } - - // Here we depart from the documented algorithm, in order to avoid - // building up an actual levels array. Since there are only three - // levels (0, 1, 2) in an implementation that doesn't take - // explicit embedding into account, we can build up the order on - // the fly, without following the level-based algorithm. - var order = [], m; - for (var i = 0; i < len;) { - if (countsAsLeft.test(types[i])) { - var start = i; - for (++i; i < len && countsAsLeft.test(types[i]); ++i) {} - order.push(new BidiSpan(0, start, i)); + if (node.getAttribute("contenteditable") == "false") { return } + var isBlock = /^(pre|div|p|li|table|br)$/i.test(node.nodeName); + if (!/^br$/i.test(node.nodeName) && node.textContent.length == 0) { return } + + if (isBlock) { close(); } + for (var i = 0; i < node.childNodes.length; i++) + { walk(node.childNodes[i]); } + + if (/^(pre|p)$/i.test(node.nodeName)) { extraLinebreak = true; } + if (isBlock) { closing = true; } + } else if (node.nodeType == 3) { + addText(node.nodeValue.replace(/\u200b/g, "").replace(/\u00a0/g, " ")); + } + } + for (;;) { + walk(from); + if (from == to) { break } + from = from.nextSibling; + extraLinebreak = false; + } + return text + } + + function domToPos(cm, node, offset) { + var lineNode; + if (node == cm.display.lineDiv) { + lineNode = cm.display.lineDiv.childNodes[offset]; + if (!lineNode) { return badPos(cm.clipPos(Pos(cm.display.viewTo - 1)), true) } + node = null; offset = 0; + } else { + for (lineNode = node;; lineNode = lineNode.parentNode) { + if (!lineNode || lineNode == cm.display.lineDiv) { return null } + if (lineNode.parentNode && lineNode.parentNode == cm.display.lineDiv) { break } + } + } + for (var i = 0; i < cm.display.view.length; i++) { + var lineView = cm.display.view[i]; + if (lineView.node == lineNode) + { return locateNodeInLineView(lineView, node, offset) } + } + } + + function locateNodeInLineView(lineView, node, offset) { + var wrapper = lineView.text.firstChild, bad = false; + if (!node || !contains(wrapper, node)) { return badPos(Pos(lineNo(lineView.line), 0), true) } + if (node == wrapper) { + bad = true; + node = wrapper.childNodes[offset]; + offset = 0; + if (!node) { + var line = lineView.rest ? lst(lineView.rest) : lineView.line; + return badPos(Pos(lineNo(line), line.text.length), bad) + } + } + + var textNode = node.nodeType == 3 ? node : null, topNode = node; + if (!textNode && node.childNodes.length == 1 && node.firstChild.nodeType == 3) { + textNode = node.firstChild; + if (offset) { offset = textNode.nodeValue.length; } + } + while (topNode.parentNode != wrapper) { topNode = topNode.parentNode; } + var measure = lineView.measure, maps = measure.maps; + + function find(textNode, topNode, offset) { + for (var i = -1; i < (maps ? maps.length : 0); i++) { + var map$$1 = i < 0 ? measure.map : maps[i]; + for (var j = 0; j < map$$1.length; j += 3) { + var curNode = map$$1[j + 2]; + if (curNode == textNode || curNode == topNode) { + var line = lineNo(i < 0 ? lineView.line : lineView.rest[i]); + var ch = map$$1[j] + offset; + if (offset < 0 || curNode != textNode) { ch = map$$1[j + (offset ? 1 : 0)]; } + return Pos(line, ch) + } + } + } + } + var found = find(textNode, topNode, offset); + if (found) { return badPos(found, bad) } + + // FIXME this is all really shaky. might handle the few cases it needs to handle, but likely to cause problems + for (var after = topNode.nextSibling, dist = textNode ? textNode.nodeValue.length - offset : 0; after; after = after.nextSibling) { + found = find(after, after.firstChild, 0); + if (found) + { return badPos(Pos(found.line, found.ch - dist), bad) } + else + { dist += after.textContent.length; } + } + for (var before = topNode.previousSibling, dist$1 = offset; before; before = before.previousSibling) { + found = find(before, before.firstChild, -1); + if (found) + { return badPos(Pos(found.line, found.ch + dist$1), bad) } + else + { dist$1 += before.textContent.length; } + } + } + + // TEXTAREA INPUT STYLE + + var TextareaInput = function(cm) { + this.cm = cm; + // See input.poll and input.reset + this.prevInput = ""; + + // Flag that indicates whether we expect input to appear real soon + // now (after some event like 'keypress' or 'input') and are + // polling intensively. + this.pollingFast = false; + // Self-resetting timeout for the poller + this.polling = new Delayed(); + // Used to work around IE issue with selection being forgotten when focus moves away from textarea + this.hasSelection = false; + this.composing = null; + }; + + TextareaInput.prototype.init = function (display) { + var this$1 = this; + + var input = this, cm = this.cm; + this.createField(display); + var te = this.textarea; + + display.wrapper.insertBefore(this.wrapper, display.wrapper.firstChild); + + // Needed to hide big blue blinking cursor on Mobile Safari (doesn't seem to work in iOS 8 anymore) + if (ios) { te.style.width = "0px"; } + + on(te, "input", function () { + if (ie && ie_version >= 9 && this$1.hasSelection) { this$1.hasSelection = null; } + input.poll(); + }); + + on(te, "paste", function (e) { + if (signalDOMEvent(cm, e) || handlePaste(e, cm)) { return } + + cm.state.pasteIncoming = +new Date; + input.fastPoll(); + }); + + function prepareCopyCut(e) { + if (signalDOMEvent(cm, e)) { return } + if (cm.somethingSelected()) { + setLastCopied({lineWise: false, text: cm.getSelections()}); + } else if (!cm.options.lineWiseCopyCut) { + return + } else { + var ranges = copyableRanges(cm); + setLastCopied({lineWise: true, text: ranges.text}); + if (e.type == "cut") { + cm.setSelections(ranges.ranges, null, sel_dontScroll); } else { - var pos = i, at = order.length; - for (++i; i < len && types[i] != "L"; ++i) {} - for (var j = pos; j < i;) { - if (countsAsNum.test(types[j])) { - if (pos < j) order.splice(at, 0, new BidiSpan(1, pos, j)); - var nstart = j; - for (++j; j < i && countsAsNum.test(types[j]); ++j) {} - order.splice(at, 0, new BidiSpan(2, nstart, j)); - pos = j; - } else ++j; - } - if (pos < i) order.splice(at, 0, new BidiSpan(1, pos, i)); + input.prevInput = ""; + te.value = ranges.text.join("\n"); + selectInput(te); } } - if (order[0].level == 1 && (m = str.match(/^\s+/))) { - order[0].from = m[0].length; - order.unshift(new BidiSpan(0, 0, m[0].length)); - } - if (lst(order).level == 1 && (m = str.match(/\s+$/))) { - lst(order).to -= m[0].length; - order.push(new BidiSpan(0, len - m[0].length, len)); - } - if (order[0].level == 2) - order.unshift(new BidiSpan(1, order[0].to, order[0].to)); - if (order[0].level != lst(order).level) - order.push(new BidiSpan(order[0].level, len, len)); - - return order; + if (e.type == "cut") { cm.state.cutIncoming = +new Date; } + } + on(te, "cut", prepareCopyCut); + on(te, "copy", prepareCopyCut); + + on(display.scroller, "paste", function (e) { + if (eventInWidget(display, e) || signalDOMEvent(cm, e)) { return } + if (!te.dispatchEvent) { + cm.state.pasteIncoming = +new Date; + input.focus(); + return + } + + // Pass the `paste` event to the textarea so it's handled by its event listener. + var event = new Event("paste"); + event.clipboardData = e.clipboardData; + te.dispatchEvent(event); + }); + + // Prevent normal selection in the editor (we handle our own) + on(display.lineSpace, "selectstart", function (e) { + if (!eventInWidget(display, e)) { e_preventDefault(e); } + }); + + on(te, "compositionstart", function () { + var start = cm.getCursor("from"); + if (input.composing) { input.composing.range.clear(); } + input.composing = { + start: start, + range: cm.markText(start, cm.getCursor("to"), {className: "CodeMirror-composing"}) + }; + }); + on(te, "compositionend", function () { + if (input.composing) { + input.poll(); + input.composing.range.clear(); + input.composing = null; + } + }); + }; + + TextareaInput.prototype.createField = function (_display) { + // Wraps and hides input textarea + this.wrapper = hiddenTextarea(); + // The semihidden textarea that is focused when the editor is + // focused, and receives input. + this.textarea = this.wrapper.firstChild; + }; + + TextareaInput.prototype.prepareSelection = function () { + // Redraw the selection and/or cursor + var cm = this.cm, display = cm.display, doc = cm.doc; + var result = prepareSelection(cm); + + // Move the hidden textarea near the cursor to prevent scrolling artifacts + if (cm.options.moveInputWithCursor) { + var headPos = cursorCoords(cm, doc.sel.primary().head, "div"); + var wrapOff = display.wrapper.getBoundingClientRect(), lineOff = display.lineDiv.getBoundingClientRect(); + result.teTop = Math.max(0, Math.min(display.wrapper.clientHeight - 10, + headPos.top + lineOff.top - wrapOff.top)); + result.teLeft = Math.max(0, Math.min(display.wrapper.clientWidth - 10, + headPos.left + lineOff.left - wrapOff.left)); + } + + return result + }; + + TextareaInput.prototype.showSelection = function (drawn) { + var cm = this.cm, display = cm.display; + removeChildrenAndAdd(display.cursorDiv, drawn.cursors); + removeChildrenAndAdd(display.selectionDiv, drawn.selection); + if (drawn.teTop != null) { + this.wrapper.style.top = drawn.teTop + "px"; + this.wrapper.style.left = drawn.teLeft + "px"; + } + }; + + // Reset the input to correspond to the selection (or to be empty, + // when not typing and nothing is selected) + TextareaInput.prototype.reset = function (typing) { + if (this.contextMenuPending || this.composing) { return } + var cm = this.cm; + if (cm.somethingSelected()) { + this.prevInput = ""; + var content = cm.getSelection(); + this.textarea.value = content; + if (cm.state.focused) { selectInput(this.textarea); } + if (ie && ie_version >= 9) { this.hasSelection = content; } + } else if (!typing) { + this.prevInput = this.textarea.value = ""; + if (ie && ie_version >= 9) { this.hasSelection = null; } + } + }; + + TextareaInput.prototype.getField = function () { return this.textarea }; + + TextareaInput.prototype.supportsTouch = function () { return false }; + + TextareaInput.prototype.focus = function () { + if (this.cm.options.readOnly != "nocursor" && (!mobile || activeElt() != this.textarea)) { + try { this.textarea.focus(); } + catch (e) {} // IE8 will throw if the textarea is display: none or not in DOM + } + }; + + TextareaInput.prototype.blur = function () { this.textarea.blur(); }; + + TextareaInput.prototype.resetPosition = function () { + this.wrapper.style.top = this.wrapper.style.left = 0; + }; + + TextareaInput.prototype.receivedFocus = function () { this.slowPoll(); }; + + // Poll for input changes, using the normal rate of polling. This + // runs as long as the editor is focused. + TextareaInput.prototype.slowPoll = function () { + var this$1 = this; + + if (this.pollingFast) { return } + this.polling.set(this.cm.options.pollInterval, function () { + this$1.poll(); + if (this$1.cm.state.focused) { this$1.slowPoll(); } + }); + }; + + // When an event has just come in that is likely to add or change + // something in the input textarea, we poll faster, to ensure that + // the change appears on the screen quickly. + TextareaInput.prototype.fastPoll = function () { + var missed = false, input = this; + input.pollingFast = true; + function p() { + var changed = input.poll(); + if (!changed && !missed) {missed = true; input.polling.set(60, p);} + else {input.pollingFast = false; input.slowPoll();} + } + input.polling.set(20, p); + }; + + // Read input from the textarea, and update the document to match. + // When something is selected, it is present in the textarea, and + // selected (unless it is huge, in which case a placeholder is + // used). When nothing is selected, the cursor sits after previously + // seen text (can be empty), which is stored in prevInput (we must + // not reset the textarea when typing, because that breaks IME). + TextareaInput.prototype.poll = function () { + var this$1 = this; + + var cm = this.cm, input = this.textarea, prevInput = this.prevInput; + // Since this is called a *lot*, try to bail out as cheaply as + // possible when it is clear that nothing happened. hasSelection + // will be the case when there is a lot of text in the textarea, + // in which case reading its value would be expensive. + if (this.contextMenuPending || !cm.state.focused || + (hasSelection(input) && !prevInput && !this.composing) || + cm.isReadOnly() || cm.options.disableInput || cm.state.keySeq) + { return false } + + var text = input.value; + // If nothing changed, bail. + if (text == prevInput && !cm.somethingSelected()) { return false } + // Work around nonsensical selection resetting in IE9/10, and + // inexplicable appearance of private area unicode characters on + // some key combos in Mac (#2689). + if (ie && ie_version >= 9 && this.hasSelection === text || + mac && /[\uf700-\uf7ff]/.test(text)) { + cm.display.input.reset(); + return false + } + + if (cm.doc.sel == cm.display.selForContextMenu) { + var first = text.charCodeAt(0); + if (first == 0x200b && !prevInput) { prevInput = "\u200b"; } + if (first == 0x21da) { this.reset(); return this.cm.execCommand("undo") } + } + // Find the part of the input that is actually new + var same = 0, l = Math.min(prevInput.length, text.length); + while (same < l && prevInput.charCodeAt(same) == text.charCodeAt(same)) { ++same; } + + runInOp(cm, function () { + applyTextInput(cm, text.slice(same), prevInput.length - same, + null, this$1.composing ? "*compose" : null); + + // Don't leave long text in the textarea, since it makes further polling slow + if (text.length > 1000 || text.indexOf("\n") > -1) { input.value = this$1.prevInput = ""; } + else { this$1.prevInput = text; } + + if (this$1.composing) { + this$1.composing.range.clear(); + this$1.composing.range = cm.markText(this$1.composing.start, cm.getCursor("to"), + {className: "CodeMirror-composing"}); + } + }); + return true + }; + + TextareaInput.prototype.ensurePolled = function () { + if (this.pollingFast && this.poll()) { this.pollingFast = false; } + }; + + TextareaInput.prototype.onKeyPress = function () { + if (ie && ie_version >= 9) { this.hasSelection = null; } + this.fastPoll(); + }; + + TextareaInput.prototype.onContextMenu = function (e) { + var input = this, cm = input.cm, display = cm.display, te = input.textarea; + if (input.contextMenuPending) { input.contextMenuPending(); } + var pos = posFromMouse(cm, e), scrollPos = display.scroller.scrollTop; + if (!pos || presto) { return } // Opera is difficult. + + // Reset the current text selection only if the click is done outside of the selection + // and 'resetSelectionOnContextMenu' option is true. + var reset = cm.options.resetSelectionOnContextMenu; + if (reset && cm.doc.sel.contains(pos) == -1) + { operation(cm, setSelection)(cm.doc, simpleSelection(pos), sel_dontScroll); } + + var oldCSS = te.style.cssText, oldWrapperCSS = input.wrapper.style.cssText; + var wrapperBox = input.wrapper.offsetParent.getBoundingClientRect(); + input.wrapper.style.cssText = "position: static"; + te.style.cssText = "position: absolute; width: 30px; height: 30px;\n top: " + (e.clientY - wrapperBox.top - 5) + "px; left: " + (e.clientX - wrapperBox.left - 5) + "px;\n z-index: 1000; background: " + (ie ? "rgba(255, 255, 255, .05)" : "transparent") + ";\n outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);"; + var oldScrollY; + if (webkit) { oldScrollY = window.scrollY; } // Work around Chrome issue (#2712) + display.input.focus(); + if (webkit) { window.scrollTo(null, oldScrollY); } + display.input.reset(); + // Adds "Select all" to context menu in FF + if (!cm.somethingSelected()) { te.value = input.prevInput = " "; } + input.contextMenuPending = rehide; + display.selForContextMenu = cm.doc.sel; + clearTimeout(display.detectingSelectAll); + + // Select-all will be greyed out if there's nothing to select, so + // this adds a zero-width space so that we can later check whether + // it got selected. + function prepareSelectAllHack() { + if (te.selectionStart != null) { + var selected = cm.somethingSelected(); + var extval = "\u200b" + (selected ? te.value : ""); + te.value = "\u21da"; // Used to catch context-menu undo + te.value = extval; + input.prevInput = selected ? "" : "\u200b"; + te.selectionStart = 1; te.selectionEnd = extval.length; + // Re-set this, in case some other handler touched the + // selection in the meantime. + display.selForContextMenu = cm.doc.sel; + } + } + function rehide() { + if (input.contextMenuPending != rehide) { return } + input.contextMenuPending = false; + input.wrapper.style.cssText = oldWrapperCSS; + te.style.cssText = oldCSS; + if (ie && ie_version < 9) { display.scrollbars.setScrollTop(display.scroller.scrollTop = scrollPos); } + + // Try to detect the user choosing select-all + if (te.selectionStart != null) { + if (!ie || (ie && ie_version < 9)) { prepareSelectAllHack(); } + var i = 0, poll = function () { + if (display.selForContextMenu == cm.doc.sel && te.selectionStart == 0 && + te.selectionEnd > 0 && input.prevInput == "\u200b") { + operation(cm, selectAll)(cm); + } else if (i++ < 10) { + display.detectingSelectAll = setTimeout(poll, 500); + } else { + display.selForContextMenu = null; + display.input.reset(); + } + }; + display.detectingSelectAll = setTimeout(poll, 200); + } + } + + if (ie && ie_version >= 9) { prepareSelectAllHack(); } + if (captureRightClick) { + e_stop(e); + var mouseup = function () { + off(window, "mouseup", mouseup); + setTimeout(rehide, 20); + }; + on(window, "mouseup", mouseup); + } else { + setTimeout(rehide, 50); + } + }; + + TextareaInput.prototype.readOnlyChanged = function (val) { + if (!val) { this.reset(); } + this.textarea.disabled = val == "nocursor"; + }; + + TextareaInput.prototype.setUneditable = function () {}; + + TextareaInput.prototype.needsContentAttribute = false; + + function fromTextArea(textarea, options) { + options = options ? copyObj(options) : {}; + options.value = textarea.value; + if (!options.tabindex && textarea.tabIndex) + { options.tabindex = textarea.tabIndex; } + if (!options.placeholder && textarea.placeholder) + { options.placeholder = textarea.placeholder; } + // Set autofocus to true if this textarea is focused, or if it has + // autofocus and no other element is focused. + if (options.autofocus == null) { + var hasFocus = activeElt(); + options.autofocus = hasFocus == textarea || + textarea.getAttribute("autofocus") != null && hasFocus == document.body; + } + + function save() {textarea.value = cm.getValue();} + + var realSubmit; + if (textarea.form) { + on(textarea.form, "submit", save); + // Deplorable hack to make the submit method do the right thing. + if (!options.leaveSubmitMethodAlone) { + var form = textarea.form; + realSubmit = form.submit; + try { + var wrappedSubmit = form.submit = function () { + save(); + form.submit = realSubmit; + form.submit(); + form.submit = wrappedSubmit; + }; + } catch(e) {} + } + } + + options.finishInit = function (cm) { + cm.save = save; + cm.getTextArea = function () { return textarea; }; + cm.toTextArea = function () { + cm.toTextArea = isNaN; // Prevent this from being ran twice + save(); + textarea.parentNode.removeChild(cm.getWrapperElement()); + textarea.style.display = ""; + if (textarea.form) { + off(textarea.form, "submit", save); + if (!options.leaveSubmitMethodAlone && typeof textarea.form.submit == "function") + { textarea.form.submit = realSubmit; } + } + }; }; - })(); - - // THE END - - CodeMirror.version = "5.11.0"; + + textarea.style.display = "none"; + var cm = CodeMirror(function (node) { return textarea.parentNode.insertBefore(node, textarea.nextSibling); }, + options); + return cm + } + + function addLegacyProps(CodeMirror) { + CodeMirror.off = off; + CodeMirror.on = on; + CodeMirror.wheelEventPixels = wheelEventPixels; + CodeMirror.Doc = Doc; + CodeMirror.splitLines = splitLinesAuto; + CodeMirror.countColumn = countColumn; + CodeMirror.findColumn = findColumn; + CodeMirror.isWordChar = isWordCharBasic; + CodeMirror.Pass = Pass; + CodeMirror.signal = signal; + CodeMirror.Line = Line; + CodeMirror.changeEnd = changeEnd; + CodeMirror.scrollbarModel = scrollbarModel; + CodeMirror.Pos = Pos; + CodeMirror.cmpPos = cmp; + CodeMirror.modes = modes; + CodeMirror.mimeModes = mimeModes; + CodeMirror.resolveMode = resolveMode; + CodeMirror.getMode = getMode; + CodeMirror.modeExtensions = modeExtensions; + CodeMirror.extendMode = extendMode; + CodeMirror.copyState = copyState; + CodeMirror.startState = startState; + CodeMirror.innerMode = innerMode; + CodeMirror.commands = commands; + CodeMirror.keyMap = keyMap; + CodeMirror.keyName = keyName; + CodeMirror.isModifierKey = isModifierKey; + CodeMirror.lookupKey = lookupKey; + CodeMirror.normalizeKeyMap = normalizeKeyMap; + CodeMirror.StringStream = StringStream; + CodeMirror.SharedTextMarker = SharedTextMarker; + CodeMirror.TextMarker = TextMarker; + CodeMirror.LineWidget = LineWidget; + CodeMirror.e_preventDefault = e_preventDefault; + CodeMirror.e_stopPropagation = e_stopPropagation; + CodeMirror.e_stop = e_stop; + CodeMirror.addClass = addClass; + CodeMirror.contains = contains; + CodeMirror.rmClass = rmClass; + CodeMirror.keyNames = keyNames; + } + + // EDITOR CONSTRUCTOR + + defineOptions(CodeMirror); + + addEditorMethods(CodeMirror); + + // Set up methods on CodeMirror's prototype to redirect to the editor's document. + var dontDelegate = "iter insert remove copy getEditor constructor".split(" "); + for (var prop in Doc.prototype) { if (Doc.prototype.hasOwnProperty(prop) && indexOf(dontDelegate, prop) < 0) + { CodeMirror.prototype[prop] = (function(method) { + return function() {return method.apply(this.doc, arguments)} + })(Doc.prototype[prop]); } } + + eventMixin(Doc); + CodeMirror.inputStyles = {"textarea": TextareaInput, "contenteditable": ContentEditableInput}; + + // Extra arguments are stored as the mode's dependencies, which is + // used by (legacy) mechanisms like loadmode.js to automatically + // load a mode. (Preferred mechanism is the require/define calls.) + CodeMirror.defineMode = function(name/*, mode, …*/) { + if (!CodeMirror.defaults.mode && name != "null") { CodeMirror.defaults.mode = name; } + defineMode.apply(this, arguments); + }; + + CodeMirror.defineMIME = defineMIME; + + // Minimal default mode. + CodeMirror.defineMode("null", function () { return ({token: function (stream) { return stream.skipToEnd(); }}); }); + CodeMirror.defineMIME("text/plain", "null"); + + // EXTENSIONS + + CodeMirror.defineExtension = function (name, func) { + CodeMirror.prototype[name] = func; + }; + CodeMirror.defineDocExtension = function (name, func) { + Doc.prototype[name] = func; + }; + + CodeMirror.fromTextArea = fromTextArea; + + addLegacyProps(CodeMirror); + + CodeMirror.version = "5.49.2"; return CodeMirror; -}); + +}))); diff --git a/rhodecode/public/js/src/codemirror/codemirror_hint.js b/rhodecode/public/js/src/codemirror/codemirror_hint.js --- a/rhodecode/public/js/src/codemirror/codemirror_hint.js +++ b/rhodecode/public/js/src/codemirror/codemirror_hint.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -46,6 +46,10 @@ completion.update(true); }); + CodeMirror.defineExtension("closeHint", function() { + if (this.state.completionActive) this.state.completionActive.close() + }) + function Completion(cm, options) { this.cm = cm; this.options = options; @@ -98,7 +102,7 @@ var pos = this.cm.getCursor(), line = this.cm.getLine(pos.line); if (pos.line != this.startPos.line || line.length - pos.ch != this.startLen - this.startPos.ch || pos.ch < this.startPos.ch || this.cm.somethingSelected() || - (pos.ch && this.options.closeCharacters.test(line.charAt(pos.ch - 1)))) { + (!pos.ch || this.options.closeCharacters.test(line.charAt(pos.ch - 1)))) { this.close(); } else { var self = this; @@ -108,24 +112,21 @@ }, update: function(first) { - if (this.tick == null) return; - if (!this.options.hint.async) { - this.finishUpdate(this.options.hint(this.cm, this.options), first); - } else { - var myTick = ++this.tick, self = this; - this.options.hint(this.cm, function(data) { - if (self.tick == myTick) self.finishUpdate(data, first); - }, this.options); - } + if (this.tick == null) return + var self = this, myTick = ++this.tick + fetchHints(this.options.hint, this.cm, this.options, function(data) { + if (self.tick == myTick) self.finishUpdate(data, first) + }) }, finishUpdate: function(data, first) { if (this.data) CodeMirror.signal(this.data, "update"); - if (data && this.data && CodeMirror.cmpPos(data.from, this.data.from)) data = null; - this.data = data; var picked = (this.widget && this.widget.picked) || (first && this.options.completeSingle); if (this.widget) this.widget.close(); + + this.data = data; + if (data && data.list.length) { if (picked && data.list.length == 1) { this.pick(data, 0); @@ -166,6 +167,14 @@ Tab: handle.pick, Esc: handle.close }; + + var mac = /Mac/.test(navigator.platform); + + if (mac) { + baseMap["Ctrl-P"] = function() {handle.moveFocus(-1);}; + baseMap["Ctrl-N"] = function() {handle.moveFocus(1);}; + } + var custom = completion.options.customKeys; var ourMap = custom ? {} : baseMap; function addBinding(key, val) { @@ -201,43 +210,61 @@ this.data = data; this.picked = false; var widget = this, cm = completion.cm; + var ownerDocument = cm.getInputField().ownerDocument; + var parentWindow = ownerDocument.defaultView || ownerDocument.parentWindow; - var hints = this.hints = document.createElement("ul"); - hints.className = "CodeMirror-hints"; + var hints = this.hints = ownerDocument.createElement("ul"); + var theme = completion.cm.options.theme; + hints.className = "CodeMirror-hints " + theme; this.selectedHint = data.selectedHint || 0; var completions = data.list; for (var i = 0; i < completions.length; ++i) { - var elt = hints.appendChild(document.createElement("li")), cur = completions[i]; + var elt = hints.appendChild(ownerDocument.createElement("li")), cur = completions[i]; var className = HINT_ELEMENT_CLASS + (i != this.selectedHint ? "" : " " + ACTIVE_HINT_ELEMENT_CLASS); if (cur.className != null) className = cur.className + " " + className; elt.className = className; if (cur.render) cur.render(elt, data, cur); - else elt.appendChild(document.createTextNode(cur.displayText || getText(cur))); + else elt.appendChild(ownerDocument.createTextNode(cur.displayText || getText(cur))); elt.hintId = i; } + var container = completion.options.container || ownerDocument.body; var pos = cm.cursorCoords(completion.options.alignWithWord ? data.from : null); var left = pos.left, top = pos.bottom, below = true; - hints.style.left = left + "px"; - hints.style.top = top + "px"; + var offsetLeft = 0, offsetTop = 0; + if (container !== ownerDocument.body) { + // We offset the cursor position because left and top are relative to the offsetParent's top left corner. + var isContainerPositioned = ['absolute', 'relative', 'fixed'].indexOf(parentWindow.getComputedStyle(container).position) !== -1; + var offsetParent = isContainerPositioned ? container : container.offsetParent; + var offsetParentPosition = offsetParent.getBoundingClientRect(); + var bodyPosition = ownerDocument.body.getBoundingClientRect(); + offsetLeft = (offsetParentPosition.left - bodyPosition.left - offsetParent.scrollLeft); + offsetTop = (offsetParentPosition.top - bodyPosition.top - offsetParent.scrollTop); + } + hints.style.left = (left - offsetLeft) + "px"; + hints.style.top = (top - offsetTop) + "px"; + // If we're at the edge of the screen, then we want the menu to appear on the left of the cursor. - var winW = window.innerWidth || Math.max(document.body.offsetWidth, document.documentElement.offsetWidth); - var winH = window.innerHeight || Math.max(document.body.offsetHeight, document.documentElement.offsetHeight); - (completion.options.container || document.body).appendChild(hints); + var winW = parentWindow.innerWidth || Math.max(ownerDocument.body.offsetWidth, ownerDocument.documentElement.offsetWidth); + var winH = parentWindow.innerHeight || Math.max(ownerDocument.body.offsetHeight, ownerDocument.documentElement.offsetHeight); + container.appendChild(hints); var box = hints.getBoundingClientRect(), overlapY = box.bottom - winH; + var scrolls = hints.scrollHeight > hints.clientHeight + 1 + var startScroll = cm.getScrollInfo(); + if (overlapY > 0) { var height = box.bottom - box.top, curTop = pos.top - (pos.bottom - box.top); if (curTop - height > 0) { // Fits above cursor - hints.style.top = (top = pos.top - height) + "px"; + hints.style.top = (top = pos.top - height - offsetTop) + "px"; below = false; } else if (height > winH) { hints.style.height = (winH - 5) + "px"; - hints.style.top = (top = pos.bottom - box.top) + "px"; + hints.style.top = (top = pos.bottom - box.top - offsetTop) + "px"; var cursor = cm.getCursor(); if (data.from.ch != cursor.ch) { pos = cm.cursorCoords(cursor); - hints.style.left = (left = pos.left) + "px"; + hints.style.left = (left = pos.left - offsetLeft) + "px"; box = hints.getBoundingClientRect(); } } @@ -248,8 +275,10 @@ hints.style.width = (winW - 5) + "px"; overlapX -= (box.right - box.left) - winW; } - hints.style.left = (left = pos.left - overlapX) + "px"; + hints.style.left = (left = pos.left - overlapX - offsetLeft) + "px"; } + if (scrolls) for (var node = hints.firstChild; node; node = node.nextSibling) + node.style.paddingRight = cm.display.nativeBarWidth + "px" cm.addKeyMap(this.keyMap = buildKeyMap(completion, { moveFocus: function(n, avoidWrap) { widget.changeActive(widget.selectedHint + n, avoidWrap); }, @@ -267,11 +296,10 @@ cm.on("focus", this.onFocus = function() { clearTimeout(closingOnBlur); }); } - var startScroll = cm.getScrollInfo(); cm.on("scroll", this.onScroll = function() { var curScroll = cm.getScrollInfo(), editor = cm.getWrapperElement().getBoundingClientRect(); var newTop = top + startScroll.top - curScroll.top; - var point = newTop - (window.pageYOffset || (document.documentElement || document.body).scrollTop); + var point = newTop - (parentWindow.pageYOffset || (ownerDocument.documentElement || ownerDocument.body).scrollTop); if (!below) point += hints.offsetHeight; if (point <= editor.top || point >= editor.bottom) return completion.close(); hints.style.top = newTop + "px"; @@ -295,7 +323,7 @@ setTimeout(function(){cm.focus();}, 20); }); - CodeMirror.signal(data, "select", completions[0], hints.firstChild); + CodeMirror.signal(data, "select", completions[this.selectedHint], hints.childNodes[this.selectedHint]); return true; } @@ -332,7 +360,7 @@ i = avoidWrap ? 0 : this.data.list.length - 1; if (this.selectedHint == i) return; var node = this.hints.childNodes[this.selectedHint]; - node.className = node.className.replace(" " + ACTIVE_HINT_ELEMENT_CLASS, ""); + if (node) node.className = node.className.replace(" " + ACTIVE_HINT_ELEMENT_CLASS, ""); node = this.hints.childNodes[this.selectedHint = i]; node.className += " " + ACTIVE_HINT_ELEMENT_CLASS; if (node.offsetTop < this.hints.scrollTop) @@ -355,40 +383,31 @@ return result } + function fetchHints(hint, cm, options, callback) { + if (hint.async) { + hint(cm, callback, options) + } else { + var result = hint(cm, options) + if (result && result.then) result.then(callback) + else callback(result) + } + } + function resolveAutoHints(cm, pos) { var helpers = cm.getHelpers(pos, "hint"), words if (helpers.length) { - var async = false, resolved - for (var i = 0; i < helpers.length; i++) if (helpers[i].async) async = true - if (async) { - resolved = function(cm, callback, options) { - var app = applicableHelpers(cm, helpers) - function run(i, result) { - if (i == app.length) return callback(null) - var helper = app[i] - if (helper.async) { - helper(cm, function(result) { - if (result) callback(result) - else run(i + 1) - }, options) - } else { - var result = helper(cm, options) - if (result) callback(result) - else run(i + 1) - } - } - run(0) + var resolved = function(cm, callback, options) { + var app = applicableHelpers(cm, helpers); + function run(i) { + if (i == app.length) return callback(null) + fetchHints(app[i], cm, options, function(result) { + if (result && result.list.length > 0) callback(result) + else run(i + 1) + }) } - resolved.async = true - } else { - resolved = function(cm, options) { - var app = applicableHelpers(cm, helpers) - for (var i = 0; i < app.length; i++) { - var cur = app[i](cm, options) - if (cur && cur.list.length) return cur - } - } + run(0) } + resolved.async = true resolved.supportsSelection = true return resolved } else if (words = cm.getHelper(cm.getCursor(), "hintWords")) { @@ -405,12 +424,13 @@ }); CodeMirror.registerHelper("hint", "fromList", function(cm, options) { - var cur = cm.getCursor(), token = cm.getTokenAt(cur); - var to = CodeMirror.Pos(cur.line, token.end); - if (token.string && /\w/.test(token.string[token.string.length - 1])) { - var term = token.string, from = CodeMirror.Pos(cur.line, token.start); + var cur = cm.getCursor(), token = cm.getTokenAt(cur) + var term, from = CodeMirror.Pos(cur.line, token.start), to = cur + if (token.start < cur.ch && /\w/.test(token.string.charAt(cur.ch - token.start - 1))) { + term = token.string.substr(0, cur.ch - token.start) } else { - var term = "", from = to; + term = "" + from = cur } var found = []; for (var i = 0; i < options.words.length; i++) { diff --git a/rhodecode/public/js/src/codemirror/codemirror_loadmode.js b/rhodecode/public/js/src/codemirror/codemirror_loadmode.js --- a/rhodecode/public/js/src/codemirror/codemirror_loadmode.js +++ b/rhodecode/public/js/src/codemirror/codemirror_loadmode.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS diff --git a/rhodecode/public/js/src/codemirror/codemirror_overlay.js b/rhodecode/public/js/src/codemirror/codemirror_overlay.js --- a/rhodecode/public/js/src/codemirror/codemirror_overlay.js +++ b/rhodecode/public/js/src/codemirror/codemirror_overlay.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE // Utility function that allows modes to be combined. The mode given // as the base argument takes care of most of the normal mode @@ -68,16 +68,21 @@ CodeMirror.overlayMode = function(base, else return state.overlayCur; }, - indent: base.indent && function(state, textAfter) { - return base.indent(state.base, textAfter); + indent: base.indent && function(state, textAfter, line) { + return base.indent(state.base, textAfter, line); }, electricChars: base.electricChars, innerMode: function(state) { return {state: state.base, mode: base}; }, blankLine: function(state) { - if (base.blankLine) base.blankLine(state.base); - if (overlay.blankLine) overlay.blankLine(state.overlay); + var baseToken, overlayToken; + if (base.blankLine) baseToken = base.blankLine(state.base); + if (overlay.blankLine) overlayToken = overlay.blankLine(state.overlay); + + return overlayToken == null ? + baseToken : + (combine && baseToken != null ? baseToken + " " + overlayToken : overlayToken); } }; }; diff --git a/rhodecode/public/js/src/codemirror/codemirror_placeholder.js b/rhodecode/public/js/src/codemirror/codemirror_placeholder.js --- a/rhodecode/public/js/src/codemirror/codemirror_placeholder.js +++ b/rhodecode/public/js/src/codemirror/codemirror_placeholder.js @@ -1,5 +1,5 @@ // CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE +// Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS @@ -14,10 +14,12 @@ if (val && !prev) { cm.on("blur", onBlur); cm.on("change", onChange); + cm.on("swapDoc", onChange); onChange(cm); } else if (!val && prev) { cm.off("blur", onBlur); cm.off("change", onChange); + cm.off("swapDoc", onChange); clearPlaceholder(cm); var wrapper = cm.getWrapperElement(); wrapper.className = wrapper.className.replace(" CodeMirror-empty", ""); @@ -36,7 +38,8 @@ clearPlaceholder(cm); var elt = cm.state.placeholder = document.createElement("pre"); elt.style.cssText = "height: 0; overflow: visible"; - elt.className = "CodeMirror-placeholder"; + elt.style.direction = cm.getOption("direction"); + elt.className = "CodeMirror-placeholder CodeMirror-line-like"; var placeHolder = cm.getOption("placeholder") if (typeof placeHolder == "string") placeHolder = document.createTextNode(placeHolder) elt.appendChild(placeHolder) diff --git a/rhodecode/public/js/src/components/index.js b/rhodecode/public/js/src/components/index.js --- a/rhodecode/public/js/src/components/index.js +++ b/rhodecode/public/js/src/components/index.js @@ -2,6 +2,5 @@ import '@polymer/iron-ajax/iron-ajax.js' import './shared-styles.js'; import './channelstream-connection/channelstream-connection.js'; import './rhodecode-toast/rhodecode-toast.js'; -import './rhodecode-toggle/rhodecode-toggle.js'; import './rhodecode-unsafe-html/rhodecode-unsafe-html.js'; import './rhodecode-app/rhodecode-app.js'; diff --git a/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.js b/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.js deleted file mode 100644 --- a/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.js +++ /dev/null @@ -1,57 +0,0 @@ -import {PolymerElement, html} from '@polymer/polymer/polymer-element.js'; -import '@polymer/paper-toggle-button/paper-toggle-button.js'; -import '@polymer/paper-spinner/paper-spinner.js'; -import '@polymer/paper-tooltip/paper-tooltip.js'; - -export class RhodecodeToggle extends PolymerElement { - - static get is() { - return 'rhodecode-toggle'; - } - - static get template() { - return html` - -
    - [[labelStatus(checked)]] - - [[tooltipText]] - -
    - `; - } - - static get properties() { - return { - noSpinner: {type: Boolean, value: false, reflectToAttribute: true}, - tooltipText: {type: String, value: "Click to toggle", reflectToAttribute: true}, - checked: {type: Boolean, value: false, reflectToAttribute: true}, - active: {type: Boolean, value: false, reflectToAttribute: true, notify: true} - } - } - - shouldShow() { - return !this.noSpinner - } - - labelStatus(isActive) { - return this.checked ? 'Enabled' : "Disabled" - } -} - -customElements.define(RhodecodeToggle.is, RhodecodeToggle); diff --git a/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.less b/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.less deleted file mode 100644 --- a/rhodecode/public/js/src/components/rhodecode-toggle/rhodecode-toggle.less +++ /dev/null @@ -1,14 +0,0 @@ -@import '../../../../css/variables'; - -.rc-toggle { - float: left; - position: relative; - - paper-spinner { - position: absolute; - top: 0; - left: -30px; - width: 20px; - height: 20px; - } -} \ No newline at end of file diff --git a/rhodecode/public/js/src/rhodecode.js b/rhodecode/public/js/src/rhodecode.js --- a/rhodecode/public/js/src/rhodecode.js +++ b/rhodecode/public/js/src/rhodecode.js @@ -221,6 +221,103 @@ var clipboardActivate = function() { }); }; +var tooltipActivate = function () { + var delay = 50; + var animation = 'fade'; + var theme = 'tooltipster-shadow'; + var debug = false; + + $('.tooltip').tooltipster({ + debug: debug, + theme: theme, + animation: animation, + delay: delay, + contentCloning: true, + contentAsHTML: true, + + functionBefore: function (instance, helper) { + var $origin = $(helper.origin); + var data = '
    {0}
    '.format(instance.content()); + instance.content(data); + } + }); + var hovercardCache = {}; + + var loadHoverCard = function (url, altHovercard, callback) { + var id = url; + + if (hovercardCache[id] !== undefined) { + callback(hovercardCache[id]); + return true; + } + + hovercardCache[id] = undefined; + $.get(url, function (data) { + hovercardCache[id] = data; + callback(hovercardCache[id]); + return true; + }).fail(function (data, textStatus, errorThrown) { + + if (parseInt(data.status) === 404) { + var msg = "

    {0}

    ".format(altHovercard || "No Data exists for this hovercard"); + } else { + var msg = "

    Error while fetching hovercard.\nError code {0} ({1}).

    ".format(data.status,data.statusText); + } + callback(msg); + return false + }); + }; + + $('.tooltip-hovercard').tooltipster({ + debug: debug, + theme: theme, + animation: animation, + delay: delay, + interactive: true, + contentCloning: true, + + trigger: 'custom', + triggerOpen: { + mouseenter: true, + }, + triggerClose: { + mouseleave: true, + originClick: true, + touchleave: true + }, + content: _gettext('Loading...'), + contentAsHTML: true, + updateAnimation: null, + + functionBefore: function (instance, helper) { + + var $origin = $(helper.origin); + + // we set a variable so the data is only loaded once via Ajax, not every time the tooltip opens + if ($origin.data('loaded') !== true) { + var hovercardUrl = $origin.data('hovercardUrl'); + var altHovercard =$origin.data('hovercardAlt'); + + if (hovercardUrl !== undefined && hovercardUrl !== "") { + var loaded = loadHoverCard(hovercardUrl, altHovercard, function (data) { + instance.content(data); + }) + } else { + if ($origin.data('hovercardAltHtml')) { + var data = atob($origin.data('hovercardAltHtml')); + } else { + var data = '
    {0}
    '.format(altHovercard) + } + var loaded = true; + instance.content(data); + } + + // to remember that the data has been loaded + $origin.data('loaded', loaded); + } + } + }) +}; // Formatting values in a Select2 dropdown of commit references var formatSelect2SelectionRefs = function(commit_ref){ @@ -437,15 +534,17 @@ function scrollToElement(element, percen if (location.hash) { var result = splitDelimitedHash(location.hash); - var loc = result.loc; + + var loc = result.loc; + if (loc.length > 1) { var highlightable_line_tds = []; // source code line format - var page_highlights = loc.substring( - loc.indexOf('#') + 1).split('L'); + var page_highlights = loc.substring(loc.indexOf('#') + 1).split('L'); + // multi-line HL, for files if (page_highlights.length > 1) { var highlight_ranges = page_highlights[1].split(","); var h_lines = []; @@ -459,8 +558,7 @@ function scrollToElement(element, percen h_lines.push(i); } } - } - else { + } else { h_lines.push(parseInt(highlight_ranges[pos])); } } @@ -472,24 +570,45 @@ function scrollToElement(element, percen } } - // now check a direct id reference (diff page) - if ($(loc).length && $(loc).hasClass('cb-lineno')) { + // now check a direct id reference of line in diff / pull-request page) + if ($(loc).length > 0 && $(loc).hasClass('cb-lineno')) { highlightable_line_tds.push($(loc)); } + + // mark diff lines as selected $.each(highlightable_line_tds, function (i, $td) { $td.addClass('cb-line-selected'); // line number td $td.prev().addClass('cb-line-selected'); // line data $td.next().addClass('cb-line-selected'); // line content }); - if (highlightable_line_tds.length) { + if (highlightable_line_tds.length > 0) { var $first_line_td = highlightable_line_tds[0]; scrollToElement($first_line_td); $.Topic('/ui/plugins/code/anchor_focus').prepareOrPublish({ td: $first_line_td, remainder: result.remainder }); + } else { + // case for direct anchor to comments + var $line = $(loc); + + if ($line.hasClass('comment-general')) { + $line.show(); + } else if ($line.hasClass('comment-inline')) { + $line.show(); + var $cb = $line.closest('.cb'); + $cb.removeClass('cb-collapsed') + } + if ($line.length > 0) { + $line.addClass('comment-selected-hl'); + offsetScroll($line, 70); + } + if (!$line.hasClass('comment-outdated') && result.remainder === '/ReplyToComment') { + $line.nextAll('.cb-comment-add-button').trigger('click'); + } } + } } collapsableContent(); diff --git a/rhodecode/public/js/src/rhodecode/changelog.js b/rhodecode/public/js/src/rhodecode/changelog.js --- a/rhodecode/public/js/src/rhodecode/changelog.js +++ b/rhodecode/public/js/src/rhodecode/changelog.js @@ -145,7 +145,7 @@ var CommitsController = function () { this.loadNext = function (node, page, branch, commit_id, f_path) { var loadUrl = this.getChunkUrl(page, 'next', branch, commit_id, f_path); var postData = {'graph': JSON.stringify(this.getCurrentGraphData())}; - + $(node).html('loading...').addClass('disabled').css({'cursor':'default'}); $.post(loadUrl, postData, function (data) { $(node).closest('tbody').append(data); $(node).closest('td').remove(); @@ -156,7 +156,7 @@ var CommitsController = function () { this.loadPrev = function (node, page, branch, commit_id, f_path) { var loadUrl = this.getChunkUrl(page, 'prev', branch, commit_id, f_path); var postData = {'graph': JSON.stringify(this.getCurrentGraphData())}; - + $(node).html('loading...').addClass('disabled').css({'cursor':'default'}); $.post(loadUrl, postData, function (data) { $(node).closest('tbody').prepend(data); $(node).closest('td').remove(); diff --git a/rhodecode/public/js/src/rhodecode/codemirror.js b/rhodecode/public/js/src/rhodecode/codemirror.js --- a/rhodecode/public/js/src/rhodecode/codemirror.js +++ b/rhodecode/public/js/src/rhodecode/codemirror.js @@ -269,14 +269,13 @@ var initMarkupCodeMirror = function(text cmLog.debug('Loading codemirror mode', DEFAULT_RENDERER); // start listening on changes to make auto-expanded editor - cm.on("change", function(instance, changeObj) { + cm.on("change", function (instance, changeObj) { var height = initialHeight; var lines = instance.lineCount(); - if ( lines > 6 && lines < 20) { + if (lines > 6 && lines < 20) { height = "auto"; - } - else if (lines >= 20){ - zheight = 20*15; + } else if (lines >= 20) { + height = 20 * 15; } instance.setSize(null, height); @@ -368,8 +367,9 @@ var initCommentBoxCodeMirror = function( $(CommentForm.statusChange).select2("val", 'approved').trigger('change'); }, render: function(elt, data, completion) { - var el = document.createElement('div'); - el.className = "flag_status flag_status_comment_box approved pull-left"; + var el = document.createElement('i'); + + el.className = "icon-circle review-status-approved"; elt.appendChild(el); el = document.createElement('span'); @@ -387,8 +387,8 @@ var initCommentBoxCodeMirror = function( $(CommentForm.statusChange).select2("val", 'rejected').trigger('change'); }, render: function(elt, data, completion) { - var el = document.createElement('div'); - el.className = "flag_status flag_status_comment_box rejected pull-left"; + var el = document.createElement('i'); + el.className = "icon-circle review-status-rejected"; elt.appendChild(el); el = document.createElement('span'); @@ -500,15 +500,15 @@ var initCommentBoxCodeMirror = function( cm.setOption("mode", DEFAULT_RENDERER); CodeMirror.autoLoadMode(cm, DEFAULT_RENDERER); // load rst or markdown mode cmLog.debug('Loading codemirror mode', DEFAULT_RENDERER); + // start listening on changes to make auto-expanded editor - cm.on("change", function(self) { + cm.on("change", function (self) { var height = initialHeight; var lines = self.lineCount(); - if ( lines > 6 && lines < 20) { + if (lines > 6 && lines < 20) { height = "auto"; - } - else if (lines >= 20){ - zheight = 20*15; + } else if (lines >= 20) { + height = 20 * 15; } self.setSize(null, height); }); @@ -708,8 +708,8 @@ var fillCodeMirrorOptions = function(tar this.initStatusChangeSelector = function(){ var formatChangeStatus = function(state, escapeMarkup) { var originalOption = state.element; - return '
    ' + - '' + escapeMarkup(state.text) + ''; + var tmpl = '{1}'.format($(originalOption).data('status'), escapeMarkup(state.text)); + return tmpl }; var formatResult = function(result, container, query, escapeMarkup) { return formatChangeStatus(result, escapeMarkup); diff --git a/rhodecode/public/js/src/rhodecode/comments.js b/rhodecode/public/js/src/rhodecode/comments.js --- a/rhodecode/public/js/src/rhodecode/comments.js +++ b/rhodecode/public/js/src/rhodecode/comments.js @@ -218,8 +218,8 @@ var _submitAjaxPOST = function(url, post this.initStatusChangeSelector = function(){ var formatChangeStatus = function(state, escapeMarkup) { var originalOption = state.element; - return '
    ' + - '' + escapeMarkup(state.text) + ''; + var tmpl = '{1}'.format($(originalOption).data('status'), escapeMarkup(state.text)); + return tmpl }; var formatResult = function(result, container, query, escapeMarkup) { return formatChangeStatus(result, escapeMarkup); @@ -335,6 +335,7 @@ var _submitAjaxPOST = function(url, post $('#injected_page_comments').append(o.rendered_text); self.resetCommentFormState(); timeagoActivate(); + tooltipActivate(); // mark visually which comment was resolved if (resolvesCommentId) { @@ -510,15 +511,17 @@ var CommentsController = function() { node = $('comment-current') } } + $wrapper = $(node).closest('div.comment'); - $comment = $(node).closest(klass); - $comments = $(klass); // show hidden comment when referenced. if (!$wrapper.is(':visible')){ $wrapper.show(); } + $comment = $(node).closest(klass); + $comments = $(klass); + $('.comment-selected').removeClass('comment-selected'); var nextIdx = $(klass).index($comment) + offset; @@ -628,7 +631,7 @@ var CommentsController = function() { var cm = commentForm.getCmInstance(); if (resolvesCommentId){ - var placeholderText = _gettext('Leave a comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId); + var placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId); } setTimeout(function() { @@ -654,6 +657,95 @@ var CommentsController = function() { }, 100); } + // add dropzone support + var insertAttachmentText = function (cm, attachmentName, attachmentStoreUrl, isRendered) { + var renderer = templateContext.visual.default_renderer; + if (renderer == 'rst') { + var attachmentUrl = '`#{0} <{1}>`_'.format(attachmentName, attachmentStoreUrl); + if (isRendered){ + attachmentUrl = '\n.. image:: {0}'.format(attachmentStoreUrl); + } + } else if (renderer == 'markdown') { + var attachmentUrl = '[{0}]({1})'.format(attachmentName, attachmentStoreUrl); + if (isRendered){ + attachmentUrl = '!' + attachmentUrl; + } + } else { + var attachmentUrl = '{}'.format(attachmentStoreUrl); + } + cm.replaceRange(attachmentUrl+'\n', CodeMirror.Pos(cm.lastLine())); + + return false; + }; + + //see: https://www.dropzonejs.com/#configuration + var storeUrl = pyroutes.url('repo_commit_comment_attachment_upload', + {'repo_name': templateContext.repo_name, + 'commit_id': templateContext.commit_data.commit_id}) + + var previewTmpl = $(formElement).find('.comment-attachment-uploader-template').get(0); + if (previewTmpl !== undefined){ + var selectLink = $(formElement).find('.pick-attachment').get(0); + $(formElement).find('.comment-attachment-uploader').dropzone({ + url: storeUrl, + headers: {"X-CSRF-Token": CSRF_TOKEN}, + paramName: function () { + return "attachment" + }, // The name that will be used to transfer the file + clickable: selectLink, + parallelUploads: 1, + maxFiles: 10, + maxFilesize: templateContext.attachment_store.max_file_size_mb, + uploadMultiple: false, + autoProcessQueue: true, // if false queue will not be processed automatically. + createImageThumbnails: false, + previewTemplate: previewTmpl.innerHTML, + + accept: function (file, done) { + done(); + }, + init: function () { + + this.on("sending", function (file, xhr, formData) { + $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').hide(); + $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').show(); + }); + + this.on("success", function (file, response) { + $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').show(); + $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide(); + + var isRendered = false; + var ext = file.name.split('.').pop(); + var imageExts = templateContext.attachment_store.image_ext; + if (imageExts.indexOf(ext) !== -1){ + isRendered = true; + } + + insertAttachmentText(cm, file.name, response.repo_fqn_access_path, isRendered) + }); + + this.on("error", function (file, errorMessage, xhr) { + $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide(); + + var error = null; + + if (xhr !== undefined){ + var httpStatus = xhr.status + " " + xhr.statusText; + if (xhr !== undefined && xhr.status >= 500) { + error = httpStatus; + } + } + + if (error === null) { + error = errorMessage.error || errorMessage || httpStatus; + } + $(file.previewElement).find('.dz-error-message').html('ERROR: {0}'.format(error)); + + }); + } + }); + } return commentForm; }; @@ -776,6 +868,7 @@ var CommentsController = function() { // re trigger the linkification of next/prev navigation linkifyComments($('.inline-comment-injected')); timeagoActivate(); + tooltipActivate(); if (window.updateSticky !== undefined) { // potentially our comments change the active window size, so we diff --git a/rhodecode/public/js/src/rhodecode/files.js b/rhodecode/public/js/src/rhodecode/files.js --- a/rhodecode/public/js/src/rhodecode/files.js +++ b/rhodecode/public/js/src/rhodecode/files.js @@ -382,6 +382,7 @@ var getFilesMetadata = function() { metadataRequest.done(function(data) { $('#file-tree').html(data); timeagoActivate(); + tooltipActivate(); }); metadataRequest.fail(function (data, textStatus, errorThrown) { if (data.status != 0) { @@ -406,7 +407,8 @@ var showAuthors = function(elem, annotat timeout: 5000 }).complete(function(){ $(elem).hide(); - $('#file_authors_title').html(_gettext('All Authors')) + $('#file_authors_title').html(_gettext('All Authors')); + tooltipActivate(); }) }; diff --git a/rhodecode/public/js/src/rhodecode/pullrequests.js b/rhodecode/public/js/src/rhodecode/pullrequests.js --- a/rhodecode/public/js/src/rhodecode/pullrequests.js +++ b/rhodecode/public/js/src/rhodecode/pullrequests.js @@ -75,12 +75,13 @@ var getTitleAndDescription = function(so var desc = ''; $.each($(elements).get().reverse().slice(0, limit), function(idx, value) { - var rawMessage = $(value).find('td.td-description .message').data('messageRaw'); + var rawMessage = $(value).find('td.td-description .message').data('messageRaw').toString(); desc += '- ' + rawMessage.split('\n')[0].replace(/\n+$/, "") + '\n'; }); // only 1 commit, use commit message as title if (elements.length === 1) { - title = $(elements[0]).find('td.td-description .message').data('messageRaw').split('\n')[0]; + var rawMessage = $(elements[0]).find('td.td-description .message').data('messageRaw').toString(); + title = rawMessage.split('\n')[0]; } else { // use reference name @@ -320,6 +321,7 @@ ReviewersController = function () { 'reasons': reasons, 'create': true }); + tooltipActivate(); } } @@ -342,18 +344,29 @@ var _updatePullRequest = function(repo_n } else { postData.csrf_token = CSRF_TOKEN; } + var success = function(o) { - window.location.reload(); + var redirectUrl = o['redirect_url']; + if (redirectUrl !== undefined && redirectUrl !== null && redirectUrl !== '') { + window.location = redirectUrl; + } else { + window.location.reload(); + } }; + ajaxPOST(url, postData, success); }; /** * PULL REQUEST update commits */ -var updateCommits = function(repo_name, pull_request_id) { +var updateCommits = function(repo_name, pull_request_id, force) { var postData = { - 'update_commits': true}; + 'update_commits': true + }; + if (force !== undefined && force === true) { + postData['force_refresh'] = true + } _updatePullRequest(repo_name, pull_request_id, postData); }; @@ -546,6 +559,66 @@ VersionController = function () { } return false + }; + + this.toggleElement = function (elem, target) { + var $elem = $(elem); + var $target = $(target); + + if ($target.is(':visible')) { + $target.hide(); + $elem.html($elem.data('toggleOn')) + } else { + $target.show(); + $elem.html($elem.data('toggleOff')) + } + + return false } +}; + + +UpdatePrController = function () { + var self = this; + this.$updateCommits = $('#update_commits'); + this.$updateCommitsSwitcher = $('#update_commits_switcher'); + + this.lockUpdateButton = function (label) { + self.$updateCommits.attr('disabled', 'disabled'); + self.$updateCommitsSwitcher.attr('disabled', 'disabled'); + + self.$updateCommits.addClass('disabled'); + self.$updateCommitsSwitcher.addClass('disabled'); + + self.$updateCommits.removeClass('btn-primary'); + self.$updateCommitsSwitcher.removeClass('btn-primary'); + + self.$updateCommits.text(_gettext(label)); + }; + + this.isUpdateLocked = function () { + return self.$updateCommits.attr('disabled') !== undefined; + }; + + this.updateCommits = function (curNode) { + if (self.isUpdateLocked()) { + return + } + self.lockUpdateButton(_gettext('Updating...')); + updateCommits( + templateContext.repo_name, + templateContext.pull_request_data.pull_request_id); + }; + + this.forceUpdateCommits = function () { + if (self.isUpdateLocked()) { + return + } + self.lockUpdateButton(_gettext('Force updating...')); + var force = true; + updateCommits( + templateContext.repo_name, + templateContext.pull_request_data.pull_request_id, force); + }; }; \ No newline at end of file diff --git a/rhodecode/public/js/src/rhodecode/tooltips.js b/rhodecode/public/js/src/rhodecode/tooltips.js --- a/rhodecode/public/js/src/rhodecode/tooltips.js +++ b/rhodecode/public/js/src/rhodecode/tooltips.js @@ -1,94 +1,4283 @@ -// # Copyright (C) 2010-2019 RhodeCode GmbH -// # -// # This program is free software: you can redistribute it and/or modify -// # it under the terms of the GNU Affero General Public License, version 3 -// # (only), as published by the Free Software Foundation. -// # -// # This program is distributed in the hope that it will be useful, -// # but WITHOUT ANY WARRANTY; without even the implied warranty of -// # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// # GNU General Public License for more details. -// # -// # You should have received a copy of the GNU Affero General Public License -// # along with this program. If not, see . -// # -// # This program is dual-licensed. If you wish to learn more about the -// # RhodeCode Enterprise Edition, including its added features, Support services, -// # and proprietary license terms, please see https://rhodecode.com/licenses/ +/** + * tooltipster http://iamceege.github.io/tooltipster/ + * A rockin' custom tooltip jQuery plugin + * Developed by Caleb Jacob and Louis Ameline + * MIT license + */ +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module unless amdModuleId is set + define(["jquery"], function (a0) { + return (factory(a0)); + }); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(require("jquery")); + } else { + factory(jQuery); + } +}(this, function ($) { + +// This file will be UMDified by a build task. + +var defaults = { + animation: 'fade', + animationDuration: 350, + content: null, + contentAsHTML: false, + contentCloning: false, + debug: true, + delay: 300, + delayTouch: [300, 500], + functionInit: null, + functionBefore: null, + functionReady: null, + functionAfter: null, + functionFormat: null, + IEmin: 6, + interactive: false, + multiple: false, + // will default to document.body, or must be an element positioned at (0, 0) + // in the document, typically like the very top views of an app. + parent: null, + plugins: ['sideTip'], + repositionOnScroll: false, + restoration: 'none', + selfDestruction: true, + theme: [], + timer: 0, + trackerInterval: 500, + trackOrigin: false, + trackTooltip: false, + trigger: 'hover', + triggerClose: { + click: false, + mouseleave: false, + originClick: false, + scroll: false, + tap: false, + touchleave: false + }, + triggerOpen: { + click: false, + mouseenter: false, + tap: false, + touchstart: false + }, + updateAnimation: 'rotate', + zIndex: 9999999 + }, + // we'll avoid using the 'window' global as a good practice but npm's + // jquery@<2.1.0 package actually requires a 'window' global, so not sure + // it's useful at all + win = (typeof window != 'undefined') ? window : null, + // env will be proxied by the core for plugins to have access its properties + env = { + // detect if this device can trigger touch events. Better have a false + // positive (unused listeners, that's ok) than a false negative. + // https://github.com/Modernizr/Modernizr/blob/master/feature-detects/touchevents.js + // http://stackoverflow.com/questions/4817029/whats-the-best-way-to-detect-a-touch-screen-device-using-javascript + hasTouchCapability: !!( + win + && ( 'ontouchstart' in win + || (win.DocumentTouch && win.document instanceof win.DocumentTouch) + || win.navigator.maxTouchPoints + ) + ), + hasTransitions: transitionSupport(), + IE: false, + // don't set manually, it will be updated by a build task after the manifest + semVer: '4.2.7', + window: win + }, + core = function() { + + // core variables + + // the core emitters + this.__$emitterPrivate = $({}); + this.__$emitterPublic = $({}); + this.__instancesLatestArr = []; + // collects plugin constructors + this.__plugins = {}; + // proxy env variables for plugins who might use them + this._env = env; + }; + +// core methods +core.prototype = { + + /** + * A function to proxy the public methods of an object onto another + * + * @param {object} constructor The constructor to bridge + * @param {object} obj The object that will get new methods (an instance or the core) + * @param {string} pluginName A plugin name for the console log message + * @return {core} + * @private + */ + __bridge: function(constructor, obj, pluginName) { + + // if it's not already bridged + if (!obj[pluginName]) { + + var fn = function() {}; + fn.prototype = constructor; + + var pluginInstance = new fn(); + + // the _init method has to exist in instance constructors but might be missing + // in core constructors + if (pluginInstance.__init) { + pluginInstance.__init(obj); + } + + $.each(constructor, function(methodName, fn) { + + // don't proxy "private" methods, only "protected" and public ones + if (methodName.indexOf('__') != 0) { + + // if the method does not exist yet + if (!obj[methodName]) { + + obj[methodName] = function() { + return pluginInstance[methodName].apply(pluginInstance, Array.prototype.slice.apply(arguments)); + }; + + // remember to which plugin this method corresponds (several plugins may + // have methods of the same name, we need to be sure) + obj[methodName].bridged = pluginInstance; + } + else if (defaults.debug) { + + console.log('The '+ methodName +' method of the '+ pluginName + +' plugin conflicts with another plugin or native methods'); + } + } + }); + + obj[pluginName] = pluginInstance; + } + + return this; + }, + + /** + * For mockup in Node env if need be, for testing purposes + * + * @return {core} + * @private + */ + __setWindow: function(window) { + env.window = window; + return this; + }, + + /** + * Returns a ruler, a tool to help measure the size of a tooltip under + * various settings. Meant for plugins + * + * @see Ruler + * @return {object} A Ruler instance + * @protected + */ + _getRuler: function($tooltip) { + return new Ruler($tooltip); + }, + + /** + * For internal use by plugins, if needed + * + * @return {core} + * @protected + */ + _off: function() { + this.__$emitterPrivate.off.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For internal use by plugins, if needed + * + * @return {core} + * @protected + */ + _on: function() { + this.__$emitterPrivate.on.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For internal use by plugins, if needed + * + * @return {core} + * @protected + */ + _one: function() { + this.__$emitterPrivate.one.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * Returns (getter) or adds (setter) a plugin + * + * @param {string|object} plugin Provide a string (in the full form + * "namespace.name") to use as as getter, an object to use as a setter + * @return {object|core} + * @protected + */ + _plugin: function(plugin) { + + var self = this; + + // getter + if (typeof plugin == 'string') { + + var pluginName = plugin, + p = null; + + // if the namespace is provided, it's easy to search + if (pluginName.indexOf('.') > 0) { + p = self.__plugins[pluginName]; + } + // otherwise, return the first name that matches + else { + $.each(self.__plugins, function(i, plugin) { + + if (plugin.name.substring(plugin.name.length - pluginName.length - 1) == '.'+ pluginName) { + p = plugin; + return false; + } + }); + } + + return p; + } + // setter + else { + + // force namespaces + if (plugin.name.indexOf('.') < 0) { + throw new Error('Plugins must be namespaced'); + } + + self.__plugins[plugin.name] = plugin; + + // if the plugin has core features + if (plugin.core) { + + // bridge non-private methods onto the core to allow new core methods + self.__bridge(plugin.core, self, plugin.name); + } + + return this; + } + }, + + /** + * Trigger events on the core emitters + * + * @returns {core} + * @protected + */ + _trigger: function() { + + var args = Array.prototype.slice.apply(arguments); + + if (typeof args[0] == 'string') { + args[0] = { type: args[0] }; + } + + // note: the order of emitters matters + this.__$emitterPrivate.trigger.apply(this.__$emitterPrivate, args); + this.__$emitterPublic.trigger.apply(this.__$emitterPublic, args); + + return this; + }, + + /** + * Returns instances of all tooltips in the page or an a given element + * + * @param {string|HTML object collection} selector optional Use this + * parameter to restrict the set of objects that will be inspected + * for the retrieval of instances. By default, all instances in the + * page are returned. + * @return {array} An array of instance objects + * @public + */ + instances: function(selector) { + + var instances = [], + sel = selector || '.tooltipstered'; + + $(sel).each(function() { + + var $this = $(this), + ns = $this.data('tooltipster-ns'); + + if (ns) { + + $.each(ns, function(i, namespace) { + instances.push($this.data(namespace)); + }); + } + }); + + return instances; + }, + + /** + * Returns the Tooltipster objects generated by the last initializing call + * + * @return {array} An array of instance objects + * @public + */ + instancesLatest: function() { + return this.__instancesLatestArr; + }, + + /** + * For public use only, not to be used by plugins (use ::_off() instead) + * + * @return {core} + * @public + */ + off: function() { + this.__$emitterPublic.off.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For public use only, not to be used by plugins (use ::_on() instead) + * + * @return {core} + * @public + */ + on: function() { + this.__$emitterPublic.on.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For public use only, not to be used by plugins (use ::_one() instead) + * + * @return {core} + * @public + */ + one: function() { + this.__$emitterPublic.one.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * Returns all HTML elements which have one or more tooltips + * + * @param {string} selector optional Use this to restrict the results + * to the descendants of an element + * @return {array} An array of HTML elements + * @public + */ + origins: function(selector) { + + var sel = selector ? + selector +' ' : + ''; + + return $(sel +'.tooltipstered').toArray(); + }, + + /** + * Change default options for all future instances + * + * @param {object} d The options that should be made defaults + * @return {core} + * @public + */ + setDefaults: function(d) { + $.extend(defaults, d); + return this; + }, + + /** + * For users to trigger their handlers on the public emitter + * + * @returns {core} + * @public + */ + triggerHandler: function() { + this.__$emitterPublic.triggerHandler.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + return this; + } +}; + +// $.tooltipster will be used to call core methods +$.tooltipster = new core(); + +// the Tooltipster instance class (mind the capital T) +$.Tooltipster = function(element, options) { + + // list of instance variables + + // stack of custom callbacks provided as parameters to API methods + this.__callbacks = { + close: [], + open: [] + }; + // the schedule time of DOM removal + this.__closingTime; + // this will be the user content shown in the tooltip. A capital "C" is used + // because there is also a method called content() + this.__Content; + // for the size tracker + this.__contentBcr; + // to disable the tooltip after destruction + this.__destroyed = false; + // we can't emit directly on the instance because if a method with the same + // name as the event exists, it will be called by jQuery. Se we use a plain + // object as emitter. This emitter is for internal use by plugins, + // if needed. + this.__$emitterPrivate = $({}); + // this emitter is for the user to listen to events without risking to mess + // with our internal listeners + this.__$emitterPublic = $({}); + this.__enabled = true; + // the reference to the gc interval + this.__garbageCollector; + // various position and size data recomputed before each repositioning + this.__Geometry; + // the tooltip position, saved after each repositioning by a plugin + this.__lastPosition; + // a unique namespace per instance + this.__namespace = 'tooltipster-'+ Math.round(Math.random()*1000000); + this.__options; + // will be used to support origins in scrollable areas + this.__$originParents; + this.__pointerIsOverOrigin = false; + // to remove themes if needed + this.__previousThemes = []; + // the state can be either: appearing, stable, disappearing, closed + this.__state = 'closed'; + // timeout references + this.__timeouts = { + close: [], + open: null + }; + // store touch events to be able to detect emulated mouse events + this.__touchEvents = []; + // the reference to the tracker interval + this.__tracker = null; + // the element to which this tooltip is associated + this._$origin; + // this will be the tooltip element (jQuery wrapped HTML element). + // It's the job of a plugin to create it and append it to the DOM + this._$tooltip; + + // launch + this.__init(element, options); +}; + +$.Tooltipster.prototype = { + + /** + * @param origin + * @param options + * @private + */ + __init: function(origin, options) { + + var self = this; + + self._$origin = $(origin); + self.__options = $.extend(true, {}, defaults, options); + + // some options may need to be reformatted + self.__optionsFormat(); + + // don't run on old IE if asked no to + if ( !env.IE + || env.IE >= self.__options.IEmin + ) { + + // note: the content is null (empty) by default and can stay that + // way if the plugin remains initialized but not fed any content. The + // tooltip will just not appear. + + // let's save the initial value of the title attribute for later + // restoration if need be. + var initialTitle = null; + + // it will already have been saved in case of multiple tooltips + if (self._$origin.data('tooltipster-initialTitle') === undefined) { + + initialTitle = self._$origin.attr('title'); + + // we do not want initialTitle to be "undefined" because + // of how jQuery's .data() method works + if (initialTitle === undefined) initialTitle = null; + + self._$origin.data('tooltipster-initialTitle', initialTitle); + } + + // If content is provided in the options, it has precedence over the + // title attribute. + // Note: an empty string is considered content, only 'null' represents + // the absence of content. + // Also, an existing title="" attribute will result in an empty string + // content + if (self.__options.content !== null) { + self.__contentSet(self.__options.content); + } + else { + + var selector = self._$origin.attr('data-tooltip-content'), + $el; + + if (selector){ + $el = $(selector); + } + + if ($el && $el[0]) { + self.__contentSet($el.first()); + } + else { + self.__contentSet(initialTitle); + } + } + + self._$origin + // strip the title off of the element to prevent the default tooltips + // from popping up + .removeAttr('title') + // to be able to find all instances on the page later (upon window + // events in particular) + .addClass('tooltipstered'); + + // set listeners on the origin + self.__prepareOrigin(); + + // set the garbage collector + self.__prepareGC(); + + // init plugins + $.each(self.__options.plugins, function(i, pluginName) { + self._plug(pluginName); + }); + + // to detect swiping + if (env.hasTouchCapability) { + $(env.window.document.body).on('touchmove.'+ self.__namespace +'-triggerOpen', function(event) { + self._touchRecordEvent(event); + }); + } + + self + // prepare the tooltip when it gets created. This event must + // be fired by a plugin + ._on('created', function() { + self.__prepareTooltip(); + }) + // save position information when it's sent by a plugin + ._on('repositioned', function(e) { + self.__lastPosition = e.position; + }); + } + else { + self.__options.disabled = true; + } + }, + + /** + * Insert the content into the appropriate HTML element of the tooltip + * + * @returns {self} + * @private + */ + __contentInsert: function() { + + var self = this, + $el = self._$tooltip.find('.tooltipster-content'), + formattedContent = self.__Content, + format = function(content) { + formattedContent = content; + }; + + self._trigger({ + type: 'format', + content: self.__Content, + format: format + }); + + if (self.__options.functionFormat) { + + formattedContent = self.__options.functionFormat.call( + self, + self, + { origin: self._$origin[0] }, + self.__Content + ); + } + + if (typeof formattedContent === 'string' && !self.__options.contentAsHTML) { + $el.text(formattedContent); + } + else { + $el + .empty() + .append(formattedContent); + } + + return self; + }, + + /** + * Save the content, cloning it beforehand if need be + * + * @param content + * @returns {self} + * @private + */ + __contentSet: function(content) { + + // clone if asked. Cloning the object makes sure that each instance has its + // own version of the content (in case a same object were provided for several + // instances) + // reminder: typeof null === object + if (content instanceof $ && this.__options.contentCloning) { + content = content.clone(true); + } + + this.__Content = content; + + this._trigger({ + type: 'updated', + content: content + }); + + return this; + }, + + /** + * Error message about a method call made after destruction + * + * @private + */ + __destroyError: function() { + throw new Error('This tooltip has been destroyed and cannot execute your method call.'); + }, + + /** + * Gather all information about dimensions and available space, + * called before every repositioning + * + * @private + * @returns {object} + */ + __geometry: function() { + + var self = this, + $target = self._$origin, + originIsArea = self._$origin.is('area'); + + // if this._$origin is a map area, the target we'll need + // the dimensions of is actually the image using the map, + // not the area itself + if (originIsArea) { + + var mapName = self._$origin.parent().attr('name'); + + $target = $('img[usemap="#'+ mapName +'"]'); + } + + var bcr = $target[0].getBoundingClientRect(), + $document = $(env.window.document), + $window = $(env.window), + $parent = $target, + // some useful properties of important elements + geo = { + // available space for the tooltip, see down below + available: { + document: null, + window: null + }, + document: { + size: { + height: $document.height(), + width: $document.width() + } + }, + window: { + scroll: { + // the second ones are for IE compatibility + left: env.window.scrollX || env.window.document.documentElement.scrollLeft, + top: env.window.scrollY || env.window.document.documentElement.scrollTop + }, + size: { + height: $window.height(), + width: $window.width() + } + }, + origin: { + // the origin has a fixed lineage if itself or one of its + // ancestors has a fixed position + fixedLineage: false, + // relative to the document + offset: {}, + size: { + height: bcr.bottom - bcr.top, + width: bcr.right - bcr.left + }, + usemapImage: originIsArea ? $target[0] : null, + // relative to the window + windowOffset: { + bottom: bcr.bottom, + left: bcr.left, + right: bcr.right, + top: bcr.top + } + } + }, + geoFixed = false; + + // if the element is a map area, some properties may need + // to be recalculated + if (originIsArea) { + + var shape = self._$origin.attr('shape'), + coords = self._$origin.attr('coords'); + + if (coords) { + + coords = coords.split(','); + + $.map(coords, function(val, i) { + coords[i] = parseInt(val); + }); + } + + // if the image itself is the area, nothing more to do + if (shape != 'default') { + + switch(shape) { + + case 'circle': + + var circleCenterLeft = coords[0], + circleCenterTop = coords[1], + circleRadius = coords[2], + areaTopOffset = circleCenterTop - circleRadius, + areaLeftOffset = circleCenterLeft - circleRadius; + + geo.origin.size.height = circleRadius * 2; + geo.origin.size.width = geo.origin.size.height; + + geo.origin.windowOffset.left += areaLeftOffset; + geo.origin.windowOffset.top += areaTopOffset; + + break; + + case 'rect': + + var areaLeft = coords[0], + areaTop = coords[1], + areaRight = coords[2], + areaBottom = coords[3]; + + geo.origin.size.height = areaBottom - areaTop; + geo.origin.size.width = areaRight - areaLeft; + + geo.origin.windowOffset.left += areaLeft; + geo.origin.windowOffset.top += areaTop; + + break; + + case 'poly': + + var areaSmallestX = 0, + areaSmallestY = 0, + areaGreatestX = 0, + areaGreatestY = 0, + arrayAlternate = 'even'; + + for (var i = 0; i < coords.length; i++) { + + var areaNumber = coords[i]; + + if (arrayAlternate == 'even') { + + if (areaNumber > areaGreatestX) { + + areaGreatestX = areaNumber; + + if (i === 0) { + areaSmallestX = areaGreatestX; + } + } + + if (areaNumber < areaSmallestX) { + areaSmallestX = areaNumber; + } + + arrayAlternate = 'odd'; + } + else { + if (areaNumber > areaGreatestY) { + + areaGreatestY = areaNumber; + + if (i == 1) { + areaSmallestY = areaGreatestY; + } + } + + if (areaNumber < areaSmallestY) { + areaSmallestY = areaNumber; + } + + arrayAlternate = 'even'; + } + } + + geo.origin.size.height = areaGreatestY - areaSmallestY; + geo.origin.size.width = areaGreatestX - areaSmallestX; + + geo.origin.windowOffset.left += areaSmallestX; + geo.origin.windowOffset.top += areaSmallestY; + + break; + } + } + } + + // user callback through an event + var edit = function(r) { + geo.origin.size.height = r.height, + geo.origin.windowOffset.left = r.left, + geo.origin.windowOffset.top = r.top, + geo.origin.size.width = r.width + }; + + self._trigger({ + type: 'geometry', + edit: edit, + geometry: { + height: geo.origin.size.height, + left: geo.origin.windowOffset.left, + top: geo.origin.windowOffset.top, + width: geo.origin.size.width + } + }); + + // calculate the remaining properties with what we got + + geo.origin.windowOffset.right = geo.origin.windowOffset.left + geo.origin.size.width; + geo.origin.windowOffset.bottom = geo.origin.windowOffset.top + geo.origin.size.height; + + geo.origin.offset.left = geo.origin.windowOffset.left + geo.window.scroll.left; + geo.origin.offset.top = geo.origin.windowOffset.top + geo.window.scroll.top; + geo.origin.offset.bottom = geo.origin.offset.top + geo.origin.size.height; + geo.origin.offset.right = geo.origin.offset.left + geo.origin.size.width; + + // the space that is available to display the tooltip relatively to the document + geo.available.document = { + bottom: { + height: geo.document.size.height - geo.origin.offset.bottom, + width: geo.document.size.width + }, + left: { + height: geo.document.size.height, + width: geo.origin.offset.left + }, + right: { + height: geo.document.size.height, + width: geo.document.size.width - geo.origin.offset.right + }, + top: { + height: geo.origin.offset.top, + width: geo.document.size.width + } + }; + + // the space that is available to display the tooltip relatively to the viewport + // (the resulting values may be negative if the origin overflows the viewport) + geo.available.window = { + bottom: { + // the inner max is here to make sure the available height is no bigger + // than the viewport height (when the origin is off screen at the top). + // The outer max just makes sure that the height is not negative (when + // the origin overflows at the bottom). + height: Math.max(geo.window.size.height - Math.max(geo.origin.windowOffset.bottom, 0), 0), + width: geo.window.size.width + }, + left: { + height: geo.window.size.height, + width: Math.max(geo.origin.windowOffset.left, 0) + }, + right: { + height: geo.window.size.height, + width: Math.max(geo.window.size.width - Math.max(geo.origin.windowOffset.right, 0), 0) + }, + top: { + height: Math.max(geo.origin.windowOffset.top, 0), + width: geo.window.size.width + } + }; + + while ($parent[0].tagName.toLowerCase() != 'html') { + + if ($parent.css('position') == 'fixed') { + geo.origin.fixedLineage = true; + break; + } + + $parent = $parent.parent(); + } + + return geo; + }, + + /** + * Some options may need to be formated before being used + * + * @returns {self} + * @private + */ + __optionsFormat: function() { + + if (typeof this.__options.animationDuration == 'number') { + this.__options.animationDuration = [this.__options.animationDuration, this.__options.animationDuration]; + } + + if (typeof this.__options.delay == 'number') { + this.__options.delay = [this.__options.delay, this.__options.delay]; + } + + if (typeof this.__options.delayTouch == 'number') { + this.__options.delayTouch = [this.__options.delayTouch, this.__options.delayTouch]; + } + + if (typeof this.__options.theme == 'string') { + this.__options.theme = [this.__options.theme]; + } + + // determine the future parent + if (this.__options.parent === null) { + this.__options.parent = $(env.window.document.body); + } + else if (typeof this.__options.parent == 'string') { + this.__options.parent = $(this.__options.parent); + } + + if (this.__options.trigger == 'hover') { + + this.__options.triggerOpen = { + mouseenter: true, + touchstart: true + }; + + this.__options.triggerClose = { + mouseleave: true, + originClick: true, + touchleave: true + }; + } + else if (this.__options.trigger == 'click') { + + this.__options.triggerOpen = { + click: true, + tap: true + }; + + this.__options.triggerClose = { + click: true, + tap: true + }; + } + + // for the plugins + this._trigger('options'); + + return this; + }, + + /** + * Schedules or cancels the garbage collector task + * + * @returns {self} + * @private + */ + __prepareGC: function() { + + var self = this; + + // in case the selfDestruction option has been changed by a method call + if (self.__options.selfDestruction) { + + // the GC task + self.__garbageCollector = setInterval(function() { + + var now = new Date().getTime(); + + // forget the old events + self.__touchEvents = $.grep(self.__touchEvents, function(event, i) { + // 1 minute + return now - event.time > 60000; + }); + + // auto-destruct if the origin is gone + if (!bodyContains(self._$origin)) { + + self.close(function(){ + self.destroy(); + }); + } + }, 20000); + } + else { + clearInterval(self.__garbageCollector); + } + + return self; + }, + + /** + * Sets listeners on the origin if the open triggers require them. + * Unlike the listeners set at opening time, these ones + * remain even when the tooltip is closed. It has been made a + * separate method so it can be called when the triggers are + * changed in the options. Closing is handled in _open() + * because of the bindings that may be needed on the tooltip + * itself + * + * @returns {self} + * @private + */ + __prepareOrigin: function() { + + var self = this; + + // in case we're resetting the triggers + self._$origin.off('.'+ self.__namespace +'-triggerOpen'); + + // if the device is touch capable, even if only mouse triggers + // are asked, we need to listen to touch events to know if the mouse + // events are actually emulated (so we can ignore them) + if (env.hasTouchCapability) { + + self._$origin.on( + 'touchstart.'+ self.__namespace +'-triggerOpen ' + + 'touchend.'+ self.__namespace +'-triggerOpen ' + + 'touchcancel.'+ self.__namespace +'-triggerOpen', + function(event){ + self._touchRecordEvent(event); + } + ); + } + + // mouse click and touch tap work the same way + if ( self.__options.triggerOpen.click + || (self.__options.triggerOpen.tap && env.hasTouchCapability) + ) { + + var eventNames = ''; + if (self.__options.triggerOpen.click) { + eventNames += 'click.'+ self.__namespace +'-triggerOpen '; + } + if (self.__options.triggerOpen.tap && env.hasTouchCapability) { + eventNames += 'touchend.'+ self.__namespace +'-triggerOpen'; + } + + self._$origin.on(eventNames, function(event) { + if (self._touchIsMeaningfulEvent(event)) { + self._open(event); + } + }); + } + + // mouseenter and touch start work the same way + if ( self.__options.triggerOpen.mouseenter + || (self.__options.triggerOpen.touchstart && env.hasTouchCapability) + ) { + + var eventNames = ''; + if (self.__options.triggerOpen.mouseenter) { + eventNames += 'mouseenter.'+ self.__namespace +'-triggerOpen '; + } + if (self.__options.triggerOpen.touchstart && env.hasTouchCapability) { + eventNames += 'touchstart.'+ self.__namespace +'-triggerOpen'; + } + + self._$origin.on(eventNames, function(event) { + if ( self._touchIsTouchEvent(event) + || !self._touchIsEmulatedEvent(event) + ) { + self.__pointerIsOverOrigin = true; + self._openShortly(event); + } + }); + } + + // info for the mouseleave/touchleave close triggers when they use a delay + if ( self.__options.triggerClose.mouseleave + || (self.__options.triggerClose.touchleave && env.hasTouchCapability) + ) { + + var eventNames = ''; + if (self.__options.triggerClose.mouseleave) { + eventNames += 'mouseleave.'+ self.__namespace +'-triggerOpen '; + } + if (self.__options.triggerClose.touchleave && env.hasTouchCapability) { + eventNames += 'touchend.'+ self.__namespace +'-triggerOpen touchcancel.'+ self.__namespace +'-triggerOpen'; + } + + self._$origin.on(eventNames, function(event) { + + if (self._touchIsMeaningfulEvent(event)) { + self.__pointerIsOverOrigin = false; + } + }); + } + + return self; + }, + + /** + * Do the things that need to be done only once after the tooltip + * HTML element it has been created. It has been made a separate + * method so it can be called when options are changed. Remember + * that the tooltip may actually exist in the DOM before it is + * opened, and present after it has been closed: it's the display + * plugin that takes care of handling it. + * + * @returns {self} + * @private + */ + __prepareTooltip: function() { + + var self = this, + p = self.__options.interactive ? 'auto' : ''; + + // this will be useful to know quickly if the tooltip is in + // the DOM or not + self._$tooltip + .attr('id', self.__namespace) + .css({ + // pointer events + 'pointer-events': p, + zIndex: self.__options.zIndex + }); + + // themes + // remove the old ones and add the new ones + $.each(self.__previousThemes, function(i, theme) { + self._$tooltip.removeClass(theme); + }); + $.each(self.__options.theme, function(i, theme) { + self._$tooltip.addClass(theme); + }); + + self.__previousThemes = $.merge([], self.__options.theme); + + return self; + }, + + /** + * Handles the scroll on any of the parents of the origin (when the + * tooltip is open) + * + * @param {object} event + * @returns {self} + * @private + */ + __scrollHandler: function(event) { + + var self = this; + + if (self.__options.triggerClose.scroll) { + self._close(event); + } + else { + + // if the origin or tooltip have been removed: do nothing, the tracker will + // take care of it later + if (bodyContains(self._$origin) && bodyContains(self._$tooltip)) { + + var geo = null; + + // if the scroll happened on the window + if (event.target === env.window.document) { + + // if the origin has a fixed lineage, window scroll will have no + // effect on its position nor on the position of the tooltip + if (!self.__Geometry.origin.fixedLineage) { + + // we don't need to do anything unless repositionOnScroll is true + // because the tooltip will already have moved with the window + // (and of course with the origin) + if (self.__options.repositionOnScroll) { + self.reposition(event); + } + } + } + // if the scroll happened on another parent of the tooltip, it means + // that it's in a scrollable area and now needs to have its position + // adjusted or recomputed, depending ont the repositionOnScroll + // option. Also, if the origin is partly hidden due to a parent that + // hides its overflow, we'll just hide (not close) the tooltip. + else { + + geo = self.__geometry(); + + var overflows = false; + + // a fixed position origin is not affected by the overflow hiding + // of a parent + if (self._$origin.css('position') != 'fixed') { + + self.__$originParents.each(function(i, el) { + + var $el = $(el), + overflowX = $el.css('overflow-x'), + overflowY = $el.css('overflow-y'); + + if (overflowX != 'visible' || overflowY != 'visible') { + + var bcr = el.getBoundingClientRect(); + + if (overflowX != 'visible') { + + if ( geo.origin.windowOffset.left < bcr.left + || geo.origin.windowOffset.right > bcr.right + ) { + overflows = true; + return false; + } + } + + if (overflowY != 'visible') { + + if ( geo.origin.windowOffset.top < bcr.top + || geo.origin.windowOffset.bottom > bcr.bottom + ) { + overflows = true; + return false; + } + } + } + + // no need to go further if fixed, for the same reason as above + if ($el.css('position') == 'fixed') { + return false; + } + }); + } + + if (overflows) { + self._$tooltip.css('visibility', 'hidden'); + } + else { + + self._$tooltip.css('visibility', 'visible'); + + // reposition + if (self.__options.repositionOnScroll) { + self.reposition(event); + } + // or just adjust offset + else { + + // we have to use offset and not windowOffset because this way, + // only the scroll distance of the scrollable areas are taken into + // account (the scrolltop value of the main window must be + // ignored since the tooltip already moves with it) + var offsetLeft = geo.origin.offset.left - self.__Geometry.origin.offset.left, + offsetTop = geo.origin.offset.top - self.__Geometry.origin.offset.top; + + // add the offset to the position initially computed by the display plugin + self._$tooltip.css({ + left: self.__lastPosition.coord.left + offsetLeft, + top: self.__lastPosition.coord.top + offsetTop + }); + } + } + } + + self._trigger({ + type: 'scroll', + event: event, + geo: geo + }); + } + } + + return self; + }, + + /** + * Changes the state of the tooltip + * + * @param {string} state + * @returns {self} + * @private + */ + __stateSet: function(state) { + + this.__state = state; + + this._trigger({ + type: 'state', + state: state + }); + + return this; + }, + + /** + * Clear appearance timeouts + * + * @returns {self} + * @private + */ + __timeoutsClear: function() { + + // there is only one possible open timeout: the delayed opening + // when the mouseenter/touchstart open triggers are used + clearTimeout(this.__timeouts.open); + this.__timeouts.open = null; + + // ... but several close timeouts: the delayed closing when the + // mouseleave close trigger is used and the timer option + $.each(this.__timeouts.close, function(i, timeout) { + clearTimeout(timeout); + }); + this.__timeouts.close = []; + + return this; + }, + + /** + * Start the tracker that will make checks at regular intervals + * + * @returns {self} + * @private + */ + __trackerStart: function() { + + var self = this, + $content = self._$tooltip.find('.tooltipster-content'); + + // get the initial content size + if (self.__options.trackTooltip) { + self.__contentBcr = $content[0].getBoundingClientRect(); + } + + self.__tracker = setInterval(function() { + + // if the origin or tooltip elements have been removed. + // Note: we could destroy the instance now if the origin has + // been removed but we'll leave that task to our garbage collector + if (!bodyContains(self._$origin) || !bodyContains(self._$tooltip)) { + self._close(); + } + // if everything is alright + else { + + // compare the former and current positions of the origin to reposition + // the tooltip if need be + if (self.__options.trackOrigin) { + + var g = self.__geometry(), + identical = false; + + // compare size first (a change requires repositioning too) + if (areEqual(g.origin.size, self.__Geometry.origin.size)) { + + // for elements that have a fixed lineage (see __geometry()), we track the + // top and left properties (relative to window) + if (self.__Geometry.origin.fixedLineage) { + if (areEqual(g.origin.windowOffset, self.__Geometry.origin.windowOffset)) { + identical = true; + } + } + // otherwise, track total offset (relative to document) + else { + if (areEqual(g.origin.offset, self.__Geometry.origin.offset)) { + identical = true; + } + } + } + + if (!identical) { + + // close the tooltip when using the mouseleave close trigger + // (see https://github.com/iamceege/tooltipster/pull/253) + if (self.__options.triggerClose.mouseleave) { + self._close(); + } + else { + self.reposition(); + } + } + } + + if (self.__options.trackTooltip) { + + var currentBcr = $content[0].getBoundingClientRect(); + + if ( currentBcr.height !== self.__contentBcr.height + || currentBcr.width !== self.__contentBcr.width + ) { + self.reposition(); + self.__contentBcr = currentBcr; + } + } + } + }, self.__options.trackerInterval); + + return self; + }, + + /** + * Closes the tooltip (after the closing delay) + * + * @param event + * @param callback + * @param force Set to true to override a potential refusal of the user's function + * @returns {self} + * @protected + */ + _close: function(event, callback, force) { + + var self = this, + ok = true; + + self._trigger({ + type: 'close', + event: event, + stop: function() { + ok = false; + } + }); + + // a destroying tooltip (force == true) may not refuse to close + if (ok || force) { + + // save the method custom callback and cancel any open method custom callbacks + if (callback) self.__callbacks.close.push(callback); + self.__callbacks.open = []; + + // clear open/close timeouts + self.__timeoutsClear(); + + var finishCallbacks = function() { + + // trigger any close method custom callbacks and reset them + $.each(self.__callbacks.close, function(i,c) { + c.call(self, self, { + event: event, + origin: self._$origin[0] + }); + }); + + self.__callbacks.close = []; + }; + + if (self.__state != 'closed') { + + var necessary = true, + d = new Date(), + now = d.getTime(), + newClosingTime = now + self.__options.animationDuration[1]; + + // the tooltip may already already be disappearing, but if a new + // call to close() is made after the animationDuration was changed + // to 0 (for example), we ought to actually close it sooner than + // previously scheduled. In that case it should be noted that the + // browser will not adapt the animation duration to the new + // animationDuration that was set after the start of the closing + // animation. + // Note: the same thing could be considered at opening, but is not + // really useful since the tooltip is actually opened immediately + // upon a call to _open(). Since it would not make the opening + // animation finish sooner, its sole impact would be to trigger the + // state event and the open callbacks sooner than the actual end of + // the opening animation, which is not great. + if (self.__state == 'disappearing') { + + if ( newClosingTime > self.__closingTime + // in case closing is actually overdue because the script + // execution was suspended. See #679 + && self.__options.animationDuration[1] > 0 + ) { + necessary = false; + } + } + + if (necessary) { + + self.__closingTime = newClosingTime; + + if (self.__state != 'disappearing') { + self.__stateSet('disappearing'); + } + + var finish = function() { + + // stop the tracker + clearInterval(self.__tracker); + + // a "beforeClose" option has been asked several times but would + // probably useless since the content element is still accessible + // via ::content(), and because people can always use listeners + // inside their content to track what's going on. For the sake of + // simplicity, this has been denied. Bur for the rare people who + // really need the option (for old browsers or for the case where + // detaching the content is actually destructive, for file or + // password inputs for example), this event will do the work. + self._trigger({ + type: 'closing', + event: event + }); + + // unbind listeners which are no longer needed + + self._$tooltip + .off('.'+ self.__namespace +'-triggerClose') + .removeClass('tooltipster-dying'); + + // orientationchange, scroll and resize listeners + $(env.window).off('.'+ self.__namespace +'-triggerClose'); + + // scroll listeners + self.__$originParents.each(function(i, el) { + $(el).off('scroll.'+ self.__namespace +'-triggerClose'); + }); + // clear the array to prevent memory leaks + self.__$originParents = null; + + $(env.window.document.body).off('.'+ self.__namespace +'-triggerClose'); + + self._$origin.off('.'+ self.__namespace +'-triggerClose'); + + self._off('dismissable'); + + // a plugin that would like to remove the tooltip from the + // DOM when closed should bind on this + self.__stateSet('closed'); + + // trigger event + self._trigger({ + type: 'after', + event: event + }); + + // call our constructor custom callback function + if (self.__options.functionAfter) { + self.__options.functionAfter.call(self, self, { + event: event, + origin: self._$origin[0] + }); + } + + // call our method custom callbacks functions + finishCallbacks(); + }; + + if (env.hasTransitions) { + + self._$tooltip.css({ + '-moz-animation-duration': self.__options.animationDuration[1] + 'ms', + '-ms-animation-duration': self.__options.animationDuration[1] + 'ms', + '-o-animation-duration': self.__options.animationDuration[1] + 'ms', + '-webkit-animation-duration': self.__options.animationDuration[1] + 'ms', + 'animation-duration': self.__options.animationDuration[1] + 'ms', + 'transition-duration': self.__options.animationDuration[1] + 'ms' + }); + + self._$tooltip + // clear both potential open and close tasks + .clearQueue() + .removeClass('tooltipster-show') + // for transitions only + .addClass('tooltipster-dying'); + + if (self.__options.animationDuration[1] > 0) { + self._$tooltip.delay(self.__options.animationDuration[1]); + } + + self._$tooltip.queue(finish); + } + else { + + self._$tooltip + .stop() + .fadeOut(self.__options.animationDuration[1], finish); + } + } + } + // if the tooltip is already closed, we still need to trigger + // the method custom callbacks + else { + finishCallbacks(); + } + } + + return self; + }, + + /** + * For internal use by plugins, if needed + * + * @returns {self} + * @protected + */ + _off: function() { + this.__$emitterPrivate.off.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For internal use by plugins, if needed + * + * @returns {self} + * @protected + */ + _on: function() { + this.__$emitterPrivate.on.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * For internal use by plugins, if needed + * + * @returns {self} + * @protected + */ + _one: function() { + this.__$emitterPrivate.one.apply(this.__$emitterPrivate, Array.prototype.slice.apply(arguments)); + return this; + }, + + /** + * Opens the tooltip right away. + * + * @param event + * @param callback Will be called when the opening animation is over + * @returns {self} + * @protected + */ + _open: function(event, callback) { + + var self = this; + + // if the destruction process has not begun and if this was not + // triggered by an unwanted emulated click event + if (!self.__destroying) { + + // check that the origin is still in the DOM + if ( bodyContains(self._$origin) + // if the tooltip is enabled + && self.__enabled + ) { + + var ok = true; + + // if the tooltip is not open yet, we need to call functionBefore. + // otherwise we can jst go on + if (self.__state == 'closed') { + + // trigger an event. The event.stop function allows the callback + // to prevent the opening of the tooltip + self._trigger({ + type: 'before', + event: event, + stop: function() { + ok = false; + } + }); + + if (ok && self.__options.functionBefore) { + + // call our custom function before continuing + ok = self.__options.functionBefore.call(self, self, { + event: event, + origin: self._$origin[0] + }); + } + } + + if (ok !== false) { + + // if there is some content + if (self.__Content !== null) { + + // save the method callback and cancel close method callbacks + if (callback) { + self.__callbacks.open.push(callback); + } + self.__callbacks.close = []; + + // get rid of any appearance timeouts + self.__timeoutsClear(); + + var extraTime, + finish = function() { + + if (self.__state != 'stable') { + self.__stateSet('stable'); + } + + // trigger any open method custom callbacks and reset them + $.each(self.__callbacks.open, function(i,c) { + c.call(self, self, { + origin: self._$origin[0], + tooltip: self._$tooltip[0] + }); + }); + + self.__callbacks.open = []; + }; + + // if the tooltip is already open + if (self.__state !== 'closed') { + + // the timer (if any) will start (or restart) right now + extraTime = 0; + + // if it was disappearing, cancel that + if (self.__state === 'disappearing') { + + self.__stateSet('appearing'); + + if (env.hasTransitions) { + + self._$tooltip + .clearQueue() + .removeClass('tooltipster-dying') + .addClass('tooltipster-show'); + + if (self.__options.animationDuration[0] > 0) { + self._$tooltip.delay(self.__options.animationDuration[0]); + } + + self._$tooltip.queue(finish); + } + else { + // in case the tooltip was currently fading out, bring it back + // to life + self._$tooltip + .stop() + .fadeIn(finish); + } + } + // if the tooltip is already open, we still need to trigger the method + // custom callback + else if (self.__state == 'stable') { + finish(); + } + } + // if the tooltip isn't already open, open it + else { + + // a plugin must bind on this and store the tooltip in this._$tooltip + self.__stateSet('appearing'); + + // the timer (if any) will start when the tooltip has fully appeared + // after its transition + extraTime = self.__options.animationDuration[0]; + + // insert the content inside the tooltip + self.__contentInsert(); + + // reposition the tooltip and attach to the DOM + self.reposition(event, true); + + // animate in the tooltip. If the display plugin wants no css + // animations, it may override the animation option with a + // dummy value that will produce no effect + if (env.hasTransitions) { + + // note: there seems to be an issue with start animations which + // are randomly not played on fast devices in both Chrome and FF, + // couldn't find a way to solve it yet. It seems that applying + // the classes before appending to the DOM helps a little, but + // it messes up some CSS transitions. The issue almost never + // happens when delay[0]==0 though + self._$tooltip + .addClass('tooltipster-'+ self.__options.animation) + .addClass('tooltipster-initial') + .css({ + '-moz-animation-duration': self.__options.animationDuration[0] + 'ms', + '-ms-animation-duration': self.__options.animationDuration[0] + 'ms', + '-o-animation-duration': self.__options.animationDuration[0] + 'ms', + '-webkit-animation-duration': self.__options.animationDuration[0] + 'ms', + 'animation-duration': self.__options.animationDuration[0] + 'ms', + 'transition-duration': self.__options.animationDuration[0] + 'ms' + }); + + setTimeout( + function() { + + // a quick hover may have already triggered a mouseleave + if (self.__state != 'closed') { + + self._$tooltip + .addClass('tooltipster-show') + .removeClass('tooltipster-initial'); + + if (self.__options.animationDuration[0] > 0) { + self._$tooltip.delay(self.__options.animationDuration[0]); + } + + self._$tooltip.queue(finish); + } + }, + 0 + ); + } + else { + + // old browsers will have to live with this + self._$tooltip + .css('display', 'none') + .fadeIn(self.__options.animationDuration[0], finish); + } + + // checks if the origin is removed while the tooltip is open + self.__trackerStart(); + + // NOTE: the listeners below have a '-triggerClose' namespace + // because we'll remove them when the tooltip closes (unlike + // the '-triggerOpen' listeners). So some of them are actually + // not about close triggers, rather about positioning. + + $(env.window) + // reposition on resize + .on('resize.'+ self.__namespace +'-triggerClose', function(e) { + + var $ae = $(document.activeElement); + + // reposition only if the resize event was not triggered upon the opening + // of a virtual keyboard due to an input field being focused within the tooltip + // (otherwise the repositioning would lose the focus) + if ( (!$ae.is('input') && !$ae.is('textarea')) + || !$.contains(self._$tooltip[0], $ae[0]) + ) { + self.reposition(e); + } + }) + // same as below for parents + .on('scroll.'+ self.__namespace +'-triggerClose', function(e) { + self.__scrollHandler(e); + }); + + self.__$originParents = self._$origin.parents(); + + // scrolling may require the tooltip to be moved or even + // repositioned in some cases + self.__$originParents.each(function(i, parent) { + + $(parent).on('scroll.'+ self.__namespace +'-triggerClose', function(e) { + self.__scrollHandler(e); + }); + }); + + if ( self.__options.triggerClose.mouseleave + || (self.__options.triggerClose.touchleave && env.hasTouchCapability) + ) { + + // we use an event to allow users/plugins to control when the mouseleave/touchleave + // close triggers will come to action. It allows to have more triggering elements + // than just the origin and the tooltip for example, or to cancel/delay the closing, + // or to make the tooltip interactive even if it wasn't when it was open, etc. + self._on('dismissable', function(event) { + + if (event.dismissable) { + + if (event.delay) { + + timeout = setTimeout(function() { + // event.event may be undefined + self._close(event.event); + }, event.delay); + + self.__timeouts.close.push(timeout); + } + else { + self._close(event); + } + } + else { + clearTimeout(timeout); + } + }); + + // now set the listeners that will trigger 'dismissable' events + var $elements = self._$origin, + eventNamesIn = '', + eventNamesOut = '', + timeout = null; + + // if we have to allow interaction, bind on the tooltip too + if (self.__options.interactive) { + $elements = $elements.add(self._$tooltip); + } + + if (self.__options.triggerClose.mouseleave) { + eventNamesIn += 'mouseenter.'+ self.__namespace +'-triggerClose '; + eventNamesOut += 'mouseleave.'+ self.__namespace +'-triggerClose '; + } + if (self.__options.triggerClose.touchleave && env.hasTouchCapability) { + eventNamesIn += 'touchstart.'+ self.__namespace +'-triggerClose'; + eventNamesOut += 'touchend.'+ self.__namespace +'-triggerClose touchcancel.'+ self.__namespace +'-triggerClose'; + } + + $elements + // close after some time spent outside of the elements + .on(eventNamesOut, function(event) { + + // it's ok if the touch gesture ended up to be a swipe, + // it's still a "touch leave" situation + if ( self._touchIsTouchEvent(event) + || !self._touchIsEmulatedEvent(event) + ) { + + var delay = (event.type == 'mouseleave') ? + self.__options.delay : + self.__options.delayTouch; + + self._trigger({ + delay: delay[1], + dismissable: true, + event: event, + type: 'dismissable' + }); + } + }) + // suspend the mouseleave timeout when the pointer comes back + // over the elements + .on(eventNamesIn, function(event) { + + // it's also ok if the touch event is a swipe gesture + if ( self._touchIsTouchEvent(event) + || !self._touchIsEmulatedEvent(event) + ) { + self._trigger({ + dismissable: false, + event: event, + type: 'dismissable' + }); + } + }); + } + + // close the tooltip when the origin gets a mouse click (common behavior of + // native tooltips) + if (self.__options.triggerClose.originClick) { + + self._$origin.on('click.'+ self.__namespace + '-triggerClose', function(event) { + + // we could actually let a tap trigger this but this feature just + // does not make sense on touch devices + if ( !self._touchIsTouchEvent(event) + && !self._touchIsEmulatedEvent(event) + ) { + self._close(event); + } + }); + } + + // set the same bindings for click and touch on the body to close the tooltip + if ( self.__options.triggerClose.click + || (self.__options.triggerClose.tap && env.hasTouchCapability) + ) { + + // don't set right away since the click/tap event which triggered this method + // (if it was a click/tap) is going to bubble up to the body, we don't want it + // to close the tooltip immediately after it opened + setTimeout(function() { + + if (self.__state != 'closed') { + + var eventNames = '', + $body = $(env.window.document.body); + + if (self.__options.triggerClose.click) { + eventNames += 'click.'+ self.__namespace +'-triggerClose '; + } + if (self.__options.triggerClose.tap && env.hasTouchCapability) { + eventNames += 'touchend.'+ self.__namespace +'-triggerClose'; + } + + $body.on(eventNames, function(event) { + + if (self._touchIsMeaningfulEvent(event)) { + + self._touchRecordEvent(event); + + if (!self.__options.interactive || !$.contains(self._$tooltip[0], event.target)) { + self._close(event); + } + } + }); + + // needed to detect and ignore swiping + if (self.__options.triggerClose.tap && env.hasTouchCapability) { + + $body.on('touchstart.'+ self.__namespace +'-triggerClose', function(event) { + self._touchRecordEvent(event); + }); + } + } + }, 0); + } + + self._trigger('ready'); + + // call our custom callback + if (self.__options.functionReady) { + self.__options.functionReady.call(self, self, { + origin: self._$origin[0], + tooltip: self._$tooltip[0] + }); + } + } + + // if we have a timer set, let the countdown begin + if (self.__options.timer > 0) { + + var timeout = setTimeout(function() { + self._close(); + }, self.__options.timer + extraTime); + + self.__timeouts.close.push(timeout); + } + } + } + } + } + + return self; + }, + + /** + * When using the mouseenter/touchstart open triggers, this function will + * schedule the opening of the tooltip after the delay, if there is one + * + * @param event + * @returns {self} + * @protected + */ + _openShortly: function(event) { + + var self = this, + ok = true; + + if (self.__state != 'stable' && self.__state != 'appearing') { + + // if a timeout is not already running + if (!self.__timeouts.open) { + + self._trigger({ + type: 'start', + event: event, + stop: function() { + ok = false; + } + }); + + if (ok) { + + var delay = (event.type.indexOf('touch') == 0) ? + self.__options.delayTouch : + self.__options.delay; + + if (delay[0]) { + + self.__timeouts.open = setTimeout(function() { + + self.__timeouts.open = null; + + // open only if the pointer (mouse or touch) is still over the origin. + // The check on the "meaningful event" can only be made here, after some + // time has passed (to know if the touch was a swipe or not) + if (self.__pointerIsOverOrigin && self._touchIsMeaningfulEvent(event)) { + + // signal that we go on + self._trigger('startend'); + + self._open(event); + } + else { + // signal that we cancel + self._trigger('startcancel'); + } + }, delay[0]); + } + else { + // signal that we go on + self._trigger('startend'); + + self._open(event); + } + } + } + } + + return self; + }, + + /** + * Meant for plugins to get their options + * + * @param {string} pluginName The name of the plugin that asks for its options + * @param {object} defaultOptions The default options of the plugin + * @returns {object} The options + * @protected + */ + _optionsExtract: function(pluginName, defaultOptions) { + + var self = this, + options = $.extend(true, {}, defaultOptions); + + // if the plugin options were isolated in a property named after the + // plugin, use them (prevents conflicts with other plugins) + var pluginOptions = self.__options[pluginName]; + + // if not, try to get them as regular options + if (!pluginOptions){ + + pluginOptions = {}; + + $.each(defaultOptions, function(optionName, value) { + + var o = self.__options[optionName]; + + if (o !== undefined) { + pluginOptions[optionName] = o; + } + }); + } + + // let's merge the default options and the ones that were provided. We'd want + // to do a deep copy but not let jQuery merge arrays, so we'll do a shallow + // extend on two levels, that will be enough if options are not more than 1 + // level deep + $.each(options, function(optionName, value) { + + if (pluginOptions[optionName] !== undefined) { + + if (( typeof value == 'object' + && !(value instanceof Array) + && value != null + ) + && + ( typeof pluginOptions[optionName] == 'object' + && !(pluginOptions[optionName] instanceof Array) + && pluginOptions[optionName] != null + ) + ) { + $.extend(options[optionName], pluginOptions[optionName]); + } + else { + options[optionName] = pluginOptions[optionName]; + } + } + }); + + return options; + }, + + /** + * Used at instantiation of the plugin, or afterwards by plugins that activate themselves + * on existing instances + * + * @param {object} pluginName + * @returns {self} + * @protected + */ + _plug: function(pluginName) { + + var plugin = $.tooltipster._plugin(pluginName); + + if (plugin) { + + // if there is a constructor for instances + if (plugin.instance) { + + // proxy non-private methods on the instance to allow new instance methods + $.tooltipster.__bridge(plugin.instance, this, plugin.name); + } + } + else { + throw new Error('The "'+ pluginName +'" plugin is not defined'); + } + + return this; + }, + + /** + * This will return true if the event is a mouse event which was + * emulated by the browser after a touch event. This allows us to + * really dissociate mouse and touch triggers. + * + * There is a margin of error if a real mouse event is fired right + * after (within the delay shown below) a touch event on the same + * element, but hopefully it should not happen often. + * + * @returns {boolean} + * @protected + */ + _touchIsEmulatedEvent: function(event) { + + var isEmulated = false, + now = new Date().getTime(); + + for (var i = this.__touchEvents.length - 1; i >= 0; i--) { + + var e = this.__touchEvents[i]; + + // delay, in milliseconds. It's supposed to be 300ms in + // most browsers (350ms on iOS) to allow a double tap but + // can be less (check out FastClick for more info) + if (now - e.time < 500) { + + if (e.target === event.target) { + isEmulated = true; + } + } + else { + break; + } + } + + return isEmulated; + }, + + /** + * Returns false if the event was an emulated mouse event or + * a touch event involved in a swipe gesture. + * + * @param {object} event + * @returns {boolean} + * @protected + */ + _touchIsMeaningfulEvent: function(event) { + return ( + (this._touchIsTouchEvent(event) && !this._touchSwiped(event.target)) + || (!this._touchIsTouchEvent(event) && !this._touchIsEmulatedEvent(event)) + ); + }, + + /** + * Checks if an event is a touch event + * + * @param {object} event + * @returns {boolean} + * @protected + */ + _touchIsTouchEvent: function(event){ + return event.type.indexOf('touch') == 0; + }, + + /** + * Store touch events for a while to detect swiping and emulated mouse events + * + * @param {object} event + * @returns {self} + * @protected + */ + _touchRecordEvent: function(event) { + + if (this._touchIsTouchEvent(event)) { + event.time = new Date().getTime(); + this.__touchEvents.push(event); + } + + return this; + }, + + /** + * Returns true if a swipe happened after the last touchstart event fired on + * event.target. + * + * We need to differentiate a swipe from a tap before we let the event open + * or close the tooltip. A swipe is when a touchmove (scroll) event happens + * on the body between the touchstart and the touchend events of an element. + * + * @param {object} target The HTML element that may have triggered the swipe + * @returns {boolean} + * @protected + */ + _touchSwiped: function(target) { + + var swiped = false; + + for (var i = this.__touchEvents.length - 1; i >= 0; i--) { + + var e = this.__touchEvents[i]; + + if (e.type == 'touchmove') { + swiped = true; + break; + } + else if ( + e.type == 'touchstart' + && target === e.target + ) { + break; + } + } + + return swiped; + }, + + /** + * Triggers an event on the instance emitters + * + * @returns {self} + * @protected + */ + _trigger: function() { + + var args = Array.prototype.slice.apply(arguments); + + if (typeof args[0] == 'string') { + args[0] = { type: args[0] }; + } + + // add properties to the event + args[0].instance = this; + args[0].origin = this._$origin ? this._$origin[0] : null; + args[0].tooltip = this._$tooltip ? this._$tooltip[0] : null; + + // note: the order of emitters matters + this.__$emitterPrivate.trigger.apply(this.__$emitterPrivate, args); + $.tooltipster._trigger.apply($.tooltipster, args); + this.__$emitterPublic.trigger.apply(this.__$emitterPublic, args); + + return this; + }, + + /** + * Deactivate a plugin on this instance + * + * @returns {self} + * @protected + */ + _unplug: function(pluginName) { + + var self = this; + + // if the plugin has been activated on this instance + if (self[pluginName]) { + + var plugin = $.tooltipster._plugin(pluginName); + + // if there is a constructor for instances + if (plugin.instance) { + + // unbridge + $.each(plugin.instance, function(methodName, fn) { + + // if the method exists (privates methods do not) and comes indeed from + // this plugin (may be missing or come from a conflicting plugin). + if ( self[methodName] + && self[methodName].bridged === self[pluginName] + ) { + delete self[methodName]; + } + }); + } + + // destroy the plugin + if (self[pluginName].__destroy) { + self[pluginName].__destroy(); + } + + // remove the reference to the plugin instance + delete self[pluginName]; + } + + return self; + }, + + /** + * @see self::_close + * @returns {self} + * @public + */ + close: function(callback) { + + if (!this.__destroyed) { + this._close(null, callback); + } + else { + this.__destroyError(); + } + + return this; + }, + + /** + * Sets or gets the content of the tooltip + * + * @returns {mixed|self} + * @public + */ + content: function(content) { + + var self = this; + + // getter method + if (content === undefined) { + return self.__Content; + } + // setter method + else { + + if (!self.__destroyed) { + + // change the content + self.__contentSet(content); + + if (self.__Content !== null) { + + // update the tooltip if it is open + if (self.__state !== 'closed') { + + // reset the content in the tooltip + self.__contentInsert(); + + // reposition and resize the tooltip + self.reposition(); + + // if we want to play a little animation showing the content changed + if (self.__options.updateAnimation) { + + if (env.hasTransitions) { + + // keep the reference in the local scope + var animation = self.__options.updateAnimation; + + self._$tooltip.addClass('tooltipster-update-'+ animation); + + // remove the class after a while. The actual duration of the + // update animation may be shorter, it's set in the CSS rules + setTimeout(function() { + + if (self.__state != 'closed') { + + self._$tooltip.removeClass('tooltipster-update-'+ animation); + } + }, 1000); + } + else { + self._$tooltip.fadeTo(200, 0.5, function() { + if (self.__state != 'closed') { + self._$tooltip.fadeTo(200, 1); + } + }); + } + } + } + } + else { + self._close(); + } + } + else { + self.__destroyError(); + } + + return self; + } + }, + + /** + * Destroys the tooltip + * + * @returns {self} + * @public + */ + destroy: function() { + + var self = this; + + if (!self.__destroyed) { + + if(self.__state != 'closed'){ + + // no closing delay + self.option('animationDuration', 0) + // force closing + ._close(null, null, true); + } + else { + // there might be an open timeout still running + self.__timeoutsClear(); + } + + // send event + self._trigger('destroy'); + + self.__destroyed = true; + + self._$origin + .removeData(self.__namespace) + // remove the open trigger listeners + .off('.'+ self.__namespace +'-triggerOpen'); + + // remove the touch listener + $(env.window.document.body).off('.' + self.__namespace +'-triggerOpen'); + + var ns = self._$origin.data('tooltipster-ns'); + + // if the origin has been removed from DOM, its data may + // well have been destroyed in the process and there would + // be nothing to clean up or restore + if (ns) { + + // if there are no more tooltips on this element + if (ns.length === 1) { + + // optional restoration of a title attribute + var title = null; + if (self.__options.restoration == 'previous') { + title = self._$origin.data('tooltipster-initialTitle'); + } + else if (self.__options.restoration == 'current') { + + // old school technique to stringify when outerHTML is not supported + title = (typeof self.__Content == 'string') ? + self.__Content : + $('
    ').append(self.__Content).html(); + } + + if (title) { + self._$origin.attr('title', title); + } + + // final cleaning + + self._$origin.removeClass('tooltipstered'); + + self._$origin + .removeData('tooltipster-ns') + .removeData('tooltipster-initialTitle'); + } + else { + // remove the instance namespace from the list of namespaces of + // tooltips present on the element + ns = $.grep(ns, function(el, i) { + return el !== self.__namespace; + }); + self._$origin.data('tooltipster-ns', ns); + } + } + + // last event + self._trigger('destroyed'); + + // unbind private and public event listeners + self._off(); + self.off(); + + // remove external references, just in case + self.__Content = null; + self.__$emitterPrivate = null; + self.__$emitterPublic = null; + self.__options.parent = null; + self._$origin = null; + self._$tooltip = null; + + // make sure the object is no longer referenced in there to prevent + // memory leaks + $.tooltipster.__instancesLatestArr = $.grep($.tooltipster.__instancesLatestArr, function(el, i) { + return self !== el; + }); + + clearInterval(self.__garbageCollector); + } + else { + self.__destroyError(); + } + + // we return the scope rather than true so that the call to + // .tooltipster('destroy') actually returns the matched elements + // and applies to all of them + return self; + }, + + /** + * Disables the tooltip + * + * @returns {self} + * @public + */ + disable: function() { + + if (!this.__destroyed) { + + // close first, in case the tooltip would not disappear on + // its own (no close trigger) + this._close(); + this.__enabled = false; + + return this; + } + else { + this.__destroyError(); + } + + return this; + }, + + /** + * Returns the HTML element of the origin + * + * @returns {self} + * @public + */ + elementOrigin: function() { + + if (!this.__destroyed) { + return this._$origin[0]; + } + else { + this.__destroyError(); + } + }, + + /** + * Returns the HTML element of the tooltip + * + * @returns {self} + * @public + */ + elementTooltip: function() { + return this._$tooltip ? this._$tooltip[0] : null; + }, + + /** + * Enables the tooltip + * + * @returns {self} + * @public + */ + enable: function() { + this.__enabled = true; + return this; + }, + + /** + * Alias, deprecated in 4.0.0 + * + * @param {function} callback + * @returns {self} + * @public + */ + hide: function(callback) { + return this.close(callback); + }, + + /** + * Returns the instance + * + * @returns {self} + * @public + */ + instance: function() { + return this; + }, + + /** + * For public use only, not to be used by plugins (use ::_off() instead) + * + * @returns {self} + * @public + */ + off: function() { + + if (!this.__destroyed) { + this.__$emitterPublic.off.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + } + + return this; + }, + + /** + * For public use only, not to be used by plugins (use ::_on() instead) + * + * @returns {self} + * @public + */ + on: function() { + + if (!this.__destroyed) { + this.__$emitterPublic.on.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + } + else { + this.__destroyError(); + } + + return this; + }, + + /** + * For public use only, not to be used by plugins + * + * @returns {self} + * @public + */ + one: function() { + + if (!this.__destroyed) { + this.__$emitterPublic.one.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + } + else { + this.__destroyError(); + } + + return this; + }, + + /** + * @see self::_open + * @returns {self} + * @public + */ + open: function(callback) { + + if (!this.__destroyed) { + this._open(null, callback); + } + else { + this.__destroyError(); + } + + return this; + }, + + /** + * Get or set options. For internal use and advanced users only. + * + * @param {string} o Option name + * @param {mixed} val optional A new value for the option + * @return {mixed|self} If val is omitted, the value of the option + * is returned, otherwise the instance itself is returned + * @public + */ + option: function(o, val) { + + // getter + if (val === undefined) { + return this.__options[o]; + } + // setter + else { + + if (!this.__destroyed) { + + // change value + this.__options[o] = val; + + // format + this.__optionsFormat(); + + // re-prepare the triggers if needed + if ($.inArray(o, ['trigger', 'triggerClose', 'triggerOpen']) >= 0) { + this.__prepareOrigin(); + } + + if (o === 'selfDestruction') { + this.__prepareGC(); + } + } + else { + this.__destroyError(); + } + + return this; + } + }, + + /** + * This method is in charge of setting the position and size properties of the tooltip. + * All the hard work is delegated to the display plugin. + * Note: The tooltip may be detached from the DOM at the moment the method is called + * but must be attached by the end of the method call. + * + * @param {object} event For internal use only. Defined if an event such as + * window resizing triggered the repositioning + * @param {boolean} tooltipIsDetached For internal use only. Set this to true if you + * know that the tooltip not being in the DOM is not an issue (typically when the + * tooltip element has just been created but has not been added to the DOM yet). + * @returns {self} + * @public + */ + reposition: function(event, tooltipIsDetached) { + + var self = this; + + if (!self.__destroyed) { + + // if the tooltip is still open and the origin is still in the DOM + if (self.__state != 'closed' && bodyContains(self._$origin)) { + + // if the tooltip has not been removed from DOM manually (or if it + // has been detached on purpose) + if (tooltipIsDetached || bodyContains(self._$tooltip)) { + + if (!tooltipIsDetached) { + // detach in case the tooltip overflows the window and adds + // scrollbars to it, so __geometry can be accurate + self._$tooltip.detach(); + } + + // refresh the geometry object before passing it as a helper + self.__Geometry = self.__geometry(); + + // let a plugin fo the rest + self._trigger({ + type: 'reposition', + event: event, + helper: { + geo: self.__Geometry + } + }); + } + } + } + else { + self.__destroyError(); + } + + return self; + }, + + /** + * Alias, deprecated in 4.0.0 + * + * @param callback + * @returns {self} + * @public + */ + show: function(callback) { + return this.open(callback); + }, + + /** + * Returns some properties about the instance + * + * @returns {object} + * @public + */ + status: function() { + + return { + destroyed: this.__destroyed, + enabled: this.__enabled, + open: this.__state !== 'closed', + state: this.__state + }; + }, + + /** + * For public use only, not to be used by plugins + * + * @returns {self} + * @public + */ + triggerHandler: function() { + + if (!this.__destroyed) { + this.__$emitterPublic.triggerHandler.apply(this.__$emitterPublic, Array.prototype.slice.apply(arguments)); + } + else { + this.__destroyError(); + } + + return this; + } +}; + +$.fn.tooltipster = function() { + + // for using in closures + var args = Array.prototype.slice.apply(arguments), + // common mistake: an HTML element can't be in several tooltips at the same time + contentCloningWarning = 'You are using a single HTML element as content for several tooltips. You probably want to set the contentCloning option to TRUE.'; + + // this happens with $(sel).tooltipster(...) when $(sel) does not match anything + if (this.length === 0) { + + // still chainable + return this; + } + // this happens when calling $(sel).tooltipster('methodName or options') + // where $(sel) matches one or more elements + else { + + // method calls + if (typeof args[0] === 'string') { + + var v = '#*$~&'; + + this.each(function() { + + // retrieve the namepaces of the tooltip(s) that exist on that element. + // We will interact with the first tooltip only. + var ns = $(this).data('tooltipster-ns'), + // self represents the instance of the first tooltipster plugin + // associated to the current HTML object of the loop + self = ns ? $(this).data(ns[0]) : null; + + // if the current element holds a tooltipster instance + if (self) { + + if (typeof self[args[0]] === 'function') { + + if ( this.length > 1 + && args[0] == 'content' + && ( args[1] instanceof $ + || (typeof args[1] == 'object' && args[1] != null && args[1].tagName) + ) + && !self.__options.contentCloning + && self.__options.debug + ) { + console.log(contentCloningWarning); + } + + // note : args[1] and args[2] may not be defined + var resp = self[args[0]](args[1], args[2]); + } + else { + throw new Error('Unknown method "'+ args[0] +'"'); + } + + // if the function returned anything other than the instance + // itself (which implies chaining, except for the `instance` method) + if (resp !== self || args[0] === 'instance') { + + v = resp; + + // return false to stop .each iteration on the first element + // matched by the selector + return false; + } + } + else { + throw new Error('You called Tooltipster\'s "'+ args[0] +'" method on an uninitialized element'); + } + }); + + return (v !== '#*$~&') ? v : this; + } + // first argument is undefined or an object: the tooltip is initializing + else { + + // reset the array of last initialized objects + $.tooltipster.__instancesLatestArr = []; + + // is there a defined value for the multiple option in the options object ? + var multipleIsSet = args[0] && args[0].multiple !== undefined, + // if the multiple option is set to true, or if it's not defined but + // set to true in the defaults + multiple = (multipleIsSet && args[0].multiple) || (!multipleIsSet && defaults.multiple), + // same for content + contentIsSet = args[0] && args[0].content !== undefined, + content = (contentIsSet && args[0].content) || (!contentIsSet && defaults.content), + // same for contentCloning + contentCloningIsSet = args[0] && args[0].contentCloning !== undefined, + contentCloning = + (contentCloningIsSet && args[0].contentCloning) + || (!contentCloningIsSet && defaults.contentCloning), + // same for debug + debugIsSet = args[0] && args[0].debug !== undefined, + debug = (debugIsSet && args[0].debug) || (!debugIsSet && defaults.debug); + + if ( this.length > 1 + && ( content instanceof $ + || (typeof content == 'object' && content != null && content.tagName) + ) + && !contentCloning + && debug + ) { + console.log(contentCloningWarning); + } + + // create a tooltipster instance for each element if it doesn't + // already have one or if the multiple option is set, and attach the + // object to it + this.each(function() { + + var go = false, + $this = $(this), + ns = $this.data('tooltipster-ns'), + obj = null; + + if (!ns) { + go = true; + } + else if (multiple) { + go = true; + } + else if (debug) { + console.log('Tooltipster: one or more tooltips are already attached to the element below. Ignoring.'); + console.log(this); + } + + if (go) { + obj = new $.Tooltipster(this, args[0]); + + // save the reference of the new instance + if (!ns) ns = []; + ns.push(obj.__namespace); + $this.data('tooltipster-ns', ns); + + // save the instance itself + $this.data(obj.__namespace, obj); + + // call our constructor custom function. + // we do this here and not in ::init() because we wanted + // the object to be saved in $this.data before triggering + // it + if (obj.__options.functionInit) { + obj.__options.functionInit.call(obj, obj, { + origin: this + }); + } + + // and now the event, for the plugins and core emitter + obj._trigger('init'); + } + + $.tooltipster.__instancesLatestArr.push(obj); + }); + + return this; + } + } +}; + +// Utilities /** - * TOOLTIP IMPL. + * A class to check if a tooltip can fit in given dimensions + * + * @param {object} $tooltip The jQuery wrapped tooltip element, or a clone of it + */ +function Ruler($tooltip) { + + // list of instance variables + + this.$container; + this.constraints = null; + this.__$tooltip; + + this.__init($tooltip); +} + +Ruler.prototype = { + + /** + * Move the tooltip into an invisible div that does not allow overflow to make + * size tests. Note: the tooltip may or may not be attached to the DOM at the + * moment this method is called, it does not matter. + * + * @param {object} $tooltip The object to test. May be just a clone of the + * actual tooltip. + * @private + */ + __init: function($tooltip) { + + this.__$tooltip = $tooltip; + + this.__$tooltip + .css({ + // for some reason we have to specify top and left 0 + left: 0, + // any overflow will be ignored while measuring + overflow: 'hidden', + // positions at (0,0) without the div using 100% of the available width + position: 'absolute', + top: 0 + }) + // overflow must be auto during the test. We re-set this in case + // it were modified by the user + .find('.tooltipster-content') + .css('overflow', 'auto'); + + this.$container = $('
    ') + .append(this.__$tooltip) + .appendTo(env.window.document.body); + }, + + /** + * Force the browser to redraw (re-render) the tooltip immediately. This is required + * when you changed some CSS properties and need to make something with it + * immediately, without waiting for the browser to redraw at the end of instructions. + * + * @see http://stackoverflow.com/questions/3485365/how-can-i-force-webkit-to-redraw-repaint-to-propagate-style-changes + * @private + */ + __forceRedraw: function() { + + // note: this would work but for Webkit only + //this.__$tooltip.close(); + //this.__$tooltip[0].offsetHeight; + //this.__$tooltip.open(); + + // works in FF too + var $p = this.__$tooltip.parent(); + this.__$tooltip.detach(); + this.__$tooltip.appendTo($p); + }, + + /** + * Set maximum dimensions for the tooltip. A call to ::measure afterwards + * will tell us if the content overflows or if it's ok + * + * @param {int} width + * @param {int} height + * @return {Ruler} + * @public + */ + constrain: function(width, height) { + + this.constraints = { + width: width, + height: height + }; + + this.__$tooltip.css({ + // we disable display:flex, otherwise the content would overflow without + // creating horizontal scrolling (which we need to detect). + display: 'block', + // reset any previous height + height: '', + // we'll check if horizontal scrolling occurs + overflow: 'auto', + // we'll set the width and see what height is generated and if there + // is horizontal overflow + width: width + }); + + return this; + }, + + /** + * Reset the tooltip content overflow and remove the test container + * + * @returns {Ruler} + * @public + */ + destroy: function() { + + // in case the element was not a clone + this.__$tooltip + .detach() + .find('.tooltipster-content') + .css({ + // reset to CSS value + display: '', + overflow: '' + }); + + this.$container.remove(); + }, + + /** + * Removes any constraints + * + * @returns {Ruler} + * @public + */ + free: function() { + + this.constraints = null; + + // reset to natural size + this.__$tooltip.css({ + display: '', + height: '', + overflow: 'visible', + width: '' + }); + + return this; + }, + + /** + * Returns the size of the tooltip. When constraints are applied, also returns + * whether the tooltip fits in the provided dimensions. + * The idea is to see if the new height is small enough and if the content does + * not overflow horizontally. + * + * @param {int} width + * @param {int} height + * @returns {object} An object with a bool `fits` property and a `size` property + * @public + */ + measure: function() { + + this.__forceRedraw(); + + var tooltipBcr = this.__$tooltip[0].getBoundingClientRect(), + result = { size: { + // bcr.width/height are not defined in IE8- but in this + // case, bcr.right/bottom will have the same value + // except in iOS 8+ where tooltipBcr.bottom/right are wrong + // after scrolling for reasons yet to be determined. + // tooltipBcr.top/left might not be 0, see issue #514 + height: tooltipBcr.height || (tooltipBcr.bottom - tooltipBcr.top), + width: tooltipBcr.width || (tooltipBcr.right - tooltipBcr.left) + }}; + + if (this.constraints) { + + // note: we used to use offsetWidth instead of boundingRectClient but + // it returned rounded values, causing issues with sub-pixel layouts. + + // note2: noticed that the bcrWidth of text content of a div was once + // greater than the bcrWidth of its container by 1px, causing the final + // tooltip box to be too small for its content. However, evaluating + // their widths one against the other (below) surprisingly returned + // equality. Happened only once in Chrome 48, was not able to reproduce + // => just having fun with float position values... + + var $content = this.__$tooltip.find('.tooltipster-content'), + height = this.__$tooltip.outerHeight(), + contentBcr = $content[0].getBoundingClientRect(), + fits = { + height: height <= this.constraints.height, + width: ( + // this condition accounts for min-width property that + // may apply + tooltipBcr.width <= this.constraints.width + // the -1 is here because scrollWidth actually returns + // a rounded value, and may be greater than bcr.width if + // it was rounded up. This may cause an issue for contents + // which actually really overflow by 1px or so, but that + // should be rare. Not sure how to solve this efficiently. + // See http://blogs.msdn.com/b/ie/archive/2012/02/17/sub-pixel-rendering-and-the-css-object-model.aspx + && contentBcr.width >= $content[0].scrollWidth - 1 + ) + }; + + result.fits = fits.height && fits.width; + } + + // old versions of IE get the width wrong for some reason and it causes + // the text to be broken to a new line, so we round it up. If the width + // is the width of the screen though, we can assume it is accurate. + if ( env.IE + && env.IE <= 11 + && result.size.width !== env.window.document.documentElement.clientWidth + ) { + result.size.width = Math.ceil(result.size.width) + 1; + } + + return result; + } +}; + +// quick & dirty compare function, not bijective nor multidimensional +function areEqual(a,b) { + var same = true; + $.each(a, function(i, _) { + if (b[i] === undefined || a[i] !== b[i]) { + same = false; + return false; + } + }); + return same; +} + +/** + * A fast function to check if an element is still in the DOM. It + * tries to use an id as ids are indexed by the browser, or falls + * back to jQuery's `contains` method. May fail if two elements + * have the same id, but so be it + * + * @param {object} $obj A jQuery-wrapped HTML element + * @return {boolean} */ - -var TTIP = {}; - -TTIP.main = { - offset: [15,15], - maxWidth: 600, - - setDeferredListeners: function(){ - $('body').on('mouseover', '.tooltip', yt.show_tip); - $('body').on('mousemove', '.tooltip', yt.move_tip); - $('body').on('mouseout', '.tooltip', yt.close_tip); - }, - - init: function(){ - $('#tip-box').remove(); - yt.tipBox = document.createElement('div'); - document.body.appendChild(yt.tipBox); - yt.tipBox.id = 'tip-box'; - - $(yt.tipBox).hide(); - $(yt.tipBox).css('position', 'absolute'); - if(yt.maxWidth !== null){ - $(yt.tipBox).css('max-width', yt.maxWidth+'px'); - } - yt.setDeferredListeners(); - }, - - show_tip: function(e, el){ - e.stopPropagation(); - e.preventDefault(); - var el = e.data || e.currentTarget || el; - if(el.tagName.toLowerCase() === 'img'){ - yt.tipText = el.alt ? el.alt : ''; - } else { - yt.tipText = el.title ? el.title : ''; - } - - if(yt.tipText !== ''){ - // save org title - $(el).attr('tt_title', yt.tipText); - // reset title to not show org tooltips - $(el).attr('title', ''); - - yt.tipBox.innerHTML = yt.tipText; - $(yt.tipBox).show(); - } - }, - - move_tip: function(e, el){ - e.stopPropagation(); - e.preventDefault(); - var el = e.data || e.currentTarget || el; - var movePos = [e.pageX, e.pageY]; - $(yt.tipBox).css('top', (movePos[1] + yt.offset[1]) + 'px') - $(yt.tipBox).css('left', (movePos[0] + yt.offset[0]) + 'px') - }, - - close_tip: function(e, el){ - e.stopPropagation(); - e.preventDefault(); - var el = e.data || e.currentTarget || el; - $(yt.tipBox).hide(); - $(el).attr('title', $(el).attr('tt_title')); - $('#tip-box').hide(); - } -}; - -// activate tooltips -yt = TTIP.main; -if ($(document).data('activated-tooltips') !== '1'){ - $(document).ready(yt.init); - $(document).data('activated-tooltips', '1'); +function bodyContains($obj) { + var id = $obj.attr('id'), + el = id ? env.window.document.getElementById(id) : null; + // must also check that the element with the id is the one we want + return el ? el === $obj[0] : $.contains(env.window.document.body, $obj[0]); +} + +// detect IE versions for dirty fixes +var uA = navigator.userAgent.toLowerCase(); +if (uA.indexOf('msie') != -1) env.IE = parseInt(uA.split('msie')[1]); +else if (uA.toLowerCase().indexOf('trident') !== -1 && uA.indexOf(' rv:11') !== -1) env.IE = 11; +else if (uA.toLowerCase().indexOf('edge/') != -1) env.IE = parseInt(uA.toLowerCase().split('edge/')[1]); + +// detecting support for CSS transitions +function transitionSupport() { + + // env.window is not defined yet when this is called + if (!win) return false; + + var b = win.document.body || win.document.documentElement, + s = b.style, + p = 'transition', + v = ['Moz', 'Webkit', 'Khtml', 'O', 'ms']; + + if (typeof s[p] == 'string') { return true; } + + p = p.charAt(0).toUpperCase() + p.substr(1); + for (var i=0; i' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '
    ' + + '' + ); + + // hide arrow if asked + if (!this.__options.arrow) { + $html + .find('.tooltipster-box') + .css('margin', 0) + .end() + .find('.tooltipster-arrow') + .hide(); + } + + // apply min/max width if asked + if (this.__options.minWidth) { + $html.css('min-width', this.__options.minWidth + 'px'); + } + if (this.__options.maxWidth) { + $html.css('max-width', this.__options.maxWidth + 'px'); + } + + this.__instance._$tooltip = $html; + + // tell the instance that the tooltip element has been created + this.__instance._trigger('created'); + }, + + /** + * Used when the plugin is to be unplugged + * + * @private + */ + __destroy: function() { + this.__instance._off('.'+ self.__namespace); + }, + + /** + * (Re)compute this.__options from the options declared to the instance + * + * @private + */ + __optionsFormat: function() { + + var self = this; + + // get the options + self.__options = self.__instance._optionsExtract(pluginName, self.__defaults()); + + // for backward compatibility, deprecated in v4.0.0 + if (self.__options.position) { + self.__options.side = self.__options.position; + } + + // options formatting + + // format distance as a four-cell array if it ain't one yet and then make + // it an object with top/bottom/left/right properties + if (typeof self.__options.distance != 'object') { + self.__options.distance = [self.__options.distance]; + } + if (self.__options.distance.length < 4) { + + if (self.__options.distance[1] === undefined) self.__options.distance[1] = self.__options.distance[0]; + if (self.__options.distance[2] === undefined) self.__options.distance[2] = self.__options.distance[0]; + if (self.__options.distance[3] === undefined) self.__options.distance[3] = self.__options.distance[1]; + + self.__options.distance = { + top: self.__options.distance[0], + right: self.__options.distance[1], + bottom: self.__options.distance[2], + left: self.__options.distance[3] + }; + } + + // let's transform: + // 'top' into ['top', 'bottom', 'right', 'left'] + // 'right' into ['right', 'left', 'top', 'bottom'] + // 'bottom' into ['bottom', 'top', 'right', 'left'] + // 'left' into ['left', 'right', 'top', 'bottom'] + if (typeof self.__options.side == 'string') { + + var opposites = { + 'top': 'bottom', + 'right': 'left', + 'bottom': 'top', + 'left': 'right' + }; + + self.__options.side = [self.__options.side, opposites[self.__options.side]]; + + if (self.__options.side[0] == 'left' || self.__options.side[0] == 'right') { + self.__options.side.push('top', 'bottom'); + } + else { + self.__options.side.push('right', 'left'); + } + } + + // misc + // disable the arrow in IE6 unless the arrow option was explicitly set to true + if ( $.tooltipster._env.IE === 6 + && self.__options.arrow !== true + ) { + self.__options.arrow = false; + } + }, + + /** + * This method must compute and set the positioning properties of the + * tooltip (left, top, width, height, etc.). It must also make sure the + * tooltip is eventually appended to its parent (since the element may be + * detached from the DOM at the moment the method is called). + * + * We'll evaluate positioning scenarios to find which side can contain the + * tooltip in the best way. We'll consider things relatively to the window + * (unless the user asks not to), then to the document (if need be, or if the + * user explicitly requires the tests to run on the document). For each + * scenario, measures are taken, allowing us to know how well the tooltip + * is going to fit. After that, a sorting function will let us know what + * the best scenario is (we also allow the user to choose his favorite + * scenario by using an event). + * + * @param {object} helper An object that contains variables that plugin + * creators may find useful (see below) + * @param {object} helper.geo An object with many layout properties + * about objects of interest (window, document, origin). This should help + * plugin users compute the optimal position of the tooltip + * @private + */ + __reposition: function(event, helper) { + + var self = this, + finalResult, + // to know where to put the tooltip, we need to know on which point + // of the x or y axis we should center it. That coordinate is the target + targets = self.__targetFind(helper), + testResults = []; + + // make sure the tooltip is detached while we make tests on a clone + self.__instance._$tooltip.detach(); + + // we could actually provide the original element to the Ruler and + // not a clone, but it just feels right to keep it out of the + // machinery. + var $clone = self.__instance._$tooltip.clone(), + // start position tests session + ruler = $.tooltipster._getRuler($clone), + satisfied = false, + animation = self.__instance.option('animation'); + + // an animation class could contain properties that distort the size + if (animation) { + $clone.removeClass('tooltipster-'+ animation); + } + + // start evaluating scenarios + $.each(['window', 'document'], function(i, container) { + + var takeTest = null; + + // let the user decide to keep on testing or not + self.__instance._trigger({ + container: container, + helper: helper, + satisfied: satisfied, + takeTest: function(bool) { + takeTest = bool; + }, + results: testResults, + type: 'positionTest' + }); + + if ( takeTest == true + || ( takeTest != false + && satisfied == false + // skip the window scenarios if asked. If they are reintegrated by + // the callback of the positionTest event, they will have to be + // excluded using the callback of positionTested + && (container != 'window' || self.__options.viewportAware) + ) + ) { + + // for each allowed side + for (var i=0; i < self.__options.side.length; i++) { + + var distance = { + horizontal: 0, + vertical: 0 + }, + side = self.__options.side[i]; + + if (side == 'top' || side == 'bottom') { + distance.vertical = self.__options.distance[side]; + } + else { + distance.horizontal = self.__options.distance[side]; + } + + // this may have an effect on the size of the tooltip if there are css + // rules for the arrow or something else + self.__sideChange($clone, side); + + $.each(['natural', 'constrained'], function(i, mode) { + + takeTest = null; + + // emit an event on the instance + self.__instance._trigger({ + container: container, + event: event, + helper: helper, + mode: mode, + results: testResults, + satisfied: satisfied, + side: side, + takeTest: function(bool) { + takeTest = bool; + }, + type: 'positionTest' + }); + + if ( takeTest == true + || ( takeTest != false + && satisfied == false + ) + ) { + + var testResult = { + container: container, + // we let the distance as an object here, it can make things a little easier + // during the user's calculations at positionTest/positionTested + distance: distance, + // whether the tooltip can fit in the size of the viewport (does not mean + // that we'll be able to make it initially entirely visible, see 'whole') + fits: null, + mode: mode, + outerSize: null, + side: side, + size: null, + target: targets[side], + // check if the origin has enough surface on screen for the tooltip to + // aim at it without overflowing the viewport (this is due to the thickness + // of the arrow represented by the minIntersection length). + // If not, the tooltip will have to be partly or entirely off screen in + // order to stay docked to the origin. This value will stay null when the + // container is the document, as it is not relevant + whole: null + }; + + // get the size of the tooltip with or without size constraints + var rulerConfigured = (mode == 'natural') ? + ruler.free() : + ruler.constrain( + helper.geo.available[container][side].width - distance.horizontal, + helper.geo.available[container][side].height - distance.vertical + ), + rulerResults = rulerConfigured.measure(); + + testResult.size = rulerResults.size; + testResult.outerSize = { + height: rulerResults.size.height + distance.vertical, + width: rulerResults.size.width + distance.horizontal + }; + + if (mode == 'natural') { + + if( helper.geo.available[container][side].width >= testResult.outerSize.width + && helper.geo.available[container][side].height >= testResult.outerSize.height + ) { + testResult.fits = true; + } + else { + testResult.fits = false; + } + } + else { + testResult.fits = rulerResults.fits; + } + + if (container == 'window') { + + if (!testResult.fits) { + testResult.whole = false; + } + else { + if (side == 'top' || side == 'bottom') { + + testResult.whole = ( + helper.geo.origin.windowOffset.right >= self.__options.minIntersection + && helper.geo.window.size.width - helper.geo.origin.windowOffset.left >= self.__options.minIntersection + ); + } + else { + testResult.whole = ( + helper.geo.origin.windowOffset.bottom >= self.__options.minIntersection + && helper.geo.window.size.height - helper.geo.origin.windowOffset.top >= self.__options.minIntersection + ); + } + } + } + + testResults.push(testResult); + + // we don't need to compute more positions if we have one fully on screen + if (testResult.whole) { + satisfied = true; + } + else { + // don't run the constrained test unless the natural width was greater + // than the available width, otherwise it's pointless as we know it + // wouldn't fit either + if ( testResult.mode == 'natural' + && ( testResult.fits + || testResult.size.width <= helper.geo.available[container][side].width + ) + ) { + return false; + } + } + } + }); + } + } + }); + + // the user may eliminate the unwanted scenarios from testResults, but he's + // not supposed to alter them at this point. functionPosition and the + // position event serve that purpose. + self.__instance._trigger({ + edit: function(r) { + testResults = r; + }, + event: event, + helper: helper, + results: testResults, + type: 'positionTested' + }); + + /** + * Sort the scenarios to find the favorite one. + * + * The favorite scenario is when we can fully display the tooltip on screen, + * even if it means that the middle of the tooltip is no longer centered on + * the middle of the origin (when the origin is near the edge of the screen + * or even partly off screen). We want the tooltip on the preferred side, + * even if it means that we have to use a constrained size rather than a + * natural one (as long as it fits). When the origin is off screen at the top + * the tooltip will be positioned at the bottom (if allowed), if the origin + * is off screen on the right, it will be positioned on the left, etc. + * If there are no scenarios where the tooltip can fit on screen, or if the + * user does not want the tooltip to fit on screen (viewportAware == false), + * we fall back to the scenarios relative to the document. + * + * When the tooltip is bigger than the viewport in either dimension, we stop + * looking at the window scenarios and consider the document scenarios only, + * with the same logic to find on which side it would fit best. + * + * If the tooltip cannot fit the document on any side, we force it at the + * bottom, so at least the user can scroll to see it. + */ + testResults.sort(function(a, b) { + + // best if it's whole (the tooltip fits and adapts to the viewport) + if (a.whole && !b.whole) { + return -1; + } + else if (!a.whole && b.whole) { + return 1; + } + else if (a.whole && b.whole) { + + var ai = self.__options.side.indexOf(a.side), + bi = self.__options.side.indexOf(b.side); + + // use the user's sides fallback array + if (ai < bi) { + return -1; + } + else if (ai > bi) { + return 1; + } + else { + // will be used if the user forced the tests to continue + return a.mode == 'natural' ? -1 : 1; + } + } + else { + + // better if it fits + if (a.fits && !b.fits) { + return -1; + } + else if (!a.fits && b.fits) { + return 1; + } + else if (a.fits && b.fits) { + + var ai = self.__options.side.indexOf(a.side), + bi = self.__options.side.indexOf(b.side); + + // use the user's sides fallback array + if (ai < bi) { + return -1; + } + else if (ai > bi) { + return 1; + } + else { + // will be used if the user forced the tests to continue + return a.mode == 'natural' ? -1 : 1; + } + } + else { + + // if everything failed, this will give a preference to the case where + // the tooltip overflows the document at the bottom + if ( a.container == 'document' + && a.side == 'bottom' + && a.mode == 'natural' + ) { + return -1; + } + else { + return 1; + } + } + } + }); + + finalResult = testResults[0]; + + + // now let's find the coordinates of the tooltip relatively to the window + finalResult.coord = {}; + + switch (finalResult.side) { + + case 'left': + case 'right': + finalResult.coord.top = Math.floor(finalResult.target - finalResult.size.height / 2); + break; + + case 'bottom': + case 'top': + finalResult.coord.left = Math.floor(finalResult.target - finalResult.size.width / 2); + break; + } + + switch (finalResult.side) { + + case 'left': + finalResult.coord.left = helper.geo.origin.windowOffset.left - finalResult.outerSize.width; + break; + + case 'right': + finalResult.coord.left = helper.geo.origin.windowOffset.right + finalResult.distance.horizontal; + break; + + case 'top': + finalResult.coord.top = helper.geo.origin.windowOffset.top - finalResult.outerSize.height; + break; + + case 'bottom': + finalResult.coord.top = helper.geo.origin.windowOffset.bottom + finalResult.distance.vertical; + break; + } + + // if the tooltip can potentially be contained within the viewport dimensions + // and that we are asked to make it fit on screen + if (finalResult.container == 'window') { + + // if the tooltip overflows the viewport, we'll move it accordingly (then it will + // not be centered on the middle of the origin anymore). We only move horizontally + // for top and bottom tooltips and vice versa. + if (finalResult.side == 'top' || finalResult.side == 'bottom') { + + // if there is an overflow on the left + if (finalResult.coord.left < 0) { + + // prevent the overflow unless the origin itself gets off screen (minus the + // margin needed to keep the arrow pointing at the target) + if (helper.geo.origin.windowOffset.right - this.__options.minIntersection >= 0) { + finalResult.coord.left = 0; + } + else { + finalResult.coord.left = helper.geo.origin.windowOffset.right - this.__options.minIntersection - 1; + } + } + // or an overflow on the right + else if (finalResult.coord.left > helper.geo.window.size.width - finalResult.size.width) { + + if (helper.geo.origin.windowOffset.left + this.__options.minIntersection <= helper.geo.window.size.width) { + finalResult.coord.left = helper.geo.window.size.width - finalResult.size.width; + } + else { + finalResult.coord.left = helper.geo.origin.windowOffset.left + this.__options.minIntersection + 1 - finalResult.size.width; + } + } + } + else { + + // overflow at the top + if (finalResult.coord.top < 0) { + + if (helper.geo.origin.windowOffset.bottom - this.__options.minIntersection >= 0) { + finalResult.coord.top = 0; + } + else { + finalResult.coord.top = helper.geo.origin.windowOffset.bottom - this.__options.minIntersection - 1; + } + } + // or at the bottom + else if (finalResult.coord.top > helper.geo.window.size.height - finalResult.size.height) { + + if (helper.geo.origin.windowOffset.top + this.__options.minIntersection <= helper.geo.window.size.height) { + finalResult.coord.top = helper.geo.window.size.height - finalResult.size.height; + } + else { + finalResult.coord.top = helper.geo.origin.windowOffset.top + this.__options.minIntersection + 1 - finalResult.size.height; + } + } + } + } + else { + + // there might be overflow here too but it's easier to handle. If there has + // to be an overflow, we'll make sure it's on the right side of the screen + // (because the browser will extend the document size if there is an overflow + // on the right, but not on the left). The sort function above has already + // made sure that a bottom document overflow is preferred to a top overflow, + // so we don't have to care about it. + + // if there is an overflow on the right + if (finalResult.coord.left > helper.geo.window.size.width - finalResult.size.width) { + + // this may actually create on overflow on the left but we'll fix it in a sec + finalResult.coord.left = helper.geo.window.size.width - finalResult.size.width; + } + + // if there is an overflow on the left + if (finalResult.coord.left < 0) { + + // don't care if it overflows the right after that, we made our best + finalResult.coord.left = 0; + } + } + + + // submit the positioning proposal to the user function which may choose to change + // the side, size and/or the coordinates + + // first, set the rules that corresponds to the proposed side: it may change + // the size of the tooltip, and the custom functionPosition may want to detect the + // size of something before making a decision. So let's make things easier for the + // implementor + self.__sideChange($clone, finalResult.side); + + // add some variables to the helper + helper.tooltipClone = $clone[0]; + helper.tooltipParent = self.__instance.option('parent').parent[0]; + // move informative values to the helper + helper.mode = finalResult.mode; + helper.whole = finalResult.whole; + // add some variables to the helper for the functionPosition callback (these + // will also be added to the event fired by self.__instance._trigger but that's + // ok, we're just being consistent) + helper.origin = self.__instance._$origin[0]; + helper.tooltip = self.__instance._$tooltip[0]; + + // leave only the actionable values in there for functionPosition + delete finalResult.container; + delete finalResult.fits; + delete finalResult.mode; + delete finalResult.outerSize; + delete finalResult.whole; + + // keep only the distance on the relevant side, for clarity + finalResult.distance = finalResult.distance.horizontal || finalResult.distance.vertical; + + // beginners may not be comfortable with the concept of editing the object + // passed by reference, so we provide an edit function and pass a clone + var finalResultClone = $.extend(true, {}, finalResult); + + // emit an event on the instance + self.__instance._trigger({ + edit: function(result) { + finalResult = result; + }, + event: event, + helper: helper, + position: finalResultClone, + type: 'position' + }); + + if (self.__options.functionPosition) { + + var result = self.__options.functionPosition.call(self, self.__instance, helper, finalResultClone); + + if (result) finalResult = result; + } + + // end the positioning tests session (the user might have had a + // use for it during the position event, now it's over) + ruler.destroy(); + + // compute the position of the target relatively to the tooltip root + // element so we can place the arrow and make the needed adjustments + var arrowCoord, + maxVal; + + if (finalResult.side == 'top' || finalResult.side == 'bottom') { + + arrowCoord = { + prop: 'left', + val: finalResult.target - finalResult.coord.left + }; + maxVal = finalResult.size.width - this.__options.minIntersection; + } + else { + + arrowCoord = { + prop: 'top', + val: finalResult.target - finalResult.coord.top + }; + maxVal = finalResult.size.height - this.__options.minIntersection; + } + + // cannot lie beyond the boundaries of the tooltip, minus the + // arrow margin + if (arrowCoord.val < this.__options.minIntersection) { + arrowCoord.val = this.__options.minIntersection; + } + else if (arrowCoord.val > maxVal) { + arrowCoord.val = maxVal; + } + + var originParentOffset; + + // let's convert the window-relative coordinates into coordinates relative to the + // future positioned parent that the tooltip will be appended to + if (helper.geo.origin.fixedLineage) { + + // same as windowOffset when the position is fixed + originParentOffset = helper.geo.origin.windowOffset; + } + else { + + // this assumes that the parent of the tooltip is located at + // (0, 0) in the document, typically like when the parent is + // . + // If we ever allow other types of parent, .tooltipster-ruler + // will have to be appended to the parent to inherit css style + // values that affect the display of the text and such. + originParentOffset = { + left: helper.geo.origin.windowOffset.left + helper.geo.window.scroll.left, + top: helper.geo.origin.windowOffset.top + helper.geo.window.scroll.top + }; + } + + finalResult.coord = { + left: originParentOffset.left + (finalResult.coord.left - helper.geo.origin.windowOffset.left), + top: originParentOffset.top + (finalResult.coord.top - helper.geo.origin.windowOffset.top) + }; + + // set position values on the original tooltip element + + self.__sideChange(self.__instance._$tooltip, finalResult.side); + + if (helper.geo.origin.fixedLineage) { + self.__instance._$tooltip + .css('position', 'fixed'); + } + else { + // CSS default + self.__instance._$tooltip + .css('position', ''); + } + + self.__instance._$tooltip + .css({ + left: finalResult.coord.left, + top: finalResult.coord.top, + // we need to set a size even if the tooltip is in its natural size + // because when the tooltip is positioned beyond the width of the body + // (which is by default the width of the window; it will happen when + // you scroll the window horizontally to get to the origin), its text + // content will otherwise break lines at each word to keep up with the + // body overflow strategy. + height: finalResult.size.height, + width: finalResult.size.width + }) + .find('.tooltipster-arrow') + .css({ + 'left': '', + 'top': '' + }) + .css(arrowCoord.prop, arrowCoord.val); + + // append the tooltip HTML element to its parent + self.__instance._$tooltip.appendTo(self.__instance.option('parent')); + + self.__instance._trigger({ + type: 'repositioned', + event: event, + position: finalResult + }); + }, + + /** + * Make whatever modifications are needed when the side is changed. This has + * been made an independant method for easy inheritance in custom plugins based + * on this default plugin. + * + * @param {object} $obj + * @param {string} side + * @private + */ + __sideChange: function($obj, side) { + + $obj + .removeClass('tooltipster-bottom') + .removeClass('tooltipster-left') + .removeClass('tooltipster-right') + .removeClass('tooltipster-top') + .addClass('tooltipster-'+ side); + }, + + /** + * Returns the target that the tooltip should aim at for a given side. + * The calculated value is a distance from the edge of the window + * (left edge for top/bottom sides, top edge for left/right side). The + * tooltip will be centered on that position and the arrow will be + * positioned there (as much as possible). + * + * @param {object} helper + * @return {integer} + * @private + */ + __targetFind: function(helper) { + + var target = {}, + rects = this.__instance._$origin[0].getClientRects(); + + // these lines fix a Chrome bug (issue #491) + if (rects.length > 1) { + var opacity = this.__instance._$origin.css('opacity'); + if(opacity == 1) { + this.__instance._$origin.css('opacity', 0.99); + rects = this.__instance._$origin[0].getClientRects(); + this.__instance._$origin.css('opacity', 1); + } + } + + // by default, the target will be the middle of the origin + if (rects.length < 2) { + + target.top = Math.floor(helper.geo.origin.windowOffset.left + (helper.geo.origin.size.width / 2)); + target.bottom = target.top; + + target.left = Math.floor(helper.geo.origin.windowOffset.top + (helper.geo.origin.size.height / 2)); + target.right = target.left; + } + // if multiple client rects exist, the element may be text split + // up into multiple lines and the middle of the origin may not be + // best option anymore. We need to choose the best target client rect + else { + + // top: the first + var targetRect = rects[0]; + target.top = Math.floor(targetRect.left + (targetRect.right - targetRect.left) / 2); + + // right: the middle line, rounded down in case there is an even + // number of lines (looks more centered => check out the + // demo with 4 split lines) + if (rects.length > 2) { + targetRect = rects[Math.ceil(rects.length / 2) - 1]; + } + else { + targetRect = rects[0]; + } + target.right = Math.floor(targetRect.top + (targetRect.bottom - targetRect.top) / 2); + + // bottom: the last + targetRect = rects[rects.length - 1]; + target.bottom = Math.floor(targetRect.left + (targetRect.right - targetRect.left) / 2); + + // left: the middle line, rounded up + if (rects.length > 2) { + targetRect = rects[Math.ceil((rects.length + 1) / 2) - 1]; + } + else { + targetRect = rects[rects.length - 1]; + } + + target.left = Math.floor(targetRect.top + (targetRect.bottom - targetRect.top) / 2); + } + + return target; + } + } +}); + +/* a build task will add "return $;" here */ +return $; + +})); + +// +// $(document).ready(function() { +// $('.tooltip-hovercard').tooltipster({ +// theme: 'tooltipster-shadow', +// animation: 'fade', +// delay: 100, +// contentCloning: true, +// +// }); \ No newline at end of file diff --git a/rhodecode/subscribers.py b/rhodecode/subscribers.py --- a/rhodecode/subscribers.py +++ b/rhodecode/subscribers.py @@ -95,8 +95,9 @@ def add_request_user_context(event): # skip api calls return - auth_user = get_auth_user(request) + auth_user, auth_token = get_auth_user(request) request.user = auth_user + request.user_auth_token = auth_token request.environ['rc_auth_user'] = auth_user request.environ['rc_auth_user_id'] = auth_user.user_id request.environ['rc_req_id'] = req_id diff --git a/rhodecode/templates/admin/admin_log_base.mako b/rhodecode/templates/admin/admin_log_base.mako --- a/rhodecode/templates/admin/admin_log_base.mako +++ b/rhodecode/templates/admin/admin_log_base.mako @@ -62,7 +62,7 @@
    -${c.audit_logs.pager('$link_previous ~2~ $link_next')} +${c.audit_logs.render()}
    %else: ${_('No actions yet')} diff --git a/rhodecode/templates/admin/defaults/defaults.mako b/rhodecode/templates/admin/defaults/defaults.mako --- a/rhodecode/templates/admin/defaults/defaults.mako +++ b/rhodecode/templates/admin/defaults/defaults.mako @@ -29,7 +29,7 @@