# HG changeset patch # User Marcin Kuzminski # Date 2016-06-12 18:04:20 # Node ID 40e6b177914bdce218c326e24ff75e92cba76616 # Parent 2b537e66787dee608717127eaf8c0545e96d88d9 # Parent 87bb625ca67bd0ffcf4de460aae81166bebfa26d Merge branch default into stable diff --git a/.bumpversion.cfg b/.bumpversion.cfg --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 4.0.1 +current_version = 4.1.0 message = release: Bump version {current_version} to {new_version} [bumpversion:file:rhodecode/VERSION] diff --git a/Gruntfile.js b/Gruntfile.js --- a/Gruntfile.js +++ b/Gruntfile.js @@ -27,12 +27,13 @@ module.exports = function(grunt) { '<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js', '<%= dirs.js.src %>/plugins/jquery.autocomplete.js', '<%= dirs.js.src %>/plugins/jquery.debounce.js', + '<%= dirs.js.src %>/plugins/jquery.mark.js', '<%= dirs.js.src %>/plugins/jquery.timeago.js', '<%= dirs.js.src %>/plugins/jquery.timeago-extension.js', // Select2 '<%= dirs.js.src %>/select2/select2.js', - + // Code-mirror '<%= dirs.js.src %>/codemirror/codemirror.js', '<%= dirs.js.src %>/codemirror/codemirror_loadmode.js', @@ -59,7 +60,7 @@ module.exports = function(grunt) { '<%= dirs.js.src %>/rhodecode/widgets/multiselect.js', // Rhodecode components - '<%= dirs.js.src %>/rhodecode/pyroutes.js', + '<%= dirs.js.src %>/rhodecode/init.js', '<%= dirs.js.src %>/rhodecode/codemirror.js', '<%= dirs.js.src %>/rhodecode/comments.js', '<%= dirs.js.src %>/rhodecode/constants.js', diff --git a/configs/development.ini b/configs/development.ini --- a/configs/development.ini +++ b/configs/development.ini @@ -34,9 +34,10 @@ pdebug = false host = 127.0.0.1 port = 5000 -########################## -## WAITRESS WSGI SERVER ## -########################## +################################## +## WAITRESS WSGI SERVER ## +## Recommended for Development ## +################################## use = egg:waitress#main ## number of worker threads threads = 5 @@ -56,7 +57,7 @@ asyncore_use_poll = true ## when this option is set to more than one worker, recommended ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers ## The `instance_id = *` must be set in the [app:main] section below -#workers = 1 +#workers = 2 ## number of threads for each of the worker, must be set to 1 for gevent ## generally recommened to be at 1 #threads = 1 @@ -71,7 +72,7 @@ asyncore_use_poll = true ## restarted, could prevent memory leaks #max_requests = 1000 #max_requests_jitter = 30 -## ammount of time a worker can spend with handling a request before it +## amount of time a worker can spend with handling a request before it ## gets killed and restarted. Set to 6hrs #timeout = 21600 @@ -199,6 +200,21 @@ default_encoding = UTF-8 ## all running rhodecode instances. Leave empty if you don't use it instance_id = +## Fallback authentication plugin. Set this to a plugin ID to force the usage +## of an authentication plugin also if it is disabled by it's settings. +## This could be useful if you are unable to log in to the system due to broken +## authentication settings. Then you can enable e.g. the internal rhodecode auth +## module to log in again and fix the settings. +## +## Available builtin plugin IDs (hash is part of the ID): +## egg:rhodecode-enterprise-ce#rhodecode +## egg:rhodecode-enterprise-ce#pam +## egg:rhodecode-enterprise-ce#ldap +## egg:rhodecode-enterprise-ce#jasig_cas +## egg:rhodecode-enterprise-ce#headers +## egg:rhodecode-enterprise-ce#crowd +#rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode + ## alternative return HTTP header for failed authentication. Default HTTP ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with ## handling that causing a series of failed authentication calls. @@ -316,7 +332,7 @@ beaker.cache.repo_cache_long.expire = 25 #################################### ## .session.type is type of storage options for the session, current allowed -## types are file, ext:memcached, ext:database, and memory(default). +## types are file, ext:memcached, ext:database, and memory (default). beaker.session.type = file beaker.session.data_dir = %(here)s/data/sessions/data @@ -356,12 +372,17 @@ beaker.session.auto = false ################################### ## SEARCH INDEXING CONFIGURATION ## ################################### +## Full text search indexer is available in rhodecode-tools under +## `rhodecode-tools index` command +# WHOOSH Backend, doesn't require additional services to run +# it works good with few dozen repos search.module = rhodecode.lib.index.whoosh search.location = %(here)s/data/index + ################################### -## ERROR AND LOG HANDLING SYSTEM ## +## APPENLIGHT CONFIG ## ################################### ## Appenlight is tailored to work with RhodeCode, see @@ -372,7 +393,7 @@ appenlight = false appenlight.server_url = https://api.appenlight.com appenlight.api_key = YOUR_API_KEY -;appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 +#appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 # used for JS client appenlight.api_public_key = YOUR_API_PUBLIC_KEY @@ -462,16 +483,26 @@ sqlalchemy.db1.convert_unicode = true ################## vcs.server.enable = true vcs.server = localhost:9900 -# Available protocols: pyro4, http -vcs.server.protocol = pyro4 -# available impl: -# vcsserver.scm_app (EE only, for testing), -# rhodecode.lib.middleware.utils.scm_app_http -# pyro4 +## Web server connectivity protocol, responsible for web based VCS operatations +## Available protocols are: +## `pyro4` - using pyro4 server +## `http` - using http-rpc backend +#vcs.server.protocol = http + +## Push/Pull operations protocol, available options are: +## `pyro4` - using pyro4 server +## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended +## `vcsserver.scm_app` - internal app (EE only) #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http +## Push/Pull operations hooks protocol, available options are: +## `pyro4` - using pyro4 server +## `http` - using http-rpc backend +#vcs.hooks.protocol = http + vcs.server.log_level = debug +## Start VCSServer with this instance as a subprocess, usefull for development vcs.start_server = true vcs.backends = hg, git, svn vcs.connection_timeout = 3600 diff --git a/configs/production.ini b/configs/production.ini --- a/configs/production.ini +++ b/configs/production.ini @@ -34,46 +34,47 @@ pdebug = false host = 127.0.0.1 port = 5000 -########################## -## WAITRESS WSGI SERVER ## -########################## -use = egg:waitress#main +################################## +## WAITRESS WSGI SERVER ## +## Recommended for Development ## +################################## +#use = egg:waitress#main ## number of worker threads -threads = 5 +#threads = 5 ## MAX BODY SIZE 100GB -max_request_body_size = 107374182400 +#max_request_body_size = 107374182400 ## Use poll instead of select, fixes file descriptors limits problems. ## May not work on old windows systems. -asyncore_use_poll = true +#asyncore_use_poll = true ########################## ## GUNICORN WSGI SERVER ## ########################## ## run with gunicorn --log-config --paste -#use = egg:gunicorn#main +use = egg:gunicorn#main ## Sets the number of process workers. You must set `instance_id = *` ## when this option is set to more than one worker, recommended ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers ## The `instance_id = *` must be set in the [app:main] section below -#workers = 1 +workers = 2 ## number of threads for each of the worker, must be set to 1 for gevent ## generally recommened to be at 1 #threads = 1 ## process name -#proc_name = rhodecode +proc_name = rhodecode ## type of worker class, one of sync, gevent ## recommended for bigger setup is using of of other than sync one -#worker_class = sync +worker_class = sync ## The maximum number of simultaneous clients. Valid only for Gevent #worker_connections = 10 ## max number of requests that worker will handle before being gracefully ## restarted, could prevent memory leaks -#max_requests = 1000 -#max_requests_jitter = 30 -## ammount of time a worker can spend with handling a request before it +max_requests = 1000 +max_requests_jitter = 30 +## amount of time a worker can spend with handling a request before it ## gets killed and restarted. Set to 6hrs -#timeout = 21600 +timeout = 21600 ## prefix middleware for RhodeCode, disables force_https flag. @@ -173,6 +174,21 @@ default_encoding = UTF-8 ## all running rhodecode instances. Leave empty if you don't use it instance_id = +## Fallback authentication plugin. Set this to a plugin ID to force the usage +## of an authentication plugin also if it is disabled by it's settings. +## This could be useful if you are unable to log in to the system due to broken +## authentication settings. Then you can enable e.g. the internal rhodecode auth +## module to log in again and fix the settings. +## +## Available builtin plugin IDs (hash is part of the ID): +## egg:rhodecode-enterprise-ce#rhodecode +## egg:rhodecode-enterprise-ce#pam +## egg:rhodecode-enterprise-ce#ldap +## egg:rhodecode-enterprise-ce#jasig_cas +## egg:rhodecode-enterprise-ce#headers +## egg:rhodecode-enterprise-ce#crowd +#rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode + ## alternative return HTTP header for failed authentication. Default HTTP ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with ## handling that causing a series of failed authentication calls. @@ -290,7 +306,7 @@ beaker.cache.repo_cache_long.expire = 25 #################################### ## .session.type is type of storage options for the session, current allowed -## types are file, ext:memcached, ext:database, and memory(default). +## types are file, ext:memcached, ext:database, and memory (default). beaker.session.type = file beaker.session.data_dir = %(here)s/data/sessions/data @@ -304,7 +320,7 @@ beaker.session.data_dir = %(here)s/data/ beaker.session.key = rhodecode beaker.session.secret = production-rc-uytcxaz -#beaker.session.lock_dir = %(here)s/data/sessions/lock +beaker.session.lock_dir = %(here)s/data/sessions/lock ## Secure encrypted cookie. Requires AES and AES python libraries ## you must disable beaker.session.secret to use this @@ -330,12 +346,17 @@ beaker.session.auto = false ################################### ## SEARCH INDEXING CONFIGURATION ## ################################### +## Full text search indexer is available in rhodecode-tools under +## `rhodecode-tools index` command +# WHOOSH Backend, doesn't require additional services to run +# it works good with few dozen repos search.module = rhodecode.lib.index.whoosh search.location = %(here)s/data/index + ################################### -## ERROR AND LOG HANDLING SYSTEM ## +## APPENLIGHT CONFIG ## ################################### ## Appenlight is tailored to work with RhodeCode, see @@ -346,7 +367,7 @@ appenlight = false appenlight.server_url = https://api.appenlight.com appenlight.api_key = YOUR_API_KEY -;appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 +#appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 # used for JS client appenlight.api_public_key = YOUR_API_PUBLIC_KEY @@ -401,11 +422,6 @@ appenlight.log_namespace_blacklist = set debug = false -############## -## STYLING ## -############## -debug_style = false - ######################################################### ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### ######################################################### @@ -436,16 +452,26 @@ sqlalchemy.db1.convert_unicode = true ################## vcs.server.enable = true vcs.server = localhost:9900 -# Available protocols: pyro4, http -vcs.server.protocol = pyro4 -# available impl: -# vcsserver.scm_app (EE only, for testing), -# rhodecode.lib.middleware.utils.scm_app_http -# pyro4 +## Web server connectivity protocol, responsible for web based VCS operatations +## Available protocols are: +## `pyro4` - using pyro4 server +## `http` - using http-rpc backend +#vcs.server.protocol = http + +## Push/Pull operations protocol, available options are: +## `pyro4` - using pyro4 server +## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended +## `vcsserver.scm_app` - internal app (EE only) #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http +## Push/Pull operations hooks protocol, available options are: +## `pyro4` - using pyro4 server +## `http` - using http-rpc backend +#vcs.hooks.protocol = http + vcs.server.log_level = info +## Start VCSServer with this instance as a subprocess, usefull for development vcs.start_server = false vcs.backends = hg, git, svn vcs.connection_timeout = 3600 diff --git a/default.nix b/default.nix --- a/default.nix +++ b/default.nix @@ -85,7 +85,7 @@ let pythonLocalOverrides = self: super: { rhodecode-enterprise-ce = let - version = "${builtins.readFile ./rhodecode/VERSION}"; + version = builtins.readFile ./rhodecode/VERSION; linkNodeModules = '' echo "Link node packages" # TODO: check if this adds stuff as a dependency, closure size @@ -119,7 +119,9 @@ let # TODO: johbo: Make a nicer way to expose the parts. Maybe # pkgs/default.nix? passthru = { - inherit myPythonPackagesUnfix; + inherit + pythonLocalOverrides + myPythonPackagesUnfix; pythonPackages = self; }; @@ -160,6 +162,7 @@ let ln -s ${self.supervisor}/bin/supervisor* $out/bin/ ln -s ${self.gunicorn}/bin/gunicorn $out/bin/ ln -s ${self.PasteScript}/bin/paster $out/bin/ + ln -s ${self.pyramid}/bin/* $out/bin/ #*/ # rhodecode-tools # TODO: johbo: re-think this. Do the tools import anything from enterprise? @@ -169,6 +172,7 @@ let for file in $out/bin/*; do #*/ wrapProgram $file \ --prefix PYTHONPATH : $PYTHONPATH \ + --prefix PATH : $PATH \ --set PYTHONHASHSEED random done diff --git a/docs/admin/apache-reverse-proxy.rst b/docs/admin/apache-reverse-proxy.rst --- a/docs/admin/apache-reverse-proxy.rst +++ b/docs/admin/apache-reverse-proxy.rst @@ -9,24 +9,24 @@ Here is a sample configuration file for ServerName hg.myserver.com ServerAlias hg.myserver.com - ## uncomment root directive if you want to serve static files by nginx - ## requires static_files = false in .ini file - DocumentRoot /path/to/installation/rhodecode/public + ## uncomment root directive if you want to serve static files by + ## Apache requires static_files = false in .ini file + #DocumentRoot /path/to/rhodecode/installation/public Order allow,deny Allow from all - #important ! - #Directive to properly generate url (clone url) for pylons + ## Important ! + ## Directive to properly generate url (clone url) for pylons ProxyPreserveHost On - #rhodecode instance - ProxyPass / http://127.0.0.1:5000/ - ProxyPassReverse / http://127.0.0.1:5000/ + ## RhodeCode instance running + ProxyPass / http://127.0.0.1:10002/ + ProxyPassReverse / http://127.0.0.1:10002/ - #to enable https use line below + ## to enable https use line below #SetEnvIf X-Url-Scheme https HTTPS=1 diff --git a/docs/admin/indexing.rst b/docs/admin/indexing.rst --- a/docs/admin/indexing.rst +++ b/docs/admin/indexing.rst @@ -3,7 +3,15 @@ Full-text Search ---------------- -By default |RCM| uses `Whoosh`_ to index |repos| and provide full-text search. +By default |RC| is configured to use `Whoosh`_ to index |repos| and +provide full-text search. + +|RCE| also provides support for `Elasticsearch`_ as a backend for scalable +search. See :ref:`enable-elasticsearch` for details. + +Indexing +^^^^^^^^ + To run the indexer you need to use an |authtoken| with admin rights to all |repos|. @@ -232,4 +240,33 @@ use the following example :file:`mapping max_filesize = 800MB commit_parse_limit = 20000 +.. _enable-elasticsearch: + +Enabling Elasticsearch +^^^^^^^^^^^^^^^^^^^^^^ + +1. Open the :file:`rhodecode.ini` file for the instance you wish to edit. The + default location is + :file:`home/{user}/.rccontrol/{instance-id}/rhodecode.ini` +2. Find the search configuration section: + +.. code-block:: ini + + ################################### + ## SEARCH INDEXING CONFIGURATION ## + ################################### + + search.module = rhodecode.lib.index.whoosh + search.location = %(here)s/data/index + +and change it to: + +.. code-block:: ini + + search.module = rc_elasticsearch + search.location = http://localhost:9200/ + +where ``search.location`` points to the elasticsearch server. + .. _Whoosh: https://pypi.python.org/pypi/Whoosh/ +.. _Elasticsearch: https://www.elastic.co/ \ No newline at end of file diff --git a/docs/admin/nginx-config-example.rst b/docs/admin/nginx-config-example.rst --- a/docs/admin/nginx-config-example.rst +++ b/docs/admin/nginx-config-example.rst @@ -7,11 +7,11 @@ Use the following example to configure N upstream rc { - server 127.0.0.1:5000; + server 127.0.0.1:10002; # add more instances for load balancing - # server 127.0.0.1:5001; - # server 127.0.0.1:5002; + # server 127.0.0.1:10003; + # server 127.0.0.1:10004; } ## gist alias @@ -58,14 +58,15 @@ Use the following example to configure N ## uncomment root directive if you want to serve static files by nginx ## requires static_files = false in .ini file - # root /path/to/installation/rhodecode/public; + # root /path/to/rhodecode/installation/public; include /etc/nginx/proxy.conf; - location / { - try_files $uri @rhode; - } + + location / { + try_files $uri @rhode; + } location @rhode { - proxy_pass http://rc; - } + proxy_pass http://rc; + } } diff --git a/docs/admin/system-overview.rst b/docs/admin/system-overview.rst --- a/docs/admin/system-overview.rst +++ b/docs/admin/system-overview.rst @@ -64,6 +64,14 @@ performance is more important than CPU p environment handling 1000s of users and |repos| you should deploy on a 12+ core 64GB RAM server. In short, the more RAM the better. + +For example: + + - for team of 1 - 5 active users you can run on 1GB RAM machine with 1CPU + - above 250 active users, |RCM| needs at least 8GB of memory. + Number of CPUs is less important, but recommended to have at least 2-3 CPUs + + .. _config-rce-files: Configuration Files diff --git a/docs/common.py b/docs/common.py --- a/docs/common.py +++ b/docs/common.py @@ -23,6 +23,8 @@ rst_epilog = ''' .. |RCV| replace:: RhodeCode Enterprise .. |RCM| replace:: RhodeCode Enterprise .. |RCE| replace:: RhodeCode Enterprise +.. |RCCE| replace:: RhodeCode Community +.. |RCEE| replace:: RhodeCode Enterprise .. |RCX| replace:: RhodeCode Extensions .. |RCT| replace:: RhodeCode Tools .. |RCEBOLD| replace:: **RhodeCode Enterprise** diff --git a/docs/install/database-string.rst b/docs/install/database-string.rst --- a/docs/install/database-string.rst +++ b/docs/install/database-string.rst @@ -5,12 +5,12 @@ Make Database Changes .. important:: - If you do change the |repo| database that |RCM| uses, then you will need to + If you do change the |repo| database that |RCEE| uses, then you will need to upgrade the database, and also remap and rescan the |repos|. More detailed information is available in the :ref:`Alternative upgrade documentation `. -If you need to change database connection details for a |RCM| instance, +If you need to change database connection details for a |RCEE| instance, use the following steps: 1. Open the :file:`rhodecode.ini` file for the instance you wish to edit. The diff --git a/docs/install/quick-start.rst b/docs/install/quick-start.rst --- a/docs/install/quick-start.rst +++ b/docs/install/quick-start.rst @@ -17,10 +17,12 @@ Quick Start Guide credentials during |RCE| installation. See the relevant database documentation for more details. -To get |RCM| up and running, run through the below steps: +To get |RCE| up and running, run through the below steps: 1. Download the latest |RCC| installer from your `rhodecode.com`_ profile - page. If you don't have an account, sign up at `rhodecode.com/register`_. + or main page. + If you don't have an account, sign up at `rhodecode.com/register`_. + 2. Run the |RCC| installer and accept the End User Licence using the following example: @@ -45,13 +47,18 @@ 3. Install a VCS Server, and configure i Added process group vcsserver-1 -4. Install |RCE|. If using MySQL or PostgreSQL, during installation you'll be - asked for your database credentials, so have them at hand. You don't need - any for SQLite. +4. Install |RCEE| or |RCCE|. If using MySQL or PostgreSQL, during + installation you'll be asked for your database credentials, so have them at hand. + Mysql or Postgres needs to be running and a new database needs to be created. + You don't need any credentials or to create a database for SQLite. .. code-block:: bash :emphasize-lines: 11-16 + $ rccontrol install Community + + or + $ rccontrol install Enterprise Username [admin]: username @@ -69,8 +76,8 @@ 4. Install |RCE|. If using MySQL or Post Database password: somepassword Database name: example-db-name -5. Check the status of your installation. You |RCE| instance runs on the URL - displayed in the status message. +5. Check the status of your installation. You |RCEE|/|RCCE| instance runs + on the URL displayed in the status message. .. code-block:: bash @@ -79,13 +86,13 @@ 5. Check the status of your installation - NAME: enterprise-1 - STATUS: RUNNING - TYPE: Enterprise - - VERSION: 3.3.0 + - VERSION: 4.1.0 - URL: http://127.0.0.1:10003 - NAME: vcsserver-1 - STATUS: RUNNING - TYPE: VCSServer - - VERSION: 3.3.0 + - VERSION: 4.1.0 - URL: http://127.0.0.1:10001 .. note:: diff --git a/docs/release-notes/release-notes-4.0.0.rst b/docs/release-notes/release-notes-4.0.0.rst --- a/docs/release-notes/release-notes-4.0.0.rst +++ b/docs/release-notes/release-notes-4.0.0.rst @@ -37,6 +37,10 @@ New Features Github, Twitter, Bitbucket and Google. It's possible now to use your Google account to log in to RhodeCode and take advantage of things like 2FA. +- Search: full text search now properly orders commits by date, and shows line + numbers for file content search. + + Security ^^^^^^^^ @@ -46,8 +50,10 @@ Security Performance ^^^^^^^^^^^ -- Optimized admin pannels to faster load large ammount of data +- Optimized admin panels to faster load large amount of data - Improved file tree loading speed +- New HTTP backend is ~10% faster, and doesn't require so many threads + for vcsserver Fixes diff --git a/docs/release-notes/release-notes-4.1.0.rst b/docs/release-notes/release-notes-4.1.0.rst new file mode 100644 --- /dev/null +++ b/docs/release-notes/release-notes-4.1.0.rst @@ -0,0 +1,55 @@ +|RCE| 4.1.0 |RNS| +----------------- + +Release Date +^^^^^^^^^^^^ + +- 2016-06-XX + +General +^^^^^^^ + +- Migrated more views to Pyramid. Those now include login, social plugins, search +- Started Implementing Pyramid Events system in exchange to rcextensions callbacks +- JS routes assets are now generated in development mode automatically +- ini: Add fallback authentication plugin setting. In case only one + authentication backend is enabled users can now enable fallback auth if + they cannot log-in due to external servers being down +- Bumped Mercurial to 3.8.3 version +- Bumped RhodeCode Tools to 0.8.3 version + +New Features +^^^^^^^^^^^^ + +- search: add syntax highlighting, line numbers and line context to file + content search results +- Go To switcher now searches commit hashes as well +- Token based authentication is now in CE edition as well +- User groups: added autocomplete widget to be able to select members of + other group as part of current group. + +Security +^^^^^^^^ + +- Added new action loggers for actions like adding/revoking permissions. +- permissions: show origin of permissions in permissions summary. Allows users + to see where and how permissions are inherited + +Performance +^^^^^^^^^^^ + + + +Fixes +^^^^^ + +- api: gracefully handle errors on repos that are damaged or missing + from filesystem. +- logging: log the original error when a merge failure occurs +- #3965 Cannot change the owner of a user's group by using the API +- database is now initialized inside pyramid context +- fixed wrong check on LDAP plugin about missing ldap server +- Bring back multi-threaded workers to gunicorn for backward compatibility with + previous RhodeCode versions +- Commit dates are now properly handled as UTC. This fixes some issues + with displaying age of commits \ No newline at end of file diff --git a/docs/release-notes/release-notes.rst b/docs/release-notes/release-notes.rst --- a/docs/release-notes/release-notes.rst +++ b/docs/release-notes/release-notes.rst @@ -6,6 +6,10 @@ Release Notes |RCE| 4.x Versions ------------------ +.. toctree:: + :maxdepth: 1 + + release-notes-4.1.0.rst release-notes-4.0.1.rst release-notes-4.0.0.rst diff --git a/pkgs/patch-rhodecode-tools-setup.diff b/pkgs/patch-rhodecode-tools-setup.diff --- a/pkgs/patch-rhodecode-tools-setup.diff +++ b/pkgs/patch-rhodecode-tools-setup.diff @@ -1,13 +1,12 @@ diff --git a/requirements.txt b/requirements.txt --- a/requirements.txt +++ b/requirements.txt -@@ -1,8 +1,8 @@ - click==5.1 - future==0.14.3 +@@ -3,7 +3,7 @@future==0.14.3 six==1.9.0 mako==1.0.1 markupsafe==0.23 -requests==2.5.1 +requests + #responses whoosh==2.7.0 - pyelasticsearch==1.4 + elasticsearch==2.3.0 \ No newline at end of file diff --git a/pkgs/python-packages-overrides.nix b/pkgs/python-packages-overrides.nix --- a/pkgs/python-packages-overrides.nix +++ b/pkgs/python-packages-overrides.nix @@ -21,6 +21,20 @@ self: super: { ''; }); + gunicorn = super.gunicorn.override (attrs: { + propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ + # johbo: futures is needed as long as we are on Python 2, otherwise + # gunicorn explodes if used with multiple threads per worker. + self.futures + ]; + }); + + ipython = super.ipython.override (attrs: { + propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ + self.gnureadline + ]; + }); + kombu = super.kombu.override (attrs: { # The current version of kombu needs some patching to work with the # other libs. Should be removed once we update celery and kombu. diff --git a/pkgs/python-packages.nix b/pkgs/python-packages.nix --- a/pkgs/python-packages.nix +++ b/pkgs/python-packages.nix @@ -359,16 +359,6 @@ md5 = "898bc87e54f278055b561316ba73e222"; }; }; - certifi = super.buildPythonPackage { - name = "certifi-2016.2.28"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/5c/f8/f6c54727c74579c6bbe5926f5deb9677c5810a33e11da58d1a4e2d09d041/certifi-2016.2.28.tar.gz"; - md5 = "5d672aa766e1f773c75cfeccd02d3650"; - }; - }; click = super.buildPythonPackage { name = "click-5.1"; buildInputs = with self; []; @@ -490,13 +480,23 @@ }; }; elasticsearch = super.buildPythonPackage { - name = "elasticsearch-1.9.0"; + name = "elasticsearch-2.3.0"; buildInputs = with self; []; doCheck = false; propagatedBuildInputs = with self; [urllib3]; src = fetchurl { - url = "https://pypi.python.org/packages/13/9b/540e311b31a10c2a904acfb08030c656047e5c7ba479d35df2799e5dccfe/elasticsearch-1.9.0.tar.gz"; - md5 = "3550390baea1639479f79758d66ab032"; + url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz"; + md5 = "2550f3b51629cf1ef9636608af92c340"; + }; + }; + elasticsearch-dsl = super.buildPythonPackage { + name = "elasticsearch-dsl-2.0.0"; + buildInputs = with self; []; + doCheck = false; + propagatedBuildInputs = with self; [six python-dateutil elasticsearch]; + src = fetchurl { + url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz"; + md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68"; }; }; flake8 = super.buildPythonPackage { @@ -540,7 +540,7 @@ }; }; gprof2dot = super.buildPythonPackage { - name = "gprof2dot-2015.12.1"; + name = "gprof2dot-2015.12.01"; buildInputs = with self; []; doCheck = false; propagatedBuildInputs = with self; []; @@ -550,13 +550,13 @@ }; }; greenlet = super.buildPythonPackage { - name = "greenlet-0.4.7"; + name = "greenlet-0.4.9"; buildInputs = with self; []; doCheck = false; propagatedBuildInputs = with self; []; src = fetchurl { - url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip"; - md5 = "c2333a8ff30fa75c5d5ec0e67b461086"; + url = "https://pypi.python.org/packages/4e/3d/9d421539b74e33608b245092870156b2e171fb49f2b51390aa4641eecb4a/greenlet-0.4.9.zip"; + md5 = "c6659cdb2a5e591723e629d2eef22e82"; }; }; gunicorn = super.buildPythonPackage { @@ -603,7 +603,7 @@ name = "ipython-3.1.0"; buildInputs = with self; []; doCheck = false; - propagatedBuildInputs = with self; [gnureadline]; + propagatedBuildInputs = with self; []; src = fetchurl { url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz"; md5 = "a749d90c16068687b0ec45a27e72ef8f"; @@ -799,16 +799,6 @@ md5 = "47b4eac84118e2606658122104e62072"; }; }; - pyelasticsearch = super.buildPythonPackage { - name = "pyelasticsearch-1.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [certifi elasticsearch urllib3 simplejson six]; - src = fetchurl { - url = "https://pypi.python.org/packages/2f/3a/7643cfcfc4cbdbb20ada800bbd54ac9705d0c047d7b8f8d5eeeb3047b4eb/pyelasticsearch-1.4.tar.gz"; - md5 = "ed61ebb7b253364e55b4923d11e17049"; - }; - }; pyflakes = super.buildPythonPackage { name = "pyflakes-0.8.1"; buildInputs = with self; []; @@ -1050,20 +1040,20 @@ }; }; rhodecode-enterprise-ce = super.buildPythonPackage { - name = "rhodecode-enterprise-ce-4.0.1"; + name = "rhodecode-enterprise-ce-4.1.0"; buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner]; doCheck = true; propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors psutil py-bcrypt]; src = ./.; }; rhodecode-tools = super.buildPythonPackage { - name = "rhodecode-tools-0.7.1"; + name = "rhodecode-tools-0.8.3"; buildInputs = with self; []; doCheck = false; - propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh pyelasticsearch]; + propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl]; src = fetchurl { - url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.7.1.zip"; - md5 = "91daea803aaa264ce7a8213bc2220d4c"; + url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip"; + md5 = "9acdfd71b8ddf4056057065f37ab9ccb"; }; }; serpent = super.buildPythonPackage { diff --git a/requirements.txt b/requirements.txt --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,6 @@ MySQL-python==1.2.5 Paste==2.0.2 PasteDeploy==1.5.2 PasteScript==1.7.5 -pyelasticsearch==1.4 Pygments==2.0.2 # TODO: This version is not available on PyPI @@ -70,13 +69,14 @@ flake8==2.4.1 future==0.14.3 futures==3.0.2 gprof2dot==2015.12.1 -greenlet==0.4.7 +greenlet==0.4.9 gunicorn==19.6.0 # TODO: Needs subvertpy and blows up without Subversion headers, # actually we should not need this for Enterprise at all. # hgsubversion==1.8.2 +gnureadline==6.3.3 infrae.cache==1.0.1 invoke==0.11.1 ipdb==0.8 @@ -124,7 +124,7 @@ pyzmq==14.6.0 # TODO: This is not available in public # rc-testdata==0.2.0 -https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.7.1.zip#md5=91daea803aaa264ce7a8213bc2220d4c +https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb recaptcha-client==1.0.6 diff --git a/rhodecode/VERSION b/rhodecode/VERSION --- a/rhodecode/VERSION +++ b/rhodecode/VERSION @@ -1,1 +1,1 @@ -4.0.1 \ No newline at end of file +4.1.0 \ No newline at end of file diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py --- a/rhodecode/__init__.py +++ b/rhodecode/__init__.py @@ -47,7 +47,7 @@ CONFIG = {} EXTENSIONS = {} __version__ = ('.'.join((str(each) for each in VERSION[:3]))) -__dbversion__ = 51 # defines current db version for migrations +__dbversion__ = 54 # defines current db version for migrations __platform__ = platform.system() __license__ = 'AGPLv3, and Commercial License' __author__ = 'RhodeCode GmbH' diff --git a/rhodecode/api/tests/test_update_repo.py b/rhodecode/api/tests/test_update_repo.py --- a/rhodecode/api/tests/test_update_repo.py +++ b/rhodecode/api/tests/test_update_repo.py @@ -24,67 +24,81 @@ import pytest from rhodecode.model.repo import RepoModel from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN from rhodecode.api.tests.utils import ( - build_data, api_call, assert_error, assert_ok, crash) + build_data, api_call, assert_error, assert_ok, crash, jsonify) from rhodecode.tests.fixture import Fixture fixture = Fixture() +UPDATE_REPO_NAME = 'api_update_me' + +class SAME_AS_UPDATES(object): """ Constant used for tests below """ @pytest.mark.usefixtures("testuser_api", "app") class TestApiUpdateRepo(object): - @pytest.mark.parametrize("changing_attr, updates", [ - ('owner', {'owner': TEST_USER_REGULAR_LOGIN}), - ('description', {'description': 'new description'}), - ('active', {'active': True}), - ('active', {'active': False}), - ('clone_uri', {'clone_uri': 'http://foo.com/repo'}), - ('clone_uri', {'clone_uri': None}), - ('landing_rev', {'landing_rev': 'branch:master'}), - ('enable_statistics', {'enable_statistics': True}), - ('enable_locking', {'enable_locking': True}), - ('enable_downloads', {'enable_downloads': True}), - ('name', {'name': 'new_repo_name'}), - ('repo_group', {'group': 'test_group_for_update'}), + + @pytest.mark.parametrize("updates, expected", [ + ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES), + ({'description': 'new description'}, SAME_AS_UPDATES), + ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES), + ({'clone_uri': None}, {'clone_uri': ''}), + ({'clone_uri': ''}, {'clone_uri': ''}), + ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}), + ({'enable_statistics': True}, SAME_AS_UPDATES), + ({'enable_locking': True}, SAME_AS_UPDATES), + ({'enable_downloads': True}, SAME_AS_UPDATES), + ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}), + ({'group': 'test_group_for_update'}, + {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}), ]) - def test_api_update_repo(self, changing_attr, updates, backend): - repo_name = 'api_update_me' + def test_api_update_repo(self, updates, expected, backend): + repo_name = UPDATE_REPO_NAME repo = fixture.create_repo(repo_name, repo_type=backend.alias) - if changing_attr == 'repo_group': + if updates.get('group'): fixture.create_repo_group(updates['group']) + expected_api_data = repo.get_api_data(include_secrets=True) + if expected is SAME_AS_UPDATES: + expected_api_data.update(updates) + else: + expected_api_data.update(expected) + + id_, params = build_data( self.apikey, 'update_repo', repoid=repo_name, **updates) response = api_call(self.app, params) - if changing_attr == 'name': + + if updates.get('name'): repo_name = updates['name'] - if changing_attr == 'repo_group': + if updates.get('group'): repo_name = '/'.join([updates['group'], repo_name]) + try: expected = { 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name), - 'repository': repo.get_api_data(include_secrets=True) + 'repository': jsonify(expected_api_data) } assert_ok(id_, expected, given=response.body) finally: fixture.destroy_repo(repo_name) - if changing_attr == 'repo_group': - + if updates.get('group'): fixture.destroy_repo_group(updates['group']) def test_api_update_repo_fork_of_field(self, backend): master_repo = backend.create_repo() repo = backend.create_repo() - updates = { 'fork_of': master_repo.repo_name } + expected_api_data = repo.get_api_data(include_secrets=True) + expected_api_data.update(updates) + id_, params = build_data( self.apikey, 'update_repo', repoid=repo.repo_name, **updates) response = api_call(self.app, params) expected = { 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), - 'repository': repo.get_api_data(include_secrets=True) + 'repository': jsonify(expected_api_data) } assert_ok(id_, expected, given=response.body) result = response.json['result']['repository'] @@ -131,7 +145,7 @@ class TestApiUpdateRepo(object): @mock.patch.object(RepoModel, 'update', crash) def test_api_update_repo_exception_occurred(self, backend): - repo_name = 'api_update_me' + repo_name = UPDATE_REPO_NAME fixture.create_repo(repo_name, repo_type=backend.alias) id_, params = build_data( self.apikey, 'update_repo', repoid=repo_name, diff --git a/rhodecode/api/tests/test_update_user_group.py b/rhodecode/api/tests/test_update_user_group.py --- a/rhodecode/api/tests/test_update_user_group.py +++ b/rhodecode/api/tests/test_update_user_group.py @@ -25,7 +25,7 @@ from rhodecode.model.user import UserMod from rhodecode.model.user_group import UserGroupModel from rhodecode.tests import TEST_USER_REGULAR_LOGIN from rhodecode.api.tests.utils import ( - build_data, api_call, assert_error, assert_ok, crash) + build_data, api_call, assert_error, assert_ok, crash, jsonify) @pytest.mark.usefixtures("testuser_api", "app") @@ -40,14 +40,18 @@ class TestUpdateUserGroup(object): def test_api_update_user_group(self, changing_attr, updates, user_util): user_group = user_util.create_user_group() group_name = user_group.users_group_name + expected_api_data = user_group.get_api_data() + expected_api_data.update(updates) + id_, params = build_data( self.apikey, 'update_user_group', usergroupid=group_name, **updates) response = api_call(self.app, params) + expected = { 'msg': 'updated user group ID:%s %s' % ( user_group.users_group_id, user_group.users_group_name), - 'user_group': user_group.get_api_data() + 'user_group': jsonify(expected_api_data) } assert_ok(id_, expected, given=response.body) @@ -63,6 +67,10 @@ class TestUpdateUserGroup(object): self, changing_attr, updates, user_util): user_group = user_util.create_user_group() group_name = user_group.users_group_name + expected_api_data = user_group.get_api_data() + expected_api_data.update(updates) + + # grant permission to this user user = UserModel().get_by_username(self.TEST_USER_LOGIN) @@ -75,7 +83,7 @@ class TestUpdateUserGroup(object): expected = { 'msg': 'updated user group ID:%s %s' % ( user_group.users_group_id, user_group.users_group_name), - 'user_group': user_group.get_api_data() + 'user_group': jsonify(expected_api_data) } assert_ok(id_, expected, given=response.body) diff --git a/rhodecode/api/views/repo_api.py b/rhodecode/api/views/repo_api.py --- a/rhodecode/api/views/repo_api.py +++ b/rhodecode/api/views/repo_api.py @@ -323,7 +323,7 @@ def get_repo_changeset(request, apiuser, def get_repo_changesets(request, apiuser, repoid, start_rev, limit, details=Optional('basic')): """ - Returns a set of changesets limited by the number of commits starting + Returns a set of commits limited by the number starting from the `start_rev` option. Additional parameters define the amount of details returned by this @@ -338,7 +338,7 @@ def get_repo_changesets(request, apiuser :type repoid: str or int :param start_rev: The starting revision from where to get changesets. :type start_rev: str - :param limit: Limit the number of changesets to this amount + :param limit: Limit the number of commits to this amount :type limit: str or int :param details: Set the level of detail returned. Valid option are: ``basic``, ``extended`` and ``full``. @@ -370,14 +370,17 @@ def get_repo_changesets(request, apiuser vcs_repo = repo.scm_instance() # SVN needs a special case to distinguish its index and commit id - if vcs_repo.alias == 'svn' and (start_rev == '0'): + if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): start_rev = vcs_repo.commit_ids[0] try: - commits = repo.scm_instance().get_commits( + commits = vcs_repo.get_commits( start_id=start_rev, pre_load=pre_load) except TypeError as e: raise JSONRPCError(e.message) + except Exception: + log.exception('Fetching of commits failed') + raise JSONRPCError('Error occurred during commit fetching') ret = [] for cnt, commit in enumerate(commits): diff --git a/rhodecode/authentication/__init__.py b/rhodecode/authentication/__init__.py --- a/rhodecode/authentication/__init__.py +++ b/rhodecode/authentication/__init__.py @@ -19,6 +19,7 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import logging +import importlib from pkg_resources import iter_entry_points from pyramid.authentication import SessionAuthenticationPolicy @@ -27,9 +28,15 @@ from rhodecode.authentication.registry i from rhodecode.authentication.routes import root_factory from rhodecode.authentication.routes import AuthnRootResource from rhodecode.config.routing import ADMIN_PREFIX +from rhodecode.model.settings import SettingsModel + log = logging.getLogger(__name__) +# Plugin ID prefixes to distinct between normal and legacy plugins. +plugin_prefix = 'egg:' +legacy_plugin_prefix = 'py:' + # TODO: Currently this is only used to discover the authentication plugins. # Later on this may be used in a generic way to look up and include all kinds @@ -38,16 +45,45 @@ log = logging.getLogger(__name__) # TODO: When refactoring this think about splitting it up into distinct # discover, load and include phases. def _discover_plugins(config, entry_point='enterprise.plugins1'): - _discovered_plugins = {} - for ep in iter_entry_points(entry_point): - plugin_id = 'egg:{}#{}'.format(ep.dist.project_name, ep.name) + plugin_id = '{}{}#{}'.format( + plugin_prefix, ep.dist.project_name, ep.name) log.debug('Plugin discovered: "%s"', plugin_id) - module = ep.load() - plugin = module(plugin_id=plugin_id) - config.include(plugin.includeme) + try: + module = ep.load() + plugin = module(plugin_id=plugin_id) + config.include(plugin.includeme) + except Exception as e: + log.exception( + 'Exception while loading authentication plugin ' + '"{}": {}'.format(plugin_id, e.message)) + + +def _import_legacy_plugin(plugin_id): + module_name = plugin_id.split(legacy_plugin_prefix, 1)[-1] + module = importlib.import_module(module_name) + return module.plugin_factory(plugin_id=plugin_id) + - return _discovered_plugins +def _discover_legacy_plugins(config, prefix=legacy_plugin_prefix): + """ + Function that imports the legacy plugins stored in the 'auth_plugins' + setting in database which are using the specified prefix. Normally 'py:' is + used for the legacy plugins. + """ + auth_plugins = SettingsModel().get_setting_by_name('auth_plugins') + enabled_plugins = auth_plugins.app_settings_value + legacy_plugins = [id_ for id_ in enabled_plugins if id_.startswith(prefix)] + + for plugin_id in legacy_plugins: + log.debug('Legacy plugin discovered: "%s"', plugin_id) + try: + plugin = _import_legacy_plugin(plugin_id) + config.include(plugin.includeme) + except Exception as e: + log.exception( + 'Exception while loading legacy authentication plugin ' + '"{}": {}'.format(plugin_id, e.message)) def includeme(config): @@ -56,7 +92,7 @@ def includeme(config): config.set_authentication_policy(authn_policy) # Create authentication plugin registry and add it to the pyramid registry. - authn_registry = AuthenticationPluginRegistry() + authn_registry = AuthenticationPluginRegistry(config.get_settings()) config.add_directive('add_authn_plugin', authn_registry.add_authn_plugin) config.registry.registerUtility(authn_registry) @@ -83,3 +119,4 @@ def includeme(config): # Auto discover authentication plugins and include their configuration. _discover_plugins(config) + _discover_legacy_plugins(config) diff --git a/rhodecode/authentication/base.py b/rhodecode/authentication/base.py --- a/rhodecode/authentication/base.py +++ b/rhodecode/authentication/base.py @@ -25,24 +25,18 @@ Authentication modules import logging import time import traceback +import warnings -from authomatic import Authomatic -from authomatic.adapters import WebObAdapter -from authomatic.providers import oauth2, oauth1 -from pylons import url -from pylons.controllers.util import Response -from pylons.i18n.translation import _ from pyramid.threadlocal import get_current_registry from sqlalchemy.ext.hybrid import hybrid_property -import rhodecode.lib.helpers as h from rhodecode.authentication.interface import IAuthnPluginRegistry from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase from rhodecode.lib import caches from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt from rhodecode.lib.utils2 import md5_safe, safe_int from rhodecode.lib.utils2 import safe_str -from rhodecode.model.db import User, ExternalIdentity +from rhodecode.model.db import User from rhodecode.model.meta import Session from rhodecode.model.settings import SettingsModel from rhodecode.model.user import UserModel @@ -226,17 +220,23 @@ class RhodeCodeAuthPluginBase(object): """ raise NotImplementedError("Not implemented in base class") + @property + def is_headers_auth(self): + """ + Returns True if this authentication plugin uses HTTP headers as + authentication method. + """ + return False + @hybrid_property def is_container_auth(self): """ - Returns bool if this module uses container auth. - - This property will trigger an automatic call to authenticate on - a visit to the website or during a push/pull. - - :returns: bool + Deprecated method that indicates if this authentication plugin uses + HTTP headers as authentication method. """ - return False + warnings.warn( + 'Use is_headers_auth instead.', category=DeprecationWarning) + return self.is_headers_auth @hybrid_property def allows_creating_users(self): @@ -299,7 +299,7 @@ class RhodeCodeAuthPluginBase(object): """ Helper method for user fetching in plugins, by default it's using simple fetch by username, but this method can be custimized in plugins - eg. container auth plugin to fetch user by environ params + eg. headers auth plugin to fetch user by environ params :param username: username if given to fetch from database :param kwargs: extra arguments needed for user fetching. @@ -477,131 +477,11 @@ class RhodeCodeExternalAuthPlugin(RhodeC return auth -class AuthomaticBase(RhodeCodeExternalAuthPlugin): - - # TODO: Think about how to create and store this secret string. - # We need the secret for the authomatic library. It needs to be the same - # across requests. - def _get_authomatic_secret(self, length=40): - secret = self.get_setting_by_name('secret') - if secret is None or secret == 'None' or secret == '': - from Crypto import Random, Hash - secret_bytes = Random.new().read(length) - secret_hash = Hash.SHA256.new() - secret_hash.update(secret_bytes) - secret = secret_hash.hexdigest() - self.create_or_update_setting('secret', secret) - Session.commit() - secret = self.get_setting_by_name('secret') - return secret - - def get_authomatic(self): - scope = [] - if self.name == 'bitbucket': - provider_class = oauth1.Bitbucket - scope = ['account', 'email', 'repository', 'issue', 'issue:write'] - elif self.name == 'github': - provider_class = oauth2.GitHub - scope = ['repo', 'public_repo', 'user:email'] - elif self.name == 'google': - provider_class = oauth2.Google - scope = ['profile', 'email'] - elif self.name == 'twitter': - provider_class = oauth1.Twitter - - authomatic_conf = { - self.name: { - 'class_': provider_class, - 'consumer_key': self.get_setting_by_name('consumer_key'), - 'consumer_secret': self.get_setting_by_name('consumer_secret'), - 'scope': scope, - 'access_headers': {'User-Agent': 'TestAppAgent'}, - } - } - secret = self._get_authomatic_secret() - return Authomatic(config=authomatic_conf, - secret=secret) - - def get_provider_result(self, request): - """ - Provides `authomatic.core.LoginResult` for provider and request - - :param provider_name: - :param request: - :param config: - :return: - """ - response = Response() - adapter = WebObAdapter(request, response) - authomatic_inst = self.get_authomatic() - return authomatic_inst.login(adapter, self.name), response - - def handle_social_data(self, session, user_id, social_data): - """ - Updates user tokens in database whenever necessary - :param request: - :param user: - :param social_data: - :return: - """ - if not self.is_active(): - h.flash(_('This provider is currently disabled'), - category='warning') - return False - - social_data = social_data - update_identity = False - - existing_row = ExternalIdentity.by_external_id_and_provider( - social_data['user']['id'], - social_data['credentials.provider'] - ) - - if existing_row: - Session().delete(existing_row) - update_identity = True - - if not existing_row or update_identity: - if not update_identity: - h.flash(_('Your external identity is now ' - 'connected with your account'), category='success') - - if not social_data['user']['id']: - h.flash(_('No external user id found? Perhaps permissions' - 'for authentication are set incorrectly'), - category='error') - return False - - ex_identity = ExternalIdentity() - ex_identity.external_id = social_data['user']['id'] - ex_identity.external_username = social_data['user']['user_name'] - ex_identity.provider_name = social_data['credentials.provider'] - ex_identity.access_token = social_data['credentials.token'] - ex_identity.token_secret = social_data['credentials.token_secret'] - ex_identity.alt_token = social_data['credentials.refresh_token'] - ex_identity.local_user_id = user_id - Session().add(ex_identity) - session.pop('rhodecode.social_auth', None) - return ex_identity - - def callback_url(self): - try: - return url('social_auth', provider_name=self.name, qualified=True) - except TypeError: - pass - return '' - - def loadplugin(plugin_id): """ Loads and returns an instantiated authentication plugin. Returns the RhodeCodeAuthPluginBase subclass on success, - raises exceptions on failure. - - raises: - KeyError -- if no plugin available with given name - TypeError -- if the RhodeCodeAuthPlugin is not a subclass of - ours RhodeCodeAuthPluginBase + or None on failure. """ # TODO: Disusing pyramids thread locals to retrieve the registry. authn_registry = get_current_registry().getUtility(IAuthnPluginRegistry) @@ -622,9 +502,9 @@ def authenticate(username, password, env Authentication function used for access control, It tries to authenticate based on enabled authentication modules. - :param username: username can be empty for container auth - :param password: password can be empty for container auth - :param environ: environ headers passed for container auth + :param username: username can be empty for headers auth + :param password: password can be empty for headers auth + :param environ: environ headers passed for headers auth :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE` :param skip_missing: ignores plugins that are in db but not in environment :returns: None if auth failed, plugin_user dict if auth is correct @@ -632,51 +512,41 @@ def authenticate(username, password, env if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]: raise ValueError('auth type must be on of http, vcs got "%s" instead' % auth_type) - container_only = environ and not (username and password) - auth_plugins = SettingsModel().get_auth_plugins() - for plugin_id in auth_plugins: - plugin = loadplugin(plugin_id) + headers_only = environ and not (username and password) - if plugin is None: - log.warning('Authentication plugin missing: "{}"'.format( - plugin_id)) - continue - - if not plugin.is_active(): - log.info('Authentication plugin is inactive: "{}"'.format( - plugin_id)) - continue - + authn_registry = get_current_registry().getUtility(IAuthnPluginRegistry) + for plugin in authn_registry.get_plugins_for_authentication(): plugin.set_auth_type(auth_type) user = plugin.get_user(username) display_user = user.username if user else username - if container_only and not plugin.is_container_auth: - log.debug('Auth type is for container only and plugin `%s` is not ' - 'container plugin, skipping...', plugin_id) + if headers_only and not plugin.is_headers_auth: + log.debug('Auth type is for headers only and plugin `%s` is not ' + 'headers plugin, skipping...', plugin.get_id()) continue # load plugin settings from RhodeCode database plugin_settings = plugin.get_settings() log.debug('Plugin settings:%s', plugin_settings) - log.debug('Trying authentication using ** %s **', plugin_id) + log.debug('Trying authentication using ** %s **', plugin.get_id()) # use plugin's method of user extraction. user = plugin.get_user(username, environ=environ, settings=plugin_settings) display_user = user.username if user else username - log.debug('Plugin %s extracted user is `%s`', plugin_id, display_user) + log.debug( + 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user) if not plugin.allows_authentication_from(user): log.debug('Plugin %s does not accept user `%s` for authentication', - plugin_id, display_user) + plugin.get_id(), display_user) continue else: log.debug('Plugin %s accepted user `%s` for authentication', - plugin_id, display_user) + plugin.get_id(), display_user) log.info('Authenticating user `%s` using %s plugin', - display_user, plugin_id) + display_user, plugin.get_id()) _cache_ttl = 0 @@ -691,7 +561,7 @@ def authenticate(username, password, env # get instance of cache manager configured for a namespace cache_manager = get_auth_cache_manager(custom_ttl=_cache_ttl) - log.debug('Cache for plugin `%s` active: %s', plugin_id, + log.debug('Cache for plugin `%s` active: %s', plugin.get_id(), plugin_cache_active) # for environ based password can be empty, but then the validation is @@ -706,7 +576,7 @@ def authenticate(username, password, env # then auth is correct. start = time.time() log.debug('Running plugin `%s` _authenticate method', - plugin_id) + plugin.get_id()) def auth_func(): """ @@ -726,7 +596,7 @@ def authenticate(username, password, env auth_time = time.time() - start log.debug('Authentication for plugin `%s` completed in %.3fs, ' 'expiration time of fetched cache %.1fs.', - plugin_id, auth_time, _cache_ttl) + plugin.get_id(), auth_time, _cache_ttl) log.debug('PLUGIN USER DATA: %s', plugin_user) @@ -735,5 +605,5 @@ def authenticate(username, password, env return plugin_user # we failed to Auth because .auth() method didn't return proper user log.debug("User `%s` failed to authenticate against %s", - display_user, plugin_id) + display_user, plugin.get_id()) return None diff --git a/rhodecode/authentication/plugins/auth_crowd.py b/rhodecode/authentication/plugins/auth_crowd.py --- a/rhodecode/authentication/plugins/auth_crowd.py +++ b/rhodecode/authentication/plugins/auth_crowd.py @@ -34,6 +34,7 @@ from sqlalchemy.ext.hybrid import hybrid from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.lib.colander_utils import strip_whitespace from rhodecode.lib.ext_json import json, formatted_json from rhodecode.model.db import User @@ -58,12 +59,14 @@ class CrowdSettingsSchema(AuthnPluginSet colander.String(), default='127.0.0.1', description=_('The FQDN or IP of the Atlassian CROWD Server'), + preparer=strip_whitespace, title=_('Host'), widget='string') port = colander.SchemaNode( colander.Int(), default=8095, description=_('The Port in use by the Atlassian CROWD Server'), + preparer=strip_whitespace, title=_('Port'), validator=colander.Range(min=0, max=65536), widget='int') @@ -71,12 +74,14 @@ class CrowdSettingsSchema(AuthnPluginSet colander.String(), default='', description=_('The Application Name to authenticate to CROWD'), + preparer=strip_whitespace, title=_('Application Name'), widget='string') app_password = colander.SchemaNode( colander.String(), default='', description=_('The password to authenticate to CROWD'), + preparer=strip_whitespace, title=_('Application Password'), widget='password') admin_groups = colander.SchemaNode( @@ -85,6 +90,7 @@ class CrowdSettingsSchema(AuthnPluginSet description=_('A comma separated list of group names that identify ' 'users as RhodeCode Administrators'), missing='', + preparer=strip_whitespace, title=_('Admin Groups'), widget='string') @@ -191,12 +197,14 @@ class RhodeCodeAuthPlugin(RhodeCodeExter config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='GET', route_name='auth_home', context=CrowdAuthnResource) config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='POST', route_name='auth_home', context=CrowdAuthnResource) diff --git a/rhodecode/authentication/plugins/auth_headers.py b/rhodecode/authentication/plugins/auth_headers.py new file mode 100644 --- /dev/null +++ b/rhodecode/authentication/plugins/auth_headers.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2012-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import colander +import logging + +from sqlalchemy.ext.hybrid import hybrid_property + +from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin +from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase +from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.lib.colander_utils import strip_whitespace +from rhodecode.lib.utils2 import str2bool, safe_unicode +from rhodecode.model.db import User +from rhodecode.translation import _ + + +log = logging.getLogger(__name__) + + +def plugin_factory(plugin_id, *args, **kwds): + """ + Factory function that is called during plugin discovery. + It returns the plugin instance. + """ + plugin = RhodeCodeAuthPlugin(plugin_id) + return plugin + + +class HeadersAuthnResource(AuthnPluginResourceBase): + pass + + +class HeadersSettingsSchema(AuthnPluginSettingsSchemaBase): + header = colander.SchemaNode( + colander.String(), + default='REMOTE_USER', + description=_('Header to extract the user from'), + preparer=strip_whitespace, + title=_('Header'), + widget='string') + fallback_header = colander.SchemaNode( + colander.String(), + default='HTTP_X_FORWARDED_USER', + description=_('Header to extract the user from when main one fails'), + preparer=strip_whitespace, + title=_('Fallback header'), + widget='string') + clean_username = colander.SchemaNode( + colander.Boolean(), + default=True, + description=_('Perform cleaning of user, if passed user has @ in ' + 'username then first part before @ is taken. ' + 'If there\'s \\ in the username only the part after ' + ' \\ is taken'), + missing=False, + title=_('Clean username'), + widget='bool') + + +class RhodeCodeAuthPlugin(RhodeCodeExternalAuthPlugin): + + def includeme(self, config): + config.add_authn_plugin(self) + config.add_authn_resource(self.get_id(), HeadersAuthnResource(self)) + config.add_view( + 'rhodecode.authentication.views.AuthnPluginViewBase', + attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', + request_method='GET', + route_name='auth_home', + context=HeadersAuthnResource) + config.add_view( + 'rhodecode.authentication.views.AuthnPluginViewBase', + attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', + request_method='POST', + route_name='auth_home', + context=HeadersAuthnResource) + + def get_display_name(self): + return _('Headers') + + def get_settings_schema(self): + return HeadersSettingsSchema() + + @hybrid_property + def name(self): + return 'headers' + + @property + def is_headers_auth(self): + return True + + def use_fake_password(self): + return True + + def user_activation_state(self): + def_user_perms = User.get_default_user().AuthUser.permissions['global'] + return 'hg.extern_activate.auto' in def_user_perms + + def _clean_username(self, username): + # Removing realm and domain from username + username = username.split('@')[0] + username = username.rsplit('\\')[-1] + return username + + def _get_username(self, environ, settings): + username = None + environ = environ or {} + if not environ: + log.debug('got empty environ: %s' % environ) + + settings = settings or {} + if settings.get('header'): + header = settings.get('header') + username = environ.get(header) + log.debug('extracted %s:%s' % (header, username)) + + # fallback mode + if not username and settings.get('fallback_header'): + header = settings.get('fallback_header') + username = environ.get(header) + log.debug('extracted %s:%s' % (header, username)) + + if username and str2bool(settings.get('clean_username')): + log.debug('Received username `%s` from headers' % username) + username = self._clean_username(username) + log.debug('New cleanup user is:%s' % username) + return username + + def get_user(self, username=None, **kwargs): + """ + Helper method for user fetching in plugins, by default it's using + simple fetch by username, but this method can be custimized in plugins + eg. headers auth plugin to fetch user by environ params + :param username: username if given to fetch + :param kwargs: extra arguments needed for user fetching. + """ + environ = kwargs.get('environ') or {} + settings = kwargs.get('settings') or {} + username = self._get_username(environ, settings) + # we got the username, so use default method now + return super(RhodeCodeAuthPlugin, self).get_user(username) + + def auth(self, userobj, username, password, settings, **kwargs): + """ + Get's the headers_auth username (or email). It tries to get username + from REMOTE_USER if this plugin is enabled, if that fails + it tries to get username from HTTP_X_FORWARDED_USER if fallback header + is set. clean_username extracts the username from this data if it's + having @ in it. + Return None on failure. On success, return a dictionary of the form: + + see: RhodeCodeAuthPluginBase.auth_func_attrs + + :param userobj: + :param username: + :param password: + :param settings: + :param kwargs: + """ + environ = kwargs.get('environ') + if not environ: + log.debug('Empty environ data skipping...') + return None + + if not userobj: + userobj = self.get_user('', environ=environ, settings=settings) + + # we don't care passed username/password for headers auth plugins. + # only way to log in is using environ + username = None + if userobj: + username = getattr(userobj, 'username') + + if not username: + # we don't have any objects in DB user doesn't exist extract + # username from environ based on the settings + username = self._get_username(environ, settings) + + # if cannot fetch username, it's a no-go for this plugin to proceed + if not username: + return None + + # old attrs fetched from RhodeCode database + admin = getattr(userobj, 'admin', False) + active = getattr(userobj, 'active', True) + email = getattr(userobj, 'email', '') + firstname = getattr(userobj, 'firstname', '') + lastname = getattr(userobj, 'lastname', '') + extern_type = getattr(userobj, 'extern_type', '') + + user_attrs = { + 'username': username, + 'firstname': safe_unicode(firstname or username), + 'lastname': safe_unicode(lastname or ''), + 'groups': [], + 'email': email or '', + 'admin': admin or False, + 'active': active, + 'active_from_extern': True, + 'extern_name': username, + 'extern_type': extern_type, + } + + log.info('user `%s` authenticated correctly' % user_attrs['username']) + return user_attrs diff --git a/rhodecode/authentication/plugins/auth_jasig_cas.py b/rhodecode/authentication/plugins/auth_jasig_cas.py --- a/rhodecode/authentication/plugins/auth_jasig_cas.py +++ b/rhodecode/authentication/plugins/auth_jasig_cas.py @@ -36,6 +36,7 @@ from sqlalchemy.ext.hybrid import hybrid from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.lib.colander_utils import strip_whitespace from rhodecode.lib.utils2 import safe_unicode from rhodecode.model.db import User @@ -60,6 +61,7 @@ class JasigCasSettingsSchema(AuthnPlugin colander.String(), default='https://domain.com/cas/v1/tickets', description=_('The url of the Jasig CAS REST service'), + preparer=strip_whitespace, title=_('URL'), widget='string') @@ -72,12 +74,14 @@ class RhodeCodeAuthPlugin(RhodeCodeExter config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='GET', route_name='auth_home', context=JasigCasAuthnResource) config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='POST', route_name='auth_home', context=JasigCasAuthnResource) @@ -92,8 +96,8 @@ class RhodeCodeAuthPlugin(RhodeCodeExter def name(self): return "jasig-cas" - @hybrid_property - def is_container_auth(self): + @property + def is_headers_auth(self): return True def use_fake_password(self): diff --git a/rhodecode/authentication/plugins/auth_ldap.py b/rhodecode/authentication/plugins/auth_ldap.py --- a/rhodecode/authentication/plugins/auth_ldap.py +++ b/rhodecode/authentication/plugins/auth_ldap.py @@ -33,6 +33,7 @@ from sqlalchemy.ext.hybrid import hybrid from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.lib.colander_utils import strip_whitespace from rhodecode.lib.exceptions import ( LdapConnectionError, LdapUsernameError, LdapPasswordError, LdapImportError ) @@ -45,8 +46,9 @@ log = logging.getLogger(__name__) try: import ldap except ImportError: - # means that python-ldap is not installed - ldap = Missing() + # means that python-ldap is not installed, we use Missing object to mark + # ldap lib is Missing + ldap = Missing def plugin_factory(plugin_id, *args, **kwds): @@ -71,12 +73,14 @@ class LdapSettingsSchema(AuthnPluginSett colander.String(), default='', description=_('Host of the LDAP Server'), + preparer=strip_whitespace, title=_('LDAP Host'), widget='string') port = colander.SchemaNode( colander.Int(), default=389, description=_('Port that the LDAP server is listening on'), + preparer=strip_whitespace, title=_('Port'), validator=colander.Range(min=0, max=65536), widget='int') @@ -85,6 +89,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('User to connect to LDAP'), missing='', + preparer=strip_whitespace, title=_('Account'), widget='string') dn_pass = colander.SchemaNode( @@ -92,6 +97,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('Password to connect to LDAP'), missing='', + preparer=strip_whitespace, title=_('Password'), widget='password') tls_kind = colander.SchemaNode( @@ -113,6 +119,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('Base DN to search (e.g., dc=mydomain,dc=com)'), missing='', + preparer=strip_whitespace, title=_('Base DN'), widget='string') filter = colander.SchemaNode( @@ -120,6 +127,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('Filter to narrow results (e.g., ou=Users, etc)'), missing='', + preparer=strip_whitespace, title=_('LDAP Search Filter'), widget='string') search_scope = colander.SchemaNode( @@ -133,14 +141,16 @@ class LdapSettingsSchema(AuthnPluginSett colander.String(), default='', description=_('LDAP Attribute to map to user name'), + missing_msg=_('The LDAP Login attribute of the CN must be specified'), + preparer=strip_whitespace, title=_('Login Attribute'), - missing_msg=_('The LDAP Login attribute of the CN must be specified'), widget='string') attr_firstname = colander.SchemaNode( colander.String(), default='', description=_('LDAP Attribute to map to first name'), missing='', + preparer=strip_whitespace, title=_('First Name Attribute'), widget='string') attr_lastname = colander.SchemaNode( @@ -148,6 +158,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('LDAP Attribute to map to last name'), missing='', + preparer=strip_whitespace, title=_('Last Name Attribute'), widget='string') attr_email = colander.SchemaNode( @@ -155,6 +166,7 @@ class LdapSettingsSchema(AuthnPluginSett default='', description=_('LDAP Attribute to map to email address'), missing='', + preparer=strip_whitespace, title=_('Email Attribute'), widget='string') @@ -171,7 +183,7 @@ class AuthLdap(object): tls_kind='PLAIN', tls_reqcert='DEMAND', ldap_version=3, search_scope='SUBTREE', attr_login='uid', ldap_filter='(&(objectClass=user)(!(objectClass=computer)))'): - if isinstance(ldap, Missing): + if ldap == Missing: raise LdapImportError("Missing or incompatible ldap library") self.ldap_version = ldap_version @@ -317,12 +329,14 @@ class RhodeCodeAuthPlugin(RhodeCodeExter config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='GET', route_name='auth_home', context=LdapAuthnResource) config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='POST', route_name='auth_home', context=LdapAuthnResource) diff --git a/rhodecode/authentication/plugins/auth_pam.py b/rhodecode/authentication/plugins/auth_pam.py --- a/rhodecode/authentication/plugins/auth_pam.py +++ b/rhodecode/authentication/plugins/auth_pam.py @@ -35,6 +35,7 @@ from sqlalchemy.ext.hybrid import hybrid from rhodecode.authentication.base import RhodeCodeExternalAuthPlugin from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.lib.colander_utils import strip_whitespace log = logging.getLogger(__name__) @@ -57,6 +58,7 @@ class PamSettingsSchema(AuthnPluginSetti colander.String(), default='login', description=_('PAM service name to use for authentication.'), + preparer=strip_whitespace, title=_('PAM service name'), widget='string') gecos = colander.SchemaNode( @@ -64,6 +66,7 @@ class PamSettingsSchema(AuthnPluginSetti default='(?P.+),\s*(?P\w+)', description=_('Regular expression for extracting user name/email etc. ' 'from Unix userinfo.'), + preparer=strip_whitespace, title=_('Gecos Regex'), widget='string') @@ -79,12 +82,14 @@ class RhodeCodeAuthPlugin(RhodeCodeExter config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='GET', route_name='auth_home', context=PamAuthnResource) config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='POST', route_name='auth_home', context=PamAuthnResource) diff --git a/rhodecode/authentication/plugins/auth_rhodecode.py b/rhodecode/authentication/plugins/auth_rhodecode.py --- a/rhodecode/authentication/plugins/auth_rhodecode.py +++ b/rhodecode/authentication/plugins/auth_rhodecode.py @@ -52,12 +52,14 @@ class RhodeCodeAuthPlugin(RhodeCodeAuthP config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='GET', route_name='auth_home', context=RhodecodeAuthnResource) config.add_view( 'rhodecode.authentication.views.AuthnPluginViewBase', attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', request_method='POST', route_name='auth_home', context=RhodecodeAuthnResource) diff --git a/rhodecode/authentication/plugins/auth_token.py b/rhodecode/authentication/plugins/auth_token.py new file mode 100644 --- /dev/null +++ b/rhodecode/authentication/plugins/auth_token.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +RhodeCode authentication token plugin for built in internal auth +""" + +import logging + +from sqlalchemy.ext.hybrid import hybrid_property + +from rhodecode.translation import _ +from rhodecode.authentication.base import RhodeCodeAuthPluginBase, VCS_TYPE +from rhodecode.authentication.routes import AuthnPluginResourceBase +from rhodecode.model.db import User, UserApiKeys + + +log = logging.getLogger(__name__) + + +def plugin_factory(plugin_id, *args, **kwds): + plugin = RhodeCodeAuthPlugin(plugin_id) + return plugin + + +class RhodecodeAuthnResource(AuthnPluginResourceBase): + pass + + +class RhodeCodeAuthPlugin(RhodeCodeAuthPluginBase): + """ + Enables usage of authentication tokens for vcs operations. + """ + + def includeme(self, config): + config.add_authn_plugin(self) + config.add_authn_resource(self.get_id(), RhodecodeAuthnResource(self)) + config.add_view( + 'rhodecode.authentication.views.AuthnPluginViewBase', + attr='settings_get', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', + request_method='GET', + route_name='auth_home', + context=RhodecodeAuthnResource) + config.add_view( + 'rhodecode.authentication.views.AuthnPluginViewBase', + attr='settings_post', + renderer='rhodecode:templates/admin/auth/plugin_settings.html', + request_method='POST', + route_name='auth_home', + context=RhodecodeAuthnResource) + + def get_display_name(self): + return _('Rhodecode Token Auth') + + @hybrid_property + def name(self): + return "authtoken" + + def user_activation_state(self): + def_user_perms = User.get_default_user().AuthUser.permissions['global'] + return 'hg.register.auto_activate' in def_user_perms + + def allows_authentication_from( + self, user, allows_non_existing_user=True, + allowed_auth_plugins=None, allowed_auth_sources=None): + """ + Custom method for this auth that doesn't accept empty users. And also + allows rhodecode and authtoken extern_type to auth with this. But only + via vcs mode + """ + # only this and rhodecode plugins can use this type + from rhodecode.authentication.plugins import auth_rhodecode + allowed_auth_plugins = [ + self.name, auth_rhodecode.RhodeCodeAuthPlugin.name] + # only for vcs operations + allowed_auth_sources = [VCS_TYPE] + + return super(RhodeCodeAuthPlugin, self).allows_authentication_from( + user, allows_non_existing_user=False, + allowed_auth_plugins=allowed_auth_plugins, + allowed_auth_sources=allowed_auth_sources) + + def auth(self, userobj, username, password, settings, **kwargs): + if not userobj: + log.debug('userobj was:%s skipping' % (userobj, )) + return None + + user_attrs = { + "username": userobj.username, + "firstname": userobj.firstname, + "lastname": userobj.lastname, + "groups": [], + "email": userobj.email, + "admin": userobj.admin, + "active": userobj.active, + "active_from_extern": userobj.active, + "extern_name": userobj.user_id, + "extern_type": userobj.extern_type, + } + + log.debug('Authenticating user with args %s', user_attrs) + if userobj.active: + role = UserApiKeys.ROLE_VCS + active_tokens = [x.api_key for x in + User.extra_valid_auth_tokens(userobj, role=role)] + if userobj.username == username and password in active_tokens: + log.info( + 'user `%s` successfully authenticated via %s', + user_attrs['username'], self.name) + return user_attrs + log.error( + 'user `%s` failed to authenticate via %s, reason: bad or ' + 'inactive token.', username, self.name) + else: + log.warning( + 'user `%s` failed to authenticate via %s, reason: account not ' + 'active.', username, self.name) + return None diff --git a/rhodecode/authentication/registry.py b/rhodecode/authentication/registry.py --- a/rhodecode/authentication/registry.py +++ b/rhodecode/authentication/registry.py @@ -25,14 +25,20 @@ from zope.interface import implementer from rhodecode.authentication.interface import IAuthnPluginRegistry from rhodecode.lib.utils2 import safe_str +from rhodecode.model.settings import SettingsModel log = logging.getLogger(__name__) @implementer(IAuthnPluginRegistry) class AuthenticationPluginRegistry(object): - def __init__(self): + + # INI settings key to set a fallback authentication plugin. + fallback_plugin_key = 'rhodecode.auth_plugin_fallback' + + def __init__(self, settings): self._plugins = {} + self._fallback_plugin = settings.get(self.fallback_plugin_key, None) def add_authn_plugin(self, config, plugin): plugin_id = plugin.get_id() @@ -51,3 +57,31 @@ class AuthenticationPluginRegistry(objec def get_plugin(self, plugin_id): return self._plugins.get(plugin_id, None) + + def get_plugins_for_authentication(self): + """ + Returns a list of plugins which should be consulted when authenticating + a user. It only returns plugins which are enabled and active. + Additionally it includes the fallback plugin from the INI file, if + `rhodecode.auth_plugin_fallback` is set to a plugin ID. + """ + plugins = [] + + # Add all enabled and active plugins to the list. We iterate over the + # auth_plugins setting from DB beacuse it also represents the ordering. + enabled_plugins = SettingsModel().get_auth_plugins() + for plugin_id in enabled_plugins: + plugin = self.get_plugin(plugin_id) + if plugin is not None and plugin.is_active(): + plugins.append(plugin) + + # Add the fallback plugin from ini file. + if self._fallback_plugin: + log.warn( + 'Using fallback authentication plugin from INI file: "%s"', + self._fallback_plugin) + plugin = self.get_plugin(self._fallback_plugin) + if plugin is not None and plugin not in plugins: + plugins.append(plugin) + + return plugins diff --git a/rhodecode/authentication/routes.py b/rhodecode/authentication/routes.py --- a/rhodecode/authentication/routes.py +++ b/rhodecode/authentication/routes.py @@ -21,12 +21,11 @@ import logging from pyramid.exceptions import ConfigurationError -from pyramid.i18n import TranslationStringFactory from rhodecode.lib.utils2 import safe_str from rhodecode.model.settings import SettingsModel +from rhodecode.translation import _ -_ = TranslationStringFactory('rhodecode-enterprise') log = logging.getLogger(__name__) @@ -128,7 +127,7 @@ class AuthnRootResource(AuthnResourceBas # Allow plugin resources with identical names by rename duplicates. unique_name = _ensure_unique_name(resource.__name__) if unique_name != resource.__name__: - log.warn('Name collision for traversal resource "%s" registered', + log.warn('Name collision for traversal resource "%s" registered ' 'by authentication plugin "%s"', resource.__name__, plugin_id) resource.__name__ = unique_name diff --git a/rhodecode/authentication/schema.py b/rhodecode/authentication/schema.py --- a/rhodecode/authentication/schema.py +++ b/rhodecode/authentication/schema.py @@ -20,9 +20,7 @@ import colander -from pyramid.i18n import TranslationStringFactory - -_ = TranslationStringFactory('rhodecode-enterprise') +from rhodecode.translation import _ class AuthnPluginSettingsSchemaBase(colander.MappingSchema): diff --git a/rhodecode/authentication/views.py b/rhodecode/authentication/views.py --- a/rhodecode/authentication/views.py +++ b/rhodecode/authentication/views.py @@ -23,7 +23,6 @@ import formencode.htmlfill import logging from pyramid.httpexceptions import HTTPFound -from pyramid.i18n import TranslationStringFactory from pyramid.renderers import render from pyramid.response import Response @@ -34,11 +33,10 @@ from rhodecode.lib.auth import LoginRequ from rhodecode.model.forms import AuthSettingsForm from rhodecode.model.meta import Session from rhodecode.model.settings import SettingsModel +from rhodecode.translation import _ log = logging.getLogger(__name__) -_ = TranslationStringFactory('rhodecode-enterprise') - class AuthnPluginViewBase(object): @@ -47,51 +45,27 @@ class AuthnPluginViewBase(object): self.context = context self.plugin = context.plugin - # TODO: Think about replacing the htmlfill stuff. - def _render_and_fill(self, template, template_context, request, - form_defaults, validation_errors): - """ - Helper to render a template and fill the HTML form fields with - defaults. Also displays the form errors. - """ - # Render template to string. - html = render(template, template_context, request=request) - - # Fill the HTML form fields with default values and add error messages. - html = formencode.htmlfill.render( - html, - defaults=form_defaults, - errors=validation_errors, - prefix_error=False, - encoding="UTF-8", - force_defaults=False) - - return html - - def settings_get(self): + def settings_get(self, defaults=None, errors=None): """ View that displays the plugin settings as a form. """ - form_defaults = {} - validation_errors = None + defaults = defaults or {} + errors = errors or {} schema = self.plugin.get_settings_schema() # Get default values for the form. - for node in schema.children: - value = self.plugin.get_setting_by_name(node.name) or node.default - form_defaults[node.name] = value + for node in schema: + db_value = self.plugin.get_setting_by_name(node.name) + defaults.setdefault(node.name, db_value) template_context = { + 'defaults': defaults, + 'errors': errors, + 'plugin': self.context.plugin, 'resource': self.context, - 'plugin': self.context.plugin } - return Response(self._render_and_fill( - 'rhodecode:templates/admin/auth/plugin_settings.html', - template_context, - self.request, - form_defaults, - validation_errors)) + return template_context def settings_post(self): """ @@ -102,24 +76,12 @@ class AuthnPluginViewBase(object): valid_data = schema.deserialize(self.request.params) except colander.Invalid, e: # Display error message and display form again. - form_defaults = self.request.params - validation_errors = e.asdict() self.request.session.flash( _('Errors exist when saving plugin settings. ' - 'Please check the form inputs.'), + 'Please check the form inputs.'), queue='error') - - template_context = { - 'resource': self.context, - 'plugin': self.context.plugin - } - - return Response(self._render_and_fill( - 'rhodecode:templates/admin/auth/plugin_settings.html', - template_context, - self.request, - form_defaults, - validation_errors)) + defaults = schema.flatten(self.request.params) + return self.settings_get(errors=e.asdict(), defaults=defaults) # Store validated data. for name, value in valid_data.items(): @@ -151,10 +113,10 @@ class AuthSettingsView(object): @LoginRequired() @HasPermissionAllDecorator('hg.admin') - def index(self, defaults={}, errors=None, prefix_error=False): + def index(self, defaults=None, errors=None, prefix_error=False): + defaults = defaults or {} authn_registry = self.request.registry.getUtility(IAuthnPluginRegistry) - default_plugins = ['egg:rhodecode-enterprise-ce#rhodecode'] - enabled_plugins = SettingsModel().get_auth_plugins() or default_plugins + enabled_plugins = SettingsModel().get_auth_plugins() # Create template context and render it. template_context = { diff --git a/rhodecode/config/environment.py b/rhodecode/config/environment.py --- a/rhodecode/config/environment.py +++ b/rhodecode/config/environment.py @@ -27,10 +27,12 @@ import logging import rhodecode import platform import re +import io from mako.lookup import TemplateLookup from pylons.configuration import PylonsConfig from pylons.error import handle_mako_error +from pyramid.settings import asbool # don't remove this import it does magic for celery from rhodecode.lib import celerypylons # noqa @@ -39,6 +41,7 @@ import rhodecode.lib.app_globals as app_ from rhodecode.config import utils from rhodecode.config.routing import make_map +from rhodecode.config.jsroutes import generate_jsroutes_content from rhodecode.lib import helpers from rhodecode.lib.auth import set_available_permissions @@ -51,7 +54,6 @@ from rhodecode.model.scm import ScmModel log = logging.getLogger(__name__) - def load_environment(global_conf, app_conf, initial=False, test_env=None, test_index=None): """ @@ -60,7 +62,6 @@ def load_environment(global_conf, app_co """ config = PylonsConfig() - rhodecode.is_test = str2bool(app_conf.get('is_test', 'False')) # Pylons paths root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -80,6 +81,16 @@ def load_environment(global_conf, app_co config['app_conf'].get('celery.always.eager')) config['routes.map'] = make_map(config) + + if asbool(config['debug']): + jsroutes = config['routes.map'].jsroutes() + jsroutes_file_content = generate_jsroutes_content(jsroutes) + jsroutes_file_path = os.path.join( + paths['static_files'], 'js', 'rhodecode', 'routes.js') + + with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: + f.write(jsroutes_file_content) + config['pylons.app_globals'] = app_globals.Globals(config) config['pylons.h'] = helpers rhodecode.CONFIG = config @@ -100,18 +111,6 @@ def load_environment(global_conf, app_co # sets the c attribute access when don't existing attribute are accessed config['pylons.strict_tmpl_context'] = True - config_file_name = os.path.split(config['__file__'])[-1] - test = re.match('^test[\w_]*\.ini$', config_file_name) is not None - if test: - if test_env is None: - test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) - - from rhodecode.lib.utils import create_test_env, create_test_index - from rhodecode.tests import TESTS_TMP_PATH - # test repos - if test_env: - create_test_env(TESTS_TMP_PATH, config) - create_test_index(TESTS_TMP_PATH, config, True) # Limit backends to "vcs.backends" from configuration backends = config['vcs.backends'] = aslist( @@ -133,10 +132,6 @@ def load_environment(global_conf, app_co protocol=utils.get_vcs_server_protocol(config), log_level=config['vcs.server.log_level']) - # MULTIPLE DB configs - # Setup the SQLAlchemy database engine - utils.initialize_database(config) - set_available_permissions(config) db_cfg = make_db_config(clear_session=True) @@ -179,3 +174,19 @@ def _use_direct_hook_calls(config): def _get_vcs_hooks_protocol(config): protocol = config.get('vcs.hooks.protocol', 'pyro4').lower() return protocol + + +def load_pyramid_environment(global_config, settings): + # Some parts of the code expect a merge of global and app settings. + settings_merged = global_config.copy() + settings_merged.update(settings) + + # If this is a test run we prepare the test environment like + # creating a test database, test search index and test repositories. + # This has to be done before the database connection is initialized. + if settings['is_test']: + rhodecode.is_test = True + utils.initialize_test_environment(settings_merged) + + # Initialize the database connection. + utils.initialize_database(settings_merged) diff --git a/rhodecode/config/jsroutes.py b/rhodecode/config/jsroutes.py new file mode 100644 --- /dev/null +++ b/rhodecode/config/jsroutes.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2010-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +def generate_jsroutes_content(jsroutes): + statements = [] + for url_name, url, fields in jsroutes: + statements.append( + "pyroutes.register('%s', '%s', %s);" % (url_name, url, fields)) + return u''' +/****************************************************************************** + * * + * DO NOT CHANGE THIS FILE MANUALLY * + * * + * * + * This file is automatically generated when the app starts up. * + * * + * To add a route here pass jsroute=True to the route definition in the app * + * * + ******************************************************************************/ +function registerRCRoutes() { + // routes registration + %s +} +''' % '\n '.join(statements) + diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -37,7 +37,8 @@ import routes.util import rhodecode from rhodecode.config import patches -from rhodecode.config.environment import load_environment +from rhodecode.config.environment import ( + load_environment, load_pyramid_environment) from rhodecode.lib.middleware import csrf from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper @@ -160,6 +161,9 @@ def make_pyramid_app(global_config, **se sanitize_settings_and_apply_defaults(settings) config = Configurator(settings=settings) add_pylons_compat_data(config.registry, global_config, settings_pylons) + + load_pyramid_environment(global_config, settings) + includeme(config) includeme_last(config) pyramid_app = config.make_wsgi_app() @@ -182,6 +186,7 @@ def includeme(config): config.include('pyramid_mako') config.include('pyramid_beaker') config.include('rhodecode.authentication') + config.include('rhodecode.login') config.include('rhodecode.tweens') config.include('rhodecode.api') @@ -301,6 +306,7 @@ def sanitize_settings_and_apply_defaults _bool_setting(settings, 'vcs.server.enable', 'true') _bool_setting(settings, 'static_files', 'true') + _bool_setting(settings, 'is_test', 'false') return settings diff --git a/rhodecode/config/routing.py b/rhodecode/config/routing.py --- a/rhodecode/config/routing.py +++ b/rhodecode/config/routing.py @@ -29,6 +29,7 @@ IMPORTANT: if you change any routing her and _route_name variable which uses some of stored naming here to do redirects. """ import os +import re from routes import Mapper from rhodecode.config import routing_links @@ -50,9 +51,60 @@ URL_NAME_REQUIREMENTS = { } +class JSRoutesMapper(Mapper): + """ + Wrapper for routes.Mapper to make pyroutes compatible url definitions + """ + _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') + _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') + def __init__(self, *args, **kw): + super(JSRoutesMapper, self).__init__(*args, **kw) + self._jsroutes = [] + + def connect(self, *args, **kw): + """ + Wrapper for connect to take an extra argument jsroute=True + + :param jsroute: boolean, if True will add the route to the pyroutes list + """ + if kw.pop('jsroute', False): + if not self._named_route_regex.match(args[0]): + raise Exception('only named routes can be added to pyroutes') + self._jsroutes.append(args[0]) + + super(JSRoutesMapper, self).connect(*args, **kw) + + def _extract_route_information(self, route): + """ + Convert a route into tuple(name, path, args), eg: + ('user_profile', '/profile/%(username)s', ['username']) + """ + routepath = route.routepath + def replace(matchobj): + if matchobj.group(1): + return "%%(%s)s" % matchobj.group(1).split(':')[0] + else: + return "%%(%s)s" % matchobj.group(2) + + routepath = self._argument_prog.sub(replace, routepath) + return ( + route.name, + routepath, + [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) + for arg in self._argument_prog.findall(route.routepath)] + ) + + def jsroutes(self): + """ + Return a list of pyroutes.js compatible routes + """ + for route_name in self._jsroutes: + yield self._extract_route_information(self._routenames[route_name]) + + def make_map(config): """Create, configure and return the routes Mapper""" - rmap = Mapper(directory=config['pylons.paths']['controllers'], + rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], always_scan=config['debug']) rmap.minimization = False rmap.explicit = False @@ -124,14 +176,14 @@ def make_map(config): #========================================================================== # MAIN PAGE - rmap.connect('home', '/', controller='home', action='index') - rmap.connect('repo_switcher_data', '/_repos_and_groups', controller='home', - action='repo_switcher_data') + rmap.connect('home', '/', controller='home', action='index', jsroute=True) + rmap.connect('goto_switcher_data', '/_goto_data', controller='home', + action='goto_switcher_data') rmap.connect('repo_list_data', '/_repos', controller='home', action='repo_list_data') rmap.connect('user_autocomplete_data', '/_users', controller='home', - action='user_autocomplete_data') + action='user_autocomplete_data', jsroute=True) rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', action='user_group_autocomplete_data') @@ -167,7 +219,7 @@ def make_map(config): action='create', conditions={'method': ['POST']}) m.connect('repos', '/repos', action='index', conditions={'method': ['GET']}) - m.connect('new_repo', '/create_repository', + m.connect('new_repo', '/create_repository', jsroute=True, action='create_repository', conditions={'method': ['GET']}) m.connect('/repos/{repo_name}', action='update', conditions={'method': ['PUT'], @@ -303,22 +355,29 @@ def make_map(config): function=check_user_group) # EXTRAS USER GROUP ROUTES - m.connect('edit_user_group_global_perms', '/user_groups/{user_group_id}/edit/global_permissions', + m.connect('edit_user_group_global_perms', + '/user_groups/{user_group_id}/edit/global_permissions', action='edit_global_perms', conditions={'method': ['GET']}) - m.connect('edit_user_group_global_perms', '/user_groups/{user_group_id}/edit/global_permissions', + m.connect('edit_user_group_global_perms', + '/user_groups/{user_group_id}/edit/global_permissions', action='update_global_perms', conditions={'method': ['PUT']}) - m.connect('edit_user_group_perms_summary', '/user_groups/{user_group_id}/edit/permissions_summary', + m.connect('edit_user_group_perms_summary', + '/user_groups/{user_group_id}/edit/permissions_summary', action='edit_perms_summary', conditions={'method': ['GET']}) - m.connect('edit_user_group_perms', '/user_groups/{user_group_id}/edit/permissions', + m.connect('edit_user_group_perms', + '/user_groups/{user_group_id}/edit/permissions', action='edit_perms', conditions={'method': ['GET']}) - m.connect('edit_user_group_perms', '/user_groups/{user_group_id}/edit/permissions', + m.connect('edit_user_group_perms', + '/user_groups/{user_group_id}/edit/permissions', action='update_perms', conditions={'method': ['PUT']}) - m.connect('edit_user_group_advanced', '/user_groups/{user_group_id}/edit/advanced', + m.connect('edit_user_group_advanced', + '/user_groups/{user_group_id}/edit/advanced', action='edit_advanced', conditions={'method': ['GET']}) - m.connect('edit_user_group_members', '/user_groups/{user_group_id}/edit/members', + m.connect('edit_user_group_members', + '/user_groups/{user_group_id}/edit/members', jsroute=True, action='edit_members', conditions={'method': ['GET']}) # ADMIN PERMISSIONS ROUTES @@ -496,12 +555,6 @@ def make_map(config): m.connect('my_account_auth_tokens', '/my_account/auth_tokens', action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) - m.connect('my_account_oauth', '/my_account/oauth', - action='my_account_oauth', conditions={'method': ['GET']}) - m.connect('my_account_oauth', '/my_account/oauth', - action='my_account_oauth_delete', - conditions={'method': ['DELETE']}) - # NOTIFICATION REST ROUTES with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='admin/notifications') as m: @@ -522,9 +575,9 @@ def make_map(config): controller='admin/gists') as m: m.connect('gists', '/gists', action='create', conditions={'method': ['POST']}) - m.connect('gists', '/gists', + m.connect('gists', '/gists', jsroute=True, action='index', conditions={'method': ['GET']}) - m.connect('new_gist', '/gists/new', + m.connect('new_gist', '/gists/new', jsroute=True, action='new', conditions={'method': ['GET']}) m.connect('/gists/{gist_id}', @@ -557,8 +610,12 @@ def make_map(config): m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', action='add_repo') m.connect( - 'pull_requests_global', '/pull_requests/{pull_request_id:[0-9]+}', + 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', action='pull_requests') + m.connect( + 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', + action='pull_requests') + # USER JOURNAL rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), @@ -586,7 +643,7 @@ def make_map(config): action='public_journal_atom') rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), - controller='journal', action='toggle_following', + controller='journal', action='toggle_following', jsroute=True, conditions={'method': ['POST']}) # FULL TEXT SEARCH @@ -598,27 +655,6 @@ def make_map(config): conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) - # LOGIN/LOGOUT/REGISTER/SIGN IN - rmap.connect('login_home', '%s/login' % (ADMIN_PREFIX,), controller='login', - action='index') - - rmap.connect('logout_home', '%s/logout' % (ADMIN_PREFIX,), controller='login', - action='logout', conditions={'method': ['POST']}) - - rmap.connect('register', '%s/register' % (ADMIN_PREFIX,), controller='login', - action='register') - - rmap.connect('reset_password', '%s/password_reset' % (ADMIN_PREFIX,), - controller='login', action='password_reset') - - rmap.connect('reset_password_confirmation', - '%s/password_reset_confirmation' % (ADMIN_PREFIX,), - controller='login', action='password_reset_confirmation') - - rmap.connect('social_auth', - '%s/social_auth/{provider_name}' % (ADMIN_PREFIX,), - controller='login', action='social_auth') - # FEEDS rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', controller='feed', action='rss', @@ -644,17 +680,17 @@ def make_map(config): rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', controller='summary', action='repo_stats', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('repo_refs_data', '/{repo_name}/refs-data', - controller='summary', action='repo_refs_data', + controller='summary', action='repo_refs_data', jsroute=True, requirements=URL_NAME_REQUIREMENTS) rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', controller='summary', action='repo_refs_changelog_data', - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', - controller='changeset', revision='tip', + controller='changeset', revision='tip', jsroute=True, conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', @@ -667,12 +703,13 @@ def make_map(config): requirements=URL_NAME_REQUIREMENTS) # repo edit options - rmap.connect('edit_repo', '/{repo_name}/settings', + rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, controller='admin/repos', action='edit', conditions={'method': ['GET'], 'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', + jsroute=True, controller='admin/repos', action='edit_permissions', conditions={'method': ['GET'], 'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) @@ -804,13 +841,13 @@ def make_map(config): requirements=URL_NAME_REQUIREMENTS) rmap.connect('changeset_comment', - '/{repo_name}/changeset/{revision}/comment', + '/{repo_name}/changeset/{revision}/comment', jsroute=True, controller='changeset', revision='tip', action='comment', conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) rmap.connect('changeset_comment_preview', - '/{repo_name}/changeset/comment/preview', + '/{repo_name}/changeset/comment/preview', jsroute=True, controller='changeset', action='preview_comment', conditions={'function': check_repo, 'method': ['POST']}, requirements=URL_NAME_REQUIREMENTS) @@ -819,11 +856,11 @@ def make_map(config): '/{repo_name}/changeset/comment/{comment_id}/delete', controller='changeset', action='delete_comment', conditions={'function': check_repo, 'method': ['DELETE']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}', controller='changeset', action='changeset_info', - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('compare_home', '/{repo_name}/compare', @@ -835,33 +872,33 @@ def make_map(config): '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', controller='compare', action='compare', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_home', '/{repo_name}/pull-request/new', controller='pullrequests', action='index', conditions={'function': check_repo, 'method': ['GET']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest', '/{repo_name}/pull-request/new', controller='pullrequests', action='create', conditions={'function': check_repo, 'method': ['POST']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_repo_refs', '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', controller='pullrequests', action='get_repo_refs', conditions={'function': check_repo, 'method': ['GET']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_repo_destinations', '/{repo_name}/pull-request/repo-destinations', controller='pullrequests', action='get_repo_destinations', conditions={'function': check_repo, 'method': ['GET']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_show', '/{repo_name}/pull-request/{pull_request_id}', @@ -875,7 +912,7 @@ def make_map(config): controller='pullrequests', action='update', conditions={'function': check_repo, 'method': ['PUT']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_merge', '/{repo_name}/pull-request/{pull_request_id}', @@ -896,20 +933,20 @@ def make_map(config): controller='pullrequests', action='show_all', conditions={'function': check_repo, 'method': ['GET']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_comment', '/{repo_name}/pull-request-comment/{pull_request_id}', controller='pullrequests', action='comment', conditions={'function': check_repo, 'method': ['POST']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('pullrequest_comment_delete', '/{repo_name}/pull-request-comment/{comment_id}/delete', controller='pullrequests', action='delete_comment', conditions={'function': check_repo, 'method': ['DELETE']}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('summary_home_explicit', '/{repo_name}/summary', controller='summary', conditions={'function': check_repo}, @@ -927,7 +964,7 @@ def make_map(config): controller='bookmarks', conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) - rmap.connect('changelog_home', '/{repo_name}/changelog', + rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, controller='changelog', conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) @@ -936,21 +973,21 @@ def make_map(config): conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) - rmap.connect('changelog_file_home', '/{repo_name}/changelog/{revision}/{f_path}', + rmap.connect('changelog_file_home', + '/{repo_name}/changelog/{revision}/{f_path}', controller='changelog', f_path=None, conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', controller='changelog', action='changelog_details', conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) - rmap.connect('files_home', - '/{repo_name}/files/{revision}/{f_path}', + rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', controller='files', revision='tip', f_path='', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('files_home_simple_catchrev', '/{repo_name}/files/{revision}', @@ -968,13 +1005,13 @@ def make_map(config): '/{repo_name}/history/{revision}/{f_path}', controller='files', action='history', revision='tip', f_path='', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('files_authors_home', '/{repo_name}/authors/{revision}/{f_path}', controller='files', action='authors', revision='tip', f_path='', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', controller='files', action='diff', f_path='', @@ -1053,19 +1090,19 @@ def make_map(config): rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', controller='files', action='archivefile', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('files_nodelist_home', '/{repo_name}/nodelist/{revision}/{f_path}', controller='files', action='nodelist', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('files_metadata_list_home', '/{repo_name}/metadata_list/{revision}/{f_path}', controller='files', action='metadata_list', conditions={'function': check_repo}, - requirements=URL_NAME_REQUIREMENTS) + requirements=URL_NAME_REQUIREMENTS, jsroute=True) rmap.connect('repo_fork_create_home', '/{repo_name}/fork', controller='forks', action='fork_create', @@ -1096,7 +1133,7 @@ def make_map(config): # catch all, at the end _connect_with_slash( - rmap, 'summary_home', '/{repo_name}', + rmap, 'summary_home', '/{repo_name}', jsroute=True, controller='summary', action='index', conditions={'function': check_repo}, requirements=URL_NAME_REQUIREMENTS) diff --git a/rhodecode/config/utils.py b/rhodecode/config/utils.py --- a/rhodecode/config/utils.py +++ b/rhodecode/config/utils.py @@ -73,6 +73,18 @@ def initialize_database(config): init_model(engine, encryption_key=config['beaker.session.secret']) +def initialize_test_environment(settings, test_env=None): + if test_env is None: + test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) + + from rhodecode.lib.utils import create_test_env, create_test_index + from rhodecode.tests import TESTS_TMP_PATH + # test repos + if test_env: + create_test_env(TESTS_TMP_PATH, settings) + create_test_index(TESTS_TMP_PATH, settings, True) + + def get_vcs_server_protocol(config): protocol = config.get('vcs.server.protocol', 'pyro4') return protocol diff --git a/rhodecode/controllers/admin/my_account.py b/rhodecode/controllers/admin/my_account.py --- a/rhodecode/controllers/admin/my_account.py +++ b/rhodecode/controllers/admin/my_account.py @@ -39,16 +39,15 @@ from rhodecode.lib.auth import ( from rhodecode.lib.base import BaseController, render from rhodecode.lib.utils2 import safe_int, md5 from rhodecode.lib.ext_json import json -from rhodecode.model.db import (Repository, PullRequest, PullRequestReviewers, - UserEmailMap, User, UserFollowing, - ExternalIdentity) +from rhodecode.model.db import ( + Repository, PullRequest, PullRequestReviewers, UserEmailMap, User, + UserFollowing) from rhodecode.model.forms import UserForm, PasswordChangeForm from rhodecode.model.scm import RepoList from rhodecode.model.user import UserModel from rhodecode.model.repo import RepoModel from rhodecode.model.auth_token import AuthTokenModel from rhodecode.model.meta import Session -from rhodecode.model.settings import SettingsModel log = logging.getLogger(__name__) @@ -347,27 +346,3 @@ class MyAccountController(BaseController h.flash(_("Auth token successfully deleted"), category='success') return redirect(url('my_account_auth_tokens')) - - def my_account_oauth(self): - c.active = 'oauth' - self.__load_data() - c.user_oauth_tokens = ExternalIdentity().by_local_user_id( - c.rhodecode_user.user_id).all() - settings = SettingsModel().get_all_settings() - c.social_plugins = SettingsModel().list_enabled_social_plugins( - settings) - return render('admin/my_account/my_account.html') - - @auth.CSRFRequired() - def my_account_oauth_delete(self): - token = ExternalIdentity.by_external_id_and_provider( - request.params.get('external_id'), - request.params.get('provider_name'), - local_user_id=c.rhodecode_user.user_id - ) - if token: - Session().delete(token) - Session().commit() - h.flash(_("OAuth token successfully deleted"), category='success') - - return redirect(url('my_account_oauth')) diff --git a/rhodecode/controllers/admin/user_groups.py b/rhodecode/controllers/admin/user_groups.py --- a/rhodecode/controllers/admin/user_groups.py +++ b/rhodecode/controllers/admin/user_groups.py @@ -36,6 +36,7 @@ from rhodecode.lib import auth from rhodecode.lib import helpers as h from rhodecode.lib.exceptions import UserGroupAssignedException,\ RepoGroupAssignmentError +from rhodecode.lib.utils import jsonify, action_logger from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int from rhodecode.lib.auth import ( LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator, @@ -181,7 +182,8 @@ class UserGroupsController(BaseControlle h.flash(_('Error occurred during creation of user group %s') \ % request.POST.get('users_group_name'), category='error') - return redirect(url('users_groups')) + return redirect( + url('edit_users_group', user_group_id=user_group.users_group_id)) @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') def new(self): @@ -467,5 +469,12 @@ class UserGroupsController(BaseControlle c.group_members_obj = sorted((x.user for x in c.user_group.members), key=lambda u: u.username.lower()) - c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] + group_members = [(x.user_id, x.username) for x in c.group_members_obj] + + if request.is_xhr: + return jsonify(lambda *a, **k: { + 'members': group_members + }) + + c.group_members = group_members return render('admin/user_groups/user_group_edit.html') diff --git a/rhodecode/controllers/compare.py b/rhodecode/controllers/compare.py --- a/rhodecode/controllers/compare.py +++ b/rhodecode/controllers/compare.py @@ -198,7 +198,9 @@ class CompareController(BaseRepoControll c.statuses = c.rhodecode_db_repo.statuses( [x.raw_id for x in c.commit_ranges]) - if partial: + if partial: # for PR ajax commits loader + if not c.ancestor: + return '' # cannot merge if there is no ancestor return render('compare/compare_commits.html') if c.ancestor: diff --git a/rhodecode/controllers/home.py b/rhodecode/controllers/home.py --- a/rhodecode/controllers/home.py +++ b/rhodecode/controllers/home.py @@ -24,16 +24,17 @@ Home controller for RhodeCode Enterprise import logging import time - +import re -from pylons import tmpl_context as c, request +from pylons import tmpl_context as c, request, url, config from pylons.i18n.translation import _ from sqlalchemy.sql import func from rhodecode.lib.auth import ( - LoginRequired, HasPermissionAllDecorator, + LoginRequired, HasPermissionAllDecorator, AuthUser, HasRepoGroupPermissionAnyDecorator, XHRRequired) from rhodecode.lib.base import BaseController, render +from rhodecode.lib.index import searcher_from_config from rhodecode.lib.ext_json import json from rhodecode.lib.utils import jsonify from rhodecode.lib.utils2 import safe_unicode @@ -134,7 +135,8 @@ class HomeController(BaseController): 'id': obj['name'], 'text': obj['name'], 'type': 'repo', - 'obj': obj['dbrepo'] + 'obj': obj['dbrepo'], + 'url': url('summary_home', repo_name=obj['name']) } for obj in repo_iter] @@ -156,16 +158,45 @@ class HomeController(BaseController): 'id': obj.group_name, 'text': obj.group_name, 'type': 'group', - 'obj': {} + 'obj': {}, + 'url': url('repo_group_home', group_name=obj.group_name) } for obj in repo_groups_iter] + def _get_hash_commit_list(self, hash_starts_with=None, limit=20): + if not hash_starts_with or len(hash_starts_with) < 3: + return [] + + commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) + + if len(commit_hashes) != 1: + return [] + + commit_hash_prefix = commit_hashes[0] + + auth_user = AuthUser( + user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) + searcher = searcher_from_config(config) + result = searcher.search( + 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user) + + return [ + { + 'id': entry['commit_id'], + 'text': entry['commit_id'], + 'type': 'commit', + 'obj': {'repo': entry['repository']}, + 'url': url('changeset_home', + repo_name=entry['repository'], revision=entry['commit_id']) + } + for entry in result['results']] + @LoginRequired() @XHRRequired() @jsonify - def repo_switcher_data(self): + def goto_switcher_data(self): query = request.GET.get('query') - log.debug('generating switcher repo/groups list, query %s', query) + log.debug('generating goto switcher list, query %s', query) res = [] repo_groups = self._get_repo_group_list(query) @@ -182,6 +213,19 @@ class HomeController(BaseController): 'children': repos }) + commits = self._get_hash_commit_list(query) + if commits: + unique_repos = {} + for commit in commits: + unique_repos.setdefault(commit['obj']['repo'], [] + ).append(commit) + + for repo in unique_repos: + res.append({ + 'text': _('Commits in %(repo)s') % {'repo': repo}, + 'children': unique_repos[repo] + }) + data = { 'more': False, 'results': res @@ -203,6 +247,7 @@ class HomeController(BaseController): 'text': _('Repositories'), 'children': repos }) + data = { 'more': False, 'results': res diff --git a/rhodecode/controllers/login.py b/rhodecode/controllers/login.py deleted file mode 100644 --- a/rhodecode/controllers/login.py +++ /dev/null @@ -1,409 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (C) 2010-2016 RhodeCode GmbH -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License, version 3 -# (only), as published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# This program is dual-licensed. If you wish to learn more about the -# RhodeCode Enterprise Edition, including its added features, Support services, -# and proprietary license terms, please see https://rhodecode.com/licenses/ - -""" -Login controller for rhodeocode -""" - -import datetime -import formencode -import logging -import urlparse -import uuid - -from formencode import htmlfill -from webob.exc import HTTPFound -from pylons.i18n.translation import _ -from pylons.controllers.util import redirect -from pylons import request, session, tmpl_context as c, url -from recaptcha.client.captcha import submit - -import rhodecode.lib.helpers as h -from rhodecode.lib.auth import ( - AuthUser, HasPermissionAnyDecorator, CSRFRequired) -from rhodecode.authentication.base import loadplugin -from rhodecode.lib.base import BaseController, render -from rhodecode.lib.exceptions import UserCreationError -from rhodecode.lib.utils2 import safe_str -from rhodecode.model.db import User, ExternalIdentity -from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm -from rhodecode.model.login_session import LoginSession -from rhodecode.model.meta import Session -from rhodecode.model.settings import SettingsModel -from rhodecode.model.user import UserModel - -log = logging.getLogger(__name__) - - -class LoginController(BaseController): - - def __before__(self): - super(LoginController, self).__before__() - - def _store_user_in_session(self, username, remember=False): - user = User.get_by_username(username, case_insensitive=True) - auth_user = AuthUser(user.user_id) - auth_user.set_authenticated() - cs = auth_user.get_cookie_store() - session['rhodecode_user'] = cs - user.update_lastlogin() - Session().commit() - - # If they want to be remembered, update the cookie - if remember: - _year = (datetime.datetime.now() + - datetime.timedelta(seconds=60 * 60 * 24 * 365)) - session._set_cookie_expires(_year) - - session.save() - - log.info('user %s is now authenticated and stored in ' - 'session, session attrs %s', username, cs) - - # dumps session attrs back to cookie - session._update_cookie_out() - # we set new cookie - headers = None - if session.request['set_cookie']: - # send set-cookie headers back to response to update cookie - headers = [('Set-Cookie', session.request['cookie_out'])] - return headers - - def _validate_came_from(self, came_from): - if not came_from: - return came_from - - parsed = urlparse.urlparse(came_from) - server_parsed = urlparse.urlparse(url.current()) - allowed_schemes = ['http', 'https'] - if parsed.scheme and parsed.scheme not in allowed_schemes: - log.error('Suspicious URL scheme detected %s for url %s' % - (parsed.scheme, parsed)) - came_from = url('home') - elif server_parsed.netloc != parsed.netloc: - log.error('Suspicious NETLOC detected %s for url %s server url ' - 'is: %s' % (parsed.netloc, parsed, server_parsed)) - came_from = url('home') - if any(bad_str in parsed.path for bad_str in ('\r', '\n')): - log.error('Header injection detected `%s` for url %s server url ' % - (parsed.path, parsed)) - came_from = url('home') - return came_from - - def _redirect_to_origin(self, location, headers=None): - request.GET.pop('came_from', None) - raise HTTPFound(location=location, headers=headers) - - def _set_came_from(self): - _default_came_from = url('home') - came_from = self._validate_came_from( - safe_str(request.GET.get('came_from', ''))) - c.came_from = came_from or _default_came_from - - def index(self): - self._set_came_from() - - not_default = c.rhodecode_user.username != User.DEFAULT_USER - ip_allowed = c.rhodecode_user.ip_allowed - c.social_plugins = self._get_active_social_plugins() - - # redirect if already logged in - if c.rhodecode_user.is_authenticated and not_default and ip_allowed: - raise self._redirect_to_origin(location=c.came_from) - - if request.POST: - # import Login Form validator class - login_form = LoginForm()() - try: - session.invalidate() - c.form_result = login_form.to_python(dict(request.POST)) - # form checks for username/password, now we're authenticated - headers = self._store_user_in_session( - username=c.form_result['username'], - remember=c.form_result['remember']) - raise self._redirect_to_origin( - location=c.came_from, headers=headers) - except formencode.Invalid as errors: - defaults = errors.value - # remove password from filling in form again - del defaults['password'] - return htmlfill.render( - render('/login.html'), - defaults=errors.value, - errors=errors.error_dict or {}, - prefix_error=False, - encoding="UTF-8", - force_defaults=False) - except UserCreationError as e: - # container auth or other auth functions that create users on - # the fly can throw this exception signaling that there's issue - # with user creation, explanation should be provided in - # Exception itself - h.flash(e, 'error') - - # check if we use container plugin, and try to login using it. - from rhodecode.authentication.base import authenticate, HTTP_TYPE - try: - log.debug('Running PRE-AUTH for container based authentication') - auth_info = authenticate( - '', '', request.environ, HTTP_TYPE, skip_missing=True) - except UserCreationError as e: - log.error(e) - h.flash(e, 'error') - # render login, with flash message about limit - return render('/login.html') - - if auth_info: - headers = self._store_user_in_session(auth_info.get('username')) - raise self._redirect_to_origin( - location=c.came_from, headers=headers) - return render('/login.html') - - # TODO: Move this to a better place. - def _get_active_social_plugins(self): - from rhodecode.authentication.base import AuthomaticBase - activated_plugins = SettingsModel().get_auth_plugins() - social_plugins = [] - for plugin_id in activated_plugins: - plugin = loadplugin(plugin_id) - if isinstance(plugin, AuthomaticBase) and plugin.is_active(): - social_plugins.append(plugin) - return social_plugins - - @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate', - 'hg.register.manual_activate') - def register(self): - c.auto_active = 'hg.register.auto_activate' in User.get_default_user()\ - .AuthUser.permissions['global'] - - settings = SettingsModel().get_all_settings() - captcha_private_key = settings.get('rhodecode_captcha_private_key') - c.captcha_active = bool(captcha_private_key) - c.captcha_public_key = settings.get('rhodecode_captcha_public_key') - c.register_message = settings.get('rhodecode_register_message') or '' - - c.social_plugins = self._get_active_social_plugins() - - social_data = session.get('rhodecode.social_auth') - c.form_data = {} - if social_data: - c.form_data = {'username': social_data['user'].get('user_name'), - 'password': str(uuid.uuid4()), - 'email': social_data['user'].get('email') - } - - if request.POST: - register_form = RegisterForm()() - try: - form_result = register_form.to_python(dict(request.POST)) - form_result['active'] = c.auto_active - - if c.captcha_active: - response = submit( - request.POST.get('recaptcha_challenge_field'), - request.POST.get('recaptcha_response_field'), - private_key=captcha_private_key, - remoteip=self.ip_addr) - if c.captcha_active and not response.is_valid: - _value = form_result - _msg = _('bad captcha') - error_dict = {'recaptcha_field': _msg} - raise formencode.Invalid(_msg, _value, None, - error_dict=error_dict) - - new_user = UserModel().create_registration(form_result) - if social_data: - plugin_name = 'egg:rhodecode-enterprise-ee#{}'.format( - social_data['credentials.provider'] - ) - auth_plugin = loadplugin(plugin_name) - if auth_plugin: - auth_plugin.handle_social_data( - session, new_user.user_id, social_data) - h.flash(_('You have successfully registered with RhodeCode'), - category='success') - Session().commit() - return redirect(url('login_home')) - - except formencode.Invalid as errors: - return htmlfill.render( - render('/register.html'), - defaults=errors.value, - errors=errors.error_dict or {}, - prefix_error=False, - encoding="UTF-8", - force_defaults=False) - except UserCreationError as e: - # container auth or other auth functions that create users on - # the fly can throw this exception signaling that there's issue - # with user creation, explanation should be provided in - # Exception itself - h.flash(e, 'error') - - return render('/register.html') - - def password_reset(self): - settings = SettingsModel().get_all_settings() - captcha_private_key = settings.get('rhodecode_captcha_private_key') - c.captcha_active = bool(captcha_private_key) - c.captcha_public_key = settings.get('rhodecode_captcha_public_key') - - if request.POST: - password_reset_form = PasswordResetForm()() - try: - form_result = password_reset_form.to_python(dict(request.POST)) - if c.captcha_active: - response = submit( - request.POST.get('recaptcha_challenge_field'), - request.POST.get('recaptcha_response_field'), - private_key=captcha_private_key, - remoteip=self.ip_addr) - if c.captcha_active and not response.is_valid: - _value = form_result - _msg = _('bad captcha') - error_dict = {'recaptcha_field': _msg} - raise formencode.Invalid(_msg, _value, None, - error_dict=error_dict) - UserModel().reset_password_link(form_result) - h.flash(_('Your password reset link was sent'), - category='success') - return redirect(url('login_home')) - - except formencode.Invalid as errors: - return htmlfill.render( - render('/password_reset.html'), - defaults=errors.value, - errors=errors.error_dict or {}, - prefix_error=False, - encoding="UTF-8", - force_defaults=False) - - return render('/password_reset.html') - - def password_reset_confirmation(self): - if request.GET and request.GET.get('key'): - try: - user = User.get_by_auth_token(request.GET.get('key')) - data = {'email': user.email} - UserModel().reset_password(data) - h.flash(_( - 'Your password reset was successful, ' - 'a new password has been sent to your email'), - category='success') - except Exception as e: - log.error(e) - return redirect(url('reset_password')) - - return redirect(url('login_home')) - - @CSRFRequired() - def logout(self): - LoginSession().destroy_user_session() - return redirect(url('home')) - - def social_auth(self, provider_name): - plugin_name = 'egg:rhodecode-enterprise-ee#{}'.format( - provider_name - ) - auth_plugin = loadplugin(plugin_name) - if not auth_plugin: - return self._handle_social_auth_error(request, 'No auth plugin') - - result, response = auth_plugin.get_provider_result(request) - if result: - if result.error: - return self._handle_social_auth_error(request, result.error) - elif result.user: - return self._handle_social_auth_success(request, result) - return response - - def _handle_social_auth_error(self, request, result): - log.error(result) - h.flash(_('There was an error during OAuth processing.'), - category='error') - return redirect(url('home')) - - def _normalize_social_data(self, result): - social_data = { - 'user': {'data': result.user.data}, - 'credentials.provider': result.user.credentials.provider_name, - 'credentials.token': result.user.credentials.token, - 'credentials.token_secret': result.user.credentials.token_secret, - 'credentials.refresh_token': result.user.credentials.refresh_token - } - # normalize data - social_data['user']['id'] = result.user.id - user_name = result.user.username or '' - # use email name as username for google - if (social_data['credentials.provider'] == 'google' and - result.user.email): - user_name = result.user.email - - social_data['user']['user_name'] = user_name - social_data['user']['email'] = result.user.email or '' - return social_data - - def _handle_social_auth_success(self, request, result): - self._set_came_from() - - # Hooray, we have the user! - # OAuth 2.0 and OAuth 1.0a provide only limited user data on login, - # We need to update the user to get more info. - if result.user: - result.user.update() - - social_data = self._normalize_social_data(result) - - session['rhodecode.social_auth'] = social_data - - plugin_name = 'egg:rhodecode-enterprise-ee#{}'.format( - social_data['credentials.provider'] - ) - auth_plugin = loadplugin(plugin_name) - - # user is logged so bind his external identity with account - if request.user and request.user.username != User.DEFAULT_USER: - if auth_plugin: - auth_plugin.handle_social_data( - session, request.user.user_id, social_data) - session.pop('rhodecode.social_auth', None) - Session().commit() - return redirect(url('my_account_oauth')) - else: - user = ExternalIdentity.user_by_external_id_and_provider( - social_data['user']['id'], - social_data['credentials.provider'] - ) - - # user tokens are already found in our db - if user: - if auth_plugin: - auth_plugin.handle_social_data( - session, user.user_id, social_data) - session.pop('rhodecode.social_auth', None) - headers = self._store_user_in_session(user.username) - raise self._redirect_to_origin( - location=c.came_from, headers=headers) - else: - msg = _('You need to finish registration ' - 'process to bind your external identity to your ' - 'account or sign in to existing account') - h.flash(msg, category='success') - return redirect(url('register')) diff --git a/rhodecode/controllers/pullrequests.py b/rhodecode/controllers/pullrequests.py --- a/rhodecode/controllers/pullrequests.py +++ b/rhodecode/controllers/pullrequests.py @@ -590,6 +590,8 @@ class PullrequestsController(BaseRepoCon PullRequestModel().close_pull_request( pull_request.pull_request_id, user) Session().commit() + msg = _('Pull request was successfully merged and closed.') + h.flash(msg, category='success') else: log.debug( "The merge was not successful. Merge response: %s", diff --git a/rhodecode/controllers/search.py b/rhodecode/controllers/search.py --- a/rhodecode/controllers/search.py +++ b/rhodecode/controllers/search.py @@ -56,30 +56,33 @@ class SearchController(BaseRepoControlle search_params = schema.deserialize( dict(search_query=request.GET.get('q'), search_type=request.GET.get('type'), + search_sort=request.GET.get('sort'), page_limit=request.GET.get('page_limit'), requested_page=request.GET.get('page')) ) except validation_schema.Invalid as e: errors = e.children + def url_generator(**kw): + q = urllib.quote(safe_str(search_query)) + return update_params( + "?q=%s&type=%s" % (q, safe_str(search_type)), **kw) + search_query = search_params.get('search_query') search_type = search_params.get('search_type') - + search_sort = search_params.get('search_sort') if search_params.get('search_query'): page_limit = search_params['page_limit'] requested_page = search_params['requested_page'] - def url_generator(**kw): - q = urllib.quote(safe_str(search_query)) - return update_params( - "?q=%s&type=%s" % (q, safe_str(search_type)), **kw) c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr) try: search_result = searcher.search( - search_query, search_type, c.perm_user, repo_name) + search_query, search_type, c.perm_user, repo_name, + requested_page, page_limit, search_sort) formatted_results = Page( search_result['results'], page=requested_page, @@ -97,6 +100,8 @@ class SearchController(BaseRepoControlle errors = [ validation_schema.Invalid(node, search_result['error'])] + c.sort = search_sort + c.url_generator = url_generator c.errors = errors c.formatted_results = formatted_results c.runtime = execution_time diff --git a/rhodecode/events.py b/rhodecode/events.py new file mode 100644 --- /dev/null +++ b/rhodecode/events.py @@ -0,0 +1,31 @@ +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +from zope.interface import implementer +from rhodecode.interfaces import IUserRegistered + + +@implementer(IUserRegistered) +class UserRegistered(object): + """ + An instance of this class is emitted as an :term:`event` whenever a user + account is registered. + """ + def __init__(self, user, session): + self.user = user + self.session = session diff --git a/rhodecode/interfaces.py b/rhodecode/interfaces.py new file mode 100644 --- /dev/null +++ b/rhodecode/interfaces.py @@ -0,0 +1,28 @@ +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +from zope.interface import Attribute, Interface + + +class IUserRegistered(Interface): + """ + An event type that is emitted whenever a new user registers a user + account. + """ + user = Attribute('The user object.') + session = Attribute('The session while processing the register form post.') diff --git a/rhodecode/lib/auth.py b/rhodecode/lib/auth.py --- a/rhodecode/lib/auth.py +++ b/rhodecode/lib/auth.py @@ -299,6 +299,54 @@ def _cached_perms_data(user_id, scope, u explicit, algo) return permissions.calculate() +class PermOrigin: + ADMIN = 'superadmin' + + REPO_USER = 'user:%s' + REPO_USERGROUP = 'usergroup:%s' + REPO_OWNER = 'repo.owner' + REPO_DEFAULT = 'repo.default' + REPO_PRIVATE = 'repo.private' + + REPOGROUP_USER = 'user:%s' + REPOGROUP_USERGROUP = 'usergroup:%s' + REPOGROUP_OWNER = 'group.owner' + REPOGROUP_DEFAULT = 'group.default' + + USERGROUP_USER = 'user:%s' + USERGROUP_USERGROUP = 'usergroup:%s' + USERGROUP_OWNER = 'usergroup.owner' + USERGROUP_DEFAULT = 'usergroup.default' + + +class PermOriginDict(dict): + """ + A special dict used for tracking permissions along with their origins. + + `__setitem__` has been overridden to expect a tuple(perm, origin) + `__getitem__` will return only the perm + `.perm_origin_stack` will return the stack of (perm, origin) set per key + + >>> perms = PermOriginDict() + >>> perms['resource'] = 'read', 'default' + >>> perms['resource'] + 'read' + >>> perms['resource'] = 'write', 'admin' + >>> perms['resource'] + 'write' + >>> perms.perm_origin_stack + {'resource': [('read', 'default'), ('write', 'admin')]} + """ + + + def __init__(self, *args, **kw): + dict.__init__(self, *args, **kw) + self.perm_origin_stack = {} + + def __setitem__(self, key, (perm, origin)): + self.perm_origin_stack.setdefault(key, []).append((perm, origin)) + dict.__setitem__(self, key, perm) + class PermissionCalculator(object): @@ -318,9 +366,9 @@ class PermissionCalculator(object): self.default_user_id = User.get_default_user(cache=True).user_id - self.permissions_repositories = {} - self.permissions_repository_groups = {} - self.permissions_user_groups = {} + self.permissions_repositories = PermOriginDict() + self.permissions_repository_groups = PermOriginDict() + self.permissions_user_groups = PermOriginDict() self.permissions_global = set() self.default_repo_perms = Permission.get_default_repo_perms( @@ -355,19 +403,19 @@ class PermissionCalculator(object): for perm in self.default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name p = 'repository.admin' - self.permissions_repositories[r_k] = p + self.permissions_repositories[r_k] = p, PermOrigin.ADMIN # repository groups for perm in self.default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name p = 'group.admin' - self.permissions_repository_groups[rg_k] = p + self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN # user groups for perm in self.default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name p = 'usergroup.admin' - self.permissions_user_groups[u_k] = p + self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN return self._permission_structure() @@ -438,8 +486,7 @@ class PermissionCalculator(object): self.permissions_global = self.permissions_global.difference( _configurable) for perm in perms: - self.permissions_global.add( - perm.permission.permission_name) + self.permissions_global.add(perm.permission.permission_name) # user explicit global permissions user_perms = Session().query(UserToPerm)\ @@ -478,13 +525,16 @@ class PermissionCalculator(object): # on given repo for perm in self.default_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name + o = PermOrigin.REPO_DEFAULT if perm.Repository.private and not ( perm.Repository.user_id == self.user_id): # disable defaults for private repos, p = 'repository.none' + o = PermOrigin.REPO_PRIVATE elif perm.Repository.user_id == self.user_id: # set admin if owner p = 'repository.admin' + o = PermOrigin.REPO_OWNER else: p = perm.Permission.permission_name # if we decide this user isn't inheriting permissions from @@ -492,15 +542,17 @@ class PermissionCalculator(object): # permissions work if not user_inherit_object_permissions: p = 'repository.none' - self.permissions_repositories[r_k] = p + self.permissions_repositories[r_k] = p, o # defaults for repository groups taken from `default` user permission # on given group for perm in self.default_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name + o = PermOrigin.REPOGROUP_DEFAULT if perm.RepoGroup.user_id == self.user_id: # set admin if owner p = 'group.admin' + o = PermOrigin.REPOGROUP_OWNER else: p = perm.Permission.permission_name @@ -508,18 +560,19 @@ class PermissionCalculator(object): # user we set him to .none so only explicit permissions work if not user_inherit_object_permissions: p = 'group.none' - self.permissions_repository_groups[rg_k] = p + self.permissions_repository_groups[rg_k] = p, o # defaults for user groups taken from `default` user permission # on given user group for perm in self.default_user_group_perms: u_k = perm.UserUserGroupToPerm.user_group.users_group_name p = perm.Permission.permission_name + o = PermOrigin.USERGROUP_DEFAULT # if we decide this user isn't inheriting permissions from default # user we set him to .none so only explicit permissions work if not user_inherit_object_permissions: p = 'usergroup.none' - self.permissions_user_groups[u_k] = p + self.permissions_user_groups[u_k] = p, o def _calculate_repository_permissions(self): """ @@ -538,17 +591,20 @@ class PermissionCalculator(object): multiple_counter = collections.defaultdict(int) for perm in user_repo_perms_from_user_group: r_k = perm.UserGroupRepoToPerm.repository.repo_name + ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name multiple_counter[r_k] += 1 p = perm.Permission.permission_name + o = PermOrigin.REPO_USERGROUP % ug_k if perm.Repository.user_id == self.user_id: # set admin if owner p = 'repository.admin' + o = PermOrigin.REPO_OWNER else: if multiple_counter[r_k] > 1: cur_perm = self.permissions_repositories[r_k] p = self._choose_permission(p, cur_perm) - self.permissions_repositories[r_k] = p + self.permissions_repositories[r_k] = p, o # user explicit permissions for repositories, overrides any specified # by the group permission @@ -556,16 +612,18 @@ class PermissionCalculator(object): self.user_id, self.scope_repo_id) for perm in user_repo_perms: r_k = perm.UserRepoToPerm.repository.repo_name + o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username # set admin if owner if perm.Repository.user_id == self.user_id: p = 'repository.admin' + o = PermOrigin.REPO_OWNER else: p = perm.Permission.permission_name if not self.explicit: cur_perm = self.permissions_repositories.get( r_k, 'repository.none') p = self._choose_permission(p, cur_perm) - self.permissions_repositories[r_k] = p + self.permissions_repositories[r_k] = p, o def _calculate_repository_group_permissions(self): """ @@ -583,32 +641,39 @@ class PermissionCalculator(object): multiple_counter = collections.defaultdict(int) for perm in user_repo_group_perms_from_user_group: g_k = perm.UserGroupRepoGroupToPerm.group.group_name + ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name + o = PermOrigin.REPOGROUP_USERGROUP % ug_k multiple_counter[g_k] += 1 p = perm.Permission.permission_name if perm.RepoGroup.user_id == self.user_id: # set admin if owner p = 'group.admin' + o = PermOrigin.REPOGROUP_OWNER else: if multiple_counter[g_k] > 1: cur_perm = self.permissions_repository_groups[g_k] p = self._choose_permission(p, cur_perm) - self.permissions_repository_groups[g_k] = p + self.permissions_repository_groups[g_k] = p, o # user explicit permissions for repository groups user_repo_groups_perms = Permission.get_default_group_perms( self.user_id, self.scope_repo_group_id) for perm in user_repo_groups_perms: rg_k = perm.UserRepoGroupToPerm.group.group_name + u_k = perm.UserRepoGroupToPerm.user.username + o = PermOrigin.REPOGROUP_USER % u_k + if perm.RepoGroup.user_id == self.user_id: # set admin if owner p = 'group.admin' + o = PermOrigin.REPOGROUP_OWNER else: p = perm.Permission.permission_name if not self.explicit: cur_perm = self.permissions_repository_groups.get( rg_k, 'group.none') p = self._choose_permission(p, cur_perm) - self.permissions_repository_groups[rg_k] = p + self.permissions_repository_groups[rg_k] = p, o def _calculate_user_group_permissions(self): """ @@ -623,24 +688,29 @@ class PermissionCalculator(object): for perm in user_group_from_user_group: g_k = perm.UserGroupUserGroupToPerm\ .target_user_group.users_group_name + u_k = perm.UserGroupUserGroupToPerm\ + .user_group.users_group_name + o = PermOrigin.USERGROUP_USERGROUP % u_k multiple_counter[g_k] += 1 p = perm.Permission.permission_name if multiple_counter[g_k] > 1: cur_perm = self.permissions_user_groups[g_k] p = self._choose_permission(p, cur_perm) - self.permissions_user_groups[g_k] = p + self.permissions_user_groups[g_k] = p, o # user explicit permission for user groups user_user_groups_perms = Permission.get_default_user_group_perms( self.user_id, self.scope_user_group_id) for perm in user_user_groups_perms: - u_k = perm.UserUserGroupToPerm.user_group.users_group_name + ug_k = perm.UserUserGroupToPerm.user_group.users_group_name + u_k = perm.UserUserGroupToPerm.user.username p = perm.Permission.permission_name + o = PermOrigin.USERGROUP_USER % u_k if not self.explicit: cur_perm = self.permissions_user_groups.get( - u_k, 'usergroup.none') + ug_k, 'usergroup.none') p = self._choose_permission(p, cur_perm) - self.permissions_user_groups[u_k] = p + self.permissions_user_groups[ug_k] = p, o def _choose_permission(self, new_perm, cur_perm): new_perm_val = Permission.PERM_WEIGHTS[new_perm] @@ -865,6 +935,10 @@ class AuthUser(object): return auth_tokens @property + def is_default(self): + return self.username == User.DEFAULT_USER + + @property def is_admin(self): return self.admin @@ -1095,6 +1169,7 @@ class LoginRequired(object): return get_cython_compat_decorator(self.__wrapper, func) def __wrapper(self, func, *fargs, **fkwargs): + from rhodecode.lib import helpers as h cls = fargs[0] user = cls._rhodecode_user loc = "%s:%s" % (cls.__class__.__name__, func.__name__) @@ -1102,7 +1177,6 @@ class LoginRequired(object): # check if our IP is allowed ip_access_valid = True if not user.ip_allowed: - from rhodecode.lib import helpers as h h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), category='warning') ip_access_valid = False @@ -1154,7 +1228,7 @@ class LoginRequired(object): log.debug('redirecting to login page with %s' % (came_from,)) return redirect( - url('login_home', came_from=came_from)) + h.route_path('login', _query={'came_from': came_from})) class NotAnonymous(object): @@ -1180,7 +1254,8 @@ class NotAnonymous(object): h.flash(_('You need to be a registered user to ' 'perform this action'), category='warning') - return redirect(url('login_home', came_from=came_from)) + return redirect( + h.route_path('login', _query={'came_from': came_from})) else: return func(*fargs, **fkwargs) @@ -1263,7 +1338,8 @@ class PermsDecorator(object): import rhodecode.lib.helpers as h h.flash(_('You need to be signed in to view this page'), category='warning') - return redirect(url('login_home', came_from=came_from)) + return redirect( + h.route_path('login', _query={'came_from': came_from})) else: # redirect with forbidden ret code diff --git a/rhodecode/lib/colander_utils.py b/rhodecode/lib/colander_utils.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/colander_utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + + +def strip_whitespace(value): + """ + Removes leading/trailing whitespace, newlines, and tabs from the value. + Implements the `colander.interface.Preparer` interface. + """ + if isinstance(value, basestring): + return value.strip(' \t\n\r') + else: + return value diff --git a/rhodecode/lib/datelib.py b/rhodecode/lib/datelib.py --- a/rhodecode/lib/datelib.py +++ b/rhodecode/lib/datelib.py @@ -35,7 +35,7 @@ def makedate(): return time.mktime(lt), tz -def date_fromtimestamp(unixts, tzoffset=0): +def utcdate_fromtimestamp(unixts, tzoffset=0): """ Makes a local datetime object out of unix timestamp @@ -43,7 +43,7 @@ def date_fromtimestamp(unixts, tzoffset= :param tzoffset: """ - return datetime.datetime.fromtimestamp(float(unixts)) + return datetime.datetime.utcfromtimestamp(float(unixts)) def date_astimestamp(value): diff --git a/rhodecode/lib/db_manage.py b/rhodecode/lib/db_manage.py --- a/rhodecode/lib/db_manage.py +++ b/rhodecode/lib/db_manage.py @@ -537,7 +537,6 @@ class DbManage(object): ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), ('support_url', '', 'unicode'), ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'), - ('license_key', '', 'unicode'), ('show_revision_number', True, 'bool'), ('show_sha_length', 12, 'int'), ] diff --git a/rhodecode/lib/dbmigrate/versions/052_version_4_1_0.py b/rhodecode/lib/dbmigrate/versions/052_version_4_1_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/052_version_4_1_0.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +import logging + +from sqlalchemy.orm.attributes import flag_modified + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption, meta + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_3_7_0_0 + init_model_encryption(db_3_7_0_0) + fixups(db_3_7_0_0, meta.Session) + + +def downgrade(migrate_engine): + pass + + +AUTH_PLUGINS_SETTING = "auth_plugins" + +PLUGIN_RENAME_MAP = { + 'egg:rhodecode-enterprise-ce#container': 'egg:rhodecode-enterprise-ce#headers', +} + +SETTINGS_RENAME_MAP = { + 'auth_container_cache_ttl': 'auth_headers_cache_ttl', + 'auth_container_clean_username': 'auth_headers_clean_username', + 'auth_container_enabled': 'auth_headers_enabled', + 'auth_container_fallback_header': 'auth_headers_fallback_header', + 'auth_container_header': 'auth_headers_header', +} + + +def rename_plugins(models, Session): + query = models.RhodeCodeSetting.query().filter( + models.RhodeCodeSetting.app_settings_name == AUTH_PLUGINS_SETTING) + plugin_setting = query.scalar() + plugins = plugin_setting.app_settings_value + + new_plugins = [] + + for plugin_id in plugins: + new_plugin_id = PLUGIN_RENAME_MAP.get(plugin_id, None) + if new_plugin_id: + new_plugins.append(new_plugin_id) + else: + new_plugins.append(plugin_id) + + plugin_setting.app_settings_value = ','.join(new_plugins) + + log.info("Rename of auth plugin IDs") + log.info("Original setting value: %s", plugins) + log.info("New setting value: %s", new_plugins) + + +def rename_plugin_settings(models, Session): + for old_name, new_name in SETTINGS_RENAME_MAP.items(): + query = models.RhodeCodeSetting.query().filter( + models.RhodeCodeSetting.app_settings_name == old_name) + setting = query.scalar() + if setting: + setting.app_settings_name = new_name + log.info( + 'Rename of plugin setting "%s" to "%s"', old_name, new_name) + + +def fixups(models, Session): + rename_plugins(models, Session) + rename_plugin_settings(models, Session) + + Session().commit() diff --git a/rhodecode/lib/dbmigrate/versions/053_version_4_1_0.py b/rhodecode/lib/dbmigrate/versions/053_version_4_1_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/053_version_4_1_0.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +import logging + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption, meta + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_3_7_0_0 + init_model_encryption(db_3_7_0_0) + fixups(db_3_7_0_0, meta.Session) + + +def downgrade(migrate_engine): + pass + + +AUTH_PLUGINS_SETTING = "auth_plugins" + +PLUGIN_RENAME_MAP = { + 'egg:rhodecode-enterprise-ee#token': 'egg:rhodecode-enterprise-ce#token', +} + + +def rename_plugins(models, Session): + query = models.RhodeCodeSetting.query().filter( + models.RhodeCodeSetting.app_settings_name == AUTH_PLUGINS_SETTING) + plugin_setting = query.scalar() + plugins = plugin_setting.app_settings_value + + new_plugins = [] + + for plugin_id in plugins: + new_plugin_id = PLUGIN_RENAME_MAP.get(plugin_id, None) + if new_plugin_id: + new_plugins.append(new_plugin_id) + else: + new_plugins.append(plugin_id) + + plugin_setting.app_settings_value = ','.join(new_plugins) + + log.info("Rename of auth plugin IDs") + log.info("Original setting value: %s", plugins) + log.info("New setting value: %s", new_plugins) + + +def fixups(models, Session): + rename_plugins(models, Session) + Session().commit() diff --git a/rhodecode/lib/dbmigrate/versions/054_version_4_1_0.py b/rhodecode/lib/dbmigrate/versions/054_version_4_1_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/054_version_4_1_0.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +import logging +from collections import namedtuple + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import init_model_encryption, meta + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_3_7_0_0 + init_model_encryption(db_3_7_0_0) + fixups(db_3_7_0_0, meta.Session) + + +def downgrade(migrate_engine): + pass + + +AUTH_PLUGINS_SETTING = "auth_plugins" + +EXTERN_TYPE_RENAME_MAP = { + 'container': 'headers', +} + +# Only used for logging purposes. +RenameExternTypeOperation = namedtuple( + 'RenameExternTypeOperation', ['user', 'old', 'new']) + + +def fixups(models, Session): + operations = [] + + # Rename the extern_type attribute + query = models.User.query().filter( + models.User.extern_type.in_(EXTERN_TYPE_RENAME_MAP.keys())) + for user in query: + old = user.extern_type + new = EXTERN_TYPE_RENAME_MAP[old] + user.extern_type = new + Session.add(user) + operations.append(RenameExternTypeOperation(user, old, new)) + + log.info("Migration of users 'extern_type' attribute.") + for op in operations: + log.info("%s", op) + + Session().commit() diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py --- a/rhodecode/lib/helpers.py +++ b/rhodecode/lib/helpers.py @@ -36,11 +36,14 @@ import urlparse import time import string import hashlib +import pygments from datetime import datetime from functools import partial from pygments.formatters.html import HtmlFormatter from pygments import highlight as code_highlight +from pygments.lexers import ( + get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) from pylons import url from pylons.i18n.translation import _, ungettext from pyramid.threadlocal import get_current_request @@ -68,8 +71,8 @@ from rhodecode.lib.annotate import annot from rhodecode.lib.action_parser import action_parser from rhodecode.lib.utils import repo_name_slug, get_custom_lexer from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ - get_commit_safe, datetime_to_time, time_to_datetime, AttributeDict, \ - safe_int, md5, md5_safe + get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ + AttributeDict, safe_int, md5, md5_safe from rhodecode.lib.markup_renderer import MarkupRenderer from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit @@ -307,6 +310,176 @@ class CodeHtmlFormatter(HtmlFormatter): yield 0, '' +class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): + def __init__(self, **kw): + # only show these line numbers if set + self.only_lines = kw.pop('only_line_numbers', []) + self.query_terms = kw.pop('query_terms', []) + self.max_lines = kw.pop('max_lines', 5) + self.line_context = kw.pop('line_context', 3) + self.url = kw.pop('url', None) + + super(CodeHtmlFormatter, self).__init__(**kw) + + def _wrap_code(self, source): + for cnt, it in enumerate(source): + i, t = it + t = '
%s
' % t + yield i, t + + def _wrap_tablelinenos(self, inner): + yield 0, '' % self.cssclass + + last_shown_line_number = 0 + current_line_number = 1 + + for t, line in inner: + if not t: + yield t, line + continue + + if current_line_number in self.only_lines: + if last_shown_line_number + 1 != current_line_number: + yield 0, '' + yield 0, '' + yield 0, '' + yield 0, '' + + yield 0, '' + if self.url: + yield 0, '' % ( + self.url, current_line_number, current_line_number) + else: + yield 0, '' % ( + current_line_number) + yield 0, '' + yield 0, '' + + last_shown_line_number = current_line_number + + current_line_number += 1 + + + yield 0, '
...
%i%i' + line + '
' + + +def extract_phrases(text_query): + """ + Extracts phrases from search term string making sure phrases + contained in double quotes are kept together - and discarding empty values + or fully whitespace values eg. + + 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more'] + + """ + + in_phrase = False + buf = '' + phrases = [] + for char in text_query: + if in_phrase: + if char == '"': # end phrase + phrases.append(buf) + buf = '' + in_phrase = False + continue + else: + buf += char + continue + else: + if char == '"': # start phrase + in_phrase = True + phrases.append(buf) + buf = '' + continue + elif char == ' ': + phrases.append(buf) + buf = '' + continue + else: + buf += char + + phrases.append(buf) + phrases = [phrase.strip() for phrase in phrases if phrase.strip()] + return phrases + + +def get_matching_offsets(text, phrases): + """ + Returns a list of string offsets in `text` that the list of `terms` match + + >>> get_matching_offsets('some text here', ['some', 'here']) + [(0, 4), (10, 14)] + + """ + offsets = [] + for phrase in phrases: + for match in re.finditer(phrase, text): + offsets.append((match.start(), match.end())) + + return offsets + + +def normalize_text_for_matching(x): + """ + Replaces all non alnum characters to spaces and lower cases the string, + useful for comparing two text strings without punctuation + """ + return re.sub(r'[^\w]', ' ', x.lower()) + + +def get_matching_line_offsets(lines, terms): + """ Return a set of `lines` indices (starting from 1) matching a + text search query, along with `context` lines above/below matching lines + + :param lines: list of strings representing lines + :param terms: search term string to match in lines eg. 'some text' + :param context: number of lines above/below a matching line to add to result + :param max_lines: cut off for lines of interest + eg. + + >>> get_matching_line_offsets(''' +words words words +words words words +some text some +words words words +words words words +text here what +''', 'text', context=1) + {3: [(5, 9)], 6: [(0, 4)]] + """ + matching_lines = {} + phrases = [normalize_text_for_matching(phrase) + for phrase in extract_phrases(terms)] + + for line_index, line in enumerate(lines, start=1): + match_offsets = get_matching_offsets( + normalize_text_for_matching(line), phrases) + if match_offsets: + matching_lines[line_index] = match_offsets + + return matching_lines + +def get_lexer_safe(mimetype=None, filepath=None): + """ + Tries to return a relevant pygments lexer using mimetype/filepath name, + defaulting to plain text if none could be found + """ + lexer = None + try: + if mimetype: + lexer = get_lexer_for_mimetype(mimetype) + if not lexer: + lexer = get_lexer_for_filename(path) + except pygments.util.ClassNotFound: + pass + + if not lexer: + lexer = get_lexer_by_name('text') + + return lexer + + def pygmentize(filenode, **kwargs): """ pygmentize function using pygments @@ -476,13 +649,20 @@ short_id = lambda x: x[:12] hide_credentials = lambda x: ''.join(credentials_filter(x)) -def age_component(datetime_iso, value=None): +def age_component(datetime_iso, value=None, time_is_local=False): title = value or format_date(datetime_iso) - # detect if we have a timezone info, if not assume UTC + # detect if we have a timezone info, otherwise, add it if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: tzinfo = '+00:00' + if time_is_local: + tzinfo = time.strftime("+%H:%M", + time.gmtime( + (datetime.now() - datetime.utcnow()).seconds + 1 + ) + ) + return literal( ''.format( diff --git a/rhodecode/lib/index/__init__.py b/rhodecode/lib/index/__init__.py --- a/rhodecode/lib/index/__init__.py +++ b/rhodecode/lib/index/__init__.py @@ -42,7 +42,6 @@ class BaseSearch(object): def search(self, query, document_type, search_user, repo_name=None): raise Exception('NotImplemented') - def searcher_from_config(config, prefix='search.'): _config = {} for key in config.keys(): diff --git a/rhodecode/lib/index/whoosh.py b/rhodecode/lib/index/whoosh.py --- a/rhodecode/lib/index/whoosh.py +++ b/rhodecode/lib/index/whoosh.py @@ -25,6 +25,7 @@ Index schema for RhodeCode from __future__ import absolute_import import logging import os +import re from pylons.i18n.translation import _ @@ -59,6 +60,7 @@ FRAGMENTER = ContextFragmenter(200) log = logging.getLogger(__name__) + class Search(BaseSearch): name = 'whoosh' @@ -90,7 +92,19 @@ class Search(BaseSearch): if self.searcher: self.searcher.close() - def search(self, query, document_type, search_user, repo_name=None): + def _extend_query(self, query): + hashes = re.compile('([0-9a-f]{5,40})').findall(query) + if hashes: + hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes) + query = u'(%s) OR %s' % (query, hashes_or_query) + return query + + def search(self, query, document_type, search_user, repo_name=None, + requested_page=1, page_limit=10, sort=None): + + original_query = query + query = self._extend_query(query) + log.debug(u'QUERY: %s on %s', query, document_type) result = { 'results': [], @@ -109,13 +123,18 @@ class Search(BaseSearch): query = qp.parse(unicode(query)) log.debug('query: %s (%s)' % (query, repr(query))) - sortedby = None + reverse, sortedby = False, None if search_type == 'message': - sortedby = sorting.FieldFacet('commit_idx', reverse=True) + if sort == 'oldfirst': + sortedby = 'date' + reverse = False + elif sort == 'newfirst': + sortedby = 'date' + reverse = True whoosh_results = self.searcher.search( query, filter=allowed_repos_filter, limit=None, - sortedby=sortedby,) + sortedby=sortedby, reverse=reverse) # fixes for 32k limit that whoosh uses for highlight whoosh_results.fragmenter.charlimit = None diff --git a/rhodecode/lib/index/whoosh_fallback_schema.py b/rhodecode/lib/index/whoosh_fallback_schema.py --- a/rhodecode/lib/index/whoosh_fallback_schema.py +++ b/rhodecode/lib/index/whoosh_fallback_schema.py @@ -63,7 +63,7 @@ COMMIT_SCHEMA = Schema( repository_id=NUMERIC(unique=True, stored=True), commit_idx=NUMERIC(stored=True, sortable=True), commit_idx_sort=ID(), - date=NUMERIC(stored=True), + date=NUMERIC(stored=True, sortable=True), owner=TEXT(stored=True), author=TEXT(stored=True), message=FieldType(format=Characters(), analyzer=ANALYZER, diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py --- a/rhodecode/lib/utils.py +++ b/rhodecode/lib/utils.py @@ -755,10 +755,10 @@ def create_test_env(repos_test_path, con # PART TWO make test repo log.debug('making test vcs repositories') - idx_path = config['app_conf']['search.location'] - data_path = config['app_conf']['cache_dir'] + idx_path = config['search.location'] + data_path = config['cache_dir'] - #clean index and data + # clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s', idx_path) shutil.rmtree(idx_path) @@ -767,7 +767,7 @@ def create_test_env(repos_test_path, con log.debug('remove %s', data_path) shutil.rmtree(data_path) - #CREATE DEFAULT TEST REPOS + # CREATE DEFAULT TEST REPOS cur_dir = dn(dn(abspath(__file__))) with tarfile.open(jn(cur_dir, 'tests', 'fixtures', 'vcs_test_hg.tar.gz')) as tar: @@ -787,7 +787,6 @@ def create_test_env(repos_test_path, con tar.extractall(jn(TESTS_TMP_PATH, SVN_REPO)) - #============================================================================== # PASTER COMMANDS #============================================================================== diff --git a/rhodecode/lib/utils2.py b/rhodecode/lib/utils2.py --- a/rhodecode/lib/utils2.py +++ b/rhodecode/lib/utils2.py @@ -608,6 +608,16 @@ def time_to_datetime(tm): return datetime.datetime.fromtimestamp(tm) +def time_to_utcdatetime(tm): + if tm: + if isinstance(tm, basestring): + try: + tm = float(tm) + except ValueError: + return + return datetime.datetime.utcfromtimestamp(tm) + + MENTIONS_REGEX = re.compile( # ^@ or @ without any special chars in front r'(?:^@|[^a-zA-Z0-9\-\_\.]@)' diff --git a/rhodecode/lib/vcs/backends/base.py b/rhodecode/lib/vcs/backends/base.py --- a/rhodecode/lib/vcs/backends/base.py +++ b/rhodecode/lib/vcs/backends/base.py @@ -409,7 +409,9 @@ class BaseRepository(object): shadow_repository_path, target_ref, source_repo, source_ref, message, user_name, user_email, dry_run=dry_run) except RepositoryError: - log.exception('Unexpected failure when running merge') + log.exception( + 'Unexpected failure when running merge, dry-run=%s', + dry_run) return MergeResponse( False, False, None, MergeFailureReason.UNKNOWN) diff --git a/rhodecode/lib/vcs/backends/git/commit.py b/rhodecode/lib/vcs/backends/git/commit.py --- a/rhodecode/lib/vcs/backends/git/commit.py +++ b/rhodecode/lib/vcs/backends/git/commit.py @@ -30,7 +30,7 @@ from StringIO import StringIO from zope.cachedescriptors.property import Lazy as LazyProperty -from rhodecode.lib.datelib import date_fromtimestamp +from rhodecode.lib.datelib import utcdate_fromtimestamp from rhodecode.lib.utils import safe_unicode, safe_str from rhodecode.lib.utils2 import safe_int from rhodecode.lib.vcs.conf import settings @@ -95,7 +95,7 @@ class GitCommit(base.BaseCommit): if value: value = safe_unicode(value) elif attr == "date": - value = date_fromtimestamp(*value) + value = utcdate_fromtimestamp(*value) elif attr == "parents": value = self._make_commits(value) self.__dict__[attr] = value @@ -135,7 +135,7 @@ class GitCommit(base.BaseCommit): def date(self): unix_ts, tz = self._remote.get_object_attrs( self.raw_id, self._date_property, self._date_tz_property) - return date_fromtimestamp(unix_ts, tz) + return utcdate_fromtimestamp(unix_ts, tz) @LazyProperty def status(self): diff --git a/rhodecode/lib/vcs/backends/git/repository.py b/rhodecode/lib/vcs/backends/git/repository.py --- a/rhodecode/lib/vcs/backends/git/repository.py +++ b/rhodecode/lib/vcs/backends/git/repository.py @@ -31,7 +31,7 @@ import time from zope.cachedescriptors.property import Lazy as LazyProperty from rhodecode.lib.compat import OrderedDict -from rhodecode.lib.datelib import makedate, date_fromtimestamp +from rhodecode.lib.datelib import makedate, utcdate_fromtimestamp from rhodecode.lib.utils import safe_unicode, safe_str from rhodecode.lib.vcs import connection, path as vcspath from rhodecode.lib.vcs.backends.base import ( @@ -269,7 +269,7 @@ class GitRepository(BaseRepository): Returns last change made on this repository as `datetime.datetime` object. """ - return date_fromtimestamp(self._get_mtime(), makedate()[1]) + return utcdate_fromtimestamp(self._get_mtime(), makedate()[1]) def _get_mtime(self): try: @@ -853,7 +853,8 @@ class GitRepository(BaseRepository): shadow_repo._checkout(pr_branch, create=True) try: shadow_repo._local_fetch(source_repo.path, source_ref.name) - except RepositoryError: + except RepositoryError as e: + log.exception('Failure when doing local fetch on git shadow repo') return MergeResponse( False, False, None, MergeFailureReason.MISSING_COMMIT) @@ -863,7 +864,8 @@ class GitRepository(BaseRepository): shadow_repo._local_merge(merge_message, merger_name, merger_email, [source_ref.commit_id]) merge_possible = True - except RepositoryError: + except RepositoryError as e: + log.exception('Failure when doing local merge on git shadow repo') merge_possible = False merge_failure_reason = MergeFailureReason.MERGE_FAILED @@ -877,7 +879,9 @@ class GitRepository(BaseRepository): # cannot retrieve the merge commit. shadow_repo = GitRepository(shadow_repository_path) merge_commit_id = shadow_repo.branches[pr_branch] - except RepositoryError: + except RepositoryError as e: + log.exception( + 'Failure when doing local push on git shadow repo') merge_succeeded = False merge_failure_reason = MergeFailureReason.PUSH_FAILED else: diff --git a/rhodecode/lib/vcs/backends/hg/commit.py b/rhodecode/lib/vcs/backends/hg/commit.py --- a/rhodecode/lib/vcs/backends/hg/commit.py +++ b/rhodecode/lib/vcs/backends/hg/commit.py @@ -26,7 +26,7 @@ import os from zope.cachedescriptors.property import Lazy as LazyProperty -from rhodecode.lib.datelib import date_fromtimestamp +from rhodecode.lib.datelib import utcdate_fromtimestamp from rhodecode.lib.utils import safe_str, safe_unicode from rhodecode.lib.vcs import path as vcspath from rhodecode.lib.vcs.backends import base @@ -78,7 +78,7 @@ class MercurialCommit(base.BaseCommit): elif attr == "affected_files": value = map(safe_unicode, value) elif attr == "date": - value = date_fromtimestamp(*value) + value = utcdate_fromtimestamp(*value) elif attr in ["children", "parents"]: value = self._make_commits(value) self.__dict__[attr] = value @@ -114,7 +114,7 @@ class MercurialCommit(base.BaseCommit): @LazyProperty def date(self): - return date_fromtimestamp(*self._remote.ctx_date(self.idx)) + return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx)) @LazyProperty def status(self): diff --git a/rhodecode/lib/vcs/backends/hg/repository.py b/rhodecode/lib/vcs/backends/hg/repository.py --- a/rhodecode/lib/vcs/backends/hg/repository.py +++ b/rhodecode/lib/vcs/backends/hg/repository.py @@ -22,6 +22,7 @@ HG repository module """ +import logging import binascii import os import re @@ -31,9 +32,8 @@ import urllib from zope.cachedescriptors.property import Lazy as LazyProperty from rhodecode.lib.compat import OrderedDict -from rhodecode.lib.datelib import ( - date_fromtimestamp, makedate, date_to_timestamp_plus_offset, - date_astimestamp) +from rhodecode.lib.datelib import (date_to_timestamp_plus_offset, + utcdate_fromtimestamp, makedate, date_astimestamp) from rhodecode.lib.utils import safe_unicode, safe_str from rhodecode.lib.vcs import connection from rhodecode.lib.vcs.backends.base import ( @@ -50,6 +50,8 @@ from rhodecode.lib.vcs.exceptions import hexlify = binascii.hexlify nullid = "\0" * 20 +log = logging.getLogger(__name__) + class MercurialRepository(BaseRepository): """ @@ -365,7 +367,7 @@ class MercurialRepository(BaseRepository Returns last change made on this repository as `datetime.datetime` object """ - return date_fromtimestamp(self._get_mtime(), makedate()[1]) + return utcdate_fromtimestamp(self._get_mtime(), makedate()[1]) def _get_mtime(self): try: @@ -605,6 +607,10 @@ class MercurialRepository(BaseRepository self._update(bookmark_name) return self._identify(), True except RepositoryError: + # The rebase-abort may raise another exception which 'hides' + # the original one, therefore we log it here. + log.exception('Error while rebasing shadow repo during merge.') + # Cleanup any rebase leftovers self._remote.rebase(abort=True) self._remote.update(clean=True) @@ -642,6 +648,8 @@ class MercurialRepository(BaseRepository shadow_repository_path = self._get_shadow_repository_path(workspace_id) if not os.path.exists(shadow_repository_path): self._local_clone(shadow_repository_path) + log.debug( + 'Prepared shadow repository in %s', shadow_repository_path) return shadow_repository_path @@ -664,12 +672,15 @@ class MercurialRepository(BaseRepository shadow_repo = self._get_shadow_instance(shadow_repository_path) + log.debug('Pulling in target reference %s', target_ref) self._validate_pull_reference(target_ref) shadow_repo._local_pull(self.path, target_ref) try: + log.debug('Pulling in source reference %s', source_ref) source_repo._validate_pull_reference(source_ref) shadow_repo._local_pull(source_repo.path, source_ref) - except CommitDoesNotExistError: + except CommitDoesNotExistError as e: + log.exception('Failure when doing local pull on hg shadow repo') return MergeResponse( False, False, None, MergeFailureReason.MISSING_COMMIT) @@ -681,7 +692,8 @@ class MercurialRepository(BaseRepository target_ref, merge_message, merger_name, merger_email, source_ref) merge_possible = True - except RepositoryError: + except RepositoryError as e: + log.exception('Failure when doing local merge on hg shadow repo') merge_possible = False merge_failure_reason = MergeFailureReason.MERGE_FAILED @@ -706,6 +718,9 @@ class MercurialRepository(BaseRepository enable_hooks=True) merge_succeeded = True except RepositoryError: + log.exception( + 'Failure when doing local push from the shadow ' + 'repository to the target repository.') merge_succeeded = False merge_failure_reason = MergeFailureReason.PUSH_FAILED else: diff --git a/rhodecode/login/__init__.py b/rhodecode/login/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/login/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + + +from rhodecode.config.routing import ADMIN_PREFIX + + +def includeme(config): + + config.add_route( + name='login', + pattern=ADMIN_PREFIX + '/login') + config.add_route( + name='logout', + pattern=ADMIN_PREFIX + '/logout') + config.add_route( + name='register', + pattern=ADMIN_PREFIX + '/register') + config.add_route( + name='reset_password', + pattern=ADMIN_PREFIX + '/password_reset') + config.add_route( + name='reset_password_confirmation', + pattern=ADMIN_PREFIX + '/password_reset_confirmation') + + # Scan module for configuration decorators. + config.scan() diff --git a/rhodecode/login/views.py b/rhodecode/login/views.py new file mode 100644 --- /dev/null +++ b/rhodecode/login/views.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import datetime +import formencode +import logging +import urlparse + +from pylons import url +from pyramid.httpexceptions import HTTPFound +from pyramid.view import view_config +from recaptcha.client.captcha import submit + +from rhodecode.authentication.base import authenticate, HTTP_TYPE +from rhodecode.events import UserRegistered +from rhodecode.lib.auth import ( + AuthUser, HasPermissionAnyDecorator, CSRFRequired) +from rhodecode.lib.base import get_ip_addr +from rhodecode.lib.exceptions import UserCreationError +from rhodecode.lib.utils2 import safe_str +from rhodecode.model.db import User +from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm +from rhodecode.model.login_session import LoginSession +from rhodecode.model.meta import Session +from rhodecode.model.settings import SettingsModel +from rhodecode.model.user import UserModel +from rhodecode.translation import _ + + +log = logging.getLogger(__name__) + + +def _store_user_in_session(session, username, remember=False): + user = User.get_by_username(username, case_insensitive=True) + auth_user = AuthUser(user.user_id) + auth_user.set_authenticated() + cs = auth_user.get_cookie_store() + session['rhodecode_user'] = cs + user.update_lastlogin() + Session().commit() + + # If they want to be remembered, update the cookie + if remember: + _year = (datetime.datetime.now() + + datetime.timedelta(seconds=60 * 60 * 24 * 365)) + session._set_cookie_expires(_year) + + session.save() + + log.info('user %s is now authenticated and stored in ' + 'session, session attrs %s', username, cs) + + # dumps session attrs back to cookie + session._update_cookie_out() + # we set new cookie + headers = None + if session.request['set_cookie']: + # send set-cookie headers back to response to update cookie + headers = [('Set-Cookie', session.request['cookie_out'])] + return headers + + +def get_came_from(request): + came_from = safe_str(request.GET.get('came_from', '')) + parsed = urlparse.urlparse(came_from) + allowed_schemes = ['http', 'https'] + if parsed.scheme and parsed.scheme not in allowed_schemes: + log.error('Suspicious URL scheme detected %s for url %s' % + (parsed.scheme, parsed)) + came_from = url('home') + elif parsed.netloc and request.host != parsed.netloc: + log.error('Suspicious NETLOC detected %s for url %s server url ' + 'is: %s' % (parsed.netloc, parsed, request.host)) + came_from = url('home') + elif any(bad_str in parsed.path for bad_str in ('\r', '\n')): + log.error('Header injection detected `%s` for url %s server url ' % + (parsed.path, parsed)) + came_from = url('home') + + return came_from or url('home') + + +class LoginView(object): + + def __init__(self, context, request): + self.request = request + self.context = context + self.session = request.session + self._rhodecode_user = request.user + + def _get_template_context(self): + return { + 'came_from': get_came_from(self.request), + 'defaults': {}, + 'errors': {}, + } + + @view_config( + route_name='login', request_method='GET', + renderer='rhodecode:templates/login.html') + def login(self): + came_from = get_came_from(self.request) + user = self.request.user + + # redirect if already logged in + if user.is_authenticated and not user.is_default and user.ip_allowed: + raise HTTPFound(came_from) + + # check if we use headers plugin, and try to login using it. + try: + log.debug('Running PRE-AUTH for headers based authentication') + auth_info = authenticate( + '', '', self.request.environ, HTTP_TYPE, skip_missing=True) + if auth_info: + headers = _store_user_in_session( + self.session, auth_info.get('username')) + raise HTTPFound(came_from, headers=headers) + except UserCreationError as e: + log.error(e) + self.session.flash(e, queue='error') + + return self._get_template_context() + + @view_config( + route_name='login', request_method='POST', + renderer='rhodecode:templates/login.html') + def login_post(self): + came_from = get_came_from(self.request) + session = self.request.session + login_form = LoginForm()() + + try: + session.invalidate() + form_result = login_form.to_python(self.request.params) + # form checks for username/password, now we're authenticated + headers = _store_user_in_session( + self.session, + username=form_result['username'], + remember=form_result['remember']) + raise HTTPFound(came_from, headers=headers) + except formencode.Invalid as errors: + defaults = errors.value + # remove password from filling in form again + del defaults['password'] + render_ctx = self._get_template_context() + render_ctx.update({ + 'errors': errors.error_dict, + 'defaults': defaults, + }) + return render_ctx + + except UserCreationError as e: + # headers auth or other auth functions that create users on + # the fly can throw this exception signaling that there's issue + # with user creation, explanation should be provided in + # Exception itself + session.flash(e, queue='error') + return self._get_template_context() + + @CSRFRequired() + @view_config(route_name='logout', request_method='POST') + def logout(self): + LoginSession().destroy_user_session() + return HTTPFound(url('home')) + + @HasPermissionAnyDecorator( + 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') + @view_config( + route_name='register', request_method='GET', + renderer='rhodecode:templates/register.html',) + def register(self, defaults=None, errors=None): + defaults = defaults or {} + errors = errors or {} + + settings = SettingsModel().get_all_settings() + captcha_public_key = settings.get('rhodecode_captcha_public_key') + captcha_private_key = settings.get('rhodecode_captcha_private_key') + captcha_active = bool(captcha_private_key) + register_message = settings.get('rhodecode_register_message') or '' + auto_active = 'hg.register.auto_activate' in User.get_default_user()\ + .AuthUser.permissions['global'] + + render_ctx = self._get_template_context() + render_ctx.update({ + 'defaults': defaults, + 'errors': errors, + 'auto_active': auto_active, + 'captcha_active': captcha_active, + 'captcha_public_key': captcha_public_key, + 'register_message': register_message, + }) + return render_ctx + + @view_config( + route_name='register', request_method='POST', + renderer='rhodecode:templates/register.html') + def register_post(self): + captcha_private_key = SettingsModel().get_setting_by_name( + 'rhodecode_captcha_private_key') + captcha_active = bool(captcha_private_key) + auto_active = 'hg.register.auto_activate' in User.get_default_user()\ + .AuthUser.permissions['global'] + + register_form = RegisterForm()() + try: + form_result = register_form.to_python(self.request.params) + form_result['active'] = auto_active + + if captcha_active: + response = submit( + self.request.params.get('recaptcha_challenge_field'), + self.request.params.get('recaptcha_response_field'), + private_key=captcha_private_key, + remoteip=get_ip_addr(self.request.environ)) + if captcha_active and not response.is_valid: + _value = form_result + _msg = _('bad captcha') + error_dict = {'recaptcha_field': _msg} + raise formencode.Invalid(_msg, _value, None, + error_dict=error_dict) + + new_user = UserModel().create_registration(form_result) + event = UserRegistered(user=new_user, session=self.session) + self.request.registry.notify(event) + self.session.flash( + _('You have successfully registered with RhodeCode'), + queue='success') + Session().commit() + + redirect_ro = self.request.route_path('login') + raise HTTPFound(redirect_ro) + + except formencode.Invalid as errors: + del errors.value['password'] + del errors.value['password_confirmation'] + return self.register( + defaults=errors.value, errors=errors.error_dict) + + except UserCreationError as e: + # container auth or other auth functions that create users on + # the fly can throw this exception signaling that there's issue + # with user creation, explanation should be provided in + # Exception itself + self.session.flash(e, queue='error') + return self.register() + + @view_config( + route_name='reset_password', request_method=('GET', 'POST'), + renderer='rhodecode:templates/password_reset.html') + def password_reset(self): + settings = SettingsModel().get_all_settings() + captcha_private_key = settings.get('rhodecode_captcha_private_key') + captcha_active = bool(captcha_private_key) + captcha_public_key = settings.get('rhodecode_captcha_public_key') + + render_ctx = { + 'captcha_active': captcha_active, + 'captcha_public_key': captcha_public_key, + 'defaults': {}, + 'errors': {}, + } + + if self.request.POST: + password_reset_form = PasswordResetForm()() + try: + form_result = password_reset_form.to_python( + self.request.params) + if captcha_active: + response = submit( + self.request.params.get('recaptcha_challenge_field'), + self.request.params.get('recaptcha_response_field'), + private_key=captcha_private_key, + remoteip=get_ip_addr(self.request.environ)) + if captcha_active and not response.is_valid: + _value = form_result + _msg = _('bad captcha') + error_dict = {'recaptcha_field': _msg} + raise formencode.Invalid(_msg, _value, None, + error_dict=error_dict) + + # Generate reset URL and send mail. + user_email = form_result['email'] + user = User.get_by_email(user_email) + password_reset_url = self.request.route_url( + 'reset_password_confirmation', + _query={'key': user.api_key}) + UserModel().reset_password_link( + form_result, password_reset_url) + + # Display success message and redirect. + self.session.flash( + _('Your password reset link was sent'), + queue='success') + return HTTPFound(self.request.route_path('login')) + + except formencode.Invalid as errors: + render_ctx.update({ + 'defaults': errors.value, + 'errors': errors.error_dict, + }) + + return render_ctx + + @view_config(route_name='reset_password_confirmation', + request_method='GET') + def password_reset_confirmation(self): + if self.request.GET and self.request.GET.get('key'): + try: + user = User.get_by_auth_token(self.request.GET.get('key')) + data = {'email': user.email} + UserModel().reset_password(data) + self.session.flash( + _('Your password reset was successful, ' + 'a new password has been sent to your email'), + queue='success') + except Exception as e: + log.error(e) + return HTTPFound(self.request.route_path('reset_password')) + + return HTTPFound(self.request.route_path('login')) diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -1593,7 +1593,7 @@ class Repository(Base, BaseModel): 'repo_id': repo.repo_id, 'repo_name': repo.repo_name, 'repo_type': repo.repo_type, - 'clone_uri': repo.clone_uri, + 'clone_uri': repo.clone_uri or '', 'private': repo.private, 'created_on': repo.created_on, 'description': repo.description, @@ -2794,7 +2794,9 @@ class CacheKey(Base, BaseModel): Session().commit() except Exception: - log.error(traceback.format_exc()) + log.exception( + 'Cache key invalidation failed for repository %s', + safe_str(repo_name)) Session().rollback() @classmethod diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py --- a/rhodecode/model/pull_request.py +++ b/rhodecode/model/pull_request.py @@ -396,10 +396,15 @@ class PullRequestModel(BaseModel): return commit_ids def merge(self, pull_request, user, extras): + log.debug("Merging pull request %s", pull_request.pull_request_id) merge_state = self._merge_pull_request(pull_request, user, extras) if merge_state.executed: + log.debug( + "Merge was successful, updating the pull request comments.") self._comment_and_close_pr(pull_request, user, merge_state) self._log_action('user_merged_pull_request', user, pull_request) + else: + log.warn("Merge failed, not updating the pull request.") return merge_state def _merge_pull_request(self, pull_request, user, extras): @@ -907,15 +912,20 @@ class PullRequestModel(BaseModel): """ Try to merge the pull request and return the merge status. """ + log.debug( + "Trying out if the pull request %s can be merged.", + pull_request.pull_request_id) target_vcs = pull_request.target_repo.scm_instance() target_ref = self._refresh_reference( pull_request.target_ref_parts, target_vcs) target_locked = pull_request.target_repo.locked if target_locked and target_locked[0]: + log.debug("The target repository is locked.") merge_state = MergeResponse( False, False, None, MergeFailureReason.TARGET_IS_LOCKED) elif self._needs_merge_state_refresh(pull_request, target_ref): + log.debug("Refreshing the merge status of the repository.") merge_state = self._refresh_merge_state( pull_request, target_vcs, target_ref) else: @@ -923,6 +933,7 @@ class PullRequestModel(BaseModel): _last_merge_status == MergeFailureReason.NONE merge_state = MergeResponse( possible, False, None, pull_request._last_merge_status) + log.debug("Merge response: %s", merge_state) return merge_state def _refresh_reference(self, reference, vcs_repository): diff --git a/rhodecode/model/scm.py b/rhodecode/model/scm.py --- a/rhodecode/model/scm.py +++ b/rhodecode/model/scm.py @@ -449,7 +449,7 @@ class ScmModel(BaseModel): return tip def _sanitize_path(self, f_path): - if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path: + if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: raise NonRelativePathError('%s is not an relative path' % f_path) if f_path: f_path = os.path.normpath(f_path) diff --git a/rhodecode/model/user.py b/rhodecode/model/user.py --- a/rhodecode/model/user.py +++ b/rhodecode/model/user.py @@ -493,7 +493,7 @@ class UserModel(BaseModel): log.error(traceback.format_exc()) raise - def reset_password_link(self, data): + def reset_password_link(self, data, pwd_reset_url): from rhodecode.lib.celerylib import tasks, run_task from rhodecode.model.notification import EmailNotificationModel user_email = data['email'] @@ -502,12 +502,8 @@ class UserModel(BaseModel): if user: log.debug('password reset user found %s', user) - password_reset_url = url( - 'reset_password_confirmation', key=user.api_key, - qualified=True) - email_kwargs = { - 'password_reset_url': password_reset_url, + 'password_reset_url': pwd_reset_url, 'user': user, 'email': user_email, 'date': datetime.datetime.now() diff --git a/rhodecode/model/user_group.py b/rhodecode/model/user_group.py --- a/rhodecode/model/user_group.py +++ b/rhodecode/model/user_group.py @@ -216,7 +216,13 @@ class UserGroupModel(BaseModel): if 'user' in form_data: owner = form_data['user'] if isinstance(owner, basestring): - user_group.user = User.get_by_username(form_data['user']) + owner = User.get_by_username(form_data['user']) + + if not isinstance(owner, User): + raise ValueError( + 'invalid owner for user group: %s' % form_data['user']) + + user_group.user = owner if 'users_group_members' in form_data: members_id_list = self._clean_members_data( diff --git a/rhodecode/model/validation_schema.py b/rhodecode/model/validation_schema.py --- a/rhodecode/model/validation_schema.py +++ b/rhodecode/model/validation_schema.py @@ -51,6 +51,11 @@ class SearchParamsSchema(colander.Mappin colander.String(), missing='content', validator=colander.OneOf(['content', 'path', 'commit', 'repository'])) + search_sort = colander.SchemaNode( + colander.String(), + missing='newfirst', + validator=colander.OneOf( + ['oldfirst', 'newfirst'])) page_limit = colander.SchemaNode( colander.Integer(), missing=10, diff --git a/rhodecode/model/validators.py b/rhodecode/model/validators.py --- a/rhodecode/model/validators.py +++ b/rhodecode/model/validators.py @@ -38,9 +38,11 @@ from sqlalchemy.sql.expression import tr from sqlalchemy.util import OrderedSet from webhelpers.pylonslib.secure_form import authentication_token +from rhodecode.authentication import ( + legacy_plugin_prefix, _import_legacy_plugin) +from rhodecode.authentication.base import loadplugin from rhodecode.config.routing import ADMIN_PREFIX from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny -from rhodecode.lib.exceptions import LdapImportError from rhodecode.lib.utils import repo_name_slug, make_db_config from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5 from rhodecode.lib.vcs.backends.git.repository import GitRepository @@ -437,8 +439,7 @@ def ValidAuth(): password = value['password'] username = value['username'] - if not authenticate(username, password, '', - HTTP_TYPE, + if not authenticate(username, password, '', HTTP_TYPE, skip_missing=True): user = User.get_by_username(username) if user and not user.active: @@ -448,7 +449,7 @@ def ValidAuth(): msg, value, state, error_dict={'username': msg} ) else: - log.warning('user %s failed to authenticate', username) + log.warning('user `%s` failed to authenticate', username) msg = M(self, 'invalid_username', state) msg2 = M(self, 'invalid_password', state) raise formencode.Invalid( @@ -986,28 +987,71 @@ def ValidAuthPlugins(): 'import_duplicate': _( u'Plugins %(loaded)s and %(next_to_load)s ' u'both export the same name'), + 'missing_includeme': _( + u'The plugin "%(plugin_id)s" is missing an includeme ' + u'function.'), + 'import_error': _( + u'Can not load plugin "%(plugin_id)s"'), + 'no_plugin': _( + u'No plugin available with ID "%(plugin_id)s"'), } def _to_python(self, value, state): # filter empty values return filter(lambda s: s not in [None, ''], value) - def validate_python(self, value, state): - from rhodecode.authentication.base import loadplugin - module_list = value - unique_names = {} + def _validate_legacy_plugin_id(self, plugin_id, value, state): + """ + Validates that the plugin import works. It also checks that the + plugin has an includeme attribute. + """ try: - for module in module_list: - plugin = loadplugin(module) - plugin_name = plugin.name - if plugin_name in unique_names: - msg = M(self, 'import_duplicate', state, - loaded=unique_names[plugin_name], - next_to_load=plugin_name) - raise formencode.Invalid(msg, value, state) - unique_names[plugin_name] = plugin - except (KeyError, AttributeError, TypeError) as e: - raise formencode.Invalid(str(e), value, state) + plugin = _import_legacy_plugin(plugin_id) + except Exception as e: + log.exception( + 'Exception during import of auth legacy plugin "{}"' + .format(plugin_id)) + msg = M(self, 'import_error', plugin_id=plugin_id) + raise formencode.Invalid(msg, value, state) + + if not hasattr(plugin, 'includeme'): + msg = M(self, 'missing_includeme', plugin_id=plugin_id) + raise formencode.Invalid(msg, value, state) + + return plugin + + def _validate_plugin_id(self, plugin_id, value, state): + """ + Plugins are already imported during app start up. Therefore this + validation only retrieves the plugin from the plugin registry and + if it returns something not None everything is OK. + """ + plugin = loadplugin(plugin_id) + + if plugin is None: + msg = M(self, 'no_plugin', plugin_id=plugin_id) + raise formencode.Invalid(msg, value, state) + + return plugin + + def validate_python(self, value, state): + unique_names = {} + for plugin_id in value: + + # Validate legacy or normal plugin. + if plugin_id.startswith(legacy_plugin_prefix): + plugin = self._validate_legacy_plugin_id( + plugin_id, value, state) + else: + plugin = self._validate_plugin_id(plugin_id, value, state) + + # Only allow unique plugin names. + if plugin.name in unique_names: + msg = M(self, 'import_duplicate', state, + loaded=unique_names[plugin.name], + next_to_load=plugin) + raise formencode.Invalid(msg, value, state) + unique_names[plugin.name] = plugin return _validator diff --git a/rhodecode/public/css/code-block.less b/rhodecode/public/css/code-block.less --- a/rhodecode/public/css/code-block.less +++ b/rhodecode/public/css/code-block.less @@ -514,6 +514,26 @@ div.search-code-body { .match { background-color: #faffa6;} .break { display: block; width: 100%; background-color: #DDE7EF; color: #747474; } } + .code-highlighttable { + border-collapse: collapse; + + tr:hover { + background: #fafafa; + } + td.code { + padding-left: 10px; + } + td.line { + border-right: 1px solid #ccc !important; + padding-right: 10px; + text-align: right; + font-family: "Lucida Console",Monaco,monospace; + span { + white-space: pre-wrap; + color: #666666; + } + } + } } div.annotatediv { margin-left: 2px; margin-right: 4px; } diff --git a/rhodecode/public/css/main-content.less b/rhodecode/public/css/main-content.less --- a/rhodecode/public/css/main-content.less +++ b/rhodecode/public/css/main-content.less @@ -353,7 +353,12 @@ .middle-group{ width: 10%; text-align: center; - padding-top: 6em; + padding-top: 4em; + i { + font-size: 18px; + cursor: pointer; + line-height: 2em; + } } } diff --git a/rhodecode/public/css/main.less b/rhodecode/public/css/main.less --- a/rhodecode/public/css/main.less +++ b/rhodecode/public/css/main.less @@ -1234,6 +1234,13 @@ table.issuetracker { .reviewer { float: left; } + + &.to-delete { + .user, + .reviewer { + text-decoration: line-through; + } + } } .reviewer_member_remove { diff --git a/rhodecode/public/css/tags.less b/rhodecode/public/css/tags.less --- a/rhodecode/public/css/tags.less +++ b/rhodecode/public/css/tags.less @@ -80,6 +80,11 @@ [tag="recommends"] { &:extend(.tag7); } [tag="see"] { &:extend(.tag8); } +.perm_overriden { + text-decoration: line-through; + opacity: 0.6; +} + .perm_tag { &:extend(.tag); diff --git a/rhodecode/public/js/src/rhodecode/pyroutes.js b/rhodecode/public/js/rhodecode/routes.js rename from rhodecode/public/js/src/rhodecode/pyroutes.js rename to rhodecode/public/js/rhodecode/routes.js --- a/rhodecode/public/js/src/rhodecode/pyroutes.js +++ b/rhodecode/public/js/rhodecode/routes.js @@ -1,45 +1,50 @@ -/* This file is automatically generated. DO NOT change it manually. - * If this file needs to be modified, edit - * rhodecode/utils/file_generation/js_routes_data.py - * and run the script invoke -r scripts/ generate.js-routes . - */ + +/****************************************************************************** + * * + * DO NOT CHANGE THIS FILE MANUALLY * + * * + * * + * This file is automatically generated when the app starts up. * + * * + * To add a route here pass jsroute=True to the route definition in the app * + * * + ******************************************************************************/ function registerRCRoutes() { // routes registration pyroutes.register('home', '/', []); - pyroutes.register('new_gist', '/_admin/gists/new', []); - pyroutes.register('gists', '/_admin/gists', []); + pyroutes.register('user_autocomplete_data', '/_users', []); pyroutes.register('new_repo', '/_admin/create_repository', []); - pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']); - pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']); - pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); + pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']); + pyroutes.register('gists', '/_admin/gists', []); + pyroutes.register('new_gist', '/_admin/gists/new', []); + pyroutes.register('toggle_following', '/_admin/toggle_following', []); + pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); + pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); + pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); + pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); - pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); - pyroutes.register('user_autocomplete_data', '/_users', []); - pyroutes.register('toggle_following', '/_admin/toggle_following', []); - pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); - pyroutes.register('changeset_info', '/changeset_info/%(repo_name)s/%(revision)s', ['repo_name', 'revision']); - pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']); pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']); pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); - pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); - pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); + pyroutes.register('changeset_info', '/changeset_info/%(repo_name)s/%(revision)s', ['repo_name', 'revision']); + pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); + pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); + pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); + pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); + pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); + pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); + pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); + pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); + pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); + pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']); + pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); + pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); + pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); + pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); pyroutes.register('files_metadata_list_home', '/%(repo_name)s/metadata_list/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); - pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); - pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); - pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); - pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); - pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); - pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); - pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); - pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); - pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); - pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); - pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); - pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); + pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']); + pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']); } - -registerRCRoutes(); \ No newline at end of file diff --git a/rhodecode/public/js/src/plugins/jquery.mark.js b/rhodecode/public/js/src/plugins/jquery.mark.js new file mode 100755 --- /dev/null +++ b/rhodecode/public/js/src/plugins/jquery.mark.js @@ -0,0 +1,490 @@ +/*!*************************************************** + * mark.js v6.1.0 + * https://github.com/julmot/mark.js + * Copyright (c) 2014–2016, Julian Motz + * Released under the MIT license https://git.io/vwTVl + *****************************************************/ + +"use strict"; + +var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol ? "symbol" : typeof obj; }; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +(function (factory, window, document) { + if (typeof define === "function" && define.amd) { + define(["jquery"], function (jQuery) { + return factory(window, document, jQuery); + }); + } else if ((typeof exports === "undefined" ? "undefined" : _typeof(exports)) === "object") { + factory(window, document, require("jquery")); + } else { + factory(window, document, jQuery); + } +})(function (window, document, $) { + var Mark = function () { + function Mark(ctx) { + _classCallCheck(this, Mark); + + this.ctx = ctx; + } + + _createClass(Mark, [{ + key: "log", + value: function log(msg) { + var level = arguments.length <= 1 || arguments[1] === undefined ? "debug" : arguments[1]; + + var log = this.opt.log; + if (!this.opt.debug) { + return; + } + if ((typeof log === "undefined" ? "undefined" : _typeof(log)) === "object" && typeof log[level] === "function") { + log[level]("mark.js: " + msg); + } + } + }, { + key: "escapeStr", + value: function escapeStr(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + } + }, { + key: "createRegExp", + value: function createRegExp(str) { + str = this.escapeStr(str); + if (Object.keys(this.opt.synonyms).length) { + str = this.createSynonymsRegExp(str); + } + if (this.opt.diacritics) { + str = this.createDiacriticsRegExp(str); + } + str = this.createAccuracyRegExp(str); + return str; + } + }, { + key: "createSynonymsRegExp", + value: function createSynonymsRegExp(str) { + var syn = this.opt.synonyms; + for (var index in syn) { + if (syn.hasOwnProperty(index)) { + var value = syn[index], + k1 = this.escapeStr(index), + k2 = this.escapeStr(value); + str = str.replace(new RegExp("(" + k1 + "|" + k2 + ")", "gmi"), "(" + k1 + "|" + k2 + ")"); + } + } + return str; + } + }, { + key: "createDiacriticsRegExp", + value: function createDiacriticsRegExp(str) { + var dct = ["aÀÁÂÃÄÅàáâãäåĀāąĄ", "cÇçćĆčČ", "dđĐďĎ", "eÈÉÊËèéêëěĚĒēęĘ", "iÌÍÎÏìíîïĪī", "lłŁ", "nÑñňŇńŃ", "oÒÓÔÕÕÖØòóôõöøŌō", "rřŘ", "sŠšśŚ", "tťŤ", "uÙÚÛÜùúûüůŮŪū", "yŸÿýÝ", "zŽžżŻźŹ"]; + var handled = []; + str.split("").forEach(function (ch) { + dct.every(function (dct) { + if (dct.indexOf(ch) !== -1) { + if (handled.indexOf(dct) > -1) { + return false; + } + + str = str.replace(new RegExp("[" + dct + "]", "gmi"), "[" + dct + "]"); + handled.push(dct); + } + return true; + }); + }); + return str; + } + }, { + key: "createAccuracyRegExp", + value: function createAccuracyRegExp(str) { + switch (this.opt.accuracy) { + case "partially": + return "()(" + str + ")"; + case "complementary": + return "()(\\S*" + str + "\\S*)"; + case "exactly": + return "(^|\\s)(" + str + ")(?=\\s|$)"; + } + } + }, { + key: "getSeparatedKeywords", + value: function getSeparatedKeywords(sv) { + var _this = this; + + var stack = []; + sv.forEach(function (kw) { + if (!_this.opt.separateWordSearch) { + if (kw.trim()) { + stack.push(kw); + } + } else { + kw.split(" ").forEach(function (kwSplitted) { + if (kwSplitted.trim()) { + stack.push(kwSplitted); + } + }); + } + }); + return { + "keywords": stack, + "length": stack.length + }; + } + }, { + key: "getElements", + value: function getElements() { + var ctx = void 0, + stack = []; + if (typeof this.ctx === "undefined") { + ctx = []; + } else if (this.ctx instanceof HTMLElement) { + ctx = [this.ctx]; + } else if (Array.isArray(this.ctx)) { + ctx = this.ctx; + } else { + ctx = Array.prototype.slice.call(this.ctx); + } + ctx.forEach(function (ctx) { + stack.push(ctx); + var childs = ctx.querySelectorAll("*"); + if (childs.length) { + stack = stack.concat(Array.prototype.slice.call(childs)); + } + }); + if (!ctx.length) { + this.log("Empty context", "warn"); + } + return { + "elements": stack, + "length": stack.length + }; + } + }, { + key: "matches", + value: function matches(el, selector) { + return (el.matches || el.matchesSelector || el.msMatchesSelector || el.mozMatchesSelector || el.webkitMatchesSelector || el.oMatchesSelector).call(el, selector); + } + }, { + key: "matchesFilter", + value: function matchesFilter(el, exclM) { + var _this2 = this; + + var remain = true; + var fltr = this.opt.filter.concat(["script", "style", "title"]); + if (!this.opt.iframes) { + fltr = fltr.concat(["iframe"]); + } + if (exclM) { + fltr = fltr.concat(["*[data-markjs='true']"]); + } + fltr.every(function (filter) { + if (_this2.matches(el, filter)) { + return remain = false; + } + return true; + }); + return !remain; + } + }, { + key: "onIframeReady", + value: function onIframeReady(ifr, successFn, errorFn) { + try { + (function () { + var ifrWin = ifr.contentWindow, + bl = "about:blank", + compl = "complete"; + var callCallback = function callCallback() { + try { + if (ifrWin.document === null) { + throw new Error("iframe inaccessible"); + } + successFn(ifrWin.document); + } catch (e) { + errorFn(); + } + }; + var isBlank = function isBlank() { + var src = ifr.getAttribute("src").trim(), + href = ifrWin.location.href; + return href === bl && src !== bl && src; + }; + var observeOnload = function observeOnload() { + var listener = function listener() { + try { + if (!isBlank()) { + ifr.removeEventListener("load", listener); + callCallback(); + } + } catch (e) { + errorFn(); + } + }; + ifr.addEventListener("load", listener); + }; + if (ifrWin.document.readyState === compl) { + if (isBlank()) { + observeOnload(); + } else { + callCallback(); + } + } else { + observeOnload(); + } + })(); + } catch (e) { + errorFn(); + } + } + }, { + key: "forEachElementInIframe", + value: function forEachElementInIframe(ifr, cb) { + var _this3 = this; + + var end = arguments.length <= 2 || arguments[2] === undefined ? function () {} : arguments[2]; + + var open = 0; + var checkEnd = function checkEnd() { + if (--open < 1) { + end(); + } + }; + this.onIframeReady(ifr, function (con) { + var stack = Array.prototype.slice.call(con.querySelectorAll("*")); + if ((open = stack.length) === 0) { + checkEnd(); + } + stack.forEach(function (el) { + if (el.tagName.toLowerCase() === "iframe") { + (function () { + var j = 0; + _this3.forEachElementInIframe(el, function (iel, len) { + cb(iel, len); + if (len - 1 === j) { + checkEnd(); + } + j++; + }, checkEnd); + })(); + } else { + cb(el, stack.length); + checkEnd(); + } + }); + }, function () { + var src = ifr.getAttribute("src"); + _this3.log("iframe '" + src + "' could not be accessed", "warn"); + checkEnd(); + }); + } + }, { + key: "forEachElement", + value: function forEachElement(cb) { + var _this4 = this; + + var end = arguments.length <= 1 || arguments[1] === undefined ? function () {} : arguments[1]; + var exclM = arguments.length <= 2 || arguments[2] === undefined ? true : arguments[2]; + + var _getElements = this.getElements(); + + var stack = _getElements.elements; + var open = _getElements.length; + + var checkEnd = function checkEnd() { + if (--open === 0) { + end(); + } + }; + checkEnd(++open); + stack.forEach(function (el) { + if (!_this4.matchesFilter(el, exclM)) { + if (el.tagName.toLowerCase() === "iframe") { + _this4.forEachElementInIframe(el, function (iel) { + if (!_this4.matchesFilter(iel, exclM)) { + cb(iel); + } + }, checkEnd); + return; + } else { + cb(el); + } + } + checkEnd(); + }); + } + }, { + key: "forEachNode", + value: function forEachNode(cb) { + var end = arguments.length <= 1 || arguments[1] === undefined ? function () {} : arguments[1]; + + this.forEachElement(function (n) { + for (n = n.firstChild; n; n = n.nextSibling) { + if (n.nodeType === 3 && n.textContent.trim()) { + cb(n); + } + } + }, end); + } + }, { + key: "wrapMatches", + value: function wrapMatches(node, regex, custom, cb) { + var hEl = !this.opt.element ? "mark" : this.opt.element, + index = custom ? 0 : 2; + var match = void 0; + while ((match = regex.exec(node.textContent)) !== null) { + var pos = match.index; + if (!custom) { + pos += match[index - 1].length; + } + var startNode = node.splitText(pos); + + node = startNode.splitText(match[index].length); + if (startNode.parentNode !== null) { + var repl = document.createElement(hEl); + repl.setAttribute("data-markjs", "true"); + if (this.opt.className) { + repl.setAttribute("class", this.opt.className); + } + repl.textContent = match[index]; + startNode.parentNode.replaceChild(repl, startNode); + cb(repl); + } + regex.lastIndex = 0; + } + } + }, { + key: "unwrapMatches", + value: function unwrapMatches(node) { + var parent = node.parentNode; + var docFrag = document.createDocumentFragment(); + while (node.firstChild) { + docFrag.appendChild(node.removeChild(node.firstChild)); + } + parent.replaceChild(docFrag, node); + parent.normalize(); + } + }, { + key: "markRegExp", + value: function markRegExp(regexp, opt) { + var _this5 = this; + + this.opt = opt; + this.log("Searching with expression \"" + regexp + "\""); + var found = false; + var eachCb = function eachCb(element) { + found = true; + _this5.opt.each(element); + }; + this.forEachNode(function (node) { + _this5.wrapMatches(node, regexp, true, eachCb); + }, function () { + if (!found) { + _this5.opt.noMatch(regexp); + } + _this5.opt.complete(); + _this5.opt.done(); + }); + } + }, { + key: "mark", + value: function mark(sv, opt) { + var _this6 = this; + + this.opt = opt; + sv = typeof sv === "string" ? [sv] : sv; + + var _getSeparatedKeywords = this.getSeparatedKeywords(sv); + + var kwArr = _getSeparatedKeywords.keywords; + var kwArrLen = _getSeparatedKeywords.length; + + if (kwArrLen === 0) { + this.opt.complete(); + this.opt.done(); + } + kwArr.forEach(function (kw) { + var regex = new RegExp(_this6.createRegExp(kw), "gmi"), + found = false; + var eachCb = function eachCb(element) { + found = true; + _this6.opt.each(element); + }; + _this6.log("Searching with expression \"" + regex + "\""); + _this6.forEachNode(function (node) { + _this6.wrapMatches(node, regex, false, eachCb); + }, function () { + if (!found) { + _this6.opt.noMatch(kw); + } + if (kwArr[kwArrLen - 1] === kw) { + _this6.opt.complete(); + _this6.opt.done(); + } + }); + }); + } + }, { + key: "unmark", + value: function unmark(opt) { + var _this7 = this; + + this.opt = opt; + var sel = this.opt.element ? this.opt.element : "*"; + sel += "[data-markjs]"; + if (this.opt.className) { + sel += "." + this.opt.className; + } + this.log("Removal selector \"" + sel + "\""); + this.forEachElement(function (el) { + if (_this7.matches(el, sel)) { + _this7.unwrapMatches(el); + } + }, function () { + _this7.opt.complete(); + _this7.opt.done(); + }, false); + } + }, { + key: "opt", + set: function set(val) { + this._opt = _extends({}, { + "element": "", + "className": "", + "filter": [], + "iframes": false, + "separateWordSearch": true, + "diacritics": true, + "synonyms": {}, + "accuracy": "partially", + "each": function each() {}, + "noMatch": function noMatch() {}, + "done": function done() {}, + "complete": function complete() {}, + "debug": false, + "log": window.console + }, val); + }, + get: function get() { + return this._opt; + } + }]); + + return Mark; + }(); + + $.fn.mark = function (sv, opt) { + new Mark(this).mark(sv, opt); + return this; + }; + $.fn.markRegExp = function (regexp, opt) { + new Mark(this).markRegExp(regexp, opt); + return this; + }; + $.fn.unmark = function (opt) { + new Mark(this).unmark(opt); + return this; + }; +}, window, document); diff --git a/rhodecode/public/js/src/plugins/jquery.timeago-extension.js b/rhodecode/public/js/src/plugins/jquery.timeago-extension.js --- a/rhodecode/public/js/src/plugins/jquery.timeago-extension.js +++ b/rhodecode/public/js/src/plugins/jquery.timeago-extension.js @@ -190,7 +190,7 @@ var AgeModule = (function () { }, createTimeComponent: function(dateTime, text) { - return ''.format(dateTime, text); + return ''.format(dateTime, text); } } })(); diff --git a/rhodecode/public/js/src/rhodecode/init.js b/rhodecode/public/js/src/rhodecode/init.js new file mode 100644 --- /dev/null +++ b/rhodecode/public/js/src/rhodecode/init.js @@ -0,0 +1,26 @@ +// # Copyright (C) 2010-2016 RhodeCode GmbH +// # +// # This program is free software: you can redistribute it and/or modify +// # it under the terms of the GNU Affero General Public License, version 3 +// # (only), as published by the Free Software Foundation. +// # +// # This program is distributed in the hope that it will be useful, +// # but WITHOUT ANY WARRANTY; without even the implied warranty of +// # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// # GNU General Public License for more details. +// # +// # You should have received a copy of the GNU Affero General Public License +// # along with this program. If not, see . +// # +// # This program is dual-licensed. If you wish to learn more about the +// # RhodeCode Enterprise Edition, including its added features, Support services, +// # and proprietary license terms, please see https://rhodecode.com/licenses/ + + +/* + * Deferred functions that must run before any rhodecode javascript go here + */ + +registerRCRoutes(); + +// TODO: move i18n here diff --git a/rhodecode/public/js/src/rhodecode/pullrequests.js b/rhodecode/public/js/src/rhodecode/pullrequests.js --- a/rhodecode/public/js/src/rhodecode/pullrequests.js +++ b/rhodecode/public/js/src/rhodecode/pullrequests.js @@ -30,7 +30,6 @@ var removeReviewMember = function(review if (reviewer){ // mark as to-remove var obj = $('#reviewer_{0}_name'.format(reviewer_id)); - obj.css("text-decoration", "line-through"); obj.addClass('to-delete'); // now delete the input $('#reviewer_{0}_input'.format(reviewer_id)).remove(); diff --git a/rhodecode/subscribers.py b/rhodecode/subscribers.py --- a/rhodecode/subscribers.py +++ b/rhodecode/subscribers.py @@ -20,9 +20,11 @@ import pylons -from pyramid.i18n import get_localizer, TranslationStringFactory -tsf = TranslationStringFactory('rc_root') +from pyramid.i18n import get_localizer +from pyramid.threadlocal import get_current_request + +from rhodecode.translation import _ as tsf def add_renderer_globals(event): @@ -33,8 +35,11 @@ def add_renderer_globals(event): event['c'] = pylons.tmpl_context event['url'] = pylons.url + # TODO: When executed in pyramid view context the request is not available + # in the event. Find a better solution to get the request. + request = event['request'] or get_current_request() + # Add Pyramid translation as '_' to context - request = event['request'] event['_'] = request.translate event['localizer'] = request.localizer diff --git a/rhodecode/templates/admin/auth/plugin_settings.html b/rhodecode/templates/admin/auth/plugin_settings.html --- a/rhodecode/templates/admin/auth/plugin_settings.html +++ b/rhodecode/templates/admin/auth/plugin_settings.html @@ -49,46 +49,44 @@
${h.secure_form(request.resource_path(resource, route_name='auth_home'))}
+ %for node in plugin.get_settings_schema(): - <% label_cls = ("label-checkbox" if (node.widget == "bool") else "") %> + <% label_css_class = ("label-checkbox" if (node.widget == "bool") else "") %>
-
- %if node.widget in ["string", "int", "unicode"]: -
- ${h.text(node.name, class_="medium")} -

${node.description}

-
- %elif node.widget == "password": -
- ${h.password(node.name, class_="medium")} -

${node.description}

-
- %elif node.widget == "bool": -
-
${h.checkbox(node.name, True)}
- ${node.description} -
- %elif node.widget == "select": -
- ${h.select(node.name, node.default, node.validator.choices)} -

${node.description}

-
- %elif node.widget == "readonly": -
+
+
+ %if node.widget in ["string", "int", "unicode"]: + ${h.text(node.name, defaults.get(node.name), class_="medium")} + %elif node.widget == "password": + ${h.password(node.name, defaults.get(node.name), class_="medium")} + %elif node.widget == "bool": +
${h.checkbox(node.name, True, checked=defaults.get(node.name))}
+ %elif node.widget == "select": + ${h.select(node.name, defaults.get(node.name), node.validator.choices)} + %elif node.widget == "readonly": ${node.default} -

${node.description}

-
- %else: -
+ %else: This field is of type ${node.typ}, which cannot be displayed. Must be one of [string|int|bool|select]. -

${node.description}

-
- %endif + %endif + %if node.name in errors: + ${errors.get(node.name)} +
+ %endif +

${node.description}

+
%endfor + + ## Allow derived templates to add something below the form + ## input fields + %if hasattr(next, 'below_form_fields'): + ${next.below_form_fields()} + %endif +
${h.submit('save',_('Save'),class_="btn")}
+
${h.end_form()}
diff --git a/rhodecode/templates/admin/gists/show.html b/rhodecode/templates/admin/gists/show.html --- a/rhodecode/templates/admin/gists/show.html +++ b/rhodecode/templates/admin/gists/show.html @@ -66,7 +66,7 @@ %if c.gist.gist_expires == -1: ${_('never')} %else: - ${h.age_component(h.time_to_datetime(c.gist.gist_expires))} + ${h.age_component(h.time_to_utcdatetime(c.gist.gist_expires))} %endif diff --git a/rhodecode/templates/admin/my_account/my_account.html b/rhodecode/templates/admin/my_account/my_account.html --- a/rhodecode/templates/admin/my_account/my_account.html +++ b/rhodecode/templates/admin/my_account/my_account.html @@ -29,7 +29,11 @@
  • ${_('My Profile')}
  • ${_('Password')}
  • ${_('Auth Tokens')}
  • -
  • ${_('OAuth Identities')}
  • + ## TODO: Find a better integration of oauth views into navigation. + %try: +
  • ${_('OAuth Identities')}
  • + %except KeyError: + %endtry
  • ${_('My Emails')}
  • ${_('My Repositories')}
  • ${_('Watched')}
  • diff --git a/rhodecode/templates/admin/my_account/my_account_auth_tokens.html b/rhodecode/templates/admin/my_account/my_account_auth_tokens.html --- a/rhodecode/templates/admin/my_account/my_account_auth_tokens.html +++ b/rhodecode/templates/admin/my_account/my_account_auth_tokens.html @@ -42,9 +42,9 @@ ${_('expires')}: ${_('never')} %else: %if auth_token.expired: - ${_('expired')}: ${h.age_component(h.time_to_datetime(auth_token.expires))} + ${_('expired')}: ${h.age_component(h.time_to_utcdatetime(auth_token.expires))} %else: - ${_('expires')}: ${h.age_component(h.time_to_datetime(auth_token.expires))} + ${_('expires')}: ${h.age_component(h.time_to_utcdatetime(auth_token.expires))} %endif %endif diff --git a/rhodecode/templates/admin/my_account/my_account_oauth.html b/rhodecode/templates/admin/my_account/my_account_oauth.html deleted file mode 100644 --- a/rhodecode/templates/admin/my_account/my_account_oauth.html +++ /dev/null @@ -1,62 +0,0 @@ -<%namespace file="/base/social_buttons.html" import="render_social_buttons"/> - -
    -
    -

    ${_('Oauth Identities')}

    -
    -
    -

    - ${_('External services currently connected with your Rhodecode user')}. -

    - -

    - - % if not c.social_plugins: - ${_('No social authentication plugins are enabled by administrator')}. - %endif - - ${render_social_buttons(c.social_plugins)} -

    - - % if c.user_oauth_tokens: - - - - - - - - - - - % for token in c.user_oauth_tokens: - - - - - - % endfor - -
    - Provider - - -
    - ${token.provider_name} - - ${h.secure_form(url('my_account_oauth', provider_name=token.provider_name, external_id=token.external_id), method='delete')} - - ${h.end_form()} -
    - % else: -

    ${_('You have no accounts linked yet')}.

    - % endif -
    -
    diff --git a/rhodecode/templates/admin/repos/repo_creating.html b/rhodecode/templates/admin/repos/repo_creating.html --- a/rhodecode/templates/admin/repos/repo_creating.html +++ b/rhodecode/templates/admin/repos/repo_creating.html @@ -50,19 +50,19 @@ if (resp.status == 200) { var jsonResponse = resp.responseJSON; - if (jsonResponse === undefined){ - setTimeout(function(){ + if (jsonResponse === undefined) { + setTimeout(function () { // we might have a backend problem, try dashboard again window.location = "${h.url('summary_home', repo_name = c.repo)}"; - }, 1000); - } - - if (skipCheck || jsonResponse.result === true) { - // success, means go to dashboard - window.location = "${h.url('summary_home', repo_name = c.repo)}"; + }, 3000); } else { - // Schedule the next request when the current one's complete - setTimeout(worker, 1000); + if (skipCheck || jsonResponse.result === true) { + // success, means go to dashboard + window.location = "${h.url('summary_home', repo_name = c.repo)}"; + } else { + // Schedule the next request when the current one's complete + setTimeout(worker, 1000); + } } } else { diff --git a/rhodecode/templates/admin/user_groups/user_group_edit_settings.html b/rhodecode/templates/admin/user_groups/user_group_edit_settings.html --- a/rhodecode/templates/admin/user_groups/user_group_edit_settings.html +++ b/rhodecode/templates/admin/user_groups/user_group_edit_settings.html @@ -43,11 +43,14 @@
    - + + ${h.text('from_user_group', + placeholder="user/usergroup", + class_="medium")}
    - + ${h.select('users_group_members',[x[0] for x in c.group_members],c.group_members,multiple=True,size=8,)}
    ${_('Remove all elements')} @@ -60,7 +63,8 @@
    - + ${h.select('available_members',[],c.available_members,multiple=True,size=8,)}
    ${_('Add all elements')} @@ -86,6 +90,42 @@ 'dropdownAutoWidth': true }); + $('#from_user_group').autocomplete({ + serviceUrl: pyroutes.url('user_autocomplete_data'), + minChars:2, + maxHeight:400, + width:300, + deferRequestBy: 300, //miliseconds + showNoSuggestionNotice: true, + params: { user_groups:true }, + formatResult: autocompleteFormatResult, + lookupFilter: autocompleteFilterResult, + onSelect: function(element, suggestion){ + + function preSelectUserIds(uids) { + $('#available_members').val(uids); + $('#users_group_members').val(uids); + } + + if (suggestion.value_type == 'user_group') { + $.getJSON( + pyroutes.url('edit_user_group_members', + {'user_group_id': suggestion.id}), + function(data) { + var uids = []; + $.each(data.members, function(idx, user) { + var userid = user[0], + username = user[1]; + uids.push(userid.toString()); + }); + preSelectUserIds(uids) + } + ); + } else if (suggestion.value_type == 'user') { + preSelectUserIds([suggestion.id.toString()]); + } + } + }); UsersAutoComplete('user', '${c.rhodecode_user.user_id}'); }) diff --git a/rhodecode/templates/admin/users/user_edit_auth_tokens.html b/rhodecode/templates/admin/users/user_edit_auth_tokens.html --- a/rhodecode/templates/admin/users/user_edit_auth_tokens.html +++ b/rhodecode/templates/admin/users/user_edit_auth_tokens.html @@ -38,9 +38,9 @@ ${_('expires')}: ${_('never')} %else: %if auth_token.expired: - ${_('expired')}: ${h.age_component(h.time_to_datetime(auth_token.expires))} + ${_('expired')}: ${h.age_component(h.time_to_utcdatetime(auth_token.expires))} %else: - ${_('expires')}: ${h.age_component(h.time_to_datetime(auth_token.expires))} + ${_('expires')}: ${h.age_component(h.time_to_utcdatetime(auth_token.expires))} %endif %endif diff --git a/rhodecode/templates/base/base.html b/rhodecode/templates/base/base.html --- a/rhodecode/templates/base/base.html +++ b/rhodecode/templates/base/base.html @@ -297,7 +297,7 @@
    %if c.rhodecode_user.username == h.DEFAULT_USER:

    ${_('Sign in to your account')}

    - ${h.form(h.url('login_home',came_from=h.url.current()), needs_csrf_token=False)} + ${h.form(h.route_path('login', _query={'came_from': h.url.current()}), needs_csrf_token=False)}
    @@ -312,7 +312,7 @@
    - ${h.link_to(_('(Forgot password?)'),h.url('reset_password'))} + ${h.link_to(_('(Forgot password?)'),h.route_path('reset_password'))}
    ${h.password('password',class_='focus',tabindex=2)} @@ -321,7 +321,7 @@
    %if h.HasPermissionAny('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')(): - ${h.link_to(_("Don't have an account ?"),h.url('register'))} + ${h.link_to(_("Don't have an account ?"),h.route_path('register'))} %endif
    @@ -341,7 +341,7 @@
    diff --git a/rhodecode/templates/login.html b/rhodecode/templates/login.html --- a/rhodecode/templates/login.html +++ b/rhodecode/templates/login.html @@ -1,6 +1,5 @@ ## -*- coding: utf-8 -*- <%inherit file="base/root.html"/> -<%namespace file="base/social_buttons.html" import="render_social_buttons"/> <%def name="title()"> ${_('Sign In')} @@ -35,21 +34,35 @@
    - ${h.form(h.url.current(**request.GET), needs_csrf_token=False)} + ${h.form(request.route_path('login', _query={'came_from': came_from}), needs_csrf_token=False)} + - ${h.text('username',class_='focus')} + ${h.text('username', class_='focus', value=defaults.get('username'))} + %if 'username' in errors: + ${errors.get('username')} +
    + %endif + - ${h.password('password',class_='focus')} - + ${h.password('password', class_='focus')} + %if 'password' in errors: + ${errors.get('password')} +
    + %endif + + ${h.checkbox('remember', value=True, checked=defaults.get('remember'))} + - ${h.submit('sign_in',_('Sign In'),class_="btn sign-in")} + + ${h.submit('sign_in', _('Sign In'), class_="btn sign-in")} + ${h.end_form()}
    - - % if c.social_plugins: -

    ${_('Sign In using one of external services')}:

    - -

    - ${render_social_buttons(c.social_plugins, 'login')} -

    - % endif - + <%block name="below_login_button" />
    diff --git a/rhodecode/templates/password_reset.html b/rhodecode/templates/password_reset.html --- a/rhodecode/templates/password_reset.html +++ b/rhodecode/templates/password_reset.html @@ -33,22 +33,30 @@
    - ${h.form(url('password_reset'), needs_csrf_token=False)} + ${h.form(request.route_path('reset_password'), needs_csrf_token=False)} - ${h.text('email')} + ${h.text('email', defaults.get('email'))} + %if 'email' in errors: + ${errors.get('email')} +
    + %endif - %if c.captcha_active: + %if captcha_active: %endif - ${h.submit('send',_('Send password reset email'),class_="btn sign-in")} + ${h.submit('send', _('Send password reset email'), class_="btn sign-in")}
    ${_('Password reset link will be send to matching email address')}
    ${h.end_form()} @@ -57,14 +65,14 @@
    -%if c.captcha_active: +%if captcha_active: %endif \ No newline at end of file + diff --git a/rhodecode/templates/pullrequests/pullrequest.html b/rhodecode/templates/pullrequests/pullrequest.html --- a/rhodecode/templates/pullrequests/pullrequest.html +++ b/rhodecode/templates/pullrequests/pullrequest.html @@ -271,7 +271,8 @@ 'source_ref_type': 'rev', 'target_ref': sourceRef[2], 'target_ref_type': 'rev', - 'merge': true + 'merge': true, + '_': Date.now() // bypass browser caching }; // gather the source/target ref and repo here if (sourceRef.length !== 3 || targetRef.length !== 3) { diff --git a/rhodecode/templates/register.html b/rhodecode/templates/register.html --- a/rhodecode/templates/register.html +++ b/rhodecode/templates/register.html @@ -1,6 +1,5 @@ ## -*- coding: utf-8 -*- <%inherit file="base/root.html"/> -<%namespace file="base/social_buttons.html" import="render_social_buttons"/> <%def name="title()"> ${_('Create an Account')} @@ -34,65 +33,91 @@
    - ${h.form(url('register'), needs_csrf_token= False)} + ${h.form(request.route_path('register'), needs_csrf_token=False)} + - ${h.text('username', c.form_data.get('username'))} + ${h.text('username', defaults.get('username'))} + %if 'username' in errors: + ${errors.get('username')} +
    + %endif + - ${h.password('password', c.form_data.get('password'))} - - ${h.password('password_confirmation', c.form_data.get('password'))} + ${h.password('password', defaults.get('password'))} + %if 'password' in errors: + ${errors.get('password')} +
    + %endif + + + ${h.password('password_confirmation', defaults.get('password_confirmation'))} + %if 'password_confirmation' in errors: + ${errors.get('password_confirmation')} +
    + %endif + - ${h.text('firstname')} + ${h.text('firstname', defaults.get('firstname'))} + %if 'firstname' in errors: + ${errors.get('firstname')} +
    + %endif + - ${h.text('lastname')} + ${h.text('lastname', defaults.get('lastname'))} + %if 'lastname' in errors: + ${errors.get('lastname')} +
    + %endif + - ${h.text('email', c.form_data.get('email'))} + ${h.text('email', defaults.get('email'))} + %if 'email' in errors: + ${errors.get('email')} +
    + %endif - %if c.captcha_active: + %if captcha_active:
    - + ${h.hidden('recaptcha_field')}
    + %if 'recaptcha_field' in errors: + ${errors.get('recaptcha_field')} +
    + %endif
    %endif - %if not c.auto_active: + %if not auto_active:

    ${_('Account activation requires admin approval.')}

    %endif

    - ${c.register_message|n} + ${register_message|n}

    ${h.submit('sign_up',_('Create Account'),class_="btn sign-in")} ${h.end_form()}
    - - % if c.social_plugins: -

    ${_('Register using one of external services')}:

    - -

    - ${render_social_buttons(c.social_plugins, 'register')} -

    - % endif - + <%block name="below_register_button" />
    -%if c.captcha_active: +%if captcha_active: %endif diff --git a/rhodecode/templates/search/search_commit.html b/rhodecode/templates/search/search_commit.html --- a/rhodecode/templates/search/search_commit.html +++ b/rhodecode/templates/search/search_commit.html @@ -6,7 +6,13 @@ ${_('Commit')} ${_('Commit message')} - ${_('Age')} + + %if c.sort == 'newfirst': + ${_('Age (new first)')} + %else: + ${_('Age (old first)')} + %endif + ${_('Author')} %for entry in c.formatted_results: @@ -33,14 +39,14 @@
    - %if entry['message_hl']: + %if entry.get('message_hl'): ${h.literal(entry['message_hl'])} %else: ${h.urlify_commit_message(entry['message'], entry['repository'])} %endif - ${h.age_component(h.time_to_datetime(entry['date']))} + ${h.age_component(h.time_to_utcdatetime(entry['date']))} diff --git a/rhodecode/templates/search/search_content.html b/rhodecode/templates/search/search_content.html --- a/rhodecode/templates/search/search_content.html +++ b/rhodecode/templates/search/search_content.html @@ -1,3 +1,40 @@ +<%def name="highlight_text_file(terms, text, url, line_context=3, + max_lines=10, + mimetype=None, filepath=None)"> +<% +lines = text.split('\n') +lines_of_interest = set() +matching_lines = h.get_matching_line_offsets(lines, terms) +shown_matching_lines = 0 + +for line_number in matching_lines: + if len(lines_of_interest) < max_lines: + lines_of_interest |= set(range( + max(line_number - line_context, 0), + min(line_number + line_context, len(lines) + 1))) + shown_matching_lines += 1 + +%> +${h.code_highlight( + text, + h.get_lexer_safe( + mimetype=mimetype, + filepath=filepath, + ), + h.SearchContentCodeHtmlFormatter( + linenos=True, + cssclass="code-highlight", + url=url, + query_terms=terms, + only_line_numbers=lines_of_interest +))|n} +%if len(matching_lines) > shown_matching_lines: + + ${len(matching_lines) - shown_matching_lines} ${_('more matches in this file')} +

    +%endif + +
    %for entry in c.formatted_results: ## search results are additionally filtered, and this check is just a safe gate @@ -29,7 +66,7 @@
    ${_('Show Full History')} - | + | ${h.link_to(_('Annotation'), h.url('files_annotate_home', repo_name=entry.get('repository',''),revision=entry.get('commit_id', 'tip'),f_path=entry.get('f_path','')))} | ${h.link_to(_('Raw'), h.url('files_raw_home', repo_name=entry.get('repository',''),revision=entry.get('commit_id', 'tip'),f_path=entry.get('f_path','')))} | @@ -38,8 +75,10 @@
    -
    ${h.literal(entry['content_short_hl'])}
    -
    + ${highlight_text_file(c.cur_query, entry['content'], + url=h.url('files_home',repo_name=entry['repository'],revision=entry.get('commit_id', 'tip'),f_path=entry['f_path']), + mimetype=entry.get('mimetype'), filepath=entry.get('path'))} +
    % endif %endfor @@ -49,3 +88,14 @@ ${c.formatted_results.pager('$link_previous ~2~ $link_next')}
    %endif + +%if c.cur_query: + +%endif \ No newline at end of file diff --git a/rhodecode/tests/__init__.py b/rhodecode/tests/__init__.py --- a/rhodecode/tests/__init__.py +++ b/rhodecode/tests/__init__.py @@ -43,11 +43,13 @@ from nose.plugins.skip import SkipTest import pytest from rhodecode import is_windows +from rhodecode.config.routing import ADMIN_PREFIX from rhodecode.model.meta import Session from rhodecode.model.db import User from rhodecode.lib import auth from rhodecode.lib.helpers import flash, link_to from rhodecode.lib.utils2 import safe_unicode, safe_str +from rhodecode.tests.utils import get_session_from_response # TODO: johbo: Solve time zone related issues and remove this tweak os.environ['TZ'] = 'UTC' @@ -177,26 +179,29 @@ class TestController(object): def login_user_session( app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS): - response = app.post(url(controller='login', action='index'), - {'username': username, - 'password': password}) - + from rhodecode.tests.functional.test_login import login_url + response = app.post( + login_url, + {'username': username, 'password': password}) if 'invalid user name' in response.body: pytest.fail('could not login using %s %s' % (username, password)) assert response.status == '302 Found' - ses = response.session['rhodecode_user'] - assert ses.get('username') == username response = response.follow() - assert ses.get('is_authenticated') + assert response.status == '200 OK' - return response.session + session = get_session_from_response(response) + assert 'rhodecode_user' in session + rc_user = session['rhodecode_user'] + assert rc_user.get('username') == username + assert rc_user.get('is_authenticated') + + return session def logout_user_session(app, csrf_token): - app.post( - url(controller='login', action='logout'), - {'csrf_token': csrf_token}, status=302) + from rhodecode.tests.functional.test_login import logut_url + app.post(logut_url, {'csrf_token': csrf_token}, status=302) def login_user(app, username=TEST_USER_ADMIN_LOGIN, diff --git a/rhodecode/tests/functional/test_admin_auth_settings.py b/rhodecode/tests/functional/test_admin_auth_settings.py --- a/rhodecode/tests/functional/test_admin_auth_settings.py +++ b/rhodecode/tests/functional/test_admin_auth_settings.py @@ -20,7 +20,8 @@ import pytest -from rhodecode.tests import assert_session_flash, url +from rhodecode.tests import assert_session_flash +from rhodecode.tests.utils import AssertResponse from rhodecode.model.db import Session from rhodecode.model.settings import SettingsModel @@ -150,12 +151,14 @@ class TestAuthSettingsController(object) 'egg:rhodecode-enterprise-ce#rhodecode,' 'egg:rhodecode-enterprise-ce#ldap', csrf_token) + invalid_port_value = 'invalid-port-number' response = self._post_ldap_settings(params, override={ - 'port': 'invalid-port-number', + 'port': invalid_port_value, }) - response.mustcontain( - '"invalid-port-number"' - ' is not a number') + assertr = AssertResponse(response) + assertr.element_contains( + '.form .field #port ~ .error-message', + invalid_port_value) def test_ldap_error_form(self, csrf_token): params = self._enable_plugins( diff --git a/rhodecode/tests/functional/test_admin_my_account.py b/rhodecode/tests/functional/test_admin_my_account.py --- a/rhodecode/tests/functional/test_admin_my_account.py +++ b/rhodecode/tests/functional/test_admin_my_account.py @@ -339,53 +339,3 @@ class TestMyAccountController(TestContro new_password_hash = response.session['rhodecode_user']['password'] assert old_password_hash != new_password_hash - - def test_my_account_oauth_tokens_empty(self): - usr = self.log_user('test_regular2', 'test12') - User.get(usr['user_id']) - response = self.app.get(url('my_account_oauth')) - response.mustcontain(no=['Connect with GitHub']) - response.mustcontain('You have no accounts linked yet') - - def test_my_account_oauth_tokens_present(self): - from rhodecode.model.db import ExternalIdentity - usr = self.log_user('test_regular2', 'test12') - user = User.get(usr['user_id']) - - ex_identity = ExternalIdentity() - ex_identity.external_id = '55' - ex_identity.provider_name = 'twitter' - ex_identity.local_user_id = user.user_id - db_session = Session() - db_session.add(ex_identity) - Session.flush() - db_session.commit() - try: - response = self.app.get(url('my_account_oauth')) - response.mustcontain('twitter', - no=['You have no accounts linked yet']) - finally: - db_session = Session() - db_session.delete(ex_identity) - db_session.commit() - - def test_my_account_oauth_tokens_delete(self): - from rhodecode.model.db import ExternalIdentity - usr = self.log_user('test_regular2', 'test12') - user = User.get(usr['user_id']) - - ex_identity = ExternalIdentity() - ex_identity.external_id = '99' - ex_identity.provider_name = 'twitter' - ex_identity.local_user_id = user.user_id - db_session = Session() - db_session.add(ex_identity) - Session.flush() - db_session.commit() - assert ExternalIdentity.query().count() == 1 - response = self.app.post( - url('my_account_oauth', provider_name='twitter', - external_id='99'), - {'_method': 'delete', 'csrf_token': self.csrf_token}) - assert_session_flash(response, 'OAuth token successfully deleted') - assert ExternalIdentity.query().count() == 0 diff --git a/rhodecode/tests/functional/test_admin_settings.py b/rhodecode/tests/functional/test_admin_settings.py --- a/rhodecode/tests/functional/test_admin_settings.py +++ b/rhodecode/tests/functional/test_admin_settings.py @@ -22,6 +22,7 @@ import mock import pytest import rhodecode +from rhodecode.config.routing import ADMIN_PREFIX from rhodecode.lib.utils2 import md5 from rhodecode.model.db import RhodeCodeUi from rhodecode.model.meta import Session @@ -157,7 +158,7 @@ class TestAdminSettingsGlobal: 'csrf_token': csrf_token, }) - response = self.app.get(url('register')) + response = self.app.get(ADMIN_PREFIX + '/register') response.mustcontain('captcha') def test_captcha_deactivate(self, csrf_token): @@ -167,7 +168,7 @@ class TestAdminSettingsGlobal: 'csrf_token': csrf_token, }) - response = self.app.get(url('register')) + response = self.app.get(ADMIN_PREFIX + '/register') response.mustcontain(no=['captcha']) def test_title_change(self, csrf_token): diff --git a/rhodecode/tests/functional/test_admin_user_groups.py b/rhodecode/tests/functional/test_admin_user_groups.py --- a/rhodecode/tests/functional/test_admin_user_groups.py +++ b/rhodecode/tests/functional/test_admin_user_groups.py @@ -35,7 +35,8 @@ class TestAdminUsersGroupsController(Tes def test_index(self): self.log_user() - self.app.get(url('users_groups')) + response = self.app.get(url('users_groups')) + response.status_int == 200 def test_create(self): self.log_user() @@ -148,7 +149,19 @@ class TestAdminUsersGroupsController(Tes fixture.destroy_user_group(users_group_name) def test_edit(self): - self.app.get(url('edit_users_group', user_group_id=1)) + self.log_user() + ug = fixture.create_user_group(TEST_USER_GROUP, skip_if_exists=True) + response = self.app.get( + url('edit_users_group', user_group_id=ug.users_group_id)) + fixture.destroy_user_group(TEST_USER_GROUP) + + def test_edit_user_group_members(self): + self.log_user() + ug = fixture.create_user_group(TEST_USER_GROUP, skip_if_exists=True) + response = self.app.get( + url('edit_user_group_members', user_group_id=ug.users_group_id)) + response.mustcontain('No members yet') + fixture.destroy_user_group(TEST_USER_GROUP) def test_usergroup_escape(self): user = User.get_by_username('test_admin') diff --git a/rhodecode/tests/functional/test_compare_local.py b/rhodecode/tests/functional/test_compare_local.py --- a/rhodecode/tests/functional/test_compare_local.py +++ b/rhodecode/tests/functional/test_compare_local.py @@ -77,7 +77,7 @@ class TestCompareController: 'hg': { 'tag': 'v0.2.0', 'branch': 'default', - 'response': (147, 5700, 10176) + 'response': (147, 5701, 10177) }, 'git': { 'tag': 'v0.2.2', diff --git a/rhodecode/tests/functional/test_home.py b/rhodecode/tests/functional/test_home.py --- a/rhodecode/tests/functional/test_home.py +++ b/rhodecode/tests/functional/test_home.py @@ -181,19 +181,25 @@ class TestUserAutocompleteData(TestContr def assert_and_get_content(result): repos = [] groups = [] + commits = [] for data in result: for data_item in data['children']: assert data_item['id'] assert data_item['text'] + assert data_item['url'] if data_item['type'] == 'repo': repos.append(data_item) - else: + elif data_item['type'] == 'group': groups.append(data_item) + elif data_item['type'] == 'commit': + commits.append(data_item) + else: + raise Exception('invalid type %s' % data_item['type']) - return repos, groups + return repos, groups, commits -class TestRepoSwitcherData(TestController): +class TestGotoSwitcherData(TestController): required_repos_with_groups = [ 'abc', 'abc-fork', @@ -253,39 +259,41 @@ class TestRepoSwitcherData(TestControlle self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='goto_switcher_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == len(Repository.get_all()) assert len(groups) == len(RepoGroup.get_all()) + assert len(commits) == 0 def test_returns_list_of_repos_and_groups_filtered(self): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='goto_switcher_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, params={'query': 'abc'}, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == 13 assert len(groups) == 5 + assert len(commits) == 0 def test_returns_list_of_properly_sorted_and_filtered(self): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='goto_switcher_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, params={'query': 'abc'}, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) test_repos = [x['text'] for x in repos[:4]] assert ['abc', 'abcd', 'a/abc', 'abcde'] == test_repos @@ -300,54 +308,58 @@ class TestRepoListData(TestController): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='repo_list_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == len(Repository.get_all()) assert len(groups) == 0 + assert len(commits) == 0 def test_returns_list_of_repos_and_groups_filtered(self): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='repo_list_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, params={'query': 'vcs_test_git'}, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == len(Repository.query().filter( Repository.repo_name.ilike('%vcs_test_git%')).all()) assert len(groups) == 0 + assert len(commits) == 0 def test_returns_list_of_repos_and_groups_filtered_with_type(self): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='repo_list_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, params={'query': 'vcs_test_git', 'repo_type': 'git'}, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == len(Repository.query().filter( Repository.repo_name.ilike('%vcs_test_git%')).all()) assert len(groups) == 0 + assert len(commits) == 0 def test_returns_list_of_repos_non_ascii_query(self): self.log_user() response = self.app.get( - url(controller='home', action='repo_switcher_data'), + url(controller='home', action='repo_list_data'), headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, params={'query': 'ć_vcs_test_ą', 'repo_type': 'git'}, status=200) result = json.loads(response.body)['results'] - repos, groups = assert_and_get_content(result) + repos, groups, commits = assert_and_get_content(result) assert len(repos) == 0 assert len(groups) == 0 + assert len(commits) == 0 diff --git a/rhodecode/tests/functional/test_login.py b/rhodecode/tests/functional/test_login.py --- a/rhodecode/tests/functional/test_login.py +++ b/rhodecode/tests/functional/test_login.py @@ -23,9 +23,11 @@ import urlparse import mock import pytest +from rhodecode.config.routing import ADMIN_PREFIX from rhodecode.tests import ( assert_session_flash, url, HG_REPO, TEST_USER_ADMIN_LOGIN) from rhodecode.tests.fixture import Fixture +from rhodecode.tests.utils import AssertResponse, get_session_from_response from rhodecode.lib.auth import check_password, generate_auth_token from rhodecode.lib import helpers as h from rhodecode.model.auth_token import AuthTokenModel @@ -35,6 +37,14 @@ from rhodecode.model.meta import Session fixture = Fixture() +# Hardcode URLs because we don't have a request object to use +# pyramids URL generation methods. +login_url = ADMIN_PREFIX + '/login' +logut_url = ADMIN_PREFIX + '/logout' +register_url = ADMIN_PREFIX + '/register' +pwd_reset_url = ADMIN_PREFIX + '/password_reset' +pwd_reset_confirm_url = ADMIN_PREFIX + '/password_reset_confirmation' + @pytest.mark.usefixtures('app') class TestLoginController: @@ -52,37 +62,38 @@ class TestLoginController: assert Notification.query().all() == [] def test_index(self): - response = self.app.get(url(controller='login', action='index')) + response = self.app.get(login_url) assert response.status == '200 OK' # Test response... def test_login_admin_ok(self): - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': 'test_admin', 'password': 'test12'}) assert response.status == '302 Found' - username = response.session['rhodecode_user'].get('username') + session = get_session_from_response(response) + username = session['rhodecode_user'].get('username') assert username == 'test_admin' response = response.follow() response.mustcontain('/%s' % HG_REPO) def test_login_regular_ok(self): - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': 'test_regular', 'password': 'test12'}) assert response.status == '302 Found' - username = response.session['rhodecode_user'].get('username') + session = get_session_from_response(response) + username = session['rhodecode_user'].get('username') assert username == 'test_regular' response = response.follow() response.mustcontain('/%s' % HG_REPO) def test_login_ok_came_from(self): test_came_from = '/_admin/users?branch=stable' - response = self.app.post(url(controller='login', action='index', - came_from=test_came_from), - {'username': 'test_admin', - 'password': 'test12'}) + _url = '{}?came_from={}'.format(login_url, test_came_from) + response = self.app.post( + _url, {'username': 'test_admin', 'password': 'test12'}) assert response.status == '302 Found' assert 'branch=stable' in response.location response = response.follow() @@ -100,33 +111,30 @@ class TestLoginController: assert 'branch=stable' in response_query[0][1] def test_login_form_with_get_args(self): - kwargs = {'branch': 'stable'} - response = self.app.get( - url(controller='login', action='index', - came_from='/_admin/users', **kwargs)) - assert 'branch=stable' in response.form.action + _url = '{}?came_from=/_admin/users,branch=stable'.format(login_url) + response = self.app.get(_url) + assert 'branch%3Dstable' in response.form.action @pytest.mark.parametrize("url_came_from", [ - ('data:text/html,',), - ('mailto:test@rhodecode.org',), - ('file:///etc/passwd',), - ('ftp://some.ftp.server',), - ('http://other.domain',), - ('/\r\nX-Forwarded-Host: http://example.org',), + 'data:text/html,', + 'mailto:test@rhodecode.org', + 'file:///etc/passwd', + 'ftp://some.ftp.server', + 'http://other.domain', + '/\r\nX-Forwarded-Host: http://example.org', ]) def test_login_bad_came_froms(self, url_came_from): - response = self.app.post(url(controller='login', action='index', - came_from=url_came_from), - {'username': 'test_admin', - 'password': 'test12'}) + _url = '{}?came_from={}'.format(login_url, url_came_from) + response = self.app.post( + _url, + {'username': 'test_admin', 'password': 'test12'}) assert response.status == '302 Found' - assert response.tmpl_context.came_from == '/' - response = response.follow() assert response.status == '200 OK' + assert response.request.path == '/' def test_login_short_password(self): - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': 'test_admin', 'password': 'as'}) assert response.status == '200 OK' @@ -135,7 +143,7 @@ class TestLoginController: def test_login_wrong_non_ascii_password(self, user_regular): response = self.app.post( - url(controller='login', action='index'), + login_url, {'username': user_regular.username, 'password': u'invalid-non-asci\xe4'.encode('utf8')}) @@ -146,13 +154,13 @@ class TestLoginController: password = u'valid-non-ascii\xe4' user = user_util.create_user(password=password) response = self.app.post( - url(controller='login', action='index'), + login_url, {'username': user.username, 'password': password.encode('utf-8')}) assert response.status_code == 302 def test_login_wrong_username_password(self): - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': 'error', 'password': 'test12'}) @@ -170,12 +178,13 @@ class TestLoginController: Session().add(user) Session().commit() self.destroy_users.add(temp_user) - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': temp_user, 'password': 'test123'}) assert response.status == '302 Found' - username = response.session['rhodecode_user'].get('username') + session = get_session_from_response(response) + username = session['rhodecode_user'].get('username') assert username == temp_user response = response.follow() response.mustcontain('/%s' % HG_REPO) @@ -186,13 +195,13 @@ class TestLoginController: # REGISTRATIONS def test_register(self): - response = self.app.get(url(controller='login', action='register')) + response = self.app.get(register_url) response.mustcontain('Create an Account') def test_register_err_same_username(self): uname = 'test_admin' response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': uname, 'password': 'test12', @@ -203,13 +212,14 @@ class TestLoginController: } ) + assertr = AssertResponse(response) msg = validators.ValidUsername()._messages['username_exists'] - msg = h.html_escape(msg % {'username': uname}) - response.mustcontain(msg) + msg = msg % {'username': uname} + assertr.element_contains('#username+.error-message', msg) def test_register_err_same_email(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'test_admin_0', 'password': 'test12', @@ -220,12 +230,13 @@ class TestLoginController: } ) + assertr = AssertResponse(response) msg = validators.UniqSystemEmail()()._messages['email_taken'] - response.mustcontain(msg) + assertr.element_contains('#email+.error-message', msg) def test_register_err_same_email_case_sensitive(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'test_admin_1', 'password': 'test12', @@ -235,12 +246,13 @@ class TestLoginController: 'lastname': 'test' } ) + assertr = AssertResponse(response) msg = validators.UniqSystemEmail()()._messages['email_taken'] - response.mustcontain(msg) + assertr.element_contains('#email+.error-message', msg) def test_register_err_wrong_data(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'xs', 'password': 'test', @@ -256,7 +268,7 @@ class TestLoginController: def test_register_err_username(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'error user', 'password': 'test12', @@ -277,7 +289,7 @@ class TestLoginController: def test_register_err_case_sensitive(self): usr = 'Test_Admin' response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': usr, 'password': 'test12', @@ -288,14 +300,14 @@ class TestLoginController: } ) - response.mustcontain('An email address must contain a single @') + assertr = AssertResponse(response) msg = validators.ValidUsername()._messages['username_exists'] - msg = h.html_escape(msg % {'username': usr}) - response.mustcontain(msg) + msg = msg % {'username': usr} + assertr.element_contains('#username+.error-message', msg) def test_register_special_chars(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'xxxaxn', 'password': 'ąćźżąśśśś', @@ -311,7 +323,7 @@ class TestLoginController: def test_register_password_mismatch(self): response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': 'xs', 'password': '123qwe', @@ -332,7 +344,7 @@ class TestLoginController: lastname = 'testlastname' response = self.app.post( - url(controller='login', action='register'), + register_url, { 'username': username, 'password': password, @@ -360,7 +372,7 @@ class TestLoginController: def test_forgot_password_wrong_mail(self): bad_email = 'marcin@wrongmail.org' response = self.app.post( - url(controller='login', action='password_reset'), + pwd_reset_url, {'email': bad_email, } ) @@ -369,8 +381,7 @@ class TestLoginController: response.mustcontain() def test_forgot_password(self): - response = self.app.get(url(controller='login', - action='password_reset')) + response = self.app.get(pwd_reset_url) assert response.status == '200 OK' username = 'test_password_reset_1' @@ -389,8 +400,7 @@ class TestLoginController: Session().add(new) Session().commit() - response = self.app.post(url(controller='login', - action='password_reset'), + response = self.app.post(pwd_reset_url, {'email': email, }) assert_session_flash( @@ -401,20 +411,18 @@ class TestLoginController: # BAD KEY key = "bad" - response = self.app.get(url(controller='login', - action='password_reset_confirmation', - key=key)) + confirm_url = '{}?key={}'.format(pwd_reset_confirm_url, key) + response = self.app.get(confirm_url) assert response.status == '302 Found' - assert response.location.endswith(url('reset_password')) + assert response.location.endswith(pwd_reset_url) # GOOD KEY key = User.get_by_username(username).api_key - response = self.app.get(url(controller='login', - action='password_reset_confirmation', - key=key)) + confirm_url = '{}?key={}'.format(pwd_reset_confirm_url, key) + response = self.app.get(confirm_url) assert response.status == '302 Found' - assert response.location.endswith(url('login_home')) + assert response.location.endswith(login_url) assert_session_flash( response, diff --git a/rhodecode/tests/functional/test_pullrequests.py b/rhodecode/tests/functional/test_pullrequests.py --- a/rhodecode/tests/functional/test_pullrequests.py +++ b/rhodecode/tests/functional/test_pullrequests.py @@ -99,12 +99,13 @@ class TestPullrequestsController: in response) != pr_merge_enabled def test_close_status_visibility(self, pr_util, csrf_token): + from rhodecode.tests.functional.test_login import login_url, logut_url # Logout response = self.app.post( - url(controller='login', action='logout'), + logut_url, params={'csrf_token': csrf_token}) # Login as regular user - response = self.app.post(url(controller='login', action='index'), + response = self.app.post(login_url, {'username': 'test_regular', 'password': 'test12'}) diff --git a/rhodecode/tests/functional/test_search.py b/rhodecode/tests/functional/test_search.py --- a/rhodecode/tests/functional/test_search.py +++ b/rhodecode/tests/functional/test_search.py @@ -129,6 +129,10 @@ class TestSearchController(TestControlle ('author:marcin@python-blog.com ' 'commit_id:b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [ ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]), + ('b986218ba1c9b0d6a259fac9b050b1724ed8e545', 1, [ + ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]), + ('b986218b', 1, [ + ('hg', 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]), ]) def test_search_commit_messages( self, query, expected_hits, expected_commits, enabled_backends): diff --git a/rhodecode/tests/lib/auth_modules/test_auth_modules.py b/rhodecode/tests/lib/auth_modules/test_auth_modules.py --- a/rhodecode/tests/lib/auth_modules/test_auth_modules.py +++ b/rhodecode/tests/lib/auth_modules/test_auth_modules.py @@ -155,3 +155,29 @@ class TestRhodeCodeAuthPlugin(object): self.password_generator_mock = password_generator_patch.start() self.password_generator_mock.return_value = 'new-password' self.finalizers.append(password_generator_patch.stop) + + +def test_missing_ldap(): + from rhodecode.model.validators import Missing + + try: + import ldap_not_existing + except ImportError: + # means that python-ldap is not installed + ldap_not_existing = Missing + + # missing is singleton + assert ldap_not_existing == Missing + + +def test_import_ldap(): + from rhodecode.model.validators import Missing + + try: + import ldap + except ImportError: + # means that python-ldap is not installed + ldap = Missing + + # missing is singleton + assert False is (ldap == Missing) diff --git a/rhodecode/tests/lib/test_auth.py b/rhodecode/tests/lib/test_auth.py --- a/rhodecode/tests/lib/test_auth.py +++ b/rhodecode/tests/lib/test_auth.py @@ -32,6 +32,36 @@ from rhodecode.model.user import UserMod from rhodecode.model.user_group import UserGroupModel +def test_perm_origin_dict(): + pod = auth.PermOriginDict() + pod['thing'] = 'read', 'default' + assert pod['thing'] == 'read' + + assert pod.perm_origin_stack == { + 'thing': [('read', 'default')]} + + pod['thing'] = 'write', 'admin' + assert pod['thing'] == 'write' + + assert pod.perm_origin_stack == { + 'thing': [('read', 'default'), ('write', 'admin')]} + + pod['other'] = 'write', 'default' + + assert pod.perm_origin_stack == { + 'other': [('write', 'default')], + 'thing': [('read', 'default'), ('write', 'admin')]} + + pod['other'] = 'none', 'override' + + assert pod.perm_origin_stack == { + 'other': [('write', 'default'), ('none', 'override')], + 'thing': [('read', 'default'), ('write', 'admin')]} + + with pytest.raises(ValueError): + pod['thing'] = 'read' + + def test_cached_perms_data(user_regular, backend_random): permissions = get_permissions(user_regular) repo_name = backend_random.repo.repo_name diff --git a/rhodecode/tests/lib/test_helpers.py b/rhodecode/tests/lib/test_helpers.py --- a/rhodecode/tests/lib/test_helpers.py +++ b/rhodecode/tests/lib/test_helpers.py @@ -155,3 +155,42 @@ def test_get_visual_attr(pylonsapp): def test_chop_at(test_text, inclusive, expected_text): assert helpers.chop_at_smart( test_text, '\n', inclusive, '...') == expected_text + + +@pytest.mark.parametrize('test_text, expected_output', [ + ('some text', ['some', 'text']), + ('some text', ['some', 'text']), + ('some text "with a phrase"', ['some', 'text', 'with a phrase']), + ('"a phrase" "another phrase"', ['a phrase', 'another phrase']), + ('"justphrase"', ['justphrase']), + ('""', []), + ('', []), + (' ', []), + ('" "', []), +]) +def test_extract_phrases(test_text, expected_output): + assert helpers.extract_phrases(test_text) == expected_output + + +@pytest.mark.parametrize('test_text, text_phrases, expected_output', [ + ('some text here', ['some', 'here'], [(0, 4), (10, 14)]), + ('here here there', ['here'], [(0, 4), (5, 9), (11, 15)]), + ('irrelevant', ['not found'], []), + ('irrelevant', ['not found'], []), +]) +def test_get_matching_offsets(test_text, text_phrases, expected_output): + assert helpers.get_matching_offsets( + test_text, text_phrases) == expected_output + +def test_normalize_text_for_matching(): + assert helpers.normalize_text_for_matching( + 'OJjfe)*#$*@)$JF*)3r2f80h') == 'ojjfe jf 3r2f80h' + +def test_get_matching_line_offsets(): + assert helpers.get_matching_line_offsets([ + 'words words words', + 'words words words', + 'some text some', + 'words words words', + 'words words words', + 'text here what'], 'text') == {3: [(5, 9)], 6: [(0, 4)]} \ No newline at end of file diff --git a/rhodecode/tests/utils.py b/rhodecode/tests/utils.py --- a/rhodecode/tests/utils.py +++ b/rhodecode/tests/utils.py @@ -270,3 +270,13 @@ def is_url_reachable(url): except urllib2.URLError: return False return True + + +def get_session_from_response(response): + """ + This returns the session from a response object. Pylons has some magic + to make the session available as `response.session`. But pyramid + doesn't expose it. + """ + # TODO: Try to look up the session key also. + return response.request.environ['beaker.session'] diff --git a/rhodecode/translation.py b/rhodecode/translation.py new file mode 100644 --- /dev/null +++ b/rhodecode/translation.py @@ -0,0 +1,22 @@ +# Copyright (C) 2016-2016 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +from pyramid.i18n import TranslationStringFactory + +# Create a translation string factory for the 'rhodecode' domain. +_ = TranslationStringFactory('rhodecode') diff --git a/scripts/node-packages.nix b/scripts/node-packages.nix deleted file mode 100644 --- a/scripts/node-packages.nix +++ /dev/null @@ -1,3341 +0,0 @@ -{ self, fetchurl, fetchgit ? null, lib }: - -{ - by-spec."abbrev"."1" = - self.by-version."abbrev"."1.0.7"; - by-version."abbrev"."1.0.7" = lib.makeOverridable self.buildNodePackage { - name = "abbrev-1.0.7"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz"; - name = "abbrev-1.0.7.tgz"; - sha1 = "5b6035b2ee9d4fb5cf859f08a9be81b208491843"; - }) - ]; - buildInputs = - (self.nativeDeps."abbrev" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "abbrev" ]; - }; - by-spec."amdefine".">=0.0.4" = - self.by-version."amdefine"."1.0.0"; - by-version."amdefine"."1.0.0" = lib.makeOverridable self.buildNodePackage { - name = "amdefine-1.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz"; - name = "amdefine-1.0.0.tgz"; - sha1 = "fd17474700cb5cc9c2b709f0be9d23ce3c198c33"; - }) - ]; - buildInputs = - (self.nativeDeps."amdefine" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "amdefine" ]; - }; - by-spec."ansi-regex"."^0.2.0" = - self.by-version."ansi-regex"."0.2.1"; - by-version."ansi-regex"."0.2.1" = lib.makeOverridable self.buildNodePackage { - name = "ansi-regex-0.2.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz"; - name = "ansi-regex-0.2.1.tgz"; - sha1 = "0d8e946967a3d8143f93e24e298525fc1b2235f9"; - }) - ]; - buildInputs = - (self.nativeDeps."ansi-regex" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "ansi-regex" ]; - }; - by-spec."ansi-regex"."^0.2.1" = - self.by-version."ansi-regex"."0.2.1"; - by-spec."ansi-regex"."^2.0.0" = - self.by-version."ansi-regex"."2.0.0"; - by-version."ansi-regex"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "ansi-regex-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz"; - name = "ansi-regex-2.0.0.tgz"; - sha1 = "c5061b6e0ef8a81775e50f5d66151bf6bf371107"; - }) - ]; - buildInputs = - (self.nativeDeps."ansi-regex" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "ansi-regex" ]; - }; - by-spec."ansi-styles"."^1.1.0" = - self.by-version."ansi-styles"."1.1.0"; - by-version."ansi-styles"."1.1.0" = lib.makeOverridable self.buildNodePackage { - name = "ansi-styles-1.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz"; - name = "ansi-styles-1.1.0.tgz"; - sha1 = "eaecbf66cd706882760b2f4691582b8f55d7a7de"; - }) - ]; - buildInputs = - (self.nativeDeps."ansi-styles" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "ansi-styles" ]; - }; - by-spec."ansi-styles"."^2.1.0" = - self.by-version."ansi-styles"."2.1.0"; - by-version."ansi-styles"."2.1.0" = lib.makeOverridable self.buildNodePackage { - name = "ansi-styles-2.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz"; - name = "ansi-styles-2.1.0.tgz"; - sha1 = "990f747146927b559a932bf92959163d60c0d0e2"; - }) - ]; - buildInputs = - (self.nativeDeps."ansi-styles" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "ansi-styles" ]; - }; - by-spec."argparse"."~ 0.1.11" = - self.by-version."argparse"."0.1.16"; - by-version."argparse"."0.1.16" = lib.makeOverridable self.buildNodePackage { - name = "argparse-0.1.16"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/argparse/-/argparse-0.1.16.tgz"; - name = "argparse-0.1.16.tgz"; - sha1 = "cfd01e0fbba3d6caed049fbd758d40f65196f57c"; - }) - ]; - buildInputs = - (self.nativeDeps."argparse" or []); - deps = { - "underscore-1.7.0" = self.by-version."underscore"."1.7.0"; - "underscore.string-2.4.0" = self.by-version."underscore.string"."2.4.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "argparse" ]; - }; - by-spec."asap"."~1.0.0" = - self.by-version."asap"."1.0.0"; - by-version."asap"."1.0.0" = lib.makeOverridable self.buildNodePackage { - name = "asap-1.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/asap/-/asap-1.0.0.tgz"; - name = "asap-1.0.0.tgz"; - sha1 = "b2a45da5fdfa20b0496fc3768cc27c12fa916a7d"; - }) - ]; - buildInputs = - (self.nativeDeps."asap" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "asap" ]; - }; - by-spec."asn1".">=0.2.3 <0.3.0" = - self.by-version."asn1"."0.2.3"; - by-version."asn1"."0.2.3" = lib.makeOverridable self.buildNodePackage { - name = "asn1-0.2.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz"; - name = "asn1-0.2.3.tgz"; - sha1 = "dac8787713c9966849fc8180777ebe9c1ddf3b86"; - }) - ]; - buildInputs = - (self.nativeDeps."asn1" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "asn1" ]; - }; - by-spec."assert-plus".">=0.2.0 <0.3.0" = - self.by-version."assert-plus"."0.2.0"; - by-version."assert-plus"."0.2.0" = lib.makeOverridable self.buildNodePackage { - name = "assert-plus-0.2.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz"; - name = "assert-plus-0.2.0.tgz"; - sha1 = "d74e1b87e7affc0db8aadb7021f3fe48101ab234"; - }) - ]; - buildInputs = - (self.nativeDeps."assert-plus" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "assert-plus" ]; - }; - by-spec."assert-plus"."^0.1.5" = - self.by-version."assert-plus"."0.1.5"; - by-version."assert-plus"."0.1.5" = lib.makeOverridable self.buildNodePackage { - name = "assert-plus-0.1.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz"; - name = "assert-plus-0.1.5.tgz"; - sha1 = "ee74009413002d84cec7219c6ac811812e723160"; - }) - ]; - buildInputs = - (self.nativeDeps."assert-plus" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "assert-plus" ]; - }; - by-spec."assert-plus"."^0.2.0" = - self.by-version."assert-plus"."0.2.0"; - by-spec."async"."^0.9.0" = - self.by-version."async"."0.9.2"; - by-version."async"."0.9.2" = lib.makeOverridable self.buildNodePackage { - name = "async-0.9.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/async/-/async-0.9.2.tgz"; - name = "async-0.9.2.tgz"; - sha1 = "aea74d5e61c1f899613bf64bda66d4c78f2fd17d"; - }) - ]; - buildInputs = - (self.nativeDeps."async" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "async" ]; - }; - by-spec."async"."^1.4.0" = - self.by-version."async"."1.5.2"; - by-version."async"."1.5.2" = lib.makeOverridable self.buildNodePackage { - name = "async-1.5.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/async/-/async-1.5.2.tgz"; - name = "async-1.5.2.tgz"; - sha1 = "ec6a61ae56480c0c3cb241c95618e20892f9672a"; - }) - ]; - buildInputs = - (self.nativeDeps."async" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "async" ]; - }; - by-spec."async"."~0.1.22" = - self.by-version."async"."0.1.22"; - by-version."async"."0.1.22" = lib.makeOverridable self.buildNodePackage { - name = "async-0.1.22"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/async/-/async-0.1.22.tgz"; - name = "async-0.1.22.tgz"; - sha1 = "0fc1aaa088a0e3ef0ebe2d8831bab0dcf8845061"; - }) - ]; - buildInputs = - (self.nativeDeps."async" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "async" ]; - }; - by-spec."async"."~0.2.9" = - self.by-version."async"."0.2.10"; - by-version."async"."0.2.10" = lib.makeOverridable self.buildNodePackage { - name = "async-0.2.10"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/async/-/async-0.2.10.tgz"; - name = "async-0.2.10.tgz"; - sha1 = "b6bbe0b0674b9d719708ca38de8c237cb526c3d1"; - }) - ]; - buildInputs = - (self.nativeDeps."async" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "async" ]; - }; - by-spec."aws-sign2"."~0.6.0" = - self.by-version."aws-sign2"."0.6.0"; - by-version."aws-sign2"."0.6.0" = lib.makeOverridable self.buildNodePackage { - name = "aws-sign2-0.6.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz"; - name = "aws-sign2-0.6.0.tgz"; - sha1 = "14342dd38dbcc94d0e5b87d763cd63612c0e794f"; - }) - ]; - buildInputs = - (self.nativeDeps."aws-sign2" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "aws-sign2" ]; - }; - by-spec."balanced-match"."^0.3.0" = - self.by-version."balanced-match"."0.3.0"; - by-version."balanced-match"."0.3.0" = lib.makeOverridable self.buildNodePackage { - name = "balanced-match-0.3.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/balanced-match/-/balanced-match-0.3.0.tgz"; - name = "balanced-match-0.3.0.tgz"; - sha1 = "a91cdd1ebef1a86659e70ff4def01625fc2d6756"; - }) - ]; - buildInputs = - (self.nativeDeps."balanced-match" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "balanced-match" ]; - }; - by-spec."bl"."~1.0.0" = - self.by-version."bl"."1.0.1"; - by-version."bl"."1.0.1" = lib.makeOverridable self.buildNodePackage { - name = "bl-1.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/bl/-/bl-1.0.1.tgz"; - name = "bl-1.0.1.tgz"; - sha1 = "0e6df7330308c46515751676cafa7334dc9852fd"; - }) - ]; - buildInputs = - (self.nativeDeps."bl" or []); - deps = { - "readable-stream-2.0.5" = self.by-version."readable-stream"."2.0.5"; - }; - peerDependencies = [ - ]; - passthru.names = [ "bl" ]; - }; - by-spec."boom"."2.x.x" = - self.by-version."boom"."2.10.1"; - by-version."boom"."2.10.1" = lib.makeOverridable self.buildNodePackage { - name = "boom-2.10.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/boom/-/boom-2.10.1.tgz"; - name = "boom-2.10.1.tgz"; - sha1 = "39c8918ceff5799f83f9492a848f625add0c766f"; - }) - ]; - buildInputs = - (self.nativeDeps."boom" or []); - deps = { - "hoek-2.16.3" = self.by-version."hoek"."2.16.3"; - }; - peerDependencies = [ - ]; - passthru.names = [ "boom" ]; - }; - by-spec."brace-expansion"."^1.0.0" = - self.by-version."brace-expansion"."1.1.2"; - by-version."brace-expansion"."1.1.2" = lib.makeOverridable self.buildNodePackage { - name = "brace-expansion-1.1.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.2.tgz"; - name = "brace-expansion-1.1.2.tgz"; - sha1 = "f21445d0488b658e2771efd870eff51df29f04ef"; - }) - ]; - buildInputs = - (self.nativeDeps."brace-expansion" or []); - deps = { - "balanced-match-0.3.0" = self.by-version."balanced-match"."0.3.0"; - "concat-map-0.0.1" = self.by-version."concat-map"."0.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "brace-expansion" ]; - }; - by-spec."caseless"."~0.11.0" = - self.by-version."caseless"."0.11.0"; - by-version."caseless"."0.11.0" = lib.makeOverridable self.buildNodePackage { - name = "caseless-0.11.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz"; - name = "caseless-0.11.0.tgz"; - sha1 = "715b96ea9841593cc33067923f5ec60ebda4f7d7"; - }) - ]; - buildInputs = - (self.nativeDeps."caseless" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "caseless" ]; - }; - by-spec."chalk"."^0.5.1" = - self.by-version."chalk"."0.5.1"; - by-version."chalk"."0.5.1" = lib.makeOverridable self.buildNodePackage { - name = "chalk-0.5.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz"; - name = "chalk-0.5.1.tgz"; - sha1 = "663b3a648b68b55d04690d49167aa837858f2174"; - }) - ]; - buildInputs = - (self.nativeDeps."chalk" or []); - deps = { - "ansi-styles-1.1.0" = self.by-version."ansi-styles"."1.1.0"; - "escape-string-regexp-1.0.4" = self.by-version."escape-string-regexp"."1.0.4"; - "has-ansi-0.1.0" = self.by-version."has-ansi"."0.1.0"; - "strip-ansi-0.3.0" = self.by-version."strip-ansi"."0.3.0"; - "supports-color-0.2.0" = self.by-version."supports-color"."0.2.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "chalk" ]; - }; - by-spec."chalk"."^1.0.0" = - self.by-version."chalk"."1.1.1"; - by-version."chalk"."1.1.1" = lib.makeOverridable self.buildNodePackage { - name = "chalk-1.1.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz"; - name = "chalk-1.1.1.tgz"; - sha1 = "509afb67066e7499f7eb3535c77445772ae2d019"; - }) - ]; - buildInputs = - (self.nativeDeps."chalk" or []); - deps = { - "ansi-styles-2.1.0" = self.by-version."ansi-styles"."2.1.0"; - "escape-string-regexp-1.0.4" = self.by-version."escape-string-regexp"."1.0.4"; - "has-ansi-2.0.0" = self.by-version."has-ansi"."2.0.0"; - "strip-ansi-3.0.0" = self.by-version."strip-ansi"."3.0.0"; - "supports-color-2.0.0" = self.by-version."supports-color"."2.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "chalk" ]; - }; - by-spec."chalk"."^1.1.1" = - self.by-version."chalk"."1.1.1"; - by-spec."cli"."0.6.x" = - self.by-version."cli"."0.6.6"; - by-version."cli"."0.6.6" = lib.makeOverridable self.buildNodePackage { - name = "cli-0.6.6"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/cli/-/cli-0.6.6.tgz"; - name = "cli-0.6.6.tgz"; - sha1 = "02ad44a380abf27adac5e6f0cdd7b043d74c53e3"; - }) - ]; - buildInputs = - (self.nativeDeps."cli" or []); - deps = { - "glob-3.2.11" = self.by-version."glob"."3.2.11"; - "exit-0.1.2" = self.by-version."exit"."0.1.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "cli" ]; - }; - by-spec."coffee-script"."~1.3.3" = - self.by-version."coffee-script"."1.3.3"; - by-version."coffee-script"."1.3.3" = lib.makeOverridable self.buildNodePackage { - name = "coffee-script-1.3.3"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz"; - name = "coffee-script-1.3.3.tgz"; - sha1 = "150d6b4cb522894369efed6a2101c20bc7f4a4f4"; - }) - ]; - buildInputs = - (self.nativeDeps."coffee-script" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "coffee-script" ]; - }; - by-spec."colors"."~0.6.2" = - self.by-version."colors"."0.6.2"; - by-version."colors"."0.6.2" = lib.makeOverridable self.buildNodePackage { - name = "colors-0.6.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/colors/-/colors-0.6.2.tgz"; - name = "colors-0.6.2.tgz"; - sha1 = "2423fe6678ac0c5dae8852e5d0e5be08c997abcc"; - }) - ]; - buildInputs = - (self.nativeDeps."colors" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "colors" ]; - }; - by-spec."combined-stream"."^1.0.5" = - self.by-version."combined-stream"."1.0.5"; - by-version."combined-stream"."1.0.5" = lib.makeOverridable self.buildNodePackage { - name = "combined-stream-1.0.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz"; - name = "combined-stream-1.0.5.tgz"; - sha1 = "938370a57b4a51dea2c77c15d5c5fdf895164009"; - }) - ]; - buildInputs = - (self.nativeDeps."combined-stream" or []); - deps = { - "delayed-stream-1.0.0" = self.by-version."delayed-stream"."1.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "combined-stream" ]; - }; - by-spec."combined-stream"."~1.0.5" = - self.by-version."combined-stream"."1.0.5"; - by-spec."commander"."^2.9.0" = - self.by-version."commander"."2.9.0"; - by-version."commander"."2.9.0" = lib.makeOverridable self.buildNodePackage { - name = "commander-2.9.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/commander/-/commander-2.9.0.tgz"; - name = "commander-2.9.0.tgz"; - sha1 = "9c99094176e12240cb22d6c5146098400fe0f7d4"; - }) - ]; - buildInputs = - (self.nativeDeps."commander" or []); - deps = { - "graceful-readlink-1.0.1" = self.by-version."graceful-readlink"."1.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "commander" ]; - }; - by-spec."concat-map"."0.0.1" = - self.by-version."concat-map"."0.0.1"; - by-version."concat-map"."0.0.1" = lib.makeOverridable self.buildNodePackage { - name = "concat-map-0.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"; - name = "concat-map-0.0.1.tgz"; - sha1 = "d8a96bd77fd68df7793a73036a3ba0d5405d477b"; - }) - ]; - buildInputs = - (self.nativeDeps."concat-map" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "concat-map" ]; - }; - by-spec."console-browserify"."1.1.x" = - self.by-version."console-browserify"."1.1.0"; - by-version."console-browserify"."1.1.0" = lib.makeOverridable self.buildNodePackage { - name = "console-browserify-1.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz"; - name = "console-browserify-1.1.0.tgz"; - sha1 = "f0241c45730a9fc6323b206dbf38edc741d0bb10"; - }) - ]; - buildInputs = - (self.nativeDeps."console-browserify" or []); - deps = { - "date-now-0.1.4" = self.by-version."date-now"."0.1.4"; - }; - peerDependencies = [ - ]; - passthru.names = [ "console-browserify" ]; - }; - by-spec."core-util-is"."~1.0.0" = - self.by-version."core-util-is"."1.0.2"; - by-version."core-util-is"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "core-util-is-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"; - name = "core-util-is-1.0.2.tgz"; - sha1 = "b5fd54220aa2bc5ab57aab7140c940754503c1a7"; - }) - ]; - buildInputs = - (self.nativeDeps."core-util-is" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "core-util-is" ]; - }; - by-spec."cryptiles"."2.x.x" = - self.by-version."cryptiles"."2.0.5"; - by-version."cryptiles"."2.0.5" = lib.makeOverridable self.buildNodePackage { - name = "cryptiles-2.0.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz"; - name = "cryptiles-2.0.5.tgz"; - sha1 = "3bdfecdc608147c1c67202fa291e7dca59eaa3b8"; - }) - ]; - buildInputs = - (self.nativeDeps."cryptiles" or []); - deps = { - "boom-2.10.1" = self.by-version."boom"."2.10.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "cryptiles" ]; - }; - by-spec."dashdash".">=1.10.1 <2.0.0" = - self.by-version."dashdash"."1.12.2"; - by-version."dashdash"."1.12.2" = lib.makeOverridable self.buildNodePackage { - name = "dashdash-1.12.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/dashdash/-/dashdash-1.12.2.tgz"; - name = "dashdash-1.12.2.tgz"; - sha1 = "1c6f70588498d047b8cd5777b32ba85a5e25be36"; - }) - ]; - buildInputs = - (self.nativeDeps."dashdash" or []); - deps = { - "assert-plus-0.2.0" = self.by-version."assert-plus"."0.2.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "dashdash" ]; - }; - by-spec."date-now"."^0.1.4" = - self.by-version."date-now"."0.1.4"; - by-version."date-now"."0.1.4" = lib.makeOverridable self.buildNodePackage { - name = "date-now-0.1.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz"; - name = "date-now-0.1.4.tgz"; - sha1 = "eaf439fd4d4848ad74e5cc7dbef200672b9e345b"; - }) - ]; - buildInputs = - (self.nativeDeps."date-now" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "date-now" ]; - }; - by-spec."dateformat"."1.0.2-1.2.3" = - self.by-version."dateformat"."1.0.2-1.2.3"; - by-version."dateformat"."1.0.2-1.2.3" = lib.makeOverridable self.buildNodePackage { - name = "dateformat-1.0.2-1.2.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz"; - name = "dateformat-1.0.2-1.2.3.tgz"; - sha1 = "b0220c02de98617433b72851cf47de3df2cdbee9"; - }) - ]; - buildInputs = - (self.nativeDeps."dateformat" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "dateformat" ]; - }; - by-spec."debug"."~0.7.0" = - self.by-version."debug"."0.7.4"; - by-version."debug"."0.7.4" = lib.makeOverridable self.buildNodePackage { - name = "debug-0.7.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/debug/-/debug-0.7.4.tgz"; - name = "debug-0.7.4.tgz"; - sha1 = "06e1ea8082c2cb14e39806e22e2f6f757f92af39"; - }) - ]; - buildInputs = - (self.nativeDeps."debug" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "debug" ]; - }; - by-spec."delayed-stream"."~1.0.0" = - self.by-version."delayed-stream"."1.0.0"; - by-version."delayed-stream"."1.0.0" = lib.makeOverridable self.buildNodePackage { - name = "delayed-stream-1.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz"; - name = "delayed-stream-1.0.0.tgz"; - sha1 = "df3ae199acadfb7d440aaae0b29e2272b24ec619"; - }) - ]; - buildInputs = - (self.nativeDeps."delayed-stream" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "delayed-stream" ]; - }; - by-spec."dom-serializer"."0" = - self.by-version."dom-serializer"."0.1.0"; - by-version."dom-serializer"."0.1.0" = lib.makeOverridable self.buildNodePackage { - name = "dom-serializer-0.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz"; - name = "dom-serializer-0.1.0.tgz"; - sha1 = "073c697546ce0780ce23be4a28e293e40bc30c82"; - }) - ]; - buildInputs = - (self.nativeDeps."dom-serializer" or []); - deps = { - "domelementtype-1.1.3" = self.by-version."domelementtype"."1.1.3"; - "entities-1.1.1" = self.by-version."entities"."1.1.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "dom-serializer" ]; - }; - by-spec."domelementtype"."1" = - self.by-version."domelementtype"."1.3.0"; - by-version."domelementtype"."1.3.0" = lib.makeOverridable self.buildNodePackage { - name = "domelementtype-1.3.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/domelementtype/-/domelementtype-1.3.0.tgz"; - name = "domelementtype-1.3.0.tgz"; - sha1 = "b17aed82e8ab59e52dd9c19b1756e0fc187204c2"; - }) - ]; - buildInputs = - (self.nativeDeps."domelementtype" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "domelementtype" ]; - }; - by-spec."domelementtype"."~1.1.1" = - self.by-version."domelementtype"."1.1.3"; - by-version."domelementtype"."1.1.3" = lib.makeOverridable self.buildNodePackage { - name = "domelementtype-1.1.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz"; - name = "domelementtype-1.1.3.tgz"; - sha1 = "bd28773e2642881aec51544924299c5cd822185b"; - }) - ]; - buildInputs = - (self.nativeDeps."domelementtype" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "domelementtype" ]; - }; - by-spec."domhandler"."2.3" = - self.by-version."domhandler"."2.3.0"; - by-version."domhandler"."2.3.0" = lib.makeOverridable self.buildNodePackage { - name = "domhandler-2.3.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/domhandler/-/domhandler-2.3.0.tgz"; - name = "domhandler-2.3.0.tgz"; - sha1 = "2de59a0822d5027fabff6f032c2b25a2a8abe738"; - }) - ]; - buildInputs = - (self.nativeDeps."domhandler" or []); - deps = { - "domelementtype-1.3.0" = self.by-version."domelementtype"."1.3.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "domhandler" ]; - }; - by-spec."domutils"."1.5" = - self.by-version."domutils"."1.5.1"; - by-version."domutils"."1.5.1" = lib.makeOverridable self.buildNodePackage { - name = "domutils-1.5.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz"; - name = "domutils-1.5.1.tgz"; - sha1 = "dcd8488a26f563d61079e48c9f7b7e32373682cf"; - }) - ]; - buildInputs = - (self.nativeDeps."domutils" or []); - deps = { - "dom-serializer-0.1.0" = self.by-version."dom-serializer"."0.1.0"; - "domelementtype-1.3.0" = self.by-version."domelementtype"."1.3.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "domutils" ]; - }; - by-spec."ecc-jsbn".">=0.0.1 <1.0.0" = - self.by-version."ecc-jsbn"."0.1.1"; - by-version."ecc-jsbn"."0.1.1" = lib.makeOverridable self.buildNodePackage { - name = "ecc-jsbn-0.1.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz"; - name = "ecc-jsbn-0.1.1.tgz"; - sha1 = "0fc73a9ed5f0d53c38193398523ef7e543777505"; - }) - ]; - buildInputs = - (self.nativeDeps."ecc-jsbn" or []); - deps = { - "jsbn-0.1.0" = self.by-version."jsbn"."0.1.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "ecc-jsbn" ]; - }; - by-spec."entities"."1.0" = - self.by-version."entities"."1.0.0"; - by-version."entities"."1.0.0" = lib.makeOverridable self.buildNodePackage { - name = "entities-1.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/entities/-/entities-1.0.0.tgz"; - name = "entities-1.0.0.tgz"; - sha1 = "b2987aa3821347fcde642b24fdfc9e4fb712bf26"; - }) - ]; - buildInputs = - (self.nativeDeps."entities" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "entities" ]; - }; - by-spec."entities"."~1.1.1" = - self.by-version."entities"."1.1.1"; - by-version."entities"."1.1.1" = lib.makeOverridable self.buildNodePackage { - name = "entities-1.1.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/entities/-/entities-1.1.1.tgz"; - name = "entities-1.1.1.tgz"; - sha1 = "6e5c2d0a5621b5dadaecef80b90edfb5cd7772f0"; - }) - ]; - buildInputs = - (self.nativeDeps."entities" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "entities" ]; - }; - by-spec."errno"."^0.1.1" = - self.by-version."errno"."0.1.4"; - by-version."errno"."0.1.4" = lib.makeOverridable self.buildNodePackage { - name = "errno-0.1.4"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/errno/-/errno-0.1.4.tgz"; - name = "errno-0.1.4.tgz"; - sha1 = "b896e23a9e5e8ba33871fc996abd3635fc9a1c7d"; - }) - ]; - buildInputs = - (self.nativeDeps."errno" or []); - deps = { - "prr-0.0.0" = self.by-version."prr"."0.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "errno" ]; - }; - by-spec."escape-string-regexp"."^1.0.0" = - self.by-version."escape-string-regexp"."1.0.4"; - by-version."escape-string-regexp"."1.0.4" = lib.makeOverridable self.buildNodePackage { - name = "escape-string-regexp-1.0.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.4.tgz"; - name = "escape-string-regexp-1.0.4.tgz"; - sha1 = "b85e679b46f72d03fbbe8a3bf7259d535c21b62f"; - }) - ]; - buildInputs = - (self.nativeDeps."escape-string-regexp" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "escape-string-regexp" ]; - }; - by-spec."escape-string-regexp"."^1.0.2" = - self.by-version."escape-string-regexp"."1.0.4"; - by-spec."esprima"."~ 1.0.2" = - self.by-version."esprima"."1.0.4"; - by-version."esprima"."1.0.4" = lib.makeOverridable self.buildNodePackage { - name = "esprima-1.0.4"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz"; - name = "esprima-1.0.4.tgz"; - sha1 = "9f557e08fc3b4d26ece9dd34f8fbf476b62585ad"; - }) - ]; - buildInputs = - (self.nativeDeps."esprima" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "esprima" ]; - }; - by-spec."eventemitter2"."~0.4.13" = - self.by-version."eventemitter2"."0.4.14"; - by-version."eventemitter2"."0.4.14" = lib.makeOverridable self.buildNodePackage { - name = "eventemitter2-0.4.14"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz"; - name = "eventemitter2-0.4.14.tgz"; - sha1 = "8f61b75cde012b2e9eb284d4545583b5643b61ab"; - }) - ]; - buildInputs = - (self.nativeDeps."eventemitter2" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "eventemitter2" ]; - }; - by-spec."exit"."0.1.2" = - self.by-version."exit"."0.1.2"; - by-version."exit"."0.1.2" = lib.makeOverridable self.buildNodePackage { - name = "exit-0.1.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/exit/-/exit-0.1.2.tgz"; - name = "exit-0.1.2.tgz"; - sha1 = "0632638f8d877cc82107d30a0fff1a17cba1cd0c"; - }) - ]; - buildInputs = - (self.nativeDeps."exit" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "exit" ]; - }; - by-spec."exit"."0.1.x" = - self.by-version."exit"."0.1.2"; - by-spec."exit"."~0.1.1" = - self.by-version."exit"."0.1.2"; - by-spec."extend"."~3.0.0" = - self.by-version."extend"."3.0.0"; - by-version."extend"."3.0.0" = lib.makeOverridable self.buildNodePackage { - name = "extend-3.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/extend/-/extend-3.0.0.tgz"; - name = "extend-3.0.0.tgz"; - sha1 = "5a474353b9f3353ddd8176dfd37b91c83a46f1d4"; - }) - ]; - buildInputs = - (self.nativeDeps."extend" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "extend" ]; - }; - by-spec."extsprintf"."1.0.2" = - self.by-version."extsprintf"."1.0.2"; - by-version."extsprintf"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "extsprintf-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz"; - name = "extsprintf-1.0.2.tgz"; - sha1 = "e1080e0658e300b06294990cc70e1502235fd550"; - }) - ]; - buildInputs = - (self.nativeDeps."extsprintf" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "extsprintf" ]; - }; - by-spec."faye-websocket"."~0.4.3" = - self.by-version."faye-websocket"."0.4.4"; - by-version."faye-websocket"."0.4.4" = lib.makeOverridable self.buildNodePackage { - name = "faye-websocket-0.4.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz"; - name = "faye-websocket-0.4.4.tgz"; - sha1 = "c14c5b3bf14d7417ffbfd990c0a7495cd9f337bc"; - }) - ]; - buildInputs = - (self.nativeDeps."faye-websocket" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "faye-websocket" ]; - }; - by-spec."findup-sync"."~0.1.2" = - self.by-version."findup-sync"."0.1.3"; - by-version."findup-sync"."0.1.3" = lib.makeOverridable self.buildNodePackage { - name = "findup-sync-0.1.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz"; - name = "findup-sync-0.1.3.tgz"; - sha1 = "7f3e7a97b82392c653bf06589bd85190e93c3683"; - }) - ]; - buildInputs = - (self.nativeDeps."findup-sync" or []); - deps = { - "glob-3.2.11" = self.by-version."glob"."3.2.11"; - "lodash-2.4.2" = self.by-version."lodash"."2.4.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "findup-sync" ]; - }; - by-spec."forever-agent"."~0.6.1" = - self.by-version."forever-agent"."0.6.1"; - by-version."forever-agent"."0.6.1" = lib.makeOverridable self.buildNodePackage { - name = "forever-agent-0.6.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz"; - name = "forever-agent-0.6.1.tgz"; - sha1 = "fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"; - }) - ]; - buildInputs = - (self.nativeDeps."forever-agent" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "forever-agent" ]; - }; - by-spec."form-data"."~1.0.0-rc3" = - self.by-version."form-data"."1.0.0-rc3"; - by-version."form-data"."1.0.0-rc3" = lib.makeOverridable self.buildNodePackage { - name = "form-data-1.0.0-rc3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/form-data/-/form-data-1.0.0-rc3.tgz"; - name = "form-data-1.0.0-rc3.tgz"; - sha1 = "d35bc62e7fbc2937ae78f948aaa0d38d90607577"; - }) - ]; - buildInputs = - (self.nativeDeps."form-data" or []); - deps = { - "async-1.5.2" = self.by-version."async"."1.5.2"; - "combined-stream-1.0.5" = self.by-version."combined-stream"."1.0.5"; - "mime-types-2.1.9" = self.by-version."mime-types"."2.1.9"; - }; - peerDependencies = [ - ]; - passthru.names = [ "form-data" ]; - }; - by-spec."gaze"."~0.5.1" = - self.by-version."gaze"."0.5.2"; - by-version."gaze"."0.5.2" = lib.makeOverridable self.buildNodePackage { - name = "gaze-0.5.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/gaze/-/gaze-0.5.2.tgz"; - name = "gaze-0.5.2.tgz"; - sha1 = "40b709537d24d1d45767db5a908689dfe69ac44f"; - }) - ]; - buildInputs = - (self.nativeDeps."gaze" or []); - deps = { - "globule-0.1.0" = self.by-version."globule"."0.1.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "gaze" ]; - }; - by-spec."generate-function"."^2.0.0" = - self.by-version."generate-function"."2.0.0"; - by-version."generate-function"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "generate-function-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz"; - name = "generate-function-2.0.0.tgz"; - sha1 = "6858fe7c0969b7d4e9093337647ac79f60dfbe74"; - }) - ]; - buildInputs = - (self.nativeDeps."generate-function" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "generate-function" ]; - }; - by-spec."generate-object-property"."^1.1.0" = - self.by-version."generate-object-property"."1.2.0"; - by-version."generate-object-property"."1.2.0" = lib.makeOverridable self.buildNodePackage { - name = "generate-object-property-1.2.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz"; - name = "generate-object-property-1.2.0.tgz"; - sha1 = "9c0e1c40308ce804f4783618b937fa88f99d50d0"; - }) - ]; - buildInputs = - (self.nativeDeps."generate-object-property" or []); - deps = { - "is-property-1.0.2" = self.by-version."is-property"."1.0.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "generate-object-property" ]; - }; - by-spec."getobject"."~0.1.0" = - self.by-version."getobject"."0.1.0"; - by-version."getobject"."0.1.0" = lib.makeOverridable self.buildNodePackage { - name = "getobject-0.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz"; - name = "getobject-0.1.0.tgz"; - sha1 = "047a449789fa160d018f5486ed91320b6ec7885c"; - }) - ]; - buildInputs = - (self.nativeDeps."getobject" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "getobject" ]; - }; - by-spec."glob"."~ 3.2.1" = - self.by-version."glob"."3.2.11"; - by-version."glob"."3.2.11" = lib.makeOverridable self.buildNodePackage { - name = "glob-3.2.11"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/glob/-/glob-3.2.11.tgz"; - name = "glob-3.2.11.tgz"; - sha1 = "4a973f635b9190f715d10987d5c00fd2815ebe3d"; - }) - ]; - buildInputs = - (self.nativeDeps."glob" or []); - deps = { - "inherits-2.0.1" = self.by-version."inherits"."2.0.1"; - "minimatch-0.3.0" = self.by-version."minimatch"."0.3.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "glob" ]; - }; - by-spec."glob"."~3.1.21" = - self.by-version."glob"."3.1.21"; - by-version."glob"."3.1.21" = lib.makeOverridable self.buildNodePackage { - name = "glob-3.1.21"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/glob/-/glob-3.1.21.tgz"; - name = "glob-3.1.21.tgz"; - sha1 = "d29e0a055dea5138f4d07ed40e8982e83c2066cd"; - }) - ]; - buildInputs = - (self.nativeDeps."glob" or []); - deps = { - "minimatch-0.2.14" = self.by-version."minimatch"."0.2.14"; - "graceful-fs-1.2.3" = self.by-version."graceful-fs"."1.2.3"; - "inherits-1.0.2" = self.by-version."inherits"."1.0.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "glob" ]; - }; - by-spec."glob"."~3.2.9" = - self.by-version."glob"."3.2.11"; - by-spec."globule"."~0.1.0" = - self.by-version."globule"."0.1.0"; - by-version."globule"."0.1.0" = lib.makeOverridable self.buildNodePackage { - name = "globule-0.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/globule/-/globule-0.1.0.tgz"; - name = "globule-0.1.0.tgz"; - sha1 = "d9c8edde1da79d125a151b79533b978676346ae5"; - }) - ]; - buildInputs = - (self.nativeDeps."globule" or []); - deps = { - "lodash-1.0.2" = self.by-version."lodash"."1.0.2"; - "glob-3.1.21" = self.by-version."glob"."3.1.21"; - "minimatch-0.2.14" = self.by-version."minimatch"."0.2.14"; - }; - peerDependencies = [ - ]; - passthru.names = [ "globule" ]; - }; - by-spec."graceful-fs"."^3.0.5" = - self.by-version."graceful-fs"."3.0.8"; - by-version."graceful-fs"."3.0.8" = lib.makeOverridable self.buildNodePackage { - name = "graceful-fs-3.0.8"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"; - name = "graceful-fs-3.0.8.tgz"; - sha1 = "ce813e725fa82f7e6147d51c9a5ca68270551c22"; - }) - ]; - buildInputs = - (self.nativeDeps."graceful-fs" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "graceful-fs" ]; - }; - by-spec."graceful-fs"."~1.2.0" = - self.by-version."graceful-fs"."1.2.3"; - by-version."graceful-fs"."1.2.3" = lib.makeOverridable self.buildNodePackage { - name = "graceful-fs-1.2.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz"; - name = "graceful-fs-1.2.3.tgz"; - sha1 = "15a4806a57547cb2d2dbf27f42e89a8c3451b364"; - }) - ]; - buildInputs = - (self.nativeDeps."graceful-fs" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "graceful-fs" ]; - }; - by-spec."graceful-readlink".">= 1.0.0" = - self.by-version."graceful-readlink"."1.0.1"; - by-version."graceful-readlink"."1.0.1" = lib.makeOverridable self.buildNodePackage { - name = "graceful-readlink-1.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz"; - name = "graceful-readlink-1.0.1.tgz"; - sha1 = "4cafad76bc62f02fa039b2f94e9a3dd3a391a725"; - }) - ]; - buildInputs = - (self.nativeDeps."graceful-readlink" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "graceful-readlink" ]; - }; - by-spec."grunt".">=0.4.0" = - self.by-version."grunt"."0.4.5"; - by-version."grunt"."0.4.5" = lib.makeOverridable self.buildNodePackage { - name = "grunt-0.4.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz"; - name = "grunt-0.4.5.tgz"; - sha1 = "56937cd5194324adff6d207631832a9d6ba4e7f0"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt" or []); - deps = { - "async-0.1.22" = self.by-version."async"."0.1.22"; - "coffee-script-1.3.3" = self.by-version."coffee-script"."1.3.3"; - "colors-0.6.2" = self.by-version."colors"."0.6.2"; - "dateformat-1.0.2-1.2.3" = self.by-version."dateformat"."1.0.2-1.2.3"; - "eventemitter2-0.4.14" = self.by-version."eventemitter2"."0.4.14"; - "findup-sync-0.1.3" = self.by-version."findup-sync"."0.1.3"; - "glob-3.1.21" = self.by-version."glob"."3.1.21"; - "hooker-0.2.3" = self.by-version."hooker"."0.2.3"; - "iconv-lite-0.2.11" = self.by-version."iconv-lite"."0.2.11"; - "minimatch-0.2.14" = self.by-version."minimatch"."0.2.14"; - "nopt-1.0.10" = self.by-version."nopt"."1.0.10"; - "rimraf-2.2.8" = self.by-version."rimraf"."2.2.8"; - "lodash-0.9.2" = self.by-version."lodash"."0.9.2"; - "underscore.string-2.2.1" = self.by-version."underscore.string"."2.2.1"; - "which-1.0.9" = self.by-version."which"."1.0.9"; - "js-yaml-2.0.5" = self.by-version."js-yaml"."2.0.5"; - "exit-0.1.2" = self.by-version."exit"."0.1.2"; - "getobject-0.1.0" = self.by-version."getobject"."0.1.0"; - "grunt-legacy-util-0.2.0" = self.by-version."grunt-legacy-util"."0.2.0"; - "grunt-legacy-log-0.1.3" = self.by-version."grunt-legacy-log"."0.1.3"; - }; - peerDependencies = [ - ]; - passthru.names = [ "grunt" ]; - }; - by-spec."grunt"."^0.4.5" = - self.by-version."grunt"."0.4.5"; - "grunt" = self.by-version."grunt"."0.4.5"; - by-spec."grunt"."~0.4.0" = - self.by-version."grunt"."0.4.5"; - by-spec."grunt-contrib-concat"."^0.5.1" = - self.by-version."grunt-contrib-concat"."0.5.1"; - by-version."grunt-contrib-concat"."0.5.1" = lib.makeOverridable self.buildNodePackage { - name = "grunt-contrib-concat-0.5.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-contrib-concat/-/grunt-contrib-concat-0.5.1.tgz"; - name = "grunt-contrib-concat-0.5.1.tgz"; - sha1 = "953c6efdfdfd2c107ab9c85077f2d4b24d31cd49"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-contrib-concat" or []); - deps = { - "chalk-0.5.1" = self.by-version."chalk"."0.5.1"; - "source-map-0.3.0" = self.by-version."source-map"."0.3.0"; - }; - peerDependencies = [ - self.by-version."grunt"."0.4.5" - ]; - passthru.names = [ "grunt-contrib-concat" ]; - }; - "grunt-contrib-concat" = self.by-version."grunt-contrib-concat"."0.5.1"; - by-spec."grunt-contrib-jshint"."^0.12.0" = - self.by-version."grunt-contrib-jshint"."0.12.0"; - by-version."grunt-contrib-jshint"."0.12.0" = lib.makeOverridable self.buildNodePackage { - name = "grunt-contrib-jshint-0.12.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-contrib-jshint/-/grunt-contrib-jshint-0.12.0.tgz"; - name = "grunt-contrib-jshint-0.12.0.tgz"; - sha1 = "f6b2f06fc715264837a7ab6c69a1ce1a689c2c29"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-contrib-jshint" or []); - deps = { - "hooker-0.2.3" = self.by-version."hooker"."0.2.3"; - "jshint-2.9.1" = self.by-version."jshint"."2.9.1"; - }; - peerDependencies = [ - self.by-version."grunt"."0.4.5" - ]; - passthru.names = [ "grunt-contrib-jshint" ]; - }; - "grunt-contrib-jshint" = self.by-version."grunt-contrib-jshint"."0.12.0"; - by-spec."grunt-contrib-less"."^1.1.0" = - self.by-version."grunt-contrib-less"."1.1.0"; - by-version."grunt-contrib-less"."1.1.0" = lib.makeOverridable self.buildNodePackage { - name = "grunt-contrib-less-1.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-contrib-less/-/grunt-contrib-less-1.1.0.tgz"; - name = "grunt-contrib-less-1.1.0.tgz"; - sha1 = "44d5c5521ad76f3675a12374125d019b5dd03f51"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-contrib-less" or []); - deps = { - "async-0.9.2" = self.by-version."async"."0.9.2"; - "chalk-1.1.1" = self.by-version."chalk"."1.1.1"; - "less-2.5.3" = self.by-version."less"."2.5.3"; - "lodash-3.10.1" = self.by-version."lodash"."3.10.1"; - }; - peerDependencies = [ - self.by-version."grunt"."0.4.5" - ]; - passthru.names = [ "grunt-contrib-less" ]; - }; - "grunt-contrib-less" = self.by-version."grunt-contrib-less"."1.1.0"; - by-spec."grunt-contrib-watch"."^0.6.1" = - self.by-version."grunt-contrib-watch"."0.6.1"; - by-version."grunt-contrib-watch"."0.6.1" = lib.makeOverridable self.buildNodePackage { - name = "grunt-contrib-watch-0.6.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.6.1.tgz"; - name = "grunt-contrib-watch-0.6.1.tgz"; - sha1 = "64fdcba25a635f5b4da1b6ce6f90da0aeb6e3f15"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-contrib-watch" or []); - deps = { - "gaze-0.5.2" = self.by-version."gaze"."0.5.2"; - "tiny-lr-fork-0.0.5" = self.by-version."tiny-lr-fork"."0.0.5"; - "lodash-2.4.2" = self.by-version."lodash"."2.4.2"; - "async-0.2.10" = self.by-version."async"."0.2.10"; - }; - peerDependencies = [ - self.by-version."grunt"."0.4.5" - ]; - passthru.names = [ "grunt-contrib-watch" ]; - }; - "grunt-contrib-watch" = self.by-version."grunt-contrib-watch"."0.6.1"; - by-spec."grunt-legacy-log"."~0.1.0" = - self.by-version."grunt-legacy-log"."0.1.3"; - by-version."grunt-legacy-log"."0.1.3" = lib.makeOverridable self.buildNodePackage { - name = "grunt-legacy-log-0.1.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz"; - name = "grunt-legacy-log-0.1.3.tgz"; - sha1 = "ec29426e803021af59029f87d2f9cd7335a05531"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-legacy-log" or []); - deps = { - "colors-0.6.2" = self.by-version."colors"."0.6.2"; - "grunt-legacy-log-utils-0.1.1" = self.by-version."grunt-legacy-log-utils"."0.1.1"; - "hooker-0.2.3" = self.by-version."hooker"."0.2.3"; - "lodash-2.4.2" = self.by-version."lodash"."2.4.2"; - "underscore.string-2.3.3" = self.by-version."underscore.string"."2.3.3"; - }; - peerDependencies = [ - ]; - passthru.names = [ "grunt-legacy-log" ]; - }; - by-spec."grunt-legacy-log-utils"."~0.1.1" = - self.by-version."grunt-legacy-log-utils"."0.1.1"; - by-version."grunt-legacy-log-utils"."0.1.1" = lib.makeOverridable self.buildNodePackage { - name = "grunt-legacy-log-utils-0.1.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz"; - name = "grunt-legacy-log-utils-0.1.1.tgz"; - sha1 = "c0706b9dd9064e116f36f23fe4e6b048672c0f7e"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-legacy-log-utils" or []); - deps = { - "lodash-2.4.2" = self.by-version."lodash"."2.4.2"; - "underscore.string-2.3.3" = self.by-version."underscore.string"."2.3.3"; - "colors-0.6.2" = self.by-version."colors"."0.6.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "grunt-legacy-log-utils" ]; - }; - by-spec."grunt-legacy-util"."~0.2.0" = - self.by-version."grunt-legacy-util"."0.2.0"; - by-version."grunt-legacy-util"."0.2.0" = lib.makeOverridable self.buildNodePackage { - name = "grunt-legacy-util-0.2.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz"; - name = "grunt-legacy-util-0.2.0.tgz"; - sha1 = "93324884dbf7e37a9ff7c026dff451d94a9e554b"; - }) - ]; - buildInputs = - (self.nativeDeps."grunt-legacy-util" or []); - deps = { - "hooker-0.2.3" = self.by-version."hooker"."0.2.3"; - "async-0.1.22" = self.by-version."async"."0.1.22"; - "lodash-0.9.2" = self.by-version."lodash"."0.9.2"; - "exit-0.1.2" = self.by-version."exit"."0.1.2"; - "underscore.string-2.2.1" = self.by-version."underscore.string"."2.2.1"; - "getobject-0.1.0" = self.by-version."getobject"."0.1.0"; - "which-1.0.9" = self.by-version."which"."1.0.9"; - }; - peerDependencies = [ - ]; - passthru.names = [ "grunt-legacy-util" ]; - }; - by-spec."har-validator"."~2.0.2" = - self.by-version."har-validator"."2.0.6"; - by-version."har-validator"."2.0.6" = lib.makeOverridable self.buildNodePackage { - name = "har-validator-2.0.6"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz"; - name = "har-validator-2.0.6.tgz"; - sha1 = "cdcbc08188265ad119b6a5a7c8ab70eecfb5d27d"; - }) - ]; - buildInputs = - (self.nativeDeps."har-validator" or []); - deps = { - "chalk-1.1.1" = self.by-version."chalk"."1.1.1"; - "commander-2.9.0" = self.by-version."commander"."2.9.0"; - "is-my-json-valid-2.12.4" = self.by-version."is-my-json-valid"."2.12.4"; - "pinkie-promise-2.0.0" = self.by-version."pinkie-promise"."2.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "har-validator" ]; - }; - by-spec."has-ansi"."^0.1.0" = - self.by-version."has-ansi"."0.1.0"; - by-version."has-ansi"."0.1.0" = lib.makeOverridable self.buildNodePackage { - name = "has-ansi-0.1.0"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz"; - name = "has-ansi-0.1.0.tgz"; - sha1 = "84f265aae8c0e6a88a12d7022894b7568894c62e"; - }) - ]; - buildInputs = - (self.nativeDeps."has-ansi" or []); - deps = { - "ansi-regex-0.2.1" = self.by-version."ansi-regex"."0.2.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "has-ansi" ]; - }; - by-spec."has-ansi"."^2.0.0" = - self.by-version."has-ansi"."2.0.0"; - by-version."has-ansi"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "has-ansi-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz"; - name = "has-ansi-2.0.0.tgz"; - sha1 = "34f5049ce1ecdf2b0649af3ef24e45ed35416d91"; - }) - ]; - buildInputs = - (self.nativeDeps."has-ansi" or []); - deps = { - "ansi-regex-2.0.0" = self.by-version."ansi-regex"."2.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "has-ansi" ]; - }; - by-spec."hawk"."~3.1.0" = - self.by-version."hawk"."3.1.3"; - by-version."hawk"."3.1.3" = lib.makeOverridable self.buildNodePackage { - name = "hawk-3.1.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz"; - name = "hawk-3.1.3.tgz"; - sha1 = "078444bd7c1640b0fe540d2c9b73d59678e8e1c4"; - }) - ]; - buildInputs = - (self.nativeDeps."hawk" or []); - deps = { - "hoek-2.16.3" = self.by-version."hoek"."2.16.3"; - "boom-2.10.1" = self.by-version."boom"."2.10.1"; - "cryptiles-2.0.5" = self.by-version."cryptiles"."2.0.5"; - "sntp-1.0.9" = self.by-version."sntp"."1.0.9"; - }; - peerDependencies = [ - ]; - passthru.names = [ "hawk" ]; - }; - by-spec."hoek"."2.x.x" = - self.by-version."hoek"."2.16.3"; - by-version."hoek"."2.16.3" = lib.makeOverridable self.buildNodePackage { - name = "hoek-2.16.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz"; - name = "hoek-2.16.3.tgz"; - sha1 = "20bb7403d3cea398e91dc4710a8ff1b8274a25ed"; - }) - ]; - buildInputs = - (self.nativeDeps."hoek" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "hoek" ]; - }; - by-spec."hooker"."^0.2.3" = - self.by-version."hooker"."0.2.3"; - by-version."hooker"."0.2.3" = lib.makeOverridable self.buildNodePackage { - name = "hooker-0.2.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz"; - name = "hooker-0.2.3.tgz"; - sha1 = "b834f723cc4a242aa65963459df6d984c5d3d959"; - }) - ]; - buildInputs = - (self.nativeDeps."hooker" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "hooker" ]; - }; - by-spec."hooker"."~0.2.3" = - self.by-version."hooker"."0.2.3"; - by-spec."htmlparser2"."3.8.x" = - self.by-version."htmlparser2"."3.8.3"; - by-version."htmlparser2"."3.8.3" = lib.makeOverridable self.buildNodePackage { - name = "htmlparser2-3.8.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/htmlparser2/-/htmlparser2-3.8.3.tgz"; - name = "htmlparser2-3.8.3.tgz"; - sha1 = "996c28b191516a8be86501a7d79757e5c70c1068"; - }) - ]; - buildInputs = - (self.nativeDeps."htmlparser2" or []); - deps = { - "domhandler-2.3.0" = self.by-version."domhandler"."2.3.0"; - "domutils-1.5.1" = self.by-version."domutils"."1.5.1"; - "domelementtype-1.3.0" = self.by-version."domelementtype"."1.3.0"; - "readable-stream-1.1.13" = self.by-version."readable-stream"."1.1.13"; - "entities-1.0.0" = self.by-version."entities"."1.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "htmlparser2" ]; - }; - by-spec."http-signature"."~1.1.0" = - self.by-version."http-signature"."1.1.0"; - by-version."http-signature"."1.1.0" = lib.makeOverridable self.buildNodePackage { - name = "http-signature-1.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/http-signature/-/http-signature-1.1.0.tgz"; - name = "http-signature-1.1.0.tgz"; - sha1 = "5d2d7e9b6ef49980ad5b128d8e4ef09a31c90d95"; - }) - ]; - buildInputs = - (self.nativeDeps."http-signature" or []); - deps = { - "assert-plus-0.1.5" = self.by-version."assert-plus"."0.1.5"; - "jsprim-1.2.2" = self.by-version."jsprim"."1.2.2"; - "sshpk-1.7.3" = self.by-version."sshpk"."1.7.3"; - }; - peerDependencies = [ - ]; - passthru.names = [ "http-signature" ]; - }; - by-spec."iconv-lite"."~0.2.11" = - self.by-version."iconv-lite"."0.2.11"; - by-version."iconv-lite"."0.2.11" = lib.makeOverridable self.buildNodePackage { - name = "iconv-lite-0.2.11"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz"; - name = "iconv-lite-0.2.11.tgz"; - sha1 = "1ce60a3a57864a292d1321ff4609ca4bb965adc8"; - }) - ]; - buildInputs = - (self.nativeDeps."iconv-lite" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "iconv-lite" ]; - }; - by-spec."image-size"."~0.3.5" = - self.by-version."image-size"."0.3.5"; - by-version."image-size"."0.3.5" = lib.makeOverridable self.buildNodePackage { - name = "image-size-0.3.5"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/image-size/-/image-size-0.3.5.tgz"; - name = "image-size-0.3.5.tgz"; - sha1 = "83240eab2fb5b00b04aab8c74b0471e9cba7ad8c"; - }) - ]; - buildInputs = - (self.nativeDeps."image-size" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "image-size" ]; - }; - by-spec."inherits"."1" = - self.by-version."inherits"."1.0.2"; - by-version."inherits"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "inherits-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/inherits/-/inherits-1.0.2.tgz"; - name = "inherits-1.0.2.tgz"; - sha1 = "ca4309dadee6b54cc0b8d247e8d7c7a0975bdc9b"; - }) - ]; - buildInputs = - (self.nativeDeps."inherits" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "inherits" ]; - }; - by-spec."inherits"."2" = - self.by-version."inherits"."2.0.1"; - by-version."inherits"."2.0.1" = lib.makeOverridable self.buildNodePackage { - name = "inherits-2.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz"; - name = "inherits-2.0.1.tgz"; - sha1 = "b17d08d326b4423e568eff719f91b0b1cbdf69f1"; - }) - ]; - buildInputs = - (self.nativeDeps."inherits" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "inherits" ]; - }; - by-spec."inherits"."~2.0.1" = - self.by-version."inherits"."2.0.1"; - by-spec."is-my-json-valid"."^2.12.4" = - self.by-version."is-my-json-valid"."2.12.4"; - by-version."is-my-json-valid"."2.12.4" = lib.makeOverridable self.buildNodePackage { - name = "is-my-json-valid-2.12.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.4.tgz"; - name = "is-my-json-valid-2.12.4.tgz"; - sha1 = "d4ed2bc1d7f88daf8d0f763b3e3e39a69bd37880"; - }) - ]; - buildInputs = - (self.nativeDeps."is-my-json-valid" or []); - deps = { - "generate-function-2.0.0" = self.by-version."generate-function"."2.0.0"; - "generate-object-property-1.2.0" = self.by-version."generate-object-property"."1.2.0"; - "jsonpointer-2.0.0" = self.by-version."jsonpointer"."2.0.0"; - "xtend-4.0.1" = self.by-version."xtend"."4.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "is-my-json-valid" ]; - }; - by-spec."is-property"."^1.0.0" = - self.by-version."is-property"."1.0.2"; - by-version."is-property"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "is-property-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz"; - name = "is-property-1.0.2.tgz"; - sha1 = "57fe1c4e48474edd65b09911f26b1cd4095dda84"; - }) - ]; - buildInputs = - (self.nativeDeps."is-property" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "is-property" ]; - }; - by-spec."is-typedarray"."~1.0.0" = - self.by-version."is-typedarray"."1.0.0"; - by-version."is-typedarray"."1.0.0" = lib.makeOverridable self.buildNodePackage { - name = "is-typedarray-1.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz"; - name = "is-typedarray-1.0.0.tgz"; - sha1 = "e479c80858df0c1b11ddda6940f96011fcda4a9a"; - }) - ]; - buildInputs = - (self.nativeDeps."is-typedarray" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "is-typedarray" ]; - }; - by-spec."isarray"."0.0.1" = - self.by-version."isarray"."0.0.1"; - by-version."isarray"."0.0.1" = lib.makeOverridable self.buildNodePackage { - name = "isarray-0.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"; - name = "isarray-0.0.1.tgz"; - sha1 = "8a18acfca9a8f4177e09abfc6038939b05d1eedf"; - }) - ]; - buildInputs = - (self.nativeDeps."isarray" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "isarray" ]; - }; - by-spec."isstream"."~0.1.2" = - self.by-version."isstream"."0.1.2"; - by-version."isstream"."0.1.2" = lib.makeOverridable self.buildNodePackage { - name = "isstream-0.1.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz"; - name = "isstream-0.1.2.tgz"; - sha1 = "47e63f7af55afa6f92e1500e690eb8b8529c099a"; - }) - ]; - buildInputs = - (self.nativeDeps."isstream" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "isstream" ]; - }; - by-spec."jodid25519".">=1.0.0 <2.0.0" = - self.by-version."jodid25519"."1.0.2"; - by-version."jodid25519"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "jodid25519-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz"; - name = "jodid25519-1.0.2.tgz"; - sha1 = "06d4912255093419477d425633606e0e90782967"; - }) - ]; - buildInputs = - (self.nativeDeps."jodid25519" or []); - deps = { - "jsbn-0.1.0" = self.by-version."jsbn"."0.1.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "jodid25519" ]; - }; - by-spec."js-yaml"."~2.0.5" = - self.by-version."js-yaml"."2.0.5"; - by-version."js-yaml"."2.0.5" = lib.makeOverridable self.buildNodePackage { - name = "js-yaml-2.0.5"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz"; - name = "js-yaml-2.0.5.tgz"; - sha1 = "a25ae6509999e97df278c6719da11bd0687743a8"; - }) - ]; - buildInputs = - (self.nativeDeps."js-yaml" or []); - deps = { - "argparse-0.1.16" = self.by-version."argparse"."0.1.16"; - "esprima-1.0.4" = self.by-version."esprima"."1.0.4"; - }; - peerDependencies = [ - ]; - passthru.names = [ "js-yaml" ]; - }; - by-spec."jsbn".">=0.1.0 <0.2.0" = - self.by-version."jsbn"."0.1.0"; - by-version."jsbn"."0.1.0" = lib.makeOverridable self.buildNodePackage { - name = "jsbn-0.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz"; - name = "jsbn-0.1.0.tgz"; - sha1 = "650987da0dd74f4ebf5a11377a2aa2d273e97dfd"; - }) - ]; - buildInputs = - (self.nativeDeps."jsbn" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "jsbn" ]; - }; - by-spec."jsbn"."~0.1.0" = - self.by-version."jsbn"."0.1.0"; - by-spec."jshint"."^2.9.1-rc3" = - self.by-version."jshint"."2.9.1"; - by-version."jshint"."2.9.1" = lib.makeOverridable self.buildNodePackage { - name = "jshint-2.9.1"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/jshint/-/jshint-2.9.1.tgz"; - name = "jshint-2.9.1.tgz"; - sha1 = "3136b68f8b6fa37423aacb8ec5e18a1ada7a2638"; - }) - ]; - buildInputs = - (self.nativeDeps."jshint" or []); - deps = { - "cli-0.6.6" = self.by-version."cli"."0.6.6"; - "console-browserify-1.1.0" = self.by-version."console-browserify"."1.1.0"; - "exit-0.1.2" = self.by-version."exit"."0.1.2"; - "htmlparser2-3.8.3" = self.by-version."htmlparser2"."3.8.3"; - "minimatch-2.0.10" = self.by-version."minimatch"."2.0.10"; - "shelljs-0.3.0" = self.by-version."shelljs"."0.3.0"; - "strip-json-comments-1.0.4" = self.by-version."strip-json-comments"."1.0.4"; - "lodash-3.7.0" = self.by-version."lodash"."3.7.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "jshint" ]; - }; - "jshint" = self.by-version."jshint"."2.9.1"; - by-spec."jshint"."~2.9.1" = - self.by-version."jshint"."2.9.1"; - by-spec."json-schema"."0.2.2" = - self.by-version."json-schema"."0.2.2"; - by-version."json-schema"."0.2.2" = lib.makeOverridable self.buildNodePackage { - name = "json-schema-0.2.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz"; - name = "json-schema-0.2.2.tgz"; - sha1 = "50354f19f603917c695f70b85afa77c3b0f23506"; - }) - ]; - buildInputs = - (self.nativeDeps."json-schema" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "json-schema" ]; - }; - by-spec."json-stringify-safe"."~5.0.1" = - self.by-version."json-stringify-safe"."5.0.1"; - by-version."json-stringify-safe"."5.0.1" = lib.makeOverridable self.buildNodePackage { - name = "json-stringify-safe-5.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz"; - name = "json-stringify-safe-5.0.1.tgz"; - sha1 = "1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"; - }) - ]; - buildInputs = - (self.nativeDeps."json-stringify-safe" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "json-stringify-safe" ]; - }; - by-spec."jsonpointer"."2.0.0" = - self.by-version."jsonpointer"."2.0.0"; - by-version."jsonpointer"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "jsonpointer-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz"; - name = "jsonpointer-2.0.0.tgz"; - sha1 = "3af1dd20fe85463910d469a385e33017d2a030d9"; - }) - ]; - buildInputs = - (self.nativeDeps."jsonpointer" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "jsonpointer" ]; - }; - by-spec."jsprim"."^1.2.2" = - self.by-version."jsprim"."1.2.2"; - by-version."jsprim"."1.2.2" = lib.makeOverridable self.buildNodePackage { - name = "jsprim-1.2.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/jsprim/-/jsprim-1.2.2.tgz"; - name = "jsprim-1.2.2.tgz"; - sha1 = "f20c906ac92abd58e3b79ac8bc70a48832512da1"; - }) - ]; - buildInputs = - (self.nativeDeps."jsprim" or []); - deps = { - "extsprintf-1.0.2" = self.by-version."extsprintf"."1.0.2"; - "json-schema-0.2.2" = self.by-version."json-schema"."0.2.2"; - "verror-1.3.6" = self.by-version."verror"."1.3.6"; - }; - peerDependencies = [ - ]; - passthru.names = [ "jsprim" ]; - }; - by-spec."less"."~2.5.0" = - self.by-version."less"."2.5.3"; - by-version."less"."2.5.3" = lib.makeOverridable self.buildNodePackage { - name = "less-2.5.3"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/less/-/less-2.5.3.tgz"; - name = "less-2.5.3.tgz"; - sha1 = "9ff586e8a703515fc18dc99c7bc498d2f3ad4849"; - }) - ]; - buildInputs = - (self.nativeDeps."less" or []); - deps = { - "errno-0.1.4" = self.by-version."errno"."0.1.4"; - "graceful-fs-3.0.8" = self.by-version."graceful-fs"."3.0.8"; - "image-size-0.3.5" = self.by-version."image-size"."0.3.5"; - "mime-1.3.4" = self.by-version."mime"."1.3.4"; - "mkdirp-0.5.1" = self.by-version."mkdirp"."0.5.1"; - "promise-6.1.0" = self.by-version."promise"."6.1.0"; - "request-2.67.0" = self.by-version."request"."2.67.0"; - "source-map-0.4.4" = self.by-version."source-map"."0.4.4"; - }; - peerDependencies = [ - ]; - passthru.names = [ "less" ]; - }; - by-spec."lodash"."3.7.x" = - self.by-version."lodash"."3.7.0"; - by-version."lodash"."3.7.0" = lib.makeOverridable self.buildNodePackage { - name = "lodash-3.7.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lodash/-/lodash-3.7.0.tgz"; - name = "lodash-3.7.0.tgz"; - sha1 = "3678bd8ab995057c07ade836ed2ef087da811d45"; - }) - ]; - buildInputs = - (self.nativeDeps."lodash" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lodash" ]; - }; - by-spec."lodash"."^3.2.0" = - self.by-version."lodash"."3.10.1"; - by-version."lodash"."3.10.1" = lib.makeOverridable self.buildNodePackage { - name = "lodash-3.10.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz"; - name = "lodash-3.10.1.tgz"; - sha1 = "5bf45e8e49ba4189e17d482789dfd15bd140b7b6"; - }) - ]; - buildInputs = - (self.nativeDeps."lodash" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lodash" ]; - }; - by-spec."lodash"."~0.9.2" = - self.by-version."lodash"."0.9.2"; - by-version."lodash"."0.9.2" = lib.makeOverridable self.buildNodePackage { - name = "lodash-0.9.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz"; - name = "lodash-0.9.2.tgz"; - sha1 = "8f3499c5245d346d682e5b0d3b40767e09f1a92c"; - }) - ]; - buildInputs = - (self.nativeDeps."lodash" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lodash" ]; - }; - by-spec."lodash"."~1.0.1" = - self.by-version."lodash"."1.0.2"; - by-version."lodash"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "lodash-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lodash/-/lodash-1.0.2.tgz"; - name = "lodash-1.0.2.tgz"; - sha1 = "8f57560c83b59fc270bd3d561b690043430e2551"; - }) - ]; - buildInputs = - (self.nativeDeps."lodash" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lodash" ]; - }; - by-spec."lodash"."~2.4.1" = - self.by-version."lodash"."2.4.2"; - by-version."lodash"."2.4.2" = lib.makeOverridable self.buildNodePackage { - name = "lodash-2.4.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz"; - name = "lodash-2.4.2.tgz"; - sha1 = "fadd834b9683073da179b3eae6d9c0d15053f73e"; - }) - ]; - buildInputs = - (self.nativeDeps."lodash" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lodash" ]; - }; - by-spec."lru-cache"."2" = - self.by-version."lru-cache"."2.7.3"; - by-version."lru-cache"."2.7.3" = lib.makeOverridable self.buildNodePackage { - name = "lru-cache-2.7.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz"; - name = "lru-cache-2.7.3.tgz"; - sha1 = "6d4524e8b955f95d4f5b58851ce21dd72fb4e952"; - }) - ]; - buildInputs = - (self.nativeDeps."lru-cache" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "lru-cache" ]; - }; - by-spec."mime"."^1.2.11" = - self.by-version."mime"."1.3.4"; - by-version."mime"."1.3.4" = lib.makeOverridable self.buildNodePackage { - name = "mime-1.3.4"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/mime/-/mime-1.3.4.tgz"; - name = "mime-1.3.4.tgz"; - sha1 = "115f9e3b6b3daf2959983cb38f149a2d40eb5d53"; - }) - ]; - buildInputs = - (self.nativeDeps."mime" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "mime" ]; - }; - by-spec."mime-db"."~1.21.0" = - self.by-version."mime-db"."1.21.0"; - by-version."mime-db"."1.21.0" = lib.makeOverridable self.buildNodePackage { - name = "mime-db-1.21.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/mime-db/-/mime-db-1.21.0.tgz"; - name = "mime-db-1.21.0.tgz"; - sha1 = "9b5239e3353cf6eb015a00d890261027c36d4bac"; - }) - ]; - buildInputs = - (self.nativeDeps."mime-db" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "mime-db" ]; - }; - by-spec."mime-types"."^2.1.3" = - self.by-version."mime-types"."2.1.9"; - by-version."mime-types"."2.1.9" = lib.makeOverridable self.buildNodePackage { - name = "mime-types-2.1.9"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/mime-types/-/mime-types-2.1.9.tgz"; - name = "mime-types-2.1.9.tgz"; - sha1 = "dfb396764b5fdf75be34b1f4104bc3687fb635f8"; - }) - ]; - buildInputs = - (self.nativeDeps."mime-types" or []); - deps = { - "mime-db-1.21.0" = self.by-version."mime-db"."1.21.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "mime-types" ]; - }; - by-spec."mime-types"."~2.1.7" = - self.by-version."mime-types"."2.1.9"; - by-spec."minimatch"."0.3" = - self.by-version."minimatch"."0.3.0"; - by-version."minimatch"."0.3.0" = lib.makeOverridable self.buildNodePackage { - name = "minimatch-0.3.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz"; - name = "minimatch-0.3.0.tgz"; - sha1 = "275d8edaac4f1bb3326472089e7949c8394699dd"; - }) - ]; - buildInputs = - (self.nativeDeps."minimatch" or []); - deps = { - "lru-cache-2.7.3" = self.by-version."lru-cache"."2.7.3"; - "sigmund-1.0.1" = self.by-version."sigmund"."1.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "minimatch" ]; - }; - by-spec."minimatch"."2.0.x" = - self.by-version."minimatch"."2.0.10"; - by-version."minimatch"."2.0.10" = lib.makeOverridable self.buildNodePackage { - name = "minimatch-2.0.10"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz"; - name = "minimatch-2.0.10.tgz"; - sha1 = "8d087c39c6b38c001b97fca7ce6d0e1e80afbac7"; - }) - ]; - buildInputs = - (self.nativeDeps."minimatch" or []); - deps = { - "brace-expansion-1.1.2" = self.by-version."brace-expansion"."1.1.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "minimatch" ]; - }; - by-spec."minimatch"."~0.2.11" = - self.by-version."minimatch"."0.2.14"; - by-version."minimatch"."0.2.14" = lib.makeOverridable self.buildNodePackage { - name = "minimatch-0.2.14"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz"; - name = "minimatch-0.2.14.tgz"; - sha1 = "c74e780574f63c6f9a090e90efbe6ef53a6a756a"; - }) - ]; - buildInputs = - (self.nativeDeps."minimatch" or []); - deps = { - "lru-cache-2.7.3" = self.by-version."lru-cache"."2.7.3"; - "sigmund-1.0.1" = self.by-version."sigmund"."1.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "minimatch" ]; - }; - by-spec."minimatch"."~0.2.12" = - self.by-version."minimatch"."0.2.14"; - by-spec."minimist"."0.0.8" = - self.by-version."minimist"."0.0.8"; - by-version."minimist"."0.0.8" = lib.makeOverridable self.buildNodePackage { - name = "minimist-0.0.8"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz"; - name = "minimist-0.0.8.tgz"; - sha1 = "857fcabfc3397d2625b8228262e86aa7a011b05d"; - }) - ]; - buildInputs = - (self.nativeDeps."minimist" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "minimist" ]; - }; - by-spec."mkdirp"."^0.5.0" = - self.by-version."mkdirp"."0.5.1"; - by-version."mkdirp"."0.5.1" = lib.makeOverridable self.buildNodePackage { - name = "mkdirp-0.5.1"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz"; - name = "mkdirp-0.5.1.tgz"; - sha1 = "30057438eac6cf7f8c4767f38648d6697d75c903"; - }) - ]; - buildInputs = - (self.nativeDeps."mkdirp" or []); - deps = { - "minimist-0.0.8" = self.by-version."minimist"."0.0.8"; - }; - peerDependencies = [ - ]; - passthru.names = [ "mkdirp" ]; - }; - by-spec."node-uuid"."~1.4.7" = - self.by-version."node-uuid"."1.4.7"; - by-version."node-uuid"."1.4.7" = lib.makeOverridable self.buildNodePackage { - name = "node-uuid-1.4.7"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz"; - name = "node-uuid-1.4.7.tgz"; - sha1 = "6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f"; - }) - ]; - buildInputs = - (self.nativeDeps."node-uuid" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "node-uuid" ]; - }; - by-spec."nopt"."~1.0.10" = - self.by-version."nopt"."1.0.10"; - by-version."nopt"."1.0.10" = lib.makeOverridable self.buildNodePackage { - name = "nopt-1.0.10"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz"; - name = "nopt-1.0.10.tgz"; - sha1 = "6ddd21bd2a31417b92727dd585f8a6f37608ebee"; - }) - ]; - buildInputs = - (self.nativeDeps."nopt" or []); - deps = { - "abbrev-1.0.7" = self.by-version."abbrev"."1.0.7"; - }; - peerDependencies = [ - ]; - passthru.names = [ "nopt" ]; - }; - by-spec."nopt"."~2.0.0" = - self.by-version."nopt"."2.0.0"; - by-version."nopt"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "nopt-2.0.0"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz"; - name = "nopt-2.0.0.tgz"; - sha1 = "ca7416f20a5e3f9c3b86180f96295fa3d0b52e0d"; - }) - ]; - buildInputs = - (self.nativeDeps."nopt" or []); - deps = { - "abbrev-1.0.7" = self.by-version."abbrev"."1.0.7"; - }; - peerDependencies = [ - ]; - passthru.names = [ "nopt" ]; - }; - by-spec."noptify"."~0.0.3" = - self.by-version."noptify"."0.0.3"; - by-version."noptify"."0.0.3" = lib.makeOverridable self.buildNodePackage { - name = "noptify-0.0.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz"; - name = "noptify-0.0.3.tgz"; - sha1 = "58f654a73d9753df0c51d9686dc92104a67f4bbb"; - }) - ]; - buildInputs = - (self.nativeDeps."noptify" or []); - deps = { - "nopt-2.0.0" = self.by-version."nopt"."2.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "noptify" ]; - }; - by-spec."oauth-sign"."~0.8.0" = - self.by-version."oauth-sign"."0.8.0"; - by-version."oauth-sign"."0.8.0" = lib.makeOverridable self.buildNodePackage { - name = "oauth-sign-0.8.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.0.tgz"; - name = "oauth-sign-0.8.0.tgz"; - sha1 = "938fdc875765ba527137d8aec9d178e24debc553"; - }) - ]; - buildInputs = - (self.nativeDeps."oauth-sign" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "oauth-sign" ]; - }; - by-spec."pinkie"."^2.0.0" = - self.by-version."pinkie"."2.0.1"; - by-version."pinkie"."2.0.1" = lib.makeOverridable self.buildNodePackage { - name = "pinkie-2.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/pinkie/-/pinkie-2.0.1.tgz"; - name = "pinkie-2.0.1.tgz"; - sha1 = "4236c86fc29f261c2045bbe81f78cbb2a5e8306c"; - }) - ]; - buildInputs = - (self.nativeDeps."pinkie" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "pinkie" ]; - }; - by-spec."pinkie-promise"."^2.0.0" = - self.by-version."pinkie-promise"."2.0.0"; - by-version."pinkie-promise"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "pinkie-promise-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.0.tgz"; - name = "pinkie-promise-2.0.0.tgz"; - sha1 = "4c83538de1f6e660c29e0a13446844f7a7e88259"; - }) - ]; - buildInputs = - (self.nativeDeps."pinkie-promise" or []); - deps = { - "pinkie-2.0.1" = self.by-version."pinkie"."2.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "pinkie-promise" ]; - }; - by-spec."process-nextick-args"."~1.0.6" = - self.by-version."process-nextick-args"."1.0.6"; - by-version."process-nextick-args"."1.0.6" = lib.makeOverridable self.buildNodePackage { - name = "process-nextick-args-1.0.6"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.6.tgz"; - name = "process-nextick-args-1.0.6.tgz"; - sha1 = "0f96b001cea90b12592ce566edb97ec11e69bd05"; - }) - ]; - buildInputs = - (self.nativeDeps."process-nextick-args" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "process-nextick-args" ]; - }; - by-spec."promise"."^6.0.1" = - self.by-version."promise"."6.1.0"; - by-version."promise"."6.1.0" = lib.makeOverridable self.buildNodePackage { - name = "promise-6.1.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/promise/-/promise-6.1.0.tgz"; - name = "promise-6.1.0.tgz"; - sha1 = "2ce729f6b94b45c26891ad0602c5c90e04c6eef6"; - }) - ]; - buildInputs = - (self.nativeDeps."promise" or []); - deps = { - "asap-1.0.0" = self.by-version."asap"."1.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "promise" ]; - }; - by-spec."prr"."~0.0.0" = - self.by-version."prr"."0.0.0"; - by-version."prr"."0.0.0" = lib.makeOverridable self.buildNodePackage { - name = "prr-0.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/prr/-/prr-0.0.0.tgz"; - name = "prr-0.0.0.tgz"; - sha1 = "1a84b85908325501411853d0081ee3fa86e2926a"; - }) - ]; - buildInputs = - (self.nativeDeps."prr" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "prr" ]; - }; - by-spec."qs"."~0.5.2" = - self.by-version."qs"."0.5.6"; - by-version."qs"."0.5.6" = lib.makeOverridable self.buildNodePackage { - name = "qs-0.5.6"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/qs/-/qs-0.5.6.tgz"; - name = "qs-0.5.6.tgz"; - sha1 = "31b1ad058567651c526921506b9a8793911a0384"; - }) - ]; - buildInputs = - (self.nativeDeps."qs" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "qs" ]; - }; - by-spec."qs"."~5.2.0" = - self.by-version."qs"."5.2.0"; - by-version."qs"."5.2.0" = lib.makeOverridable self.buildNodePackage { - name = "qs-5.2.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/qs/-/qs-5.2.0.tgz"; - name = "qs-5.2.0.tgz"; - sha1 = "a9f31142af468cb72b25b30136ba2456834916be"; - }) - ]; - buildInputs = - (self.nativeDeps."qs" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "qs" ]; - }; - by-spec."readable-stream"."1.1" = - self.by-version."readable-stream"."1.1.13"; - by-version."readable-stream"."1.1.13" = lib.makeOverridable self.buildNodePackage { - name = "readable-stream-1.1.13"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz"; - name = "readable-stream-1.1.13.tgz"; - sha1 = "f6eef764f514c89e2b9e23146a75ba106756d23e"; - }) - ]; - buildInputs = - (self.nativeDeps."readable-stream" or []); - deps = { - "core-util-is-1.0.2" = self.by-version."core-util-is"."1.0.2"; - "isarray-0.0.1" = self.by-version."isarray"."0.0.1"; - "string_decoder-0.10.31" = self.by-version."string_decoder"."0.10.31"; - "inherits-2.0.1" = self.by-version."inherits"."2.0.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "readable-stream" ]; - }; - by-spec."readable-stream"."~2.0.5" = - self.by-version."readable-stream"."2.0.5"; - by-version."readable-stream"."2.0.5" = lib.makeOverridable self.buildNodePackage { - name = "readable-stream-2.0.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/readable-stream/-/readable-stream-2.0.5.tgz"; - name = "readable-stream-2.0.5.tgz"; - sha1 = "a2426f8dcd4551c77a33f96edf2886a23c829669"; - }) - ]; - buildInputs = - (self.nativeDeps."readable-stream" or []); - deps = { - "core-util-is-1.0.2" = self.by-version."core-util-is"."1.0.2"; - "inherits-2.0.1" = self.by-version."inherits"."2.0.1"; - "isarray-0.0.1" = self.by-version."isarray"."0.0.1"; - "process-nextick-args-1.0.6" = self.by-version."process-nextick-args"."1.0.6"; - "string_decoder-0.10.31" = self.by-version."string_decoder"."0.10.31"; - "util-deprecate-1.0.2" = self.by-version."util-deprecate"."1.0.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "readable-stream" ]; - }; - by-spec."request"."^2.51.0" = - self.by-version."request"."2.67.0"; - by-version."request"."2.67.0" = lib.makeOverridable self.buildNodePackage { - name = "request-2.67.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/request/-/request-2.67.0.tgz"; - name = "request-2.67.0.tgz"; - sha1 = "8af74780e2bf11ea0ae9aa965c11f11afd272742"; - }) - ]; - buildInputs = - (self.nativeDeps."request" or []); - deps = { - "bl-1.0.1" = self.by-version."bl"."1.0.1"; - "caseless-0.11.0" = self.by-version."caseless"."0.11.0"; - "extend-3.0.0" = self.by-version."extend"."3.0.0"; - "forever-agent-0.6.1" = self.by-version."forever-agent"."0.6.1"; - "form-data-1.0.0-rc3" = self.by-version."form-data"."1.0.0-rc3"; - "json-stringify-safe-5.0.1" = self.by-version."json-stringify-safe"."5.0.1"; - "mime-types-2.1.9" = self.by-version."mime-types"."2.1.9"; - "node-uuid-1.4.7" = self.by-version."node-uuid"."1.4.7"; - "qs-5.2.0" = self.by-version."qs"."5.2.0"; - "tunnel-agent-0.4.2" = self.by-version."tunnel-agent"."0.4.2"; - "tough-cookie-2.2.1" = self.by-version."tough-cookie"."2.2.1"; - "http-signature-1.1.0" = self.by-version."http-signature"."1.1.0"; - "oauth-sign-0.8.0" = self.by-version."oauth-sign"."0.8.0"; - "hawk-3.1.3" = self.by-version."hawk"."3.1.3"; - "aws-sign2-0.6.0" = self.by-version."aws-sign2"."0.6.0"; - "stringstream-0.0.5" = self.by-version."stringstream"."0.0.5"; - "combined-stream-1.0.5" = self.by-version."combined-stream"."1.0.5"; - "isstream-0.1.2" = self.by-version."isstream"."0.1.2"; - "is-typedarray-1.0.0" = self.by-version."is-typedarray"."1.0.0"; - "har-validator-2.0.6" = self.by-version."har-validator"."2.0.6"; - }; - peerDependencies = [ - ]; - passthru.names = [ "request" ]; - }; - by-spec."rimraf"."~2.2.8" = - self.by-version."rimraf"."2.2.8"; - by-version."rimraf"."2.2.8" = lib.makeOverridable self.buildNodePackage { - name = "rimraf-2.2.8"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz"; - name = "rimraf-2.2.8.tgz"; - sha1 = "e439be2aaee327321952730f99a8929e4fc50582"; - }) - ]; - buildInputs = - (self.nativeDeps."rimraf" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "rimraf" ]; - }; - by-spec."shelljs"."0.3.x" = - self.by-version."shelljs"."0.3.0"; - by-version."shelljs"."0.3.0" = lib.makeOverridable self.buildNodePackage { - name = "shelljs-0.3.0"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz"; - name = "shelljs-0.3.0.tgz"; - sha1 = "3596e6307a781544f591f37da618360f31db57b1"; - }) - ]; - buildInputs = - (self.nativeDeps."shelljs" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "shelljs" ]; - }; - by-spec."sigmund"."~1.0.0" = - self.by-version."sigmund"."1.0.1"; - by-version."sigmund"."1.0.1" = lib.makeOverridable self.buildNodePackage { - name = "sigmund-1.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz"; - name = "sigmund-1.0.1.tgz"; - sha1 = "3ff21f198cad2175f9f3b781853fd94d0d19b590"; - }) - ]; - buildInputs = - (self.nativeDeps."sigmund" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "sigmund" ]; - }; - by-spec."sntp"."1.x.x" = - self.by-version."sntp"."1.0.9"; - by-version."sntp"."1.0.9" = lib.makeOverridable self.buildNodePackage { - name = "sntp-1.0.9"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz"; - name = "sntp-1.0.9.tgz"; - sha1 = "6541184cc90aeea6c6e7b35e2659082443c66198"; - }) - ]; - buildInputs = - (self.nativeDeps."sntp" or []); - deps = { - "hoek-2.16.3" = self.by-version."hoek"."2.16.3"; - }; - peerDependencies = [ - ]; - passthru.names = [ "sntp" ]; - }; - by-spec."source-map"."^0.3.0" = - self.by-version."source-map"."0.3.0"; - by-version."source-map"."0.3.0" = lib.makeOverridable self.buildNodePackage { - name = "source-map-0.3.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/source-map/-/source-map-0.3.0.tgz"; - name = "source-map-0.3.0.tgz"; - sha1 = "8586fb9a5a005e5b501e21cd18b6f21b457ad1f9"; - }) - ]; - buildInputs = - (self.nativeDeps."source-map" or []); - deps = { - "amdefine-1.0.0" = self.by-version."amdefine"."1.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "source-map" ]; - }; - by-spec."source-map"."^0.4.2" = - self.by-version."source-map"."0.4.4"; - by-version."source-map"."0.4.4" = lib.makeOverridable self.buildNodePackage { - name = "source-map-0.4.4"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz"; - name = "source-map-0.4.4.tgz"; - sha1 = "eba4f5da9c0dc999de68032d8b4f76173652036b"; - }) - ]; - buildInputs = - (self.nativeDeps."source-map" or []); - deps = { - "amdefine-1.0.0" = self.by-version."amdefine"."1.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "source-map" ]; - }; - by-spec."sshpk"."^1.7.0" = - self.by-version."sshpk"."1.7.3"; - by-version."sshpk"."1.7.3" = lib.makeOverridable self.buildNodePackage { - name = "sshpk-1.7.3"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/sshpk/-/sshpk-1.7.3.tgz"; - name = "sshpk-1.7.3.tgz"; - sha1 = "caa8ef95e30765d856698b7025f9f211ab65962f"; - }) - ]; - buildInputs = - (self.nativeDeps."sshpk" or []); - deps = { - "asn1-0.2.3" = self.by-version."asn1"."0.2.3"; - "assert-plus-0.2.0" = self.by-version."assert-plus"."0.2.0"; - "dashdash-1.12.2" = self.by-version."dashdash"."1.12.2"; - "jsbn-0.1.0" = self.by-version."jsbn"."0.1.0"; - "tweetnacl-0.13.3" = self.by-version."tweetnacl"."0.13.3"; - "jodid25519-1.0.2" = self.by-version."jodid25519"."1.0.2"; - "ecc-jsbn-0.1.1" = self.by-version."ecc-jsbn"."0.1.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "sshpk" ]; - }; - by-spec."string_decoder"."~0.10.x" = - self.by-version."string_decoder"."0.10.31"; - by-version."string_decoder"."0.10.31" = lib.makeOverridable self.buildNodePackage { - name = "string_decoder-0.10.31"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"; - name = "string_decoder-0.10.31.tgz"; - sha1 = "62e203bc41766c6c28c9fc84301dab1c5310fa94"; - }) - ]; - buildInputs = - (self.nativeDeps."string_decoder" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "string_decoder" ]; - }; - by-spec."stringstream"."~0.0.4" = - self.by-version."stringstream"."0.0.5"; - by-version."stringstream"."0.0.5" = lib.makeOverridable self.buildNodePackage { - name = "stringstream-0.0.5"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz"; - name = "stringstream-0.0.5.tgz"; - sha1 = "4e484cd4de5a0bbbee18e46307710a8a81621878"; - }) - ]; - buildInputs = - (self.nativeDeps."stringstream" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "stringstream" ]; - }; - by-spec."strip-ansi"."^0.3.0" = - self.by-version."strip-ansi"."0.3.0"; - by-version."strip-ansi"."0.3.0" = lib.makeOverridable self.buildNodePackage { - name = "strip-ansi-0.3.0"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz"; - name = "strip-ansi-0.3.0.tgz"; - sha1 = "25f48ea22ca79187f3174a4db8759347bb126220"; - }) - ]; - buildInputs = - (self.nativeDeps."strip-ansi" or []); - deps = { - "ansi-regex-0.2.1" = self.by-version."ansi-regex"."0.2.1"; - }; - peerDependencies = [ - ]; - passthru.names = [ "strip-ansi" ]; - }; - by-spec."strip-ansi"."^3.0.0" = - self.by-version."strip-ansi"."3.0.0"; - by-version."strip-ansi"."3.0.0" = lib.makeOverridable self.buildNodePackage { - name = "strip-ansi-3.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz"; - name = "strip-ansi-3.0.0.tgz"; - sha1 = "7510b665567ca914ccb5d7e072763ac968be3724"; - }) - ]; - buildInputs = - (self.nativeDeps."strip-ansi" or []); - deps = { - "ansi-regex-2.0.0" = self.by-version."ansi-regex"."2.0.0"; - }; - peerDependencies = [ - ]; - passthru.names = [ "strip-ansi" ]; - }; - by-spec."strip-json-comments"."1.0.x" = - self.by-version."strip-json-comments"."1.0.4"; - by-version."strip-json-comments"."1.0.4" = lib.makeOverridable self.buildNodePackage { - name = "strip-json-comments-1.0.4"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz"; - name = "strip-json-comments-1.0.4.tgz"; - sha1 = "1e15fbcac97d3ee99bf2d73b4c656b082bbafb91"; - }) - ]; - buildInputs = - (self.nativeDeps."strip-json-comments" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "strip-json-comments" ]; - }; - by-spec."supports-color"."^0.2.0" = - self.by-version."supports-color"."0.2.0"; - by-version."supports-color"."0.2.0" = lib.makeOverridable self.buildNodePackage { - name = "supports-color-0.2.0"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz"; - name = "supports-color-0.2.0.tgz"; - sha1 = "d92de2694eb3f67323973d7ae3d8b55b4c22190a"; - }) - ]; - buildInputs = - (self.nativeDeps."supports-color" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "supports-color" ]; - }; - by-spec."supports-color"."^2.0.0" = - self.by-version."supports-color"."2.0.0"; - by-version."supports-color"."2.0.0" = lib.makeOverridable self.buildNodePackage { - name = "supports-color-2.0.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz"; - name = "supports-color-2.0.0.tgz"; - sha1 = "535d045ce6b6363fa40117084629995e9df324c7"; - }) - ]; - buildInputs = - (self.nativeDeps."supports-color" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "supports-color" ]; - }; - by-spec."tiny-lr-fork"."0.0.5" = - self.by-version."tiny-lr-fork"."0.0.5"; - by-version."tiny-lr-fork"."0.0.5" = lib.makeOverridable self.buildNodePackage { - name = "tiny-lr-fork-0.0.5"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/tiny-lr-fork/-/tiny-lr-fork-0.0.5.tgz"; - name = "tiny-lr-fork-0.0.5.tgz"; - sha1 = "1e99e1e2a8469b736ab97d97eefa98c71f76ed0a"; - }) - ]; - buildInputs = - (self.nativeDeps."tiny-lr-fork" or []); - deps = { - "qs-0.5.6" = self.by-version."qs"."0.5.6"; - "faye-websocket-0.4.4" = self.by-version."faye-websocket"."0.4.4"; - "noptify-0.0.3" = self.by-version."noptify"."0.0.3"; - "debug-0.7.4" = self.by-version."debug"."0.7.4"; - }; - peerDependencies = [ - ]; - passthru.names = [ "tiny-lr-fork" ]; - }; - by-spec."tough-cookie"."~2.2.0" = - self.by-version."tough-cookie"."2.2.1"; - by-version."tough-cookie"."2.2.1" = lib.makeOverridable self.buildNodePackage { - name = "tough-cookie-2.2.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.1.tgz"; - name = "tough-cookie-2.2.1.tgz"; - sha1 = "3b0516b799e70e8164436a1446e7e5877fda118e"; - }) - ]; - buildInputs = - (self.nativeDeps."tough-cookie" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "tough-cookie" ]; - }; - by-spec."tunnel-agent"."~0.4.1" = - self.by-version."tunnel-agent"."0.4.2"; - by-version."tunnel-agent"."0.4.2" = lib.makeOverridable self.buildNodePackage { - name = "tunnel-agent-0.4.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.2.tgz"; - name = "tunnel-agent-0.4.2.tgz"; - sha1 = "1104e3f36ac87125c287270067d582d18133bfee"; - }) - ]; - buildInputs = - (self.nativeDeps."tunnel-agent" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "tunnel-agent" ]; - }; - by-spec."tweetnacl".">=0.13.0 <1.0.0" = - self.by-version."tweetnacl"."0.13.3"; - by-version."tweetnacl"."0.13.3" = lib.makeOverridable self.buildNodePackage { - name = "tweetnacl-0.13.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/tweetnacl/-/tweetnacl-0.13.3.tgz"; - name = "tweetnacl-0.13.3.tgz"; - sha1 = "d628b56f3bcc3d5ae74ba9d4c1a704def5ab4b56"; - }) - ]; - buildInputs = - (self.nativeDeps."tweetnacl" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "tweetnacl" ]; - }; - by-spec."underscore"."~1.7.0" = - self.by-version."underscore"."1.7.0"; - by-version."underscore"."1.7.0" = lib.makeOverridable self.buildNodePackage { - name = "underscore-1.7.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz"; - name = "underscore-1.7.0.tgz"; - sha1 = "6bbaf0877500d36be34ecaa584e0db9fef035209"; - }) - ]; - buildInputs = - (self.nativeDeps."underscore" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "underscore" ]; - }; - by-spec."underscore.string"."~2.2.1" = - self.by-version."underscore.string"."2.2.1"; - by-version."underscore.string"."2.2.1" = lib.makeOverridable self.buildNodePackage { - name = "underscore.string-2.2.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz"; - name = "underscore.string-2.2.1.tgz"; - sha1 = "d7c0fa2af5d5a1a67f4253daee98132e733f0f19"; - }) - ]; - buildInputs = - (self.nativeDeps."underscore.string" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "underscore.string" ]; - }; - by-spec."underscore.string"."~2.3.3" = - self.by-version."underscore.string"."2.3.3"; - by-version."underscore.string"."2.3.3" = lib.makeOverridable self.buildNodePackage { - name = "underscore.string-2.3.3"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz"; - name = "underscore.string-2.3.3.tgz"; - sha1 = "71c08bf6b428b1133f37e78fa3a21c82f7329b0d"; - }) - ]; - buildInputs = - (self.nativeDeps."underscore.string" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "underscore.string" ]; - }; - by-spec."underscore.string"."~2.4.0" = - self.by-version."underscore.string"."2.4.0"; - by-version."underscore.string"."2.4.0" = lib.makeOverridable self.buildNodePackage { - name = "underscore.string-2.4.0"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/underscore.string/-/underscore.string-2.4.0.tgz"; - name = "underscore.string-2.4.0.tgz"; - sha1 = "8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b"; - }) - ]; - buildInputs = - (self.nativeDeps."underscore.string" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "underscore.string" ]; - }; - by-spec."util-deprecate"."~1.0.1" = - self.by-version."util-deprecate"."1.0.2"; - by-version."util-deprecate"."1.0.2" = lib.makeOverridable self.buildNodePackage { - name = "util-deprecate-1.0.2"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"; - name = "util-deprecate-1.0.2.tgz"; - sha1 = "450d4dc9fa70de732762fbd2d4a28981419a0ccf"; - }) - ]; - buildInputs = - (self.nativeDeps."util-deprecate" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "util-deprecate" ]; - }; - by-spec."verror"."1.3.6" = - self.by-version."verror"."1.3.6"; - by-version."verror"."1.3.6" = lib.makeOverridable self.buildNodePackage { - name = "verror-1.3.6"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/verror/-/verror-1.3.6.tgz"; - name = "verror-1.3.6.tgz"; - sha1 = "cff5df12946d297d2baaefaa2689e25be01c005c"; - }) - ]; - buildInputs = - (self.nativeDeps."verror" or []); - deps = { - "extsprintf-1.0.2" = self.by-version."extsprintf"."1.0.2"; - }; - peerDependencies = [ - ]; - passthru.names = [ "verror" ]; - }; - by-spec."which"."~1.0.5" = - self.by-version."which"."1.0.9"; - by-version."which"."1.0.9" = lib.makeOverridable self.buildNodePackage { - name = "which-1.0.9"; - bin = true; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/which/-/which-1.0.9.tgz"; - name = "which-1.0.9.tgz"; - sha1 = "460c1da0f810103d0321a9b633af9e575e64486f"; - }) - ]; - buildInputs = - (self.nativeDeps."which" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "which" ]; - }; - by-spec."xtend"."^4.0.0" = - self.by-version."xtend"."4.0.1"; - by-version."xtend"."4.0.1" = lib.makeOverridable self.buildNodePackage { - name = "xtend-4.0.1"; - bin = false; - src = [ - (fetchurl { - url = "http://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"; - name = "xtend-4.0.1.tgz"; - sha1 = "a5c6d532be656e23db820efb943a1f04998d63af"; - }) - ]; - buildInputs = - (self.nativeDeps."xtend" or []); - deps = { - }; - peerDependencies = [ - ]; - passthru.names = [ "xtend" ]; - }; -} diff --git a/scripts/python-packages-overrides.nix b/scripts/python-packages-overrides.nix deleted file mode 100644 --- a/scripts/python-packages-overrides.nix +++ /dev/null @@ -1,109 +0,0 @@ -# Overrides for the generated python-packages.nix -# -# This function is intended to be used as an extension to the generated file -# python-packages.nix. The main objective is to add needed dependencies of C -# libraries and tweak the build instructions where needed. - -{ pkgs, basePythonPackages }: - -let - sed = "sed -i"; -in - -self: super: { - - kombu = super.kombu.override (attrs: { - preConfigure = '' - # Disable msgpack support to avoid conflict. - # https://github.com/celery/kombu/pull/143/files - # - # This can be dropped once celery and kombu are updated to more - # recent versions. - ${sed} -e \ - 's:msgpack.packs, msgpack.unpacks:msgpack.packb, msgpack.unpackb:' \ - kombu/serialization.py - ''; - }); - - lxml = super.lxml.override (attrs: { - buildInputs = with self; [ - pkgs.libxml2 - pkgs.libxslt - ]; - }); - - mercurial = super.mercurial.override (attrs: { - propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ - self.python.modules.curses - ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin - pkgs.darwin.apple_sdk.frameworks.ApplicationServices; - }); - - psutil = super.psutil.override (attrs: { - buildInputs = attrs.buildInputs ++ - pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.darwin.IOKit; - }); - - psycopg2 = super.psycopg2.override (attrs: { - buildInputs = attrs.buildInputs ++ - pkgs.lib.optional pkgs.stdenv.isDarwin pkgs.openssl; - propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ - pkgs.postgresql - ]; - }); - - pycurl = super.pycurl.override (attrs: { - propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ - pkgs.curl - pkgs.openssl - ]; - }); - - Pylons = super.Pylons.override (attrs: { - name = "Pylons-1.0.1-patch1"; - src = pkgs.fetchgit { - url = "https://code.rhodecode.com/upstream/pylons"; - rev = "707354ee4261b9c10450404fc9852ccea4fd667d"; - sha256 = "b2763274c2780523a335f83a1df65be22ebe4ff413a7bc9e9288d23c1f62032e"; - }; - }); - - pyramid = super.pyramid.override (attrs: { - postFixup = '' - wrapPythonPrograms - # TODO: johbo: "wrapPython" adds this magic line which - # confuses pserve. - ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped - ''; - }); - - Pyro4 = super.Pyro4.override (attrs: { - # TODO: Was not able to generate this version, needs further - # investigation. - name = "Pyro4-4.35"; - src = pkgs.fetchurl { - url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz"; - md5 = "cbe6cb855f086a0f092ca075005855f3"; - }; - }); - - pysqlite = super.pysqlite.override (attrs: { - propagatedBuildInputs = [ - pkgs.sqlite - ]; - }); - - python-ldap = super.python-ldap.override (attrs: { - propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ - pkgs.cyrus_sasl - pkgs.openldap - pkgs.openssl - ]; - NIX_CFLAGS_COMPILE = "-I${pkgs.cyrus_sasl}/include/sasl"; - }); - - # Avoid that setuptools is replaced, this leads to trouble - # with buildPythonPackage. - setuptools = basePythonPackages.setuptools; - -} diff --git a/scripts/python-packages.nix b/scripts/python-packages.nix deleted file mode 100644 --- a/scripts/python-packages.nix +++ /dev/null @@ -1,1203 +0,0 @@ -{ - Babel = super.buildPythonPackage { - name = "Babel-1.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pytz]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/B/Babel/Babel-1.3.tar.gz"; - md5 = "5264ceb02717843cbc9ffce8e6e06bdb"; - }; - }; - Beaker = super.buildPythonPackage { - name = "Beaker-1.7.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/B/Beaker/Beaker-1.7.0.tar.gz"; - md5 = "386be3f7fe427358881eee4622b428b3"; - }; - }; - CProfileV = super.buildPythonPackage { - name = "CProfileV-1.0.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [bottle]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/C/CProfileV/CProfileV-1.0.6.tar.gz"; - md5 = "08c7c242b6e64237bc53c5d13537e03d"; - }; - }; - Cython = super.buildPythonPackage { - name = "Cython-0.22"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/C/Cython/cython-0.22.tar.gz"; - md5 = "1ae25add4ef7b63ee9b4af697300d6b6"; - }; - }; - Fabric = super.buildPythonPackage { - name = "Fabric-1.10.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [paramiko]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/F/Fabric/Fabric-1.10.0.tar.gz"; - md5 = "2cb96473387f0e7aa035210892352f4a"; - }; - }; - FormEncode = super.buildPythonPackage { - name = "FormEncode-1.2.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/F/FormEncode/FormEncode-1.2.4.tar.gz"; - md5 = "6bc17fb9aed8aea198975e888e2077f4"; - }; - }; - Jinja2 = super.buildPythonPackage { - name = "Jinja2-2.7.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [MarkupSafe]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/J/Jinja2/Jinja2-2.7.3.tar.gz"; - md5 = "b9dffd2f3b43d673802fe857c8445b1a"; - }; - }; - Mako = super.buildPythonPackage { - name = "Mako-1.0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [MarkupSafe]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz"; - md5 = "9f0aafd177b039ef67b90ea350497a54"; - }; - }; - Markdown = super.buildPythonPackage { - name = "Markdown-2.6.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/M/Markdown/Markdown-2.6.2.tar.gz"; - md5 = "256d19afcc564dc4ce4c229bb762f7ae"; - }; - }; - MarkupSafe = super.buildPythonPackage { - name = "MarkupSafe-0.23"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/M/MarkupSafe/MarkupSafe-0.23.tar.gz"; - md5 = "f5ab3deee4c37cd6a922fb81e730da6e"; - }; - }; - MySQL-python = super.buildPythonPackage { - name = "MySQL-python-1.2.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/M/MySQL-python/MySQL-python-1.2.5.zip"; - md5 = "654f75b302db6ed8dc5a898c625e030c"; - }; - }; - Paste = super.buildPythonPackage { - name = "Paste-2.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [six]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/Paste/Paste-2.0.2.tar.gz"; - md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c"; - }; - }; - PasteDeploy = super.buildPythonPackage { - name = "PasteDeploy-1.5.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/PasteDeploy/PasteDeploy-1.5.2.tar.gz"; - md5 = "352b7205c78c8de4987578d19431af3b"; - }; - }; - PasteScript = super.buildPythonPackage { - name = "PasteScript-1.7.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [Paste PasteDeploy]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/PasteScript/PasteScript-1.7.5.tar.gz"; - md5 = "4c72d78dcb6bb993f30536842c16af4d"; - }; - }; - Pygments = super.buildPythonPackage { - name = "Pygments-2.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.2.tar.gz"; - md5 = "238587a1370d62405edabd0794b3ec4a"; - }; - }; - Pylons = super.buildPythonPackage { - name = "Pylons-1.0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/Pylons/Pylons-1.0.1.tar.gz"; - md5 = "6cb880d75fa81213192142b07a6e4915"; - }; - }; - Pyro4 = super.buildPythonPackage { - name = "Pyro4-4.41"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [serpent]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.41.tar.gz"; - md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c"; - }; - }; - RhodeCodeEnterprise = super.buildPythonPackage { - name = "RhodeCodeEnterprise-3.9.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebOb WebTest Whoosh amqplib anyjson backport-ipaddress celery decorator docutils kombu mercurial packaging pycrypto pyparsing python-dateutil recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors py-bcrypt psutil]; - src = ./../..; - }; - Routes = super.buildPythonPackage { - name = "Routes-1.13"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [repoze.lru]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/R/Routes/Routes-1.13.tar.gz"; - md5 = "d527b0ab7dd9172b1275a41f97448783"; - }; - }; - SQLAlchemy = super.buildPythonPackage { - name = "SQLAlchemy-0.9.9"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.9.9.tar.gz"; - md5 = "8a10a9bd13ed3336ef7333ac2cc679ff"; - }; - }; - Sphinx = super.buildPythonPackage { - name = "Sphinx-1.2.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [Pygments docutils Jinja2]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.2.2.tar.gz"; - md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4"; - }; - }; - Tempita = super.buildPythonPackage { - name = "Tempita-0.5.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/T/Tempita/Tempita-0.5.2.tar.gz"; - md5 = "4c2f17bb9d481821c41b6fbee904cea1"; - }; - }; - URLObject = super.buildPythonPackage { - name = "URLObject-2.4.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/U/URLObject/URLObject-2.4.0.tar.gz"; - md5 = "2ed819738a9f0a3051f31dc9924e3065"; - }; - }; - WebError = super.buildPythonPackage { - name = "WebError-0.10.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/WebError/WebError-0.10.3.tar.gz"; - md5 = "84b9990b0baae6fd440b1e60cdd06f9a"; - }; - }; - WebHelpers = super.buildPythonPackage { - name = "WebHelpers-1.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [MarkupSafe]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/WebHelpers/WebHelpers-1.3.tar.gz"; - md5 = "32749ffadfc40fea51075a7def32588b"; - }; - }; - WebHelpers2 = super.buildPythonPackage { - name = "WebHelpers2-2.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [MarkupSafe six]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/WebHelpers2/WebHelpers2-2.0.tar.gz"; - md5 = "0f6b68d70c12ee0aed48c00b24da13d3"; - }; - }; - WebOb = super.buildPythonPackage { - name = "WebOb-1.3.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/WebOb/WebOb-1.3.1.tar.gz"; - md5 = "20918251c5726956ba8fef22d1556177"; - }; - }; - WebTest = super.buildPythonPackage { - name = "WebTest-1.4.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [WebOb]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/WebTest/WebTest-1.4.3.zip"; - md5 = "631ce728bed92c681a4020a36adbc353"; - }; - }; - Whoosh = super.buildPythonPackage { - name = "Whoosh-2.7.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/W/Whoosh/Whoosh-2.7.0.tar.gz"; - md5 = "9a0fc2df9335e1d2e81dd84a2c4c416f"; - }; - }; - alembic = super.buildPythonPackage { - name = "alembic-0.8.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/a/alembic/alembic-0.8.4.tar.gz"; - md5 = "5f95d8ee62b443f9b37eb5bee76c582d"; - }; - }; - amqplib = super.buildPythonPackage { - name = "amqplib-1.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/a/amqplib/amqplib-1.0.2.tgz"; - md5 = "5c92f17fbedd99b2b4a836d4352d1e2f"; - }; - }; - anyjson = super.buildPythonPackage { - name = "anyjson-0.3.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/a/anyjson/anyjson-0.3.3.tar.gz"; - md5 = "2ea28d6ec311aeeebaf993cb3008b27c"; - }; - }; - appenlight-client = super.buildPythonPackage { - name = "appenlight-client-0.6.14"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [WebOb requests]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/a/appenlight-client/appenlight_client-0.6.14.tar.gz"; - md5 = "578c69b09f4356d898fff1199b98a95c"; - }; - }; - backport-ipaddress = super.buildPythonPackage { - name = "backport-ipaddress-0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/b/backport_ipaddress/backport_ipaddress-0.1.tar.gz"; - md5 = "9c1f45f4361f71b124d7293a60006c05"; - }; - }; - bottle = super.buildPythonPackage { - name = "bottle-0.12.8"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/b/bottle/bottle-0.12.8.tar.gz"; - md5 = "13132c0a8f607bf860810a6ee9064c5b"; - }; - }; - bumpversion = super.buildPythonPackage { - name = "bumpversion-0.5.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/b/bumpversion/bumpversion-0.5.3.tar.gz"; - md5 = "c66a3492eafcf5ad4b024be9fca29820"; - }; - }; - celery = super.buildPythonPackage { - name = "celery-2.2.10"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/celery/celery-2.2.10.tar.gz"; - md5 = "898bc87e54f278055b561316ba73e222"; - }; - }; - channelstream = super.buildPythonPackage { - name = "channelstream-0.4.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [gevent gevent-websocket pyramid pyramid-jinja2 itsdangerous]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/channelstream/channelstream-0.4.2.tar.gz"; - md5 = "5857cc2b1cef993088817ccc31285254"; - }; - }; - click = super.buildPythonPackage { - name = "click-4.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/click/click-4.0.tar.gz"; - md5 = "79b475a1dbd566d8ce7daba5e6c1aaa7"; - }; - }; - colander = super.buildPythonPackage { - name = "colander-1.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [translationstring iso8601]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/colander/colander-1.2.tar.gz"; - md5 = "83db21b07936a0726e588dae1914b9ed"; - }; - }; - configobj = super.buildPythonPackage { - name = "configobj-5.0.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [six]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/configobj/configobj-5.0.6.tar.gz"; - md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6"; - }; - }; - cov-core = super.buildPythonPackage { - name = "cov-core-1.15.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [coverage]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/cov-core/cov-core-1.15.0.tar.gz"; - md5 = "f519d4cb4c4e52856afb14af52919fe6"; - }; - }; - coverage = super.buildPythonPackage { - name = "coverage-3.7.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/coverage/coverage-3.7.1.tar.gz"; - md5 = "c47b36ceb17eaff3ecfab3bcd347d0df"; - }; - }; - cssselect = super.buildPythonPackage { - name = "cssselect-0.9.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/c/cssselect/cssselect-0.9.1.tar.gz"; - md5 = "c74f45966277dc7a0f768b9b0f3522ac"; - }; - }; - decorator = super.buildPythonPackage { - name = "decorator-3.4.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/d/decorator/decorator-3.4.2.tar.gz"; - md5 = "9e0536870d2b83ae27d58dbf22582f4d"; - }; - }; - docutils = super.buildPythonPackage { - name = "docutils-0.12"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/d/docutils/docutils-0.12.tar.gz"; - md5 = "4622263b62c5c771c03502afa3157768"; - }; - }; - dogpile.cache = super.buildPythonPackage { - name = "dogpile.cache-0.5.7"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [dogpile.core]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/d/dogpile.cache/dogpile.cache-0.5.7.tar.gz"; - md5 = "3e58ce41af574aab41d78e9c4190f194"; - }; - }; - dogpile.core = super.buildPythonPackage { - name = "dogpile.core-0.4.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/d/dogpile.core/dogpile.core-0.4.1.tar.gz"; - md5 = "01cb19f52bba3e95c9b560f39341f045"; - }; - }; - dulwich = super.buildPythonPackage { - name = "dulwich-0.12.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/d/dulwich/dulwich-0.12.0.tar.gz"; - md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa"; - }; - }; - ecdsa = super.buildPythonPackage { - name = "ecdsa-0.11"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/e/ecdsa/ecdsa-0.11.tar.gz"; - md5 = "8ef586fe4dbb156697d756900cb41d7c"; - }; - }; - flake8 = super.buildPythonPackage { - name = "flake8-2.4.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pyflakes pep8 mccabe]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/f/flake8/flake8-2.4.1.tar.gz"; - md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65"; - }; - }; - future = super.buildPythonPackage { - name = "future-0.14.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/f/future/future-0.14.3.tar.gz"; - md5 = "e94079b0bd1fc054929e8769fc0f6083"; - }; - }; - futures = super.buildPythonPackage { - name = "futures-3.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/f/futures/futures-3.0.2.tar.gz"; - md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a"; - }; - }; - gevent = super.buildPythonPackage { - name = "gevent-1.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [greenlet]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/gevent/gevent-1.0.2.tar.gz"; - md5 = "117f135d57ca7416203fba3720bf71c1"; - }; - }; - gevent-websocket = super.buildPythonPackage { - name = "gevent-websocket-0.9.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [gevent]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/gevent-websocket/gevent-websocket-0.9.5.tar.gz"; - md5 = "03a8473b9a61426b0ef6094319141389"; - }; - }; - gnureadline = super.buildPythonPackage { - name = "gnureadline-6.3.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/gnureadline/gnureadline-6.3.3.tar.gz"; - md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c"; - }; - }; - gprof2dot = super.buildPythonPackage { - name = "gprof2dot-2015.12.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/gprof2dot/gprof2dot-2015.12.1.tar.gz"; - md5 = "e23bf4e2f94db032750c193384b4165b"; - }; - }; - greenlet = super.buildPythonPackage { - name = "greenlet-0.4.7"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/greenlet/greenlet-0.4.7.zip"; - md5 = "c2333a8ff30fa75c5d5ec0e67b461086"; - }; - }; - gunicorn = super.buildPythonPackage { - name = "gunicorn-19.3.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/g/gunicorn/gunicorn-19.3.0.tar.gz"; - md5 = "faa3e80661efd67e5e06bba32699af20"; - }; - }; - infrae.cache = super.buildPythonPackage { - name = "infrae.cache-1.0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [Beaker repoze.lru]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/infrae.cache/infrae.cache-1.0.1.tar.gz"; - md5 = "b09076a766747e6ed2a755cc62088e32"; - }; - }; - invoke = super.buildPythonPackage { - name = "invoke-0.11.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/invoke/invoke-0.11.1.tar.gz"; - md5 = "3d4ecbe26779ceef1046ecf702c9c4a8"; - }; - }; - ipdb = super.buildPythonPackage { - name = "ipdb-0.8"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [ipython]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/ipdb/ipdb-0.8.zip"; - md5 = "96dca0712efa01aa5eaf6b22071dd3ed"; - }; - }; - ipython = super.buildPythonPackage { - name = "ipython-3.1.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [gnureadline]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/ipython/ipython-3.1.0.tar.gz"; - md5 = "a749d90c16068687b0ec45a27e72ef8f"; - }; - }; - iso8601 = super.buildPythonPackage { - name = "iso8601-0.1.11"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/iso8601/iso8601-0.1.11.tar.gz"; - md5 = "b06d11cd14a64096f907086044f0fe38"; - }; - }; - itsdangerous = super.buildPythonPackage { - name = "itsdangerous-0.24"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/i/itsdangerous/itsdangerous-0.24.tar.gz"; - md5 = "a3d55aa79369aef5345c036a8a26307f"; - }; - }; - kombu = super.buildPythonPackage { - name = "kombu-1.5.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [anyjson amqplib]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/k/kombu/kombu-1.5.1.tar.gz"; - md5 = "50662f3c7e9395b3d0721fb75d100b63"; - }; - }; - lxml = super.buildPythonPackage { - name = "lxml-3.4.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/l/lxml/lxml-3.4.4.tar.gz"; - md5 = "a9a65972afc173ec7a39c585f4eea69c"; - }; - }; - mccabe = super.buildPythonPackage { - name = "mccabe-0.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/m/mccabe/mccabe-0.3.tar.gz"; - md5 = "81640948ff226f8c12b3277059489157"; - }; - }; - meld3 = super.buildPythonPackage { - name = "meld3-1.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/m/meld3/meld3-1.0.2.tar.gz"; - md5 = "3ccc78cd79cffd63a751ad7684c02c91"; - }; - }; - mercurial = super.buildPythonPackage { - name = "mercurial-3.3.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/M/Mercurial/mercurial-3.3.3.tar.gz"; - md5 = "8648a6980fc12a5a424abe809ab4c6e5"; - }; - }; - mock = super.buildPythonPackage { - name = "mock-1.0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/m/mock/mock-1.0.1.tar.gz"; - md5 = "c3971991738caa55ec7c356bbc154ee2"; - }; - }; - msgpack-python = super.buildPythonPackage { - name = "msgpack-python-0.4.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/m/msgpack-python/msgpack-python-0.4.6.tar.gz"; - md5 = "8b317669314cf1bc881716cccdaccb30"; - }; - }; - nose = super.buildPythonPackage { - name = "nose-1.3.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.6.tar.gz"; - md5 = "0ca546d81ca8309080fc80cb389e7a16"; - }; - }; - objgraph = super.buildPythonPackage { - name = "objgraph-2.0.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/o/objgraph/objgraph-2.0.0.tar.gz"; - md5 = "25b0d5e5adc74aa63ead15699614159c"; - }; - }; - packaging = super.buildPythonPackage { - name = "packaging-15.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/packaging/packaging-15.2.tar.gz"; - md5 = "c16093476f6ced42128bf610e5db3784"; - }; - }; - paramiko = super.buildPythonPackage { - name = "paramiko-1.15.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pycrypto ecdsa]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/paramiko/paramiko-1.15.1.tar.gz"; - md5 = "48c274c3f9b1282932567b21f6acf3b5"; - }; - }; - pep8 = super.buildPythonPackage { - name = "pep8-1.5.7"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pep8/pep8-1.5.7.tar.gz"; - md5 = "f6adbdd69365ecca20513c709f9b7c93"; - }; - }; - psutil = super.buildPythonPackage { - name = "psutil-2.2.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/psutil/psutil-2.2.1.tar.gz"; - md5 = "1a2b58cd9e3a53528bb6148f0c4d5244"; - }; - }; - psycopg2 = super.buildPythonPackage { - name = "psycopg2-2.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/psycopg2/psycopg2-2.6.tar.gz"; - md5 = "fbbb039a8765d561a1c04969bbae7c74"; - }; - }; - py = super.buildPythonPackage { - name = "py-1.4.29"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/py/py-1.4.29.tar.gz"; - md5 = "c28e0accba523a29b35a48bb703fb96c"; - }; - }; - py-bcrypt = super.buildPythonPackage { - name = "py-bcrypt-0.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/py-bcrypt/py-bcrypt-0.4.tar.gz"; - md5 = "dd8b367d6b716a2ea2e72392525f4e36"; - }; - }; - pycrypto = super.buildPythonPackage { - name = "pycrypto-2.6.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pycrypto/pycrypto-2.6.1.tar.gz"; - md5 = "55a61a054aa66812daf5161a0d5d7eda"; - }; - }; - pycurl = super.buildPythonPackage { - name = "pycurl-7.19.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pycurl/pycurl-7.19.5.tar.gz"; - md5 = "47b4eac84118e2606658122104e62072"; - }; - }; - pyflakes = super.buildPythonPackage { - name = "pyflakes-0.8.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyflakes/pyflakes-0.8.1.tar.gz"; - md5 = "905fe91ad14b912807e8fdc2ac2e2c23"; - }; - }; - pyparsing = super.buildPythonPackage { - name = "pyparsing-1.5.7"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-1.5.7.tar.gz"; - md5 = "9be0fcdcc595199c646ab317c1d9a709"; - }; - }; - pyramid = super.buildPythonPackage { - name = "pyramid-1.5.7"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyramid/pyramid-1.5.7.tar.gz"; - md5 = "179437d1c331e720df50fb4e7428ed6b"; - }; - }; - pyramid-jinja2 = super.buildPythonPackage { - name = "pyramid-jinja2-2.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyramid_jinja2/pyramid_jinja2-2.5.tar.gz"; - md5 = "07cb6547204ac5e6f0b22a954ccee928"; - }; - }; - pyramid-mako = super.buildPythonPackage { - name = "pyramid-mako-1.0.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pyramid Mako]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyramid_mako/pyramid_mako-1.0.2.tar.gz"; - md5 = "ee25343a97eb76bd90abdc2a774eb48a"; - }; - }; - pysqlite = super.buildPythonPackage { - name = "pysqlite-2.6.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pysqlite/pysqlite-2.6.3.tar.gz"; - md5 = "7ff1cedee74646b50117acff87aa1cfa"; - }; - }; - pytest = super.buildPythonPackage { - name = "pytest-2.8.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [py]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytest/pytest-2.8.5.zip"; - md5 = "8493b06f700862f1294298d6c1b715a9"; - }; - }; - pytest-catchlog = super.buildPythonPackage { - name = "pytest-catchlog-1.2.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [py pytest]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytest-catchlog/pytest-catchlog-1.2.2.zip"; - md5 = "09d890c54c7456c818102b7ff8c182c8"; - }; - }; - pytest-cov = super.buildPythonPackage { - name = "pytest-cov-1.8.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [py pytest coverage cov-core]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytest-cov/pytest-cov-1.8.1.tar.gz"; - md5 = "76c778afa2494088270348be42d759fc"; - }; - }; - pytest-profiling = super.buildPythonPackage { - name = "pytest-profiling-1.0.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [six pytest gprof2dot]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytest-profiling/pytest-profiling-1.0.1.tar.gz"; - md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b"; - }; - }; - pytest-timeout = super.buildPythonPackage { - name = "pytest-timeout-0.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [pytest]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytest-timeout/pytest-timeout-0.4.tar.gz"; - md5 = "03b28aff69cbbfb959ed35ade5fde262"; - }; - }; - python-dateutil = super.buildPythonPackage { - name = "python-dateutil-1.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-1.5.tar.gz"; - md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5"; - }; - }; - python-editor = super.buildPythonPackage { - name = "python-editor-0.5"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/python-editor/python-editor-0.5.tar.gz"; - md5 = "ece4f1848d93286d58df88e3fcb37704"; - }; - }; - python-ldap = super.buildPythonPackage { - name = "python-ldap-2.4.19"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/python-ldap/python-ldap-2.4.19.tar.gz"; - md5 = "b941bf31d09739492aa19ef679e94ae3"; - }; - }; - pytz = super.buildPythonPackage { - name = "pytz-2015.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pytz/pytz-2015.4.tar.bz2"; - md5 = "39f7375c4b1fa34cdcb4b4765d08f817"; - }; - }; - pyzmq = super.buildPythonPackage { - name = "pyzmq-14.6.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/p/pyzmq/pyzmq-14.6.0.tar.gz"; - md5 = "395b5de95a931afa5b14c9349a5b8024"; - }; - }; - recaptcha-client = super.buildPythonPackage { - name = "recaptcha-client-1.0.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/r/recaptcha-client/recaptcha-client-1.0.6.tar.gz"; - md5 = "74228180f7e1fb76c4d7089160b0d919"; - }; - }; - repoze.lru = super.buildPythonPackage { - name = "repoze.lru-0.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/r/repoze.lru/repoze.lru-0.6.tar.gz"; - md5 = "2c3b64b17a8e18b405f55d46173e14dd"; - }; - }; - requests = super.buildPythonPackage { - name = "requests-2.9.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/r/requests/requests-2.9.1.tar.gz"; - md5 = "0b7f480d19012ec52bab78292efd976d"; - }; - }; - serpent = super.buildPythonPackage { - name = "serpent-1.11"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/serpent/serpent-1.11.tar.gz"; - md5 = "8d72e90f84631b3ffcb665d74b99a78f"; - }; - }; - setproctitle = super.buildPythonPackage { - name = "setproctitle-1.1.8"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/setproctitle/setproctitle-1.1.8.tar.gz"; - md5 = "728f4c8c6031bbe56083a48594027edd"; - }; - }; - setuptools = super.buildPythonPackage { - name = "setuptools-20.1.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-20.1.1.tar.gz"; - md5 = "10a0f4feb9f2ea99acf634c8d7136d6d"; - }; - }; - simplejson = super.buildPythonPackage { - name = "simplejson-3.7.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/simplejson/simplejson-3.7.2.tar.gz"; - md5 = "a5fc7d05d4cb38492285553def5d4b46"; - }; - }; - six = super.buildPythonPackage { - name = "six-1.9.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz"; - md5 = "476881ef4012262dfc8adc645ee786c4"; - }; - }; - subprocess32 = super.buildPythonPackage { - name = "subprocess32-3.2.6"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/subprocess32/subprocess32-3.2.6.tar.gz"; - md5 = "754c5ab9f533e764f931136974b618f1"; - }; - }; - supervisor = super.buildPythonPackage { - name = "supervisor-3.1.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [meld3]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/s/supervisor/supervisor-3.1.3.tar.gz"; - md5 = "aad263c4fbc070de63dd354864d5e552"; - }; - }; - transifex-client = super.buildPythonPackage { - name = "transifex-client-0.10"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/t/transifex-client/transifex-client-0.10.tar.gz"; - md5 = "5549538d84b8eede6b254cd81ae024fa"; - }; - }; - translationstring = super.buildPythonPackage { - name = "translationstring-1.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/t/translationstring/translationstring-1.1.tar.gz"; - md5 = "0979b46d8f0f852810c8ec4be5c26cf2"; - }; - }; - trollius = super.buildPythonPackage { - name = "trollius-1.0.4"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [futures]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/t/trollius/trollius-1.0.4.tar.gz"; - md5 = "3631a464d49d0cbfd30ab2918ef2b783"; - }; - }; - uWSGI = super.buildPythonPackage { - name = "uWSGI-2.0.11.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/u/uWSGI/uwsgi-2.0.11.2.tar.gz"; - md5 = "1f02dcbee7f6f61de4b1fd68350cf16f"; - }; - }; - venusian = super.buildPythonPackage { - name = "venusian-1.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/v/venusian/venusian-1.0.tar.gz"; - md5 = "dccf2eafb7113759d60c86faf5538756"; - }; - }; - waitress = super.buildPythonPackage { - name = "waitress-0.8.9"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/w/waitress/waitress-0.8.9.tar.gz"; - md5 = "da3f2e62b3676be5dd630703a68e2a04"; - }; - }; - wsgiref = super.buildPythonPackage { - name = "wsgiref-0.1.2"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; []; - src = fetchurl { - url = "https://pypi.python.org/packages/source/w/wsgiref/wsgiref-0.1.2.zip"; - md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb"; - }; - }; - zope.cachedescriptors = super.buildPythonPackage { - name = "zope.cachedescriptors-4.0.0"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/z/zope.cachedescriptors/zope.cachedescriptors-4.0.0.tar.gz"; - md5 = "8d308de8c936792c8e758058fcb7d0f0"; - }; - }; - zope.deprecation = super.buildPythonPackage { - name = "zope.deprecation-4.1.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/z/zope.deprecation/zope.deprecation-4.1.1.tar.gz"; - md5 = "ce261b9384066f7e13b63525778430cb"; - }; - }; - zope.event = super.buildPythonPackage { - name = "zope.event-4.0.3"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/z/zope.event/zope.event-4.0.3.tar.gz"; - md5 = "9a3780916332b18b8b85f522bcc3e249"; - }; - }; - zope.interface = super.buildPythonPackage { - name = "zope.interface-4.1.1"; - buildInputs = with self; []; - doCheck = false; - propagatedBuildInputs = with self; [setuptools]; - src = fetchurl { - url = "https://pypi.python.org/packages/source/z/zope.interface/zope.interface-4.1.1.tar.gz"; - md5 = "edcd5f719c5eb2e18894c4d06e29b6c6"; - }; - }; - -### Test requirements - - -} diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -214,10 +214,12 @@ setup( entry_points={ 'enterprise.plugins1': [ 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory', + 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory', 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory', 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory', 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory', 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory', + 'token=rhodecode.authentication.plugins.auth_token:plugin_factory', ], 'paste.app_factory': [ 'main=rhodecode.config.middleware:make_pyramid_app',