diff --git a/configs/development.ini b/configs/development.ini --- a/configs/development.ini +++ b/configs/development.ini @@ -657,6 +657,10 @@ vcs.methods.cache = true ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible #vcs.svn.compatible_version = 1.8 +; Redis connection settings for svn integrations logic +; This connection string needs to be the same on ce and vcsserver +vcs.svn.redis_conn = redis://redis:6379/0 + ; Enable SVN proxy of requests over HTTP vcs.svn.proxy.enabled = true diff --git a/configs/production.ini b/configs/production.ini --- a/configs/production.ini +++ b/configs/production.ini @@ -625,6 +625,10 @@ vcs.methods.cache = true ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible #vcs.svn.compatible_version = 1.8 +; Redis connection settings for svn integrations logic +; This connection string needs to be the same on ce and vcsserver +vcs.svn.redis_conn = redis://redis:6379/0 + ; Enable SVN proxy of requests over HTTP vcs.svn.proxy.enabled = true diff --git a/rhodecode/config/config_maker.py b/rhodecode/config/config_maker.py --- a/rhodecode/config/config_maker.py +++ b/rhodecode/config/config_maker.py @@ -103,6 +103,7 @@ def sanitize_settings_and_apply_defaults settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') settings_maker.make_setting('vcs.svn.compatible_version', '') + settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0') settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') settings_maker.make_setting('vcs.hooks.protocol', 'http') diff --git a/rhodecode/lib/action_parser.py b/rhodecode/lib/action_parser.py --- a/rhodecode/lib/action_parser.py +++ b/rhodecode/lib/action_parser.py @@ -258,8 +258,7 @@ class ActionParser(object): commit = repo.get_commit(commit_id=commit_id) commits.append(commit) except CommitDoesNotExistError: - log.error( - 'cannot find commit id %s in this repository', + log.error('cannot find commit id %s in this repository', commit_id) commits.append(commit_id) continue diff --git a/rhodecode/lib/hook_daemon/base.py b/rhodecode/lib/hook_daemon/base.py --- a/rhodecode/lib/hook_daemon/base.py +++ b/rhodecode/lib/hook_daemon/base.py @@ -15,13 +15,14 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ + import os import time import logging -import tempfile from rhodecode.lib.config_utils import get_config -from rhodecode.lib.ext_json import json + +from rhodecode.lib.svn_txn_utils import get_txn_id_from_store log = logging.getLogger(__name__) @@ -47,49 +48,22 @@ class HooksModuleCallbackDaemon(BaseHook super().__init__() self.hooks_module = module - -def get_txn_id_data_path(txn_id): - import rhodecode - - root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir() - final_dir = os.path.join(root, 'svn_txn_id') - - if not os.path.isdir(final_dir): - os.makedirs(final_dir) - return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id)) - - -def store_txn_id_data(txn_id, data_dict): - if not txn_id: - log.warning('Cannot store txn_id because it is empty') - return - - path = get_txn_id_data_path(txn_id) - try: - with open(path, 'wb') as f: - f.write(json.dumps(data_dict)) - except Exception: - log.exception('Failed to write txn_id metadata') - - -def get_txn_id_from_store(txn_id): - """ - Reads txn_id from store and if present returns the data for callback manager - """ - path = get_txn_id_data_path(txn_id) - try: - with open(path, 'rb') as f: - return json.loads(f.read()) - except Exception: - return {} + def __repr__(self): + return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})' def prepare_callback_daemon(extras, protocol, host, txn_id=None): - txn_details = get_txn_id_from_store(txn_id) - port = txn_details.get('port', 0) + match protocol: case 'http': from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon + port = 0 + if txn_id: + # read txn-id to re-use the PORT for callback daemon + repo_path = os.path.join(extras['repo_store'], extras['repository']) + txn_details = get_txn_id_from_store(repo_path, txn_id) + port = txn_details.get('port', 0) + callback_daemon = HttpHooksCallbackDaemon( txn_id=txn_id, host=host, port=port) case 'celery': diff --git a/rhodecode/lib/hook_daemon/celery_hooks_deamon.py b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py --- a/rhodecode/lib/hook_daemon/celery_hooks_deamon.py +++ b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py @@ -28,3 +28,6 @@ class CeleryHooksCallbackDaemon(BaseHook # TODO: replace this with settings bootstrapped... self.task_queue = config.get('app:main', 'celery.broker_url') self.task_backend = config.get('app:main', 'celery.result_backend') + + def __repr__(self): + return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})' diff --git a/rhodecode/lib/hook_daemon/http_hooks_deamon.py b/rhodecode/lib/hook_daemon/http_hooks_deamon.py --- a/rhodecode/lib/hook_daemon/http_hooks_deamon.py +++ b/rhodecode/lib/hook_daemon/http_hooks_deamon.py @@ -30,7 +30,7 @@ from socketserver import TCPServer from rhodecode.model import meta from rhodecode.lib.ext_json import json from rhodecode.lib import rc_cache -from rhodecode.lib.hook_daemon.base import get_txn_id_data_path +from rhodecode.lib.svn_txn_utils import get_txn_id_data_key from rhodecode.lib.hook_daemon.hook_module import Hooks log = logging.getLogger(__name__) @@ -185,9 +185,12 @@ class HttpHooksCallbackDaemon(ThreadedHo use_gevent = False + def __repr__(self): + return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})' + @property def _hook_prefix(self): - return 'HOOKS: {} '.format(self.hooks_uri) + return f'HOOKS: {self.hooks_uri} ' def get_hostname(self): return socket.gethostname() or '127.0.0.1' @@ -205,7 +208,7 @@ class HttpHooksCallbackDaemon(ThreadedHo port = self.get_available_port() server_address = (host, port) - self.hooks_uri = '{}:{}'.format(host, port) + self.hooks_uri = f'{host}:{port}' self.txn_id = txn_id self._done = False @@ -249,7 +252,9 @@ class HttpHooksCallbackDaemon(ThreadedHo self._daemon = None self._callback_thread = None if self.txn_id: - txn_id_file = get_txn_id_data_path(self.txn_id) + #TODO: figure out the repo_path... + repo_path = '' + txn_id_file = get_txn_id_data_key(repo_path, self.txn_id) log.debug('Cleaning up TXN ID %s', txn_id_file) if os.path.isfile(txn_id_file): os.remove(txn_id_file) @@ -272,7 +277,9 @@ class HttpHooksCallbackDaemon(ThreadedHo self._callback_greenlet = None if self.txn_id: - txn_id_file = get_txn_id_data_path(self.txn_id) + #TODO: figure out the repo_path... + repo_path = '' + txn_id_file = get_txn_id_data_key(repo_path, self.txn_id) log.debug('Cleaning up TXN ID %s', txn_id_file) if os.path.isfile(txn_id_file): os.remove(txn_id_file) diff --git a/rhodecode/lib/middleware/simplesvn.py b/rhodecode/lib/middleware/simplesvn.py --- a/rhodecode/lib/middleware/simplesvn.py +++ b/rhodecode/lib/middleware/simplesvn.py @@ -17,7 +17,8 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ -import base64 +import re +import os import logging import urllib.request import urllib.parse @@ -28,14 +29,10 @@ import requests from pyramid.httpexceptions import HTTPNotAcceptable from rhodecode import ConfigGet -from rhodecode.lib import rc_cache from rhodecode.lib.middleware import simplevcs from rhodecode.lib.middleware.utils import get_path_info from rhodecode.lib.utils import is_valid_repo -from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes -from rhodecode.lib.type_utils import str2bool -from rhodecode.lib.ext_json import json -from rhodecode.lib.hook_daemon.base import store_txn_id_data +from rhodecode.lib.str_utils import safe_str log = logging.getLogger(__name__) @@ -63,28 +60,11 @@ class SimpleSvnApp(object): # stream control flag, based on request and content type... stream = False - if req_method in ['MKCOL'] or has_content_length: - data_processed = False - # read chunk to check if we have txn-with-props - initial_data: bytes = data_io.read(1024) - if initial_data.startswith(b'(create-txn-with-props'): - data_io = initial_data + data_io.read() - # store on-the-fly our rc_extra using svn revision properties - # those can be read later on in hooks executed so we have a way - # to pass in the data into svn hooks - rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras)) - rc_data_len = str(len(rc_data)) - # header defines data length, and serialized data - skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data)) - data_io = data_io[:-2] + skel + b'))' - data_processed = True - - if not data_processed: - # NOTE(johbo): Avoid that we end up with sending the request in chunked - # transfer encoding (mainly on Gunicorn). If we know the content - # length, then we should transfer the payload in one request. - data_io = initial_data + data_io.read() + # NOTE(johbo): Avoid that we end up with sending the request in chunked + # transfer encoding (mainly on Gunicorn). If we know the content + # length, then we should transfer the payload in one request. + data_io = data_io.read() if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked': # NOTE(marcink): when getting/uploading files, we want to STREAM content @@ -101,6 +81,7 @@ class SimpleSvnApp(object): stream=stream ) if req_method in ['HEAD', 'DELETE']: + # NOTE(marcink): HEAD might be deprecated for SVN 1.14+ protocol del call_kwargs['data'] try: @@ -120,14 +101,6 @@ class SimpleSvnApp(object): log.debug('got response code: %s', response.status_code) response_headers = self._get_response_headers(response.headers) - - if response.headers.get('SVN-Txn-name'): - svn_tx_id = response.headers.get('SVN-Txn-name') - txn_id = rc_cache.utils.compute_key_from_params( - self.config['repository'], svn_tx_id) - port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) - store_txn_id_data(txn_id, {'port': port}) - start_response(f'{response.status_code} {response.reason}', response_headers) return response.iter_content(chunk_size=1024) @@ -137,6 +110,20 @@ class SimpleSvnApp(object): url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'") return url_path + def _get_txn_id(self, environ): + url = environ['RAW_URI'] + + # Define the regex pattern + pattern = r'/txr/([^/]+)/' + + # Search for the pattern in the URL + match = re.search(pattern, url) + + # Check if a match is found and extract the captured group + if match: + txn_id = match.group(1) + return txn_id + def _get_request_headers(self, environ): headers = {} whitelist = { @@ -182,10 +169,39 @@ class DisabledSimpleSvnApp(object): class SimpleSvn(simplevcs.SimpleVCS): + """ + details: https://svn.apache.org/repos/asf/subversion/trunk/notes/http-and-webdav/webdav-protocol + + Read Commands : (OPTIONS, PROPFIND, GET, REPORT) + + GET: fetch info about resources + PROPFIND: Used to retrieve properties of resources. + REPORT: Used for specialized queries to the repository. E.g History etc... + OPTIONS: request is sent to an SVN server, the server responds with information about the available HTTP + methods and other server capabilities. + + Write Commands : (MKACTIVITY, PROPPATCH, PUT, CHECKOUT, MKCOL, MOVE, + -------------- COPY, DELETE, LOCK, UNLOCK, MERGE) + + With the exception of LOCK/UNLOCK, every write command performs some + sort of DeltaV commit operation. In DeltaV, a commit always starts + by creating a transaction (MKACTIVITY), applies a log message + (PROPPATCH), does some other write methods, and then ends by + committing the transaction (MERGE). If the MERGE fails, the client + may try to remove the transaction with a DELETE. + + PROPPATCH: Used to set and/or remove properties on resources. + MKCOL: Creates a new collection (directory). + DELETE: Removes a resource. + COPY and MOVE: Used for copying and moving resources. + MERGE: Used to merge changes from different branches. + CHECKOUT, CHECKIN, UNCHECKOUT: DeltaV methods for managing working resources and versions. + """ SCM = 'svn' READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT') - DEFAULT_HTTP_SERVER = 'http://localhost:8090' + WRITE_COMMANDS = ('MERGE', 'POST', 'PUT', 'COPY', 'MOVE', 'DELETE', 'MKCOL') + DEFAULT_HTTP_SERVER = 'http://svn:8090' def _get_repository_name(self, environ): """ @@ -218,10 +234,10 @@ class SimpleSvn(simplevcs.SimpleVCS): else 'push') def _should_use_callback_daemon(self, extras, environ, action): - # only MERGE command triggers hooks, so we don't want to start + # only PUT & MERGE command triggers hooks, so we don't want to start # hooks server too many times. POST however starts the svn transaction # so we also need to run the init of callback daemon of POST - if environ['REQUEST_METHOD'] in ['MERGE', 'POST']: + if environ['REQUEST_METHOD'] not in self.READ_ONLY_COMMANDS: return True return False diff --git a/rhodecode/lib/middleware/simplevcs.py b/rhodecode/lib/middleware/simplevcs.py --- a/rhodecode/lib/middleware/simplevcs.py +++ b/rhodecode/lib/middleware/simplevcs.py @@ -25,11 +25,9 @@ It's implemented with basic auth functio import os import re -import io import logging import importlib from functools import wraps -from lxml import etree import time from paste.httpheaders import REMOTE_USER, AUTH_TYPE @@ -41,6 +39,7 @@ from zope.cachedescriptors.property impo import rhodecode from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin from rhodecode.lib import rc_cache +from rhodecode.lib.svn_txn_utils import store_txn_id_data from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware from rhodecode.lib.base import ( BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) @@ -48,7 +47,7 @@ from rhodecode.lib.exceptions import (Us from rhodecode.lib.hook_daemon.base import prepare_callback_daemon from rhodecode.lib.middleware import appenlight from rhodecode.lib.middleware.utils import scm_app_http -from rhodecode.lib.str_utils import safe_bytes +from rhodecode.lib.str_utils import safe_bytes, safe_int from rhodecode.lib.utils import is_valid_repo, SLUG_RE from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool from rhodecode.lib.vcs.conf import settings as vcs_settings @@ -63,29 +62,6 @@ from rhodecode.model.settings import Set log = logging.getLogger(__name__) -def extract_svn_txn_id(acl_repo_name, data: bytes): - """ - Helper method for extraction of svn txn_id from submitted XML data during - POST operations - """ - - try: - root = etree.fromstring(data) - pat = re.compile(r'/txn/(?P.*)') - for el in root: - if el.tag == '{DAV:}source': - for sub_el in el: - if sub_el.tag == '{DAV:}href': - match = pat.search(sub_el.text) - if match: - svn_tx_id = match.groupdict()['txn_id'] - txn_id = rc_cache.utils.compute_key_from_params( - acl_repo_name, svn_tx_id) - return txn_id - except Exception: - log.exception('Failed to extract txn_id') - - def initialize_generator(factory): """ Initializes the returned generator by draining its first element. @@ -468,7 +444,6 @@ class SimpleVCS(object): log.debug('Not enough credentials to access repo: `%s` ' 'repository as anonymous user', self.acl_repo_name) - username = None # ============================================================== # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE @@ -582,6 +557,24 @@ class SimpleVCS(object): return self._generate_vcs_response( environ, start_response, repo_path, extras, action) + def _get_txn_id(self, environ): + + for k in ['RAW_URI', 'HTTP_DESTINATION']: + url = environ.get(k) + if not url: + continue + + # regex to search for svn-txn-id + pattern = r'/!svn/txr/([^/]+)/' + + # Search for the pattern in the URL + match = re.search(pattern, url) + + # Check if a match is found and extract the captured group + if match: + txn_id = match.group(1) + return txn_id + @initialize_generator def _generate_vcs_response( self, environ, start_response, repo_path, extras, action): @@ -593,28 +586,23 @@ class SimpleVCS(object): also handles the locking exceptions which will be triggered when the first chunk is produced by the underlying WSGI application. """ - - txn_id = '' - if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE': - # case for SVN, we want to re-use the callback daemon port - # so we use the txn_id, for this we peek the body, and still save - # it as wsgi.input - - stream = environ['wsgi.input'] - - if isinstance(stream, io.BytesIO): - data: bytes = stream.getvalue() - elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body - data: bytes = stream.buf.getvalue() - else: - # fallback to the crudest way, copy the iterator - data = safe_bytes(stream.read()) - environ['wsgi.input'] = io.BytesIO(data) - - txn_id = extract_svn_txn_id(self.acl_repo_name, data) + svn_txn_id = '' + if action == 'push': + svn_txn_id = self._get_txn_id(environ) callback_daemon, extras = self._prepare_callback_daemon( - extras, environ, action, txn_id=txn_id) + extras, environ, action, txn_id=svn_txn_id) + + if svn_txn_id: + + port = safe_int(extras['hooks_uri'].split(':')[-1]) + txn_id_data = extras.copy() + txn_id_data.update({'port': port}) + txn_id_data.update({'req_method': environ['REQUEST_METHOD']}) + + full_repo_path = repo_path + store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data) + log.debug('HOOKS extras is %s', extras) http_scheme = self._get_http_scheme(environ) @@ -677,6 +665,7 @@ class SimpleVCS(object): def _prepare_callback_daemon(self, extras, environ, action, txn_id=None): protocol = vcs_settings.HOOKS_PROTOCOL + if not self._should_use_callback_daemon(extras, environ, action): # disable callback daemon for actions that don't require it protocol = 'local' diff --git a/rhodecode/lib/svn_txn_utils.py b/rhodecode/lib/svn_txn_utils.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/svn_txn_utils.py @@ -0,0 +1,132 @@ +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import logging +import redis + +from ..lib import rc_cache +from ..lib.ext_json import json + + +log = logging.getLogger(__name__) + +redis_client = None + + +class RedisTxnClient: + + def __init__(self, url): + self.url = url + self._create_client(url) + + def _create_client(self, url): + connection_pool = redis.ConnectionPool.from_url(url) + self.writer_client = redis.StrictRedis( + connection_pool=connection_pool + ) + self.reader_client = self.writer_client + + def set(self, key, value, expire=24 * 60000): + self.writer_client.set(key, value, ex=expire) + + def get(self, key): + return self.reader_client.get(key) + + def delete(self, key): + self.writer_client.delete(key) + + +def get_redis_client(url=''): + + global redis_client + if redis_client is not None: + return redis_client + if not url: + from rhodecode import CONFIG + url = CONFIG['vcs.svn.redis_conn'] + redis_client = RedisTxnClient(url) + return redis_client + + +def extract_svn_txn_id(data: bytes): + """ + Helper method for extraction of svn txn_id from submitted XML data during + POST operations + """ + import re + from lxml import etree + + try: + root = etree.fromstring(data) + pat = re.compile(r'/txn/(?P.*)') + for el in root: + if el.tag == '{DAV:}source': + for sub_el in el: + if sub_el.tag == '{DAV:}href': + match = pat.search(sub_el.text) + if match: + svn_tx_id = match.groupdict()['txn_id'] + return svn_tx_id + except Exception: + log.exception('Failed to extract txn_id') + + +def get_txn_id_data_key(repo_path, svn_txn_id): + log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id) + repo_key = rc_cache.utils.compute_key_from_params(repo_path) + final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id' + log.debug('computed final key: %s', final_key) + + return final_key + + +def store_txn_id_data(repo_path, svn_txn_id, data_dict): + log.debug('svn-txn-id: %s, storing data', svn_txn_id) + + if not svn_txn_id: + log.warning('Cannot store txn_id because it is empty') + return + + redis_conn = get_redis_client() + + store_key = get_txn_id_data_key(repo_path, svn_txn_id) + store_data = json.dumps(data_dict) + redis_conn.set(store_key, store_data) + + +def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False): + """ + Reads txn_id from store and if present returns the data for callback manager + """ + log.debug('svn-txn-id: %s, retrieving data', svn_txn_id) + redis_conn = get_redis_client() + + store_key = get_txn_id_data_key(repo_path, svn_txn_id) + data = {} + redis_conn.get(store_key) + try: + raw_data = redis_conn.get(store_key) + data = json.loads(raw_data) + except Exception: + log.exception('Failed to get txn_id metadata') + + if rm_on_read: + log.debug('Cleaning up txn_id at %s', store_key) + redis_conn.delete(store_key) + + return data diff --git a/rhodecode/public/js/rhodecode/routes.js b/rhodecode/public/js/rhodecode/routes.js --- a/rhodecode/public/js/rhodecode/routes.js +++ b/rhodecode/public/js/rhodecode/routes.js @@ -12,16 +12,14 @@ ******************************************************************************/ function registerRCRoutes() { // routes registration - pyroutes.register('admin_artifacts', '/_admin/artifacts', []); - pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []); - pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']); - pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []); - pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']); - pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']); + pyroutes.register('admin_artifacts', '/_admin/_admin/artifacts', []); + pyroutes.register('admin_artifacts_delete', '/_admin/_admin/artifacts/%(uid)s/delete', ['uid']); + pyroutes.register('admin_artifacts_show_all', '/_admin/_admin/artifacts', []); + pyroutes.register('admin_artifacts_show_info', '/_admin/_admin/artifacts/%(uid)s', ['uid']); + pyroutes.register('admin_artifacts_update', '/_admin/_admin/artifacts/%(uid)s/update', ['uid']); pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); - pyroutes.register('admin_automation', '/_admin/automation', []); - pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']); + pyroutes.register('admin_automation', '/_admin/_admin/automation', []); pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); pyroutes.register('admin_home', '/_admin', []); @@ -29,7 +27,6 @@ function registerRCRoutes() { pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); - pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); @@ -39,8 +36,7 @@ function registerRCRoutes() { pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); - pyroutes.register('admin_scheduler', '/_admin/scheduler', []); - pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []); + pyroutes.register('admin_scheduler', '/_admin/_admin/scheduler', []); pyroutes.register('admin_settings', '/_admin/settings', []); pyroutes.register('admin_settings_email', '/_admin/settings/email', []); pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); @@ -59,8 +55,6 @@ function registerRCRoutes() { pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); - pyroutes.register('admin_settings_license', '/_admin/settings/license', []); - pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); @@ -68,12 +62,6 @@ function registerRCRoutes() { pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); - pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []); - pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']); - pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']); - pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']); - pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []); - pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']); pyroutes.register('admin_settings_search', '/_admin/settings/search', []); pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); @@ -97,7 +85,6 @@ function registerRCRoutes() { pyroutes.register('channelstream_proxy', '/_channelstream', []); pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); pyroutes.register('check_2fa', '/_admin/check_2fa', []); - pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']); pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); pyroutes.register('debug_style_home', '/_admin/debug_style', []); @@ -222,8 +209,6 @@ function registerRCRoutes() { pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); - pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); - pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); @@ -254,7 +239,6 @@ function registerRCRoutes() { pyroutes.register('ops_healthcheck', '/_admin/ops/status', []); pyroutes.register('ops_ping', '/_admin/ops/ping', []); pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); - pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); @@ -264,7 +248,6 @@ function registerRCRoutes() { pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); - pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); @@ -277,18 +260,8 @@ function registerRCRoutes() { pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); pyroutes.register('register', '/_admin/register', []); pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); - pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); - pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); - pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); - pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); - pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); - pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); - pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []); - pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []); - pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); - pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); @@ -366,9 +339,6 @@ function registerRCRoutes() { pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); - pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); - pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); - pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']); pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); diff --git a/rhodecode/templates/base/issue_tracker_settings.mako b/rhodecode/templates/base/issue_tracker_settings.mako --- a/rhodecode/templates/base/issue_tracker_settings.mako +++ b/rhodecode/templates/base/issue_tracker_settings.mako @@ -17,7 +17,7 @@ examples = [ ( 'Tickets with #123 (Redmine etc)', - '(?\d+)', + '(?[0-9]+)', 'https://myissueserver.com/${repo}/issue/${issue_id}', '' ), @@ -60,7 +60,7 @@ examples = [ ( 'Pivotal Tracker', - '(?:pivot-)(?P\d+)-(?P\d+)', + '(?:pivot-)(?P\d+)-(?P[0-9]+)', 'https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}', 'PIV-', ), diff --git a/rhodecode/templates/base/vcs_settings.mako b/rhodecode/templates/base/vcs_settings.mako --- a/rhodecode/templates/base/vcs_settings.mako +++ b/rhodecode/templates/base/vcs_settings.mako @@ -332,7 +332,6 @@ POST request to trigger the (re)generation of the mod_dav_svn config. */ $('#vcs_svn_generate_cfg').on('click', function(event) { event.preventDefault(); - alert('i cliked it !!') var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}"; var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN}); jqxhr.done(function(data) { diff --git a/rhodecode/tests/fixture_mods/fixture_pyramid.py b/rhodecode/tests/fixture_mods/fixture_pyramid.py --- a/rhodecode/tests/fixture_mods/fixture_pyramid.py +++ b/rhodecode/tests/fixture_mods/fixture_pyramid.py @@ -161,7 +161,7 @@ def vcsserver_port(request): @pytest.fixture(scope='session') -def available_port_factory(): +def available_port_factory() -> get_available_port: """ Returns a callable which returns free port numbers. """ diff --git a/rhodecode/tests/lib/test_hooks_daemon.py b/rhodecode/tests/lib/test_hooks_daemon.py --- a/rhodecode/tests/lib/test_hooks_daemon.py +++ b/rhodecode/tests/lib/test_hooks_daemon.py @@ -304,7 +304,8 @@ class TestPrepareHooksDaemon(object): 'txn_id': 'txnid2', 'hooks_protocol': protocol.lower(), 'task_backend': '', - 'task_queue': '' + 'task_queue': '', + 'repo_store': '/var/opt/rhodecode_repo_store' } callback, extras = hook_base.prepare_callback_daemon( expected_extras.copy(), protocol=protocol, host='127.0.0.1', diff --git a/rhodecode/tests/server_utils.py b/rhodecode/tests/server_utils.py --- a/rhodecode/tests/server_utils.py +++ b/rhodecode/tests/server_utils.py @@ -148,7 +148,7 @@ class RcVCSServer(ServerBase): self._args = [ 'gunicorn', '--bind', self.bind_addr, - '--worker-class', 'gevent', + '--worker-class', 'gthread', '--backlog', '16', '--timeout', '300', '--workers', workers, @@ -185,7 +185,7 @@ class RcWebServer(ServerBase): self._args = [ 'gunicorn', '--bind', self.bind_addr, - '--worker-class', 'gevent', + '--worker-class', 'gthread', '--backlog', '16', '--timeout', '300', '--workers', workers, @@ -219,3 +219,11 @@ class RcWebServer(ServerBase): params.update(**kwargs) _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}" return _url + + def repo_clone_credentials(self, **kwargs): + params = { + 'user': TEST_USER_ADMIN_LOGIN, + 'passwd': TEST_USER_ADMIN_PASS, + } + params.update(**kwargs) + return params['user'], params['passwd'] diff --git a/rhodecode/tests/vcs_operations/__init__.py b/rhodecode/tests/vcs_operations/__init__.py --- a/rhodecode/tests/vcs_operations/__init__.py +++ b/rhodecode/tests/vcs_operations/__init__.py @@ -26,20 +26,21 @@ Base for test suite for making push/pull to redirect things to stderr instead of stdout. """ -from os.path import join as jn -from subprocess import Popen, PIPE + import logging import os import tempfile +import subprocess from rhodecode.lib.str_utils import safe_str -from rhodecode.tests import GIT_REPO, HG_REPO +from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO DEBUG = True RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') REPO_GROUP = 'a_repo_group' -HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) -GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) +HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}' +GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}' +SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}' log = logging.getLogger(__name__) @@ -65,8 +66,9 @@ class Command(object): if key.startswith('COV_CORE_'): del env[key] - self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, - cwd=self.cwd, env=env) + self.process = subprocess.Popen( + command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + cwd=self.cwd, env=env) stdout, stderr = self.process.communicate() stdout = safe_str(stdout) @@ -85,12 +87,14 @@ def _add_files(vcs, dest, clone_url=None full_name = 'Marcin Kuźminski' email = 'me@email.com' git_ident = f"git config user.name {full_name} && git config user.email {email}" - cwd = path = jn(dest) + cwd = path = os.path.join(dest) tags = tags or [] - added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence()))) - Command(cwd).execute('touch %s' % added_file) - Command(cwd).execute('%s add %s' % (vcs, added_file)) + name_sequence = next(tempfile._RandomNameSequence()) + added_file = os.path.join(path, f'{name_sequence}_setup.py') + + Command(cwd).execute(f'touch {added_file}') + Command(cwd).execute(f'{vcs} add {added_file}') author_str = 'Marcin Kuźminski ' for i in range(kwargs.get('files_no', 3)): @@ -128,7 +132,7 @@ def _add_files_and_push(vcs, dest, clone vcs is git or hg and defines what VCS we want to make those files for """ git_ident = "git config user.name Marcin Kuźminski && git config user.email me@email.com" - cwd = jn(dest) + cwd = os.path.join(dest) # commit some stuff into this repo _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs) @@ -147,12 +151,15 @@ def _add_files_and_push(vcs, dest, clone if new_branch: maybe_new_branch = '--new-branch' stdout, stderr = Command(cwd).execute( - 'hg push --traceback --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url) + f'hg push --traceback --verbose {maybe_new_branch} -r {target_branch} {clone_url}' ) elif vcs == 'git': stdout, stderr = Command(cwd).execute( - """{} && - git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch) + f'{git_ident} && git push --verbose --tags {clone_url} {target_branch}' + ) + elif vcs == 'svn': + stdout, stderr = Command(cwd).execute( + f'svn ci -m "pushing to {target_branch}"' ) return stdout, stderr @@ -179,6 +186,13 @@ def _check_proper_hg_push(stdout, stderr assert 'abort:' not in stderr +def _check_proper_svn_push(stdout, stderr): + assert 'pushing to' in stdout + assert 'searching for changes' in stdout + + assert 'abort:' not in stderr + + def _check_proper_clone(stdout, stderr, vcs): if vcs == 'hg': assert 'requesting all changes' in stdout @@ -193,3 +207,8 @@ def _check_proper_clone(stdout, stderr, assert 'Cloning into' in stderr assert 'abort:' not in stderr assert 'fatal:' not in stderr + + if vcs == 'svn': + assert 'dupa' in stdout + + diff --git a/rhodecode/tests/vcs_operations/conftest.py b/rhodecode/tests/vcs_operations/conftest.py --- a/rhodecode/tests/vcs_operations/conftest.py +++ b/rhodecode/tests/vcs_operations/conftest.py @@ -42,7 +42,7 @@ from rhodecode.model.db import Repositor from rhodecode.model.meta import Session from rhodecode.integrations.types.webhook import WebhookIntegrationType -from rhodecode.tests import GIT_REPO, HG_REPO +from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST from rhodecode.tests.fixture import Fixture from rhodecode.tests.server_utils import RcWebServer @@ -51,13 +51,15 @@ from rhodecode.tests.server_utils import REPO_GROUP = 'a_repo_group' HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}' GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}' +SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}' log = logging.getLogger(__name__) def check_httpbin_connection(): + log.debug('Checking if HTTPBIN_DOMAIN: %s is available', HTTPBIN_DOMAIN) try: - response = requests.get(HTTPBIN_DOMAIN) + response = requests.get(HTTPBIN_DOMAIN, timeout=5) return response.status_code == 200 except Exception as e: print(e) @@ -102,11 +104,15 @@ def repos(request, db_connection): fixture.create_fork(GIT_REPO, GIT_REPO, repo_name_full=GIT_REPO_WITH_GROUP, repo_group=repo_group_id) + fixture.create_fork(SVN_REPO, SVN_REPO, + repo_name_full=SVN_REPO_WITH_GROUP, + repo_group=repo_group_id) @request.addfinalizer def cleanup(): fixture.destroy_repo(HG_REPO_WITH_GROUP) fixture.destroy_repo(GIT_REPO_WITH_GROUP) + fixture.destroy_repo(SVN_REPO_WITH_GROUP) fixture.destroy_repo_group(repo_group_id) @@ -139,11 +145,11 @@ def rc_web_server( """ Run the web server as a subprocess. with its own instance of vcsserver """ - rcweb_port = available_port_factory() - log.info('Using rcweb ops test port {}'.format(rcweb_port)) + rcweb_port: int = available_port_factory() + log.info('Using rcweb ops test port %s', rcweb_port) - vcsserver_port = available_port_factory() - log.info('Using vcsserver ops test port {}'.format(vcsserver_port)) + vcsserver_port: int = available_port_factory() + log.info('Using vcsserver ops test port %s', vcsserver_port) vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') vcsserver_factory( @@ -303,5 +309,3 @@ def branch_permission_setter(request): Session().commit() return _branch_permissions_setter - - diff --git a/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py b/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py --- a/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py +++ b/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py @@ -32,7 +32,7 @@ from rhodecode.lib.vcs.backends.git.repo from rhodecode.lib.vcs.nodes import FileNode from rhodecode.tests import GIT_REPO from rhodecode.tests.vcs_operations import Command -from .test_vcs_operations import _check_proper_clone, _check_proper_git_push +from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir): diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations.py b/rhodecode/tests/vcs_operations/test_vcs_operations_git.py rename from rhodecode/tests/vcs_operations/test_vcs_operations.py rename to rhodecode/tests/vcs_operations/test_vcs_operations_git.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_git.py @@ -28,47 +28,23 @@ Test suite for making push/pull operatio import time -import logging - import pytest -from rhodecode.lib import rc_cache -from rhodecode.model.auth_token import AuthTokenModel -from rhodecode.model.db import Repository, UserIpMap, CacheKey +from rhodecode.model.db import Repository, UserIpMap from rhodecode.model.meta import Session from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel -from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN) -from rhodecode.tests.utils import assert_message_in_log +from rhodecode.tests import (GIT_REPO, TEST_USER_ADMIN_LOGIN) + from rhodecode.tests.vcs_operations import ( Command, _check_proper_clone, _check_proper_git_push, - _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP) + _add_files_and_push, GIT_REPO_WITH_GROUP) @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") class TestVCSOperations(object): - def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') - - def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone --pull', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') - - def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone --pull --stream', clone_url, tmpdir.strpath) - assert 'files to transfer,' in stdout - assert 'transferred 1.' in stdout - assert '114 files updated,' in stdout - def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) cmd = Command('/tmp') @@ -83,13 +59,6 @@ class TestVCSOperations(object): _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir): - repo_id = Repository.get_by_repo_name(HG_REPO).repo_id - clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') - def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir): repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) @@ -98,12 +67,6 @@ class TestVCSOperations(object): _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') - def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP) cmd = Command('/tmp') @@ -121,11 +84,6 @@ class TestVCSOperations(object): assert 'Cloning into' in stderr cmd.assert_returncode_success() - def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - assert 'abort: authorization failed' in stderr def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') @@ -139,12 +97,6 @@ class TestVCSOperations(object): 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) - assert 'not found' in stderr - def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url('trololo') stdout, stderr = Command('/tmp').execute( @@ -156,25 +108,11 @@ class TestVCSOperations(object): stdout, stderr = Command('/tmp').execute('git clone', clone_url) assert 'not found' in stderr - def test_clone_hg_with_slashes(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('//' + HG_REPO) - stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath) - assert 'HTTP Error 404: Not Found' in stderr - def test_clone_git_with_slashes(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO) stdout, stderr = Command('/tmp').execute('git clone', clone_url) assert 'not found' in stderr - def test_clone_existing_path_hg_not_in_database( - self, rc_web_server, tmpdir, fs_repo_only): - - db_name = fs_repo_only('not-in-db-hg', repo_type='hg') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - assert 'HTTP Error 404: Not Found' in stderr - def test_clone_existing_path_git_not_in_database( self, rc_web_server, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-git', repo_type='git') @@ -183,14 +121,6 @@ class TestVCSOperations(object): 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_clone_existing_path_hg_not_in_database_different_scm( - self, rc_web_server, tmpdir, fs_repo_only): - db_name = fs_repo_only('not-in-db-git', repo_type='git') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - assert 'HTTP Error 404: Not Found' in stderr - def test_clone_existing_path_git_not_in_database_different_scm( self, rc_web_server, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-hg', repo_type='hg') @@ -199,17 +129,6 @@ class TestVCSOperations(object): 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util): - repo = user_util.create_repo() - clone_url = rc_web_server.repo_clone_url(repo.repo_name) - - # Damage repo by removing it's folder - RepoModel()._delete_filesystem_repo(repo) - - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - assert 'HTTP Error 404: Not Found' in stderr - def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util): repo = user_util.create_repo(repo_type='git') clone_url = rc_web_server.repo_clone_url(repo.repo_name) @@ -221,17 +140,6 @@ class TestVCSOperations(object): 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_push_new_file_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=clone_url) - - assert 'pushing to' in stdout - assert 'size summary' in stdout - def test_push_new_file_git(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) stdout, stderr = Command('/tmp').execute( @@ -243,58 +151,6 @@ class TestVCSOperations(object): _check_proper_git_push(stdout, stderr) - def test_push_invalidates_cache(self, rc_web_server, tmpdir): - hg_repo = Repository.get_by_repo_name(HG_REPO) - - # init cache objects - CacheKey.delete_all_cache() - - repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id) - - inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key) - - with inv_context_manager as invalidation_context: - # __enter__ will create and register cache objects - pass - - cache_keys = hg_repo.cache_keys - assert cache_keys != [] - old_ids = [x.cache_state_uid for x in cache_keys] - - # clone to init cache - clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - - cache_keys = hg_repo.cache_keys - assert cache_keys != [] - for key in cache_keys: - assert key.cache_active is True - - # PUSH that should trigger invalidation cache - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) - - # flush... - Session().commit() - hg_repo = Repository.get_by_repo_name(HG_REPO) - cache_keys = hg_repo.cache_keys - assert cache_keys != [] - new_ids = [x.cache_state_uid for x in cache_keys] - assert new_ids != old_ids - - def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - - push_url = rc_web_server.repo_clone_url( - HG_REPO, user='bad', passwd='name') - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=push_url) - - assert 'abort: authorization failed' in stderr - def test_push_wrong_credentials_git(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) stdout, stderr = Command('/tmp').execute( @@ -307,17 +163,6 @@ class TestVCSOperations(object): assert 'fatal: Authentication failed' in stderr - def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, - clone_url=rc_web_server.repo_clone_url('not-existing')) - - assert 'HTTP Error 404: Not Found' in stderr - def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) stdout, stderr = Command('/tmp').execute( @@ -329,28 +174,6 @@ class TestVCSOperations(object): assert 'not found' in stderr - def test_ip_restriction_hg(self, rc_web_server, tmpdir): - user_model = UserModel() - try: - user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') - Session().commit() - time.sleep(2) - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - assert 'abort: HTTP Error 403: Forbidden' in stderr - finally: - # release IP restrictions - for ip in UserIpMap.getAll(): - UserIpMap.delete(ip.ip_id) - Session().commit() - - time.sleep(2) - - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') - def test_ip_restriction_git(self, rc_web_server, tmpdir): user_model = UserModel() try: diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py b/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py @@ -0,0 +1,226 @@ + +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Test suite for making push/pull operations, on specially modified INI files + +.. important:: + + You must have git >= 1.8.5 for tests to work fine. With 68b939b git started + to redirect things to stderr instead of stdout. +""" + + +import time + +import pytest + +from rhodecode.lib import rc_cache +from rhodecode.model.db import Repository, UserIpMap, CacheKey +from rhodecode.model.meta import Session +from rhodecode.model.repo import RepoModel +from rhodecode.model.user import UserModel +from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN) + +from rhodecode.tests.vcs_operations import ( + Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP) + + +@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") +class TestVCSOperations(object): + + def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') + + def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone --pull', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') + + def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone --pull --stream', clone_url, tmpdir.strpath) + assert 'files to transfer,' in stdout + assert 'transferred 1.' in stdout + assert '114 files updated,' in stdout + + def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir): + repo_id = Repository.get_by_repo_name(HG_REPO).repo_id + clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') + + def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') + + def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'abort: authorization failed' in stderr + + def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(GIT_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url('trololo') + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_clone_hg_with_slashes(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url('//' + HG_REPO) + stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_clone_existing_path_hg_not_in_database( + self, rc_web_server, tmpdir, fs_repo_only): + + db_name = fs_repo_only('not-in-db-hg', repo_type='hg') + clone_url = rc_web_server.repo_clone_url(db_name) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_clone_existing_path_hg_not_in_database_different_scm( + self, rc_web_server, tmpdir, fs_repo_only): + db_name = fs_repo_only('not-in-db-git', repo_type='git') + clone_url = rc_web_server.repo_clone_url(db_name) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util): + repo = user_util.create_repo() + clone_url = rc_web_server.repo_clone_url(repo.repo_name) + + # Damage repo by removing it's folder + RepoModel()._delete_filesystem_repo(repo) + + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'HTTP Error 404: Not Found' in stderr + + def test_push_new_file_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, clone_url=clone_url) + + assert 'pushing to' in stdout + assert 'size summary' in stdout + + def test_push_invalidates_cache(self, rc_web_server, tmpdir): + hg_repo = Repository.get_by_repo_name(HG_REPO) + + # init cache objects + CacheKey.delete_all_cache() + + repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id) + + inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key) + + with inv_context_manager as invalidation_context: + # __enter__ will create and register cache objects + pass + + cache_keys = hg_repo.cache_keys + assert cache_keys != [] + old_ids = [x.cache_state_uid for x in cache_keys] + + # clone to init cache + clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + + cache_keys = hg_repo.cache_keys + assert cache_keys != [] + for key in cache_keys: + assert key.cache_active is True + + # PUSH that should trigger invalidation cache + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) + + # flush... + Session().commit() + hg_repo = Repository.get_by_repo_name(HG_REPO) + cache_keys = hg_repo.cache_keys + assert cache_keys != [] + new_ids = [x.cache_state_uid for x in cache_keys] + assert new_ids != old_ids + + def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + + push_url = rc_web_server.repo_clone_url( + HG_REPO, user='bad', passwd='name') + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, clone_url=push_url) + + assert 'abort: authorization failed' in stderr + + def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, + clone_url=rc_web_server.repo_clone_url('not-existing')) + + assert 'HTTP Error 404: Not Found' in stderr + + def test_ip_restriction_hg(self, rc_web_server, tmpdir): + user_model = UserModel() + try: + user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') + Session().commit() + time.sleep(2) + clone_url = rc_web_server.repo_clone_url(HG_REPO) + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + assert 'abort: HTTP Error 403: Forbidden' in stderr + finally: + # release IP restrictions + for ip in UserIpMap.getAll(): + UserIpMap.delete(ip.ip_id) + Session().commit() + + time.sleep(2) + + stdout, stderr = Command('/tmp').execute( + 'hg clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py b/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py @@ -0,0 +1,197 @@ +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Test suite for making push/pull operations, on specially modified INI files + +.. important:: + + You must have git >= 1.8.5 for tests to work fine. With 68b939b git started + to redirect things to stderr instead of stdout. +""" + + +import time +import pytest + +from rhodecode.model.db import Repository, UserIpMap +from rhodecode.model.meta import Session +from rhodecode.model.repo import RepoModel +from rhodecode.model.user import UserModel +from rhodecode.tests import (SVN_REPO, TEST_USER_ADMIN_LOGIN) + + +from rhodecode.tests.vcs_operations import ( + Command, _check_proper_clone, _check_proper_svn_push, + _add_files_and_push, SVN_REPO_WITH_GROUP) + + +@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") +class TestVCSOperations(object): + + def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + username, password = rc_web_server.repo_clone_credentials() + + cmd = Command('/tmp') + + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'svn') + cmd.assert_returncode_success() + + def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir): + repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id + username, password = rc_web_server.repo_clone_credentials() + + clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) + cmd = Command('/tmp') + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'svn') + cmd.assert_returncode_success() + + def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP) + username, password = rc_web_server.repo_clone_credentials() + + cmd = Command('/tmp') + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'svn') + cmd.assert_returncode_success() + + def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + username, password = rc_web_server.repo_clone_credentials() + password = 'bad-password' + + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + assert 'fatal: Authentication failed' in stderr + + def test_clone_svn_with_slashes(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO) + stdout, stderr = Command('/tmp').execute('svn checkout', clone_url) + assert 'not found' in stderr + + def test_clone_existing_path_svn_not_in_database( + self, rc_web_server, tmpdir, fs_repo_only): + db_name = fs_repo_only('not-in-db-git', repo_type='git') + clone_url = rc_web_server.repo_clone_url(db_name) + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + assert 'not found' in stderr + + def test_clone_existing_path_svn_not_in_database_different_scm( + self, rc_web_server, tmpdir, fs_repo_only): + db_name = fs_repo_only('not-in-db-hg', repo_type='hg') + clone_url = rc_web_server.repo_clone_url(db_name) + + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + assert 'not found' in stderr + + def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util): + repo = user_util.create_repo(repo_type='git') + clone_url = rc_web_server.repo_clone_url(repo.repo_name) + + # Damage repo by removing it's folder + RepoModel()._delete_filesystem_repo(repo) + + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + assert 'not found' in stderr + + def test_push_new_file_svn(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + + # commit some stuff into this repo + stdout, stderr = _add_files_and_push( + 'svn', tmpdir.strpath, clone_url=clone_url) + + _check_proper_svn_push(stdout, stderr) + + def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + + push_url = rc_web_server.repo_clone_url( + SVN_REPO, user='bad', passwd='name') + stdout, stderr = _add_files_and_push( + 'svn', tmpdir.strpath, clone_url=push_url) + + assert 'fatal: Authentication failed' in stderr + + def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir): + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + + stdout, stderr = _add_files_and_push( + 'svn', tmpdir.strpath, + clone_url=rc_web_server.repo_clone_url('not-existing')) + + assert 'not found' in stderr + + def test_ip_restriction_svn(self, rc_web_server, tmpdir): + user_model = UserModel() + username, password = '', '' + auth = f'--non-interactive --username={username} --password={password}' + + try: + user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') + Session().commit() + time.sleep(2) + clone_url = rc_web_server.repo_clone_url(SVN_REPO) + + stdout, stderr = Command('/tmp').execute( + f'svn checkout {auth}', clone_url, tmpdir.strpath) + msg = "The requested URL returned error: 403" + assert msg in stderr + finally: + # release IP restrictions + for ip in UserIpMap.getAll(): + UserIpMap.delete(ip.ip_id) + Session().commit() + + time.sleep(2) + + cmd = Command('/tmp') + stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath) + cmd.assert_returncode_success() + _check_proper_clone(stdout, stderr, 'svn') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py b/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py @@ -42,6 +42,7 @@ connection_available = pytest.mark.skipi "enable_webhook_push_integration") class TestVCSOperationsOnCustomIniConfig(object): + @connection_available def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(HG_REPO) stdout, stderr = Command('/tmp').execute( @@ -56,6 +57,7 @@ class TestVCSOperationsOnCustomIniConfig assert 'ERROR' not in rc_log assert "{'name': 'v1.0.0'," in rc_log + @connection_available def test_push_tag_with_commit_git( self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) @@ -71,6 +73,7 @@ class TestVCSOperationsOnCustomIniConfig assert 'ERROR' not in rc_log assert "{'name': 'v1.0.0'," in rc_log + @connection_available def test_push_tag_with_no_commit_git( self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(GIT_REPO) diff --git a/rhodecode/tests/vcsserver_http.ini b/rhodecode/tests/vcsserver_http.ini --- a/rhodecode/tests/vcsserver_http.ini +++ b/rhodecode/tests/vcsserver_http.ini @@ -7,7 +7,7 @@ [server:main] ; COMMON HOST/IP CONFIG host = 127.0.0.1 -port = 9900 +port = 10010 ; ########################### @@ -22,6 +22,17 @@ use = egg:gunicorn#main [app:main] ; The %(here)s variable will be replaced with the absolute path of parent directory ; of this file +; Each option in the app:main can be override by an environmental variable +; +;To override an option: +; +;RC_ +;Everything should be uppercase, . and - should be replaced by _. +;For example, if you have these configuration settings: +;rc_cache.repo_object.backend = foo +;can be overridden by +;export RC_CACHE_REPO_OBJECT_BACKEND=foo + use = egg:rhodecode-vcsserver ; Pyramid default locales, we need this to be set @@ -30,11 +41,15 @@ pyramid.default_locale_name = en ; default locale used by VCS systems locale = en_US.UTF-8 -; path to binaries for vcsserver, it should be set by the installer -; at installation time, e.g /home/user/vcsserver-1/profile/bin -; it can also be a path to nix-build output in case of development +; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer +; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin +; or /usr/local/bin/rhodecode_bin/vcs_bin core.binary_dir = +; Redis connection settings for svn integrations logic +; This connection string needs to be the same on ce and vcsserver +vcs.svn.redis_conn = redis://redis:6379/0 + ; Custom exception store path, defaults to TMPDIR ; This is used to store exception from RhodeCode in shared directory #exception_tracker.store_path = @@ -52,14 +67,14 @@ cache_dir = %(here)s/data ; *************************************** ; `repo_object` cache settings for vcs methods for repositories -rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru +#rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace ; cache auto-expires after N seconds ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) -rc_cache.repo_object.expiration_time = 2592000 +#rc_cache.repo_object.expiration_time = 2592000 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set -#rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db +#rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db ; *********************************************************** ; `repo_object` cache with redis backend @@ -83,19 +98,32 @@ rc_cache.repo_object.expiration_time = 2 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends #rc_cache.repo_object.arguments.distributed_lock = true -# legacy cache regions, please don't change -beaker.cache.regions = repo_object -beaker.cache.repo_object.type = memorylru -beaker.cache.repo_object.max_items = 100 -# cache auto-expires after N seconds -beaker.cache.repo_object.expire = 300 -beaker.cache.repo_object.enabled = true +; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen +#rc_cache.repo_object.arguments.lock_auto_renewal = true + +; Statsd client config, this is used to send metrics to statsd +; We recommend setting statsd_exported and scrape them using Promethues +#statsd.enabled = false +#statsd.statsd_host = 0.0.0.0 +#statsd.statsd_port = 8125 +#statsd.statsd_prefix = +#statsd.statsd_ipv6 = false +; configure logging automatically at server startup set to false +; to use the below custom logging config. +; RC_LOGGING_FORMATTER +; RC_LOGGING_LEVEL +; env variables can control the settings for logging in case of autoconfigure +#logging.autoconfigure = true + +; specify your own custom logging config file to configure logging +#logging.logging_conf_file = /path/to/custom_logging.ini ; ##################### ; LOGGING CONFIGURATION ; ##################### + [loggers] keys = root, vcsserver @@ -103,7 +131,7 @@ keys = root, vcsserver keys = console [formatters] -keys = generic +keys = generic, json ; ####### ; LOGGERS @@ -113,12 +141,11 @@ level = NOTSET handlers = console [logger_vcsserver] -level = DEBUG +level = INFO handlers = qualname = vcsserver propagate = 1 - ; ######## ; HANDLERS ; ######## @@ -127,6 +154,8 @@ propagate = 1 class = StreamHandler args = (sys.stderr, ) level = DEBUG +; To enable JSON formatted logs replace 'generic' with 'json' +; This allows sending properly formatted logs to grafana loki or elasticsearch formatter = generic ; ########## @@ -136,3 +165,7 @@ formatter = generic [formatter_generic] format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s datefmt = %Y-%m-%d %H:%M:%S + +[formatter_json] +format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s +class = vcsserver.lib._vendor.jsonlogger.JsonFormatter