diff --git a/.bumpversion.cfg b/.bumpversion.cfg --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,5 @@ [bumpversion] -current_version = 4.20.1 +current_version = 4.21.0 message = release: Bump version {current_version} to {new_version} [bumpversion:file:rhodecode/VERSION] - diff --git a/.release.cfg b/.release.cfg --- a/.release.cfg +++ b/.release.cfg @@ -5,25 +5,20 @@ done = false done = true [task:rc_tools_pinned] -done = true [task:fixes_on_stable] -done = true [task:pip2nix_generated] -done = true [task:changelog_updated] -done = true [task:generate_api_docs] -done = true + +[task:updated_translation] [release] -state = prepared -version = 4.20.1 - -[task:updated_translation] +state = in_progress +version = 4.21.0 [task:generate_js_routes] diff --git a/docs/admin/system_admin/nginx/nginx-config-example.rst b/docs/admin/system_admin/nginx/nginx-config-example.rst --- a/docs/admin/system_admin/nginx/nginx-config-example.rst +++ b/docs/admin/system_admin/nginx/nginx-config-example.rst @@ -147,12 +147,13 @@ Use the following example to configure N ## Special Cache for file store, make sure you enable this intentionally as ## it could bypass upload files permissions - # location /_file_store/download { + # location /_file_store/download/gravatars { # # proxy_cache cache_zone; # # ignore Set-Cookie # proxy_ignore_headers Set-Cookie; - # proxy_ignore_headers Cookie; + # # ignore cache-control + # proxy_ignore_headers Cache-Control; # # proxy_cache_key $host$uri$is_args$args; # proxy_cache_methods GET; diff --git a/pkgs/patches/beaker/patch-beaker-improved-redis-2.diff b/pkgs/patches/beaker/patch-beaker-improved-redis-2.diff new file mode 100644 --- /dev/null +++ b/pkgs/patches/beaker/patch-beaker-improved-redis-2.diff @@ -0,0 +1,18 @@ +diff -rup Beaker-1.9.1-orig/beaker/session.py Beaker-1.9.1/beaker/session.py +--- Beaker-1.9.1-orig/beaker/session.py 2020-04-10 10:23:04.000000000 +0200 ++++ Beaker-1.9.1/beaker/session.py 2020-04-10 10:23:34.000000000 +0200 +@@ -156,6 +156,14 @@ def __init__(self, request, id=None, invalidate_corrupt=False, + if timeout and not save_accessed_time: + raise BeakerException("timeout requires save_accessed_time") + self.timeout = timeout ++ # We want to pass timeout param to redis backend to support expiration of keys ++ # In future, I believe, we can use this param for memcached and mongo as well ++ if self.timeout is not None and self.type == 'ext:redis': ++ # The backend expiration should always be a bit longer (I decied to use 2 minutes) than the ++ # session expiration itself to prevent the case where the backend data expires while ++ # the session is being read (PR#153) ++ self.namespace_args['timeout'] = self.timeout + 60 * 2 ++ + self.save_atime = save_accessed_time + self.use_cookies = use_cookies + self.cookie_expires = cookie_expires \ No newline at end of file diff --git a/pkgs/patches/beaker/patch-beaker-improved-redis.diff b/pkgs/patches/beaker/patch-beaker-improved-redis.diff new file mode 100644 --- /dev/null +++ b/pkgs/patches/beaker/patch-beaker-improved-redis.diff @@ -0,0 +1,26 @@ +diff -rup Beaker-1.9.1-orig/beaker/ext/redisnm.py Beaker-1.9.1/beaker/ext/redisnm.py +--- Beaker-1.9.1-orig/beaker/ext/redisnm.py 2018-04-10 10:23:04.000000000 +0200 ++++ Beaker-1.9.1/beaker/ext/redisnm.py 2018-04-10 10:23:34.000000000 +0200 +@@ -30,9 +30,10 @@ class RedisNamespaceManager(NamespaceManager): + + clients = SyncDict() + +- def __init__(self, namespace, url, **kw): ++ def __init__(self, namespace, url, timeout=None, **kw): + super(RedisNamespaceManager, self).__init__(namespace) + self.lock_dir = None # Redis uses redis itself for locking. ++ self.timeout = timeout + + if redis is None: + raise RuntimeError('redis is not available') +@@ -68,6 +69,8 @@ def has_key(self, key): + + def set_value(self, key, value, expiretime=None): + value = pickle.dumps(value) ++ if expiretime is None and self.timeout is not None: ++ expiretime = self.timeout + if expiretime is not None: + self.client.setex(self._format_key(key), int(expiretime), value) + else: + + diff --git a/pkgs/python-packages-overrides.nix b/pkgs/python-packages-overrides.nix --- a/pkgs/python-packages-overrides.nix +++ b/pkgs/python-packages-overrides.nix @@ -32,6 +32,8 @@ self: super: { patches = [ ./patches/beaker/patch-beaker-lock-func-debug.diff ./patches/beaker/patch-beaker-metadata-reuse.diff + ./patches/beaker/patch-beaker-improved-redis.diff + ./patches/beaker/patch-beaker-improved-redis-2.diff ]; }); diff --git a/pkgs/python-packages.nix b/pkgs/python-packages.nix --- a/pkgs/python-packages.nix +++ b/pkgs/python-packages.nix @@ -35,6 +35,20 @@ self: super: { license = [ pkgs.lib.licenses.bsdOriginal ]; }; }; + "apispec" = super.buildPythonPackage { + name = "apispec-1.0.0"; + doCheck = false; + propagatedBuildInputs = [ + self."PyYAML" + ]; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz"; + sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp"; + }; + meta = { + license = [ pkgs.lib.licenses.mit ]; + }; + }; "appenlight-client" = super.buildPythonPackage { name = "appenlight-client-0.6.26"; doCheck = false; @@ -236,20 +250,23 @@ self: super: { }; }; "channelstream" = super.buildPythonPackage { - name = "channelstream-0.5.2"; + name = "channelstream-0.6.14"; doCheck = false; propagatedBuildInputs = [ self."gevent" self."ws4py" + self."marshmallow" + self."python-dateutil" self."pyramid" self."pyramid-jinja2" + self."pyramid-apispec" self."itsdangerous" self."requests" self."six" ]; src = fetchurl { - url = "https://files.pythonhosted.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz"; - sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm"; + url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz"; + sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -862,11 +879,11 @@ self: super: { }; }; "itsdangerous" = super.buildPythonPackage { - name = "itsdangerous-0.24"; + name = "itsdangerous-1.1.0"; doCheck = false; src = fetchurl { - url = "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz"; - sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb"; + url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz"; + sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj"; }; meta = { license = [ pkgs.lib.licenses.bsdOriginal ]; @@ -993,6 +1010,17 @@ self: super: { license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ]; }; }; + "marshmallow" = super.buildPythonPackage { + name = "marshmallow-2.18.0"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz"; + sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm"; + }; + meta = { + license = [ pkgs.lib.licenses.mit ]; + }; + }; "mistune" = super.buildPythonPackage { name = "mistune-0.8.4"; doCheck = false; @@ -1522,6 +1550,20 @@ self: super: { license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; }; }; + "pyramid-apispec" = super.buildPythonPackage { + name = "pyramid-apispec-0.3.2"; + doCheck = false; + propagatedBuildInputs = [ + self."apispec" + ]; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz"; + sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0"; + }; + meta = { + license = [ pkgs.lib.licenses.bsdOriginal ]; + }; + }; "pyramid-mailer" = super.buildPythonPackage { name = "pyramid-mailer-0.15.1"; doCheck = false; @@ -1763,6 +1805,17 @@ self: super: { license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ]; }; }; + "PyYAML" = super.buildPythonPackage { + name = "PyYAML-5.3.1"; + doCheck = false; + src = fetchurl { + url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"; + sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq"; + }; + meta = { + license = [ pkgs.lib.licenses.mit ]; + }; + }; "redis" = super.buildPythonPackage { name = "redis-3.4.1"; doCheck = false; @@ -1819,7 +1872,7 @@ self: super: { }; }; "rhodecode-enterprise-ce" = super.buildPythonPackage { - name = "rhodecode-enterprise-ce-4.20.1"; + name = "rhodecode-enterprise-ce-4.20.0"; buildInputs = [ self."pytest" self."py" diff --git a/requirements.txt b/requirements.txt --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ babel==1.3 beaker==1.9.1 bleach==3.1.3 celery==4.3.0 -channelstream==0.5.2 +channelstream==0.6.14 click==7.0 colander==1.7.0 # our custom configobj @@ -22,7 +22,7 @@ future==0.14.3 futures==3.0.2 infrae.cache==1.0.1 iso8601==0.1.12 -itsdangerous==0.24 +itsdangerous==1.1.0 kombu==4.6.6 lxml==4.2.5 mako==1.1.0 diff --git a/requirements_pinned.txt b/requirements_pinned.txt --- a/requirements_pinned.txt +++ b/requirements_pinned.txt @@ -18,10 +18,11 @@ jsonschema==2.6.0 pluggy==0.13.1 pyasn1-modules==0.2.6 pyramid-jinja2==2.7 +pyramid-apispec==0.3.2 scandir==1.10.0 setproctitle==1.1.10 tempita==0.5.2 testpath==0.4.4 transaction==2.4.0 vine==1.3.0 -wcwidth==0.1.9 +wcwidth==0.1.9 \ No newline at end of file diff --git a/rhodecode/VERSION b/rhodecode/VERSION --- a/rhodecode/VERSION +++ b/rhodecode/VERSION @@ -1,1 +1,1 @@ -4.20.1 \ No newline at end of file +4.21.0 \ No newline at end of file diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py --- a/rhodecode/__init__.py +++ b/rhodecode/__init__.py @@ -48,7 +48,7 @@ PYRAMID_SETTINGS = {} EXTENSIONS = {} __version__ = ('.'.join((str(each) for each in VERSION[:3]))) -__dbversion__ = 108 # defines current db version for migrations +__dbversion__ = 109 # defines current db version for migrations __platform__ = platform.system() __license__ = 'AGPLv3, and Commercial License' __author__ = 'RhodeCode GmbH' diff --git a/rhodecode/api/utils.py b/rhodecode/api/utils.py --- a/rhodecode/api/utils.py +++ b/rhodecode/api/utils.py @@ -170,8 +170,7 @@ def validate_repo_permissions(apiuser, r """ if not HasRepoPermissionAnyApi(*perms)( user=apiuser, repo_name=repo.repo_name): - raise JSONRPCError( - 'repository `%s` does not exist' % repoid) + raise JSONRPCError('repository `%s` does not exist' % repoid) return True diff --git a/rhodecode/api/views/repo_api.py b/rhodecode/api/views/repo_api.py --- a/rhodecode/api/views/repo_api.py +++ b/rhodecode/api/views/repo_api.py @@ -307,8 +307,7 @@ def get_repo_changeset(request, apiuser, """ repo = get_repo_or_error(repoid) if not has_superadmin_permission(apiuser): - _perms = ( - 'repository.admin', 'repository.write', 'repository.read',) + _perms = ('repository.admin', 'repository.write', 'repository.read',) validate_repo_permissions(apiuser, repoid, repo, _perms) changes_details = Optional.extract(details) @@ -366,8 +365,7 @@ def get_repo_changesets(request, apiuser """ repo = get_repo_or_error(repoid) if not has_superadmin_permission(apiuser): - _perms = ( - 'repository.admin', 'repository.write', 'repository.read',) + _perms = ('repository.admin', 'repository.write', 'repository.read',) validate_repo_permissions(apiuser, repoid, repo, _perms) changes_details = Optional.extract(details) @@ -1021,7 +1019,8 @@ def update_repo( include_secrets = False if not has_superadmin_permission(apiuser): - validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) + _perms = ('repository.admin',) + validate_repo_permissions(apiuser, repoid, repo, _perms) else: include_secrets = True @@ -1208,8 +1207,7 @@ def fork_repo(request, apiuser, repoid, if not has_superadmin_permission(apiuser): # check if we have at least read permission for # this repo that we fork ! - _perms = ( - 'repository.admin', 'repository.write', 'repository.read') + _perms = ('repository.admin', 'repository.write', 'repository.read') validate_repo_permissions(apiuser, repoid, repo, _perms) # check if the regular user has at least fork permissions as well @@ -2370,12 +2368,13 @@ def get_repo_settings(request, apiuser, } """ - # Restrict access to this api method to admins only. + # Restrict access to this api method to super-admins, and repo admins only. + repo = get_repo_or_error(repoid) if not has_superadmin_permission(apiuser): - raise JSONRPCForbidden() + _perms = ('repository.admin',) + validate_repo_permissions(apiuser, repoid, repo, _perms) try: - repo = get_repo_or_error(repoid) settings_model = VcsSettingsModel(repo=repo) settings = settings_model.get_global_settings() settings.update(settings_model.get_repo_settings()) @@ -2414,9 +2413,11 @@ def set_repo_settings(request, apiuser, "result": true } """ - # Restrict access to this api method to admins only. + # Restrict access to this api method to super-admins, and repo admins only. + repo = get_repo_or_error(repoid) if not has_superadmin_permission(apiuser): - raise JSONRPCForbidden() + _perms = ('repository.admin',) + validate_repo_permissions(apiuser, repoid, repo, _perms) if type(settings) is not dict: raise JSONRPCError('Settings have to be a JSON Object.') diff --git a/rhodecode/apps/channelstream/views.py b/rhodecode/apps/channelstream/views.py --- a/rhodecode/apps/channelstream/views.py +++ b/rhodecode/apps/channelstream/views.py @@ -34,7 +34,7 @@ from rhodecode.lib.channelstream import get_user_data, parse_channels_info, update_history_from_logs, - STATE_PUBLIC_KEYS) + USER_STATE_PUBLIC_KEYS) from rhodecode.lib.auth import NotAnonymous @@ -86,14 +86,16 @@ class ChannelstreamView(BaseAppView): 'display_name': None, 'display_link': None, } - user_data['permissions'] = self._rhodecode_user.permissions_safe + + #user_data['permissions'] = self._rhodecode_user.permissions_safe + payload = { 'username': user.username, 'user_state': user_data, 'conn_id': str(uuid.uuid4()), 'channels': channels, 'channel_configs': {}, - 'state_public_keys': STATE_PUBLIC_KEYS, + 'state_public_keys': USER_STATE_PUBLIC_KEYS, 'info': { 'exclude_channels': ['broadcast'] } @@ -118,10 +120,13 @@ class ChannelstreamView(BaseAppView): 'Channelstream service at {} is down'.format(channelstream_url)) return HTTPBadGateway() + channel_info = connect_result.get('channels_info') + if not channel_info: + raise HTTPBadRequest() + connect_result['channels'] = channels connect_result['channels_info'] = parse_channels_info( - connect_result['channels_info'], - include_channel_info=filtered_channels) + channel_info, include_channel_info=filtered_channels) update_history_from_logs(self.channelstream_config, filtered_channels, connect_result) return connect_result @@ -167,10 +172,15 @@ class ChannelstreamView(BaseAppView): log.exception( 'Channelstream service at {} is down'.format(channelstream_url)) return HTTPBadGateway() + + channel_info = connect_result.get('channels_info') + if not channel_info: + raise HTTPBadRequest() + # include_channel_info will limit history only to new channel # to not overwrite histories on other channels in client connect_result['channels_info'] = parse_channels_info( - connect_result['channels_info'], + channel_info, include_channel_info=filtered_channels) update_history_from_logs( self.channelstream_config, filtered_channels, connect_result) diff --git a/rhodecode/apps/file_store/__init__.py b/rhodecode/apps/file_store/__init__.py --- a/rhodecode/apps/file_store/__init__.py +++ b/rhodecode/apps/file_store/__init__.py @@ -43,10 +43,10 @@ def includeme(config): pattern='/_file_store/upload') config.add_route( name='download_file', - pattern='/_file_store/download/{fid}') + pattern='/_file_store/download/{fid:.*}') config.add_route( name='download_file_by_token', - pattern='/_file_store/token-download/{_auth_token}/{fid}') + pattern='/_file_store/token-download/{_auth_token}/{fid:.*}') # Scan module for configuration decorators. config.scan('.views', ignore='.tests') diff --git a/rhodecode/apps/file_store/backends/local_store.py b/rhodecode/apps/file_store/backends/local_store.py --- a/rhodecode/apps/file_store/backends/local_store.py +++ b/rhodecode/apps/file_store/backends/local_store.py @@ -20,6 +20,7 @@ import os import time +import errno import shutil import hashlib @@ -32,9 +33,24 @@ from rhodecode.apps.file_store.exception METADATA_VER = 'v1' +def safe_make_dirs(dir_path): + if not os.path.exists(dir_path): + try: + os.makedirs(dir_path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + return + + class LocalFileStorage(object): @classmethod + def apply_counter(cls, counter, filename): + name_counted = '%d-%s' % (counter, filename) + return name_counted + + @classmethod def resolve_name(cls, name, directory): """ Resolves a unique name and the correct path. If a filename @@ -47,17 +63,16 @@ class LocalFileStorage(object): counter = 0 while True: - name = '%d-%s' % (counter, name) + name_counted = cls.apply_counter(counter, name) # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file - sub_store = cls._sub_store_from_filename(name) + sub_store = cls._sub_store_from_filename(name_counted) sub_store_path = os.path.join(directory, sub_store) - if not os.path.exists(sub_store_path): - os.makedirs(sub_store_path) + safe_make_dirs(sub_store_path) - path = os.path.join(sub_store_path, name) + path = os.path.join(sub_store_path, name_counted) if not os.path.exists(path): - return name, path + return name_counted, path counter += 1 @classmethod @@ -102,8 +117,13 @@ class LocalFileStorage(object): :param filename: base name of file """ - sub_store = self._sub_store_from_filename(filename) - return os.path.join(self.base_path, sub_store, filename) + prefix_dir = '' + if '/' in filename: + prefix_dir, filename = filename.split('/') + sub_store = self._sub_store_from_filename(filename) + else: + sub_store = self._sub_store_from_filename(filename) + return os.path.join(self.base_path, prefix_dir, sub_store, filename) def delete(self, filename): """ @@ -123,7 +143,7 @@ class LocalFileStorage(object): Checks if file exists. Resolves filename's absolute path based on base_path. - :param filename: base name of file + :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg """ return os.path.exists(self.store_path(filename)) @@ -158,7 +178,7 @@ class LocalFileStorage(object): return ext in [normalize_ext(x) for x in extensions] def save_file(self, file_obj, filename, directory=None, extensions=None, - extra_metadata=None, max_filesize=None, **kwargs): + extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs): """ Saves a file object to the uploads location. Returns the resolved filename, i.e. the directory + @@ -169,6 +189,7 @@ class LocalFileStorage(object): :param directory: relative path of sub-directory :param extensions: iterable of allowed extensions, if not default :param max_filesize: maximum size of file that should be allowed + :param randomized_name: generate random generated UID or fixed based on the filename :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix """ @@ -183,13 +204,12 @@ class LocalFileStorage(object): else: dest_directory = self.base_path - if not os.path.exists(dest_directory): - os.makedirs(dest_directory) + safe_make_dirs(dest_directory) - filename = utils.uid_filename(filename) + uid_filename = utils.uid_filename(filename, randomized=randomized_name) # resolve also produces special sub-dir for file optimized store - filename, path = self.resolve_name(filename, dest_directory) + filename, path = self.resolve_name(uid_filename, dest_directory) stored_file_dir = os.path.dirname(path) file_obj.seek(0) @@ -210,12 +230,13 @@ class LocalFileStorage(object): file_hash = self.calculate_path_hash(path) - metadata.update( - {"filename": filename, + metadata.update({ + "filename": filename, "size": size, "time": time.time(), "sha256": file_hash, - "meta_ver": METADATA_VER}) + "meta_ver": METADATA_VER + }) filename_meta = filename + '.meta' with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: diff --git a/rhodecode/apps/file_store/utils.py b/rhodecode/apps/file_store/utils.py --- a/rhodecode/apps/file_store/utils.py +++ b/rhodecode/apps/file_store/utils.py @@ -20,7 +20,7 @@ import uuid - +import StringIO import pathlib2 @@ -52,3 +52,7 @@ def uid_filename(filename, randomized=Tr hash_key = '{}.{}'.format(filename, 'store') uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) return str(uid) + ext.lower() + + +def bytes_to_file_obj(bytes_data): + return StringIO.StringIO(bytes_data) diff --git a/rhodecode/apps/file_store/views.py b/rhodecode/apps/file_store/views.py --- a/rhodecode/apps/file_store/views.py +++ b/rhodecode/apps/file_store/views.py @@ -64,7 +64,7 @@ class FileStoreView(BaseAppView): file_uid, store_path) raise HTTPNotFound() - db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar() + db_obj = FileStore.get_by_store_uid(file_uid, safe=True) if not db_obj: raise HTTPNotFound() diff --git a/rhodecode/apps/repository/__init__.py b/rhodecode/apps/repository/__init__.py --- a/rhodecode/apps/repository/__init__.py +++ b/rhodecode/apps/repository/__init__.py @@ -345,6 +345,16 @@ def includeme(config): pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', repo_route=True, repo_accepted_types=['hg', 'git']) + config.add_route( + name='pullrequest_comments', + pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments', + repo_route=True) + + config.add_route( + name='pullrequest_todos', + pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos', + repo_route=True) + # Artifacts, (EE feature) config.add_route( name='repo_artifacts_list', diff --git a/rhodecode/apps/repository/tests/test_repo_commit_comments.py b/rhodecode/apps/repository/tests/test_repo_commit_comments.py --- a/rhodecode/apps/repository/tests/test_repo_commit_comments.py +++ b/rhodecode/apps/repository/tests/test_repo_commit_comments.py @@ -485,23 +485,10 @@ class TestRepoCommitCommentsView(TestCon def assert_comment_links(response, comments, inline_comments): - if comments == 1: - comments_text = "%d General" % comments - else: - comments_text = "%d General" % comments - - if inline_comments == 1: - inline_comments_text = "%d Inline" % inline_comments - else: - inline_comments_text = "%d Inline" % inline_comments + response.mustcontain( + ''.format(comments)) + response.mustcontain( + ''.format(inline_comments)) - if comments: - response.mustcontain('%s,' % comments_text) - else: - response.mustcontain(comments_text) - if inline_comments: - response.mustcontain( - 'id="inline-comments-counter">%s' % inline_comments_text) - else: - response.mustcontain(inline_comments_text) + diff --git a/rhodecode/apps/repository/tests/test_repo_compare.py b/rhodecode/apps/repository/tests/test_repo_compare.py --- a/rhodecode/apps/repository/tests/test_repo_compare.py +++ b/rhodecode/apps/repository/tests/test_repo_compare.py @@ -619,7 +619,12 @@ class ComparePage(AssertResponse): self.contains_one_anchor(file_id) diffblock = doc.cssselect('[data-f-path="%s"]' % filename) assert len(diffblock) == 2 - assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1 + for lnk in diffblock[0].cssselect('a'): + if 'permalink' in lnk.text: + assert '#{}'.format(file_id) in lnk.attrib['href'] + break + else: + pytest.fail('Unable to find permalink') def contains_change_summary(self, files_changed, inserted, deleted): template = ( diff --git a/rhodecode/apps/repository/tests/test_repo_pullrequests.py b/rhodecode/apps/repository/tests/test_repo_pullrequests.py --- a/rhodecode/apps/repository/tests/test_repo_pullrequests.py +++ b/rhodecode/apps/repository/tests/test_repo_pullrequests.py @@ -150,9 +150,9 @@ class TestPullrequestsView(object): response = self.app.post( route_path('pullrequest_create', repo_name=source.repo_name), [ - ('source_repo', source.repo_name), + ('source_repo', source_repo_name), ('source_ref', source_ref), - ('target_repo', target.repo_name), + ('target_repo', target_repo_name), ('target_ref', target_ref), ('common_ancestor', commit_ids['initial-commit']), ('pullrequest_title', 'Title'), @@ -1110,16 +1110,17 @@ class TestPullrequestsView(object): # source has ancestor - change - change-2 backend.pull_heads(source, heads=['change-2']) + target_repo_name = target.repo_name # update PR self.app.post( route_path('pullrequest_update', - repo_name=target.repo_name, pull_request_id=pull_request_id), + repo_name=target_repo_name, pull_request_id=pull_request_id), params={'update_commits': 'true', 'csrf_token': csrf_token}) response = self.app.get( route_path('pullrequest_show', - repo_name=target.repo_name, + repo_name=target_repo_name, pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 @@ -1166,10 +1167,11 @@ class TestPullrequestsView(object): # source has ancestor - ancestor-new - change-rebased backend.pull_heads(target, heads=['ancestor-new']) backend.pull_heads(source, heads=['change-rebased']) + target_repo_name = target.repo_name # update PR url = route_path('pullrequest_update', - repo_name=target.repo_name, + repo_name=target_repo_name, pull_request_id=pull_request_id) self.app.post(url, params={'update_commits': 'true', 'csrf_token': csrf_token}, @@ -1183,7 +1185,7 @@ class TestPullrequestsView(object): response = self.app.get( route_path('pullrequest_show', - repo_name=target.repo_name, + repo_name=target_repo_name, pull_request_id=pull_request.pull_request_id)) assert response.status_int == 200 response.mustcontain('Pull request updated to') @@ -1232,16 +1234,17 @@ class TestPullrequestsView(object): vcsrepo = target.scm_instance() vcsrepo.config.clear_section('hooks') vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) + target_repo_name = target.repo_name # update PR url = route_path('pullrequest_update', - repo_name=target.repo_name, + repo_name=target_repo_name, pull_request_id=pull_request_id) self.app.post(url, params={'update_commits': 'true', 'csrf_token': csrf_token}, status=200) - response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) + response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name)) assert response.status_int == 200 response.mustcontain('Pull request updated to') response.mustcontain('with 0 added, 0 removed commits.') @@ -1280,11 +1283,12 @@ class TestPullrequestsView(object): # source has ancestor - ancestor-new - change-rebased backend.pull_heads(target, heads=['ancestor-new']) backend.pull_heads(source, heads=['change-rebased']) + target_repo_name = target.repo_name # update PR self.app.post( route_path('pullrequest_update', - repo_name=target.repo_name, pull_request_id=pull_request_id), + repo_name=target_repo_name, pull_request_id=pull_request_id), params={'update_commits': 'true', 'csrf_token': csrf_token}, status=200) @@ -1389,6 +1393,8 @@ class TestPullrequestsView(object): pull_request = pr_util.create_pull_request( commits, target_head='old-feature', source_head='new-feature', revisions=['new-feature'], mergeable=True) + pr_id = pull_request.pull_request_id + target_repo_name = pull_request.target_repo.repo_name vcs = pr_util.source_repository.scm_instance() if backend.alias == 'git': @@ -1397,8 +1403,8 @@ class TestPullrequestsView(object): vcs.strip(pr_util.commit_ids['new-feature']) url = route_path('pullrequest_update', - repo_name=pull_request.target_repo.repo_name, - pull_request_id=pull_request.pull_request_id) + repo_name=target_repo_name, + pull_request_id=pr_id) response = self.app.post(url, params={'update_commits': 'true', 'csrf_token': csrf_token}) @@ -1409,8 +1415,8 @@ class TestPullrequestsView(object): # Make sure that after update, it won't raise 500 errors response = self.app.get(route_path( 'pullrequest_show', - repo_name=pr_util.target_repository.repo_name, - pull_request_id=pull_request.pull_request_id)) + repo_name=target_repo_name, + pull_request_id=pr_id)) assert response.status_int == 200 response.assert_response().element_contains( diff --git a/rhodecode/apps/repository/views/repo_commits.py b/rhodecode/apps/repository/views/repo_commits.py --- a/rhodecode/apps/repository/views/repo_commits.py +++ b/rhodecode/apps/repository/views/repo_commits.py @@ -18,8 +18,8 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ - import logging +import collections from pyramid.httpexceptions import ( HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) @@ -34,14 +34,14 @@ from rhodecode.apps.file_store.exception from rhodecode.lib import diffs, codeblocks from rhodecode.lib.auth import ( LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) - +from rhodecode.lib.ext_json import json from rhodecode.lib.compat import OrderedDict from rhodecode.lib.diffs import ( cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, get_diff_whitespace_flag) from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch import rhodecode.lib.helpers as h -from rhodecode.lib.utils2 import safe_unicode, str2bool +from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict from rhodecode.lib.vcs.backends.base import EmptyCommit from rhodecode.lib.vcs.exceptions import ( RepositoryError, CommitDoesNotExistError) @@ -115,6 +115,7 @@ class RepoCommitsView(RepoAppView): except Exception: log.exception("General failure") raise HTTPNotFound() + single_commit = len(c.commit_ranges) == 1 c.changes = OrderedDict() c.lines_added = 0 @@ -128,23 +129,48 @@ class RepoCommitsView(RepoAppView): c.inline_comments = [] c.files = [] - c.statuses = [] c.comments = [] c.unresolved_comments = [] c.resolved_comments = [] - if len(c.commit_ranges) == 1: + + # Single commit + if single_commit: commit = c.commit_ranges[0] c.comments = CommentsModel().get_comments( self.db_repo.repo_id, revision=commit.raw_id) - c.statuses.append(ChangesetStatusModel().get_status( - self.db_repo.repo_id, commit.raw_id)) + # comments from PR statuses = ChangesetStatusModel().get_statuses( self.db_repo.repo_id, commit.raw_id, with_revisions=True) - prs = set(st.pull_request for st in statuses - if st.pull_request is not None) + + prs = set() + reviewers = list() + reviewers_duplicates = set() # to not have duplicates from multiple votes + for c_status in statuses: + + # extract associated pull-requests from votes + if c_status.pull_request: + prs.add(c_status.pull_request) + + # extract reviewers + _user_id = c_status.author.user_id + if _user_id not in reviewers_duplicates: + reviewers.append( + StrictAttributeDict({ + 'user': c_status.author, + + # fake attributed for commit, page that we don't have + # but we share the display with PR page + 'mandatory': False, + 'reasons': [], + 'rule_user_group_data': lambda: None + }) + ) + reviewers_duplicates.add(_user_id) + + c.allowed_reviewers = reviewers # from associated statuses, check the pull requests, and # show comments from them for pr in prs: @@ -155,6 +181,37 @@ class RepoCommitsView(RepoAppView): c.resolved_comments = CommentsModel()\ .get_commit_resolved_todos(commit.raw_id) + c.inline_comments_flat = CommentsModel()\ + .get_commit_inline_comments(commit.raw_id) + + review_statuses = ChangesetStatusModel().aggregate_votes_by_user( + statuses, reviewers) + + c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED + + c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []}) + + for review_obj, member, reasons, mandatory, status in review_statuses: + member_reviewer = h.reviewer_as_json( + member, reasons=reasons, mandatory=mandatory, + user_group=None + ) + + current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED + member_reviewer['review_status'] = current_review_status + member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status) + member_reviewer['allowed_to_update'] = False + c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer) + + c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json) + + # NOTE(marcink): this uses the same voting logic as in pull-requests + c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses) + c.commit_broadcast_channel = u'/repo${}$/commit/{}'.format( + c.repo_name, + commit.raw_id + ) + diff = None # Iterate over ranges (default commit view is always one commit) for commit in c.commit_ranges: @@ -166,8 +223,8 @@ class RepoCommitsView(RepoAppView): if method == 'show': inline_comments = CommentsModel().get_inline_comments( self.db_repo.repo_id, revision=commit.raw_id) - c.inline_cnt = CommentsModel().get_inline_comments_count( - inline_comments) + c.inline_cnt = len(CommentsModel().get_inline_comments_as_list( + inline_comments)) c.inline_comments = inline_comments cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( @@ -226,6 +283,7 @@ class RepoCommitsView(RepoAppView): # sort comments by how they were generated c.comments = sorted(c.comments, key=lambda x: x.comment_id) + c.at_version_num = None if len(c.commit_ranges) == 1: c.commit = c.commit_ranges[0] @@ -395,6 +453,7 @@ class RepoCommitsView(RepoAppView): } if comment: c.co = comment + c.at_version_num = 0 rendered_comment = render( 'rhodecode:templates/changeset/changeset_comment_block.mako', self._get_template_context(c), self.request) @@ -427,7 +486,6 @@ class RepoCommitsView(RepoAppView): return '' @LoginRequired() - @NotAnonymous() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') @CSRFRequired() diff --git a/rhodecode/apps/repository/views/repo_pull_requests.py b/rhodecode/apps/repository/views/repo_pull_requests.py --- a/rhodecode/apps/repository/views/repo_pull_requests.py +++ b/rhodecode/apps/repository/views/repo_pull_requests.py @@ -39,7 +39,7 @@ from rhodecode.lib.ext_json import json from rhodecode.lib.auth import ( LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) -from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode +from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason from rhodecode.lib.vcs.exceptions import ( CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) @@ -265,6 +265,36 @@ class RepoPullRequestsView(RepoAppView, return diffset + def register_comments_vars(self, c, pull_request, versions): + comments_model = CommentsModel() + + # GENERAL COMMENTS with versions # + q = comments_model._all_general_comments_of_pull_request(pull_request) + q = q.order_by(ChangesetComment.comment_id.asc()) + general_comments = q + + # pick comments we want to render at current version + c.comment_versions = comments_model.aggregate_comments( + general_comments, versions, c.at_version_num) + + # INLINE COMMENTS with versions # + q = comments_model._all_inline_comments_of_pull_request(pull_request) + q = q.order_by(ChangesetComment.comment_id.asc()) + inline_comments = q + + c.inline_versions = comments_model.aggregate_comments( + inline_comments, versions, c.at_version_num, inline=True) + + # Comments inline+general + if c.at_version: + c.inline_comments_flat = c.inline_versions[c.at_version_num]['display'] + c.comments = c.comment_versions[c.at_version_num]['display'] + else: + c.inline_comments_flat = c.inline_versions[c.at_version_num]['until'] + c.comments = c.comment_versions[c.at_version_num]['until'] + + return general_comments, inline_comments + @LoginRequired() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') @@ -280,6 +310,8 @@ class RepoPullRequestsView(RepoAppView, pull_request_id = pull_request.pull_request_id c.state_progressing = pull_request.is_state_changing() + c.pr_broadcast_channel = '/repo${}$/pr/{}'.format( + pull_request.target_repo.repo_name, pull_request.pull_request_id) _new_state = { 'created': PullRequest.STATE_CREATED, @@ -300,22 +332,23 @@ class RepoPullRequestsView(RepoAppView, from_version = self.request.GET.get('from_version') or version merge_checks = self.request.GET.get('merge_checks') c.fulldiff = str2bool(self.request.GET.get('fulldiff')) + force_refresh = str2bool(self.request.GET.get('force_refresh')) + c.range_diff_on = self.request.GET.get('range-diff') == "1" # fetch global flags of ignore ws or context lines diff_context = diffs.get_diff_context(self.request) hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) - force_refresh = str2bool(self.request.GET.get('force_refresh')) - (pull_request_latest, pull_request_at_ver, pull_request_display_obj, at_version) = PullRequestModel().get_pr_version( pull_request_id, version=version) + pr_closed = pull_request_latest.is_closed() if pr_closed and (version or from_version): - # not allow to browse versions + # not allow to browse versions for closed PR raise HTTPFound(h.route_path( 'pullrequest_show', repo_name=self.db_repo_name, pull_request_id=pull_request_id)) @@ -323,13 +356,13 @@ class RepoPullRequestsView(RepoAppView, versions = pull_request_display_obj.versions() # used to store per-commit range diffs c.changes = collections.OrderedDict() - c.range_diff_on = self.request.GET.get('range-diff') == "1" c.at_version = at_version c.at_version_num = (at_version - if at_version and at_version != 'latest' + if at_version and at_version != PullRequest.LATEST_VER else None) - c.at_version_pos = ChangesetComment.get_index_from_version( + + c.at_version_index = ChangesetComment.get_index_from_version( c.at_version_num, versions) (prev_pull_request_latest, @@ -340,9 +373,9 @@ class RepoPullRequestsView(RepoAppView, c.from_version = prev_at_version c.from_version_num = (prev_at_version - if prev_at_version and prev_at_version != 'latest' + if prev_at_version and prev_at_version != PullRequest.LATEST_VER else None) - c.from_version_pos = ChangesetComment.get_index_from_version( + c.from_version_index = ChangesetComment.get_index_from_version( c.from_version_num, versions) # define if we're in COMPARE mode or VIEW at version mode @@ -351,16 +384,21 @@ class RepoPullRequestsView(RepoAppView, # pull_requests repo_name we opened it against # ie. target_repo must match if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: + log.warning('Mismatch between the current repo: %s, and target %s', + self.db_repo_name, pull_request_at_ver.target_repo.repo_name) raise HTTPNotFound() - c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( - pull_request_at_ver) + c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver) c.pull_request = pull_request_display_obj c.renderer = pull_request_at_ver.description_renderer or c.renderer c.pull_request_latest = pull_request_latest - if compare or (at_version and not at_version == 'latest'): + # inject latest version + latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) + c.versions = versions + [latest_ver] + + if compare or (at_version and not at_version == PullRequest.LATEST_VER): c.allowed_to_change_status = False c.allowed_to_update = False c.allowed_to_merge = False @@ -389,12 +427,9 @@ class RepoPullRequestsView(RepoAppView, 'rules' in pull_request_latest.reviewer_data: rules = pull_request_latest.reviewer_data['rules'] or {} try: - c.forbid_adding_reviewers = rules.get( - 'forbid_adding_reviewers') - c.forbid_author_to_review = rules.get( - 'forbid_author_to_review') - c.forbid_commit_author_to_review = rules.get( - 'forbid_commit_author_to_review') + c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers') + c.forbid_author_to_review = rules.get('forbid_author_to_review') + c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review') except Exception: pass @@ -419,41 +454,34 @@ class RepoPullRequestsView(RepoAppView, 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' return self._get_template_context(c) - comments_model = CommentsModel() + c.allowed_reviewers = [obj.user_id for obj in pull_request.reviewers if obj.user] # reviewers and statuses - c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() - allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] + c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data) + c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []}) - # GENERAL COMMENTS with versions # - q = comments_model._all_general_comments_of_pull_request(pull_request_latest) - q = q.order_by(ChangesetComment.comment_id.asc()) - general_comments = q + for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses(): + member_reviewer = h.reviewer_as_json( + member, reasons=reasons, mandatory=mandatory, + user_group=review_obj.rule_user_group_data() + ) - # pick comments we want to render at current version - c.comment_versions = comments_model.aggregate_comments( - general_comments, versions, c.at_version_num) - c.comments = c.comment_versions[c.at_version_num]['until'] + current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED + member_reviewer['review_status'] = current_review_status + member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status) + member_reviewer['allowed_to_update'] = c.allowed_to_update + c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer) - # INLINE COMMENTS with versions # - q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) - q = q.order_by(ChangesetComment.comment_id.asc()) - inline_comments = q + c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json) - c.inline_versions = comments_model.aggregate_comments( - inline_comments, versions, c.at_version_num, inline=True) + general_comments, inline_comments = \ + self.register_comments_vars(c, pull_request_latest, versions) # TODOs c.unresolved_comments = CommentsModel() \ - .get_pull_request_unresolved_todos(pull_request) + .get_pull_request_unresolved_todos(pull_request_latest) c.resolved_comments = CommentsModel() \ - .get_pull_request_resolved_todos(pull_request) - - # inject latest version - latest_ver = PullRequest.get_pr_display_object( - pull_request_latest, pull_request_latest) - - c.versions = versions + [latest_ver] + .get_pull_request_resolved_todos(pull_request_latest) # if we use version, then do not show later comments # than current version @@ -520,8 +548,8 @@ class RepoPullRequestsView(RepoAppView, # empty version means latest, so we keep this to prevent # double caching - version_normalized = version or 'latest' - from_version_normalized = from_version or 'latest' + version_normalized = version or PullRequest.LATEST_VER + from_version_normalized = from_version or PullRequest.LATEST_VER cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) cache_file_path = diff_cache_exist( @@ -613,7 +641,7 @@ class RepoPullRequestsView(RepoAppView, diff_limit, file_limit, c.fulldiff, hide_whitespace_changes, diff_context, use_ancestor=use_ancestor - ) + ) # save cached diff if caching_enabled: @@ -717,7 +745,7 @@ class RepoPullRequestsView(RepoAppView, # current user review statuses for each version c.review_versions = {} - if self._rhodecode_user.user_id in allowed_reviewers: + if self._rhodecode_user.user_id in c.allowed_reviewers: for co in general_comments: if co.author.user_id == self._rhodecode_user.user_id: status = co.status_change @@ -937,6 +965,90 @@ class RepoPullRequestsView(RepoAppView, @NotAnonymous() @HasRepoPermissionAnyDecorator( 'repository.read', 'repository.write', 'repository.admin') + @view_config( + route_name='pullrequest_comments', request_method='POST', + renderer='string', xhr=True) + def pullrequest_comments(self): + self.load_default_context() + + pull_request = PullRequest.get_or_404( + self.request.matchdict['pull_request_id']) + pull_request_id = pull_request.pull_request_id + version = self.request.GET.get('version') + + _render = self.request.get_partial_renderer( + 'rhodecode:templates/base/sidebar.mako') + c = _render.get_call_context() + + (pull_request_latest, + pull_request_at_ver, + pull_request_display_obj, + at_version) = PullRequestModel().get_pr_version( + pull_request_id, version=version) + versions = pull_request_display_obj.versions() + latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) + c.versions = versions + [latest_ver] + + c.at_version = at_version + c.at_version_num = (at_version + if at_version and at_version != PullRequest.LATEST_VER + else None) + + self.register_comments_vars(c, pull_request_latest, versions) + all_comments = c.inline_comments_flat + c.comments + + existing_ids = filter( + lambda e: e, map(safe_int, self.request.POST.getall('comments[]'))) + return _render('comments_table', all_comments, len(all_comments), + existing_ids=existing_ids) + + @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') + @view_config( + route_name='pullrequest_todos', request_method='POST', + renderer='string', xhr=True) + def pullrequest_todos(self): + self.load_default_context() + + pull_request = PullRequest.get_or_404( + self.request.matchdict['pull_request_id']) + pull_request_id = pull_request.pull_request_id + version = self.request.GET.get('version') + + _render = self.request.get_partial_renderer( + 'rhodecode:templates/base/sidebar.mako') + c = _render.get_call_context() + (pull_request_latest, + pull_request_at_ver, + pull_request_display_obj, + at_version) = PullRequestModel().get_pr_version( + pull_request_id, version=version) + versions = pull_request_display_obj.versions() + latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest) + c.versions = versions + [latest_ver] + + c.at_version = at_version + c.at_version_num = (at_version + if at_version and at_version != PullRequest.LATEST_VER + else None) + + c.unresolved_comments = CommentsModel() \ + .get_pull_request_unresolved_todos(pull_request) + c.resolved_comments = CommentsModel() \ + .get_pull_request_resolved_todos(pull_request) + + all_comments = c.unresolved_comments + c.resolved_comments + existing_ids = filter( + lambda e: e, map(safe_int, self.request.POST.getall('comments[]'))) + return _render('comments_table', all_comments, len(c.unresolved_comments), + todo_comments=True, existing_ids=existing_ids) + + @LoginRequired() + @NotAnonymous() + @HasRepoPermissionAnyDecorator( + 'repository.read', 'repository.write', 'repository.admin') @CSRFRequired() @view_config( route_name='pullrequest_create', request_method='POST', @@ -1098,7 +1210,7 @@ class RepoPullRequestsView(RepoAppView, self.request.matchdict['pull_request_id']) _ = self.request.translate - self.load_default_context() + c = self.load_default_context() redirect_url = None if pull_request.is_closed(): @@ -1109,6 +1221,8 @@ class RepoPullRequestsView(RepoAppView, 'redirect_url': redirect_url} is_state_changing = pull_request.is_state_changing() + c.pr_broadcast_channel = '/repo${}$/pr/{}'.format( + pull_request.target_repo.repo_name, pull_request.pull_request_id) # only owner or admin can update it allowed_to_update = PullRequestModel().check_user_update( @@ -1132,7 +1246,7 @@ class RepoPullRequestsView(RepoAppView, return {'response': True, 'redirect_url': redirect_url} - self._update_commits(pull_request) + self._update_commits(c, pull_request) if force_refresh: redirect_url = h.route_path( 'pullrequest_show', repo_name=self.db_repo_name, @@ -1168,7 +1282,7 @@ class RepoPullRequestsView(RepoAppView, h.flash(msg, category='success') return - def _update_commits(self, pull_request): + def _update_commits(self, c, pull_request): _ = self.request.translate with pull_request.set_state(PullRequest.STATE_UPDATING): @@ -1196,13 +1310,18 @@ class RepoPullRequestsView(RepoAppView, change_source=changed) h.flash(msg, category='success') - channel = '/repo${}$/pr/{}'.format( - pull_request.target_repo.repo_name, pull_request.pull_request_id) message = msg + ( ' - ' '{}'.format(_('Reload page'))) + + message_obj = { + 'message': message, + 'level': 'success', + 'topic': '/notifications' + } + channelstream.post_message( - channel, message, self._rhodecode_user.username, + c.pr_broadcast_channel, message_obj, self._rhodecode_user.username, registry=self.request.registry) else: msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] @@ -1472,6 +1591,7 @@ class RepoPullRequestsView(RepoAppView, } if comment: c.co = comment + c.at_version_num = None rendered_comment = render( 'rhodecode:templates/changeset/changeset_comment_block.mako', self._get_template_context(c), self.request) diff --git a/rhodecode/config/licenses.json b/rhodecode/config/licenses.json --- a/rhodecode/config/licenses.json +++ b/rhodecode/config/licenses.json @@ -1890,7 +1890,7 @@ "url": "http://spdx.org/licenses/BSD-4-Clause.html" } ], - "name": "python2.7-channelstream-0.5.2" + "name": "python2.7-channelstream-0.6.14" }, { "license": [ diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -53,7 +53,7 @@ from rhodecode.lib.utils2 import aslist from rhodecode.lib.exc_tracking import store_exception from rhodecode.subscribers import ( scan_repositories_if_enabled, write_js_routes_if_enabled, - write_metadata_if_needed, inject_app_settings) + write_metadata_if_needed, write_usage_data, inject_app_settings) log = logging.getLogger(__name__) @@ -316,6 +316,8 @@ def includeme(config): pyramid.events.ApplicationCreated) config.add_subscriber(write_metadata_if_needed, pyramid.events.ApplicationCreated) + config.add_subscriber(write_usage_data, + pyramid.events.ApplicationCreated) config.add_subscriber(write_js_routes_if_enabled, pyramid.events.ApplicationCreated) diff --git a/rhodecode/events/pullrequest.py b/rhodecode/events/pullrequest.py --- a/rhodecode/events/pullrequest.py +++ b/rhodecode/events/pullrequest.py @@ -145,7 +145,7 @@ class PullRequestCommentEvent(PullReques status = None if self.comment.status_change: - status = self.comment.status_change[0].status + status = self.comment.review_status data.update({ 'comment': { @@ -184,7 +184,7 @@ class PullRequestCommentEditEvent(PullRe status = None if self.comment.status_change: - status = self.comment.status_change[0].status + status = self.comment.review_status data.update({ 'comment': { diff --git a/rhodecode/lib/channelstream.py b/rhodecode/lib/channelstream.py --- a/rhodecode/lib/channelstream.py +++ b/rhodecode/lib/channelstream.py @@ -37,8 +37,9 @@ log = logging.getLogger(__name__) LOCK = ReadWriteMutex() -STATE_PUBLIC_KEYS = ['id', 'username', 'first_name', 'last_name', - 'icon_link', 'display_name', 'display_link'] +USER_STATE_PUBLIC_KEYS = [ + 'id', 'username', 'first_name', 'last_name', + 'icon_link', 'display_name', 'display_link'] class ChannelstreamException(Exception): @@ -64,6 +65,8 @@ def channelstream_request(config, payloa 'x-channelstream-endpoint': endpoint, 'Content-Type': 'application/json'} req_url = get_channelstream_server_url(config, endpoint) + + log.debug('Sending a channelstream request to endpoint: `%s`', req_url) response = None try: response = requests.post(req_url, data=json.dumps(payload), @@ -76,6 +79,7 @@ def channelstream_request(config, payloa log.exception('Exception related to Channelstream happened') if raise_exc: raise ChannelstreamConnectionException() + log.debug('Got channelstream response: %s', response) return response @@ -154,7 +158,7 @@ def parse_channels_info(info_result, inc for userinfo in info_result['users']: user_state_dict[userinfo['user']] = { k: v for k, v in userinfo['state'].items() - if k in STATE_PUBLIC_KEYS + if k in USER_STATE_PUBLIC_KEYS } channels_info = {} @@ -163,10 +167,10 @@ def parse_channels_info(info_result, inc if c_name not in include_channel_info: continue connected_list = [] - for userinfo in c_info['users']: + for username in c_info['users']: connected_list.append({ - 'user': userinfo['user'], - 'state': user_state_dict[userinfo['user']] + 'user': username, + 'state': user_state_dict[username] }) channels_info[c_name] = {'users': connected_list, 'history': c_info['history']} @@ -230,6 +234,14 @@ def get_connection_validators(registry): def post_message(channel, message, username, registry=None): + message_obj = message + if isinstance(message, basestring): + message_obj = { + 'message': message, + 'level': 'success', + 'topic': '/notifications' + } + if not registry: registry = get_current_registry() @@ -243,11 +255,7 @@ def post_message(channel, message, usern 'user': 'system', 'exclude_users': [username], 'channel': channel, - 'message': { - 'message': message, - 'level': 'success', - 'topic': '/notifications' - } + 'message': message_obj } try: diff --git a/rhodecode/lib/dbmigrate/schema/db_4_20_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_20_0_0.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/schema/db_4_20_0_0.py @@ -0,0 +1,5689 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2010-2020 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Database Models for RhodeCode Enterprise +""" + +import re +import os +import time +import string +import hashlib +import logging +import datetime +import uuid +import warnings +import ipaddress +import functools +import traceback +import collections + +from sqlalchemy import ( + or_, and_, not_, func, cast, TypeDecorator, event, + Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, + Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, + Text, Float, PickleType, BigInteger) +from sqlalchemy.sql.expression import true, false, case +from sqlalchemy.sql.functions import coalesce, count # pragma: no cover +from sqlalchemy.orm import ( + relationship, joinedload, class_mapper, validates, aliased) +from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.exc import IntegrityError # pragma: no cover +from sqlalchemy.dialects.mysql import LONGTEXT +from zope.cachedescriptors.property import Lazy as LazyProperty +from pyramid import compat +from pyramid.threadlocal import get_current_request +from webhelpers2.text import remove_formatting + +from rhodecode.translation import _ +from rhodecode.lib.vcs import get_vcs_instance, VCSError +from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference +from rhodecode.lib.utils2 import ( + str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, + time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, + glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict) +from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ + JsonRaw +from rhodecode.lib.ext_json import json +from rhodecode.lib.caching_query import FromCache +from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data +from rhodecode.lib.encrypt2 import Encryptor +from rhodecode.lib.exceptions import ( + ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) +from rhodecode.model.meta import Base, Session + +URL_SEP = '/' +log = logging.getLogger(__name__) + +# ============================================================================= +# BASE CLASSES +# ============================================================================= + +# this is propagated from .ini file rhodecode.encrypted_values.secret or +# beaker.session.secret if first is not set. +# and initialized at environment.py +ENCRYPTION_KEY = None + +# used to sort permissions by types, '#' used here is not allowed to be in +# usernames, and it's very early in sorted string.printable table. +PERMISSION_TYPE_SORT = { + 'admin': '####', + 'write': '###', + 'read': '##', + 'none': '#', +} + + +def display_user_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + if obj.username == User.DEFAULT_USER: + return '#####' + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + extra_sort_num = '1' # default + + # NOTE(dan): inactive duplicates goes last + if getattr(obj, 'duplicate_perm', None): + extra_sort_num = '9' + return prefix + extra_sort_num + obj.username + + +def display_user_group_sort(obj): + """ + Sort function used to sort permissions in .permissions() function of + Repository, RepoGroup, UserGroup. Also it put the default user in front + of all other resources + """ + + prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') + return prefix + obj.users_group_name + + +def _hash_key(k): + return sha1_safe(k) + + +def in_filter_generator(qry, items, limit=500): + """ + Splits IN() into multiple with OR + e.g.:: + cnt = Repository.query().filter( + or_( + *in_filter_generator(Repository.repo_id, range(100000)) + )).count() + """ + if not items: + # empty list will cause empty query which might cause security issues + # this can lead to hidden unpleasant results + items = [-1] + + parts = [] + for chunk in xrange(0, len(items), limit): + parts.append( + qry.in_(items[chunk: chunk + limit]) + ) + + return parts + + +base_table_args = { + 'extend_existing': True, + 'mysql_engine': 'InnoDB', + 'mysql_charset': 'utf8', + 'sqlite_autoincrement': True +} + + +class EncryptedTextValue(TypeDecorator): + """ + Special column for encrypted long text data, use like:: + + value = Column("encrypted_value", EncryptedValue(), nullable=False) + + This column is intelligent so if value is in unencrypted form it return + unencrypted form, but on save it always encrypts + """ + impl = Text + + def process_bind_param(self, value, dialect): + """ + Setter for storing value + """ + import rhodecode + if not value: + return value + + # protect against double encrypting if values is already encrypted + if value.startswith('enc$aes$') \ + or value.startswith('enc$aes_hmac$') \ + or value.startswith('enc2$'): + raise ValueError('value needs to be in unencrypted format, ' + 'ie. not starting with enc$ or enc2$') + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + if algo == 'aes': + return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).encrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + + def process_result_value(self, value, dialect): + """ + Getter for retrieving value + """ + + import rhodecode + if not value: + return value + + algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' + enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True) + if algo == 'aes': + decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode) + elif algo == 'fernet': + return Encryptor(ENCRYPTION_KEY).decrypt(value) + else: + ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) + return decrypted_data + + +class BaseModel(object): + """ + Base Model for all classes + """ + + @classmethod + def _get_keys(cls): + """return column names for this model """ + return class_mapper(cls).c.keys() + + def get_dict(self): + """ + return dict with keys and values corresponding + to this model data """ + + d = {} + for k in self._get_keys(): + d[k] = getattr(self, k) + + # also use __json__() if present to get additional fields + _json_attr = getattr(self, '__json__', None) + if _json_attr: + # update with attributes from __json__ + if callable(_json_attr): + _json_attr = _json_attr() + for k, val in _json_attr.iteritems(): + d[k] = val + return d + + def get_appstruct(self): + """return list with keys and values tuples corresponding + to this model data """ + + lst = [] + for k in self._get_keys(): + lst.append((k, getattr(self, k),)) + return lst + + def populate_obj(self, populate_dict): + """populate model with data from given populate_dict""" + + for k in self._get_keys(): + if k in populate_dict: + setattr(self, k, populate_dict[k]) + + @classmethod + def query(cls): + return Session().query(cls) + + @classmethod + def get(cls, id_): + if id_: + return cls.query().get(id_) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + try: + id_ = int(id_) + except (TypeError, ValueError): + raise HTTPNotFound() + + res = cls.query().get(id_) + if not res: + raise HTTPNotFound() + return res + + @classmethod + def getAll(cls): + # deprecated and left for backward compatibility + return cls.get_all() + + @classmethod + def get_all(cls): + return cls.query().all() + + @classmethod + def delete(cls, id_): + obj = cls.query().get(id_) + Session().delete(obj) + + @classmethod + def identity_cache(cls, session, attr_name, value): + exist_in_session = [] + for (item_cls, pkey), instance in session.identity_map.items(): + if cls == item_cls and getattr(instance, attr_name) == value: + exist_in_session.append(instance) + if exist_in_session: + if len(exist_in_session) == 1: + return exist_in_session[0] + log.exception( + 'multiple objects with attr %s and ' + 'value %s found with same name: %r', + attr_name, value, exist_in_session) + + def __repr__(self): + if hasattr(self, '__unicode__'): + # python repr needs to return str + try: + return safe_str(self.__unicode__()) + except UnicodeDecodeError: + pass + return '' % (self.__class__.__name__) + + +class RhodeCodeSetting(Base, BaseModel): + __tablename__ = 'rhodecode_settings' + __table_args__ = ( + UniqueConstraint('app_settings_name'), + base_table_args + ) + + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' + GLOBAL_CONF_KEY = 'app_settings' + + app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) + _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) + _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) + + def __init__(self, key='', val='', type='unicode'): + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + _type = self.app_settings_type + if _type: + _type = self.app_settings_type.split('.')[0] + # decode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + v = safe_unicode(cipher.process_result_value(v, None)) + + converter = self.SETTINGS_TYPES.get(_type) or \ + self.SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + val = safe_unicode(val) + # encode the encrypted value + if 'encrypted' in self.app_settings_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._app_settings_value = val + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + if val.split('.')[0] not in self.SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (self.SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + @classmethod + def get_by_prefix(cls, prefix): + return RhodeCodeSetting.query()\ + .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ + .all() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RhodeCodeUi(Base, BaseModel): + __tablename__ = 'rhodecode_ui' + __table_args__ = ( + UniqueConstraint('ui_key'), + base_table_args + ) + + HOOK_REPO_SIZE = 'changegroup.repo_size' + # HG + HOOK_PRE_PULL = 'preoutgoing.pre_pull' + HOOK_PULL = 'outgoing.pull_logger' + HOOK_PRE_PUSH = 'prechangegroup.pre_push' + HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' + HOOK_PUSH = 'changegroup.push_logger' + HOOK_PUSH_KEY = 'pushkey.key_push' + + HOOKS_BUILTIN = [ + HOOK_PRE_PULL, + HOOK_PULL, + HOOK_PRE_PUSH, + HOOK_PRETX_PUSH, + HOOK_PUSH, + HOOK_PUSH_KEY, + ] + + # TODO: johbo: Unify way how hooks are configured for git and hg, + # git part is currently hardcoded. + + # SVN PATTERNS + SVN_BRANCH_ID = 'vcs_svn_branch' + SVN_TAG_ID = 'vcs_svn_tag' + + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, + self.ui_key, self.ui_value) + + +class RepoRhodeCodeSetting(Base, BaseModel): + __tablename__ = 'repo_rhodecode_settings' + __table_args__ = ( + UniqueConstraint( + 'app_settings_name', 'repository_id', + name='uq_repo_rhodecode_setting_name_repo_id'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + app_settings_id = Column( + "app_settings_id", Integer(), nullable=False, unique=True, + default=None, primary_key=True) + app_settings_name = Column( + "app_settings_name", String(255), nullable=True, unique=None, + default=None) + _app_settings_value = Column( + "app_settings_value", String(4096), nullable=True, unique=None, + default=None) + _app_settings_type = Column( + "app_settings_type", String(255), nullable=True, unique=None, + default=None) + + repository = relationship('Repository') + + def __init__(self, repository_id, key='', val='', type='unicode'): + self.repository_id = repository_id + self.app_settings_name = key + self.app_settings_type = type + self.app_settings_value = val + + @validates('_app_settings_value') + def validate_settings_value(self, key, val): + assert type(val) == unicode + return val + + @hybrid_property + def app_settings_value(self): + v = self._app_settings_value + type_ = self.app_settings_type + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] + return converter(v) + + @app_settings_value.setter + def app_settings_value(self, val): + """ + Setter that will always make sure we use unicode in app_settings_value + + :param val: + """ + self._app_settings_value = safe_unicode(val) + + @hybrid_property + def app_settings_type(self): + return self._app_settings_type + + @app_settings_type.setter + def app_settings_type(self, val): + SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES + if val not in SETTINGS_TYPES: + raise Exception('type must be one of %s got %s' + % (SETTINGS_TYPES.keys(), val)) + self._app_settings_type = val + + def __unicode__(self): + return u"<%s('%s:%s:%s[%s]')>" % ( + self.__class__.__name__, self.repository.repo_name, + self.app_settings_name, self.app_settings_value, + self.app_settings_type + ) + + +class RepoRhodeCodeUi(Base, BaseModel): + __tablename__ = 'repo_rhodecode_ui' + __table_args__ = ( + UniqueConstraint( + 'repository_id', 'ui_section', 'ui_key', + name='uq_repo_rhodecode_ui_repository_id_section_key'), + base_table_args + ) + + repository_id = Column( + "repository_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + ui_id = Column( + "ui_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + ui_section = Column( + "ui_section", String(255), nullable=True, unique=None, default=None) + ui_key = Column( + "ui_key", String(255), nullable=True, unique=None, default=None) + ui_value = Column( + "ui_value", String(255), nullable=True, unique=None, default=None) + ui_active = Column( + "ui_active", Boolean(), nullable=True, unique=None, default=True) + + repository = relationship('Repository') + + def __repr__(self): + return '<%s[%s:%s]%s=>%s]>' % ( + self.__class__.__name__, self.repository.repo_name, + self.ui_section, self.ui_key, self.ui_value) + + +class User(Base, BaseModel): + __tablename__ = 'users' + __table_args__ = ( + UniqueConstraint('username'), UniqueConstraint('email'), + Index('u_username_idx', 'username'), + Index('u_email_idx', 'email'), + base_table_args + ) + + DEFAULT_USER = 'default' + DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' + DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' + + user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + username = Column("username", String(255), nullable=True, unique=None, default=None) + password = Column("password", String(255), nullable=True, unique=None, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) + name = Column("firstname", String(255), nullable=True, unique=None, default=None) + lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=None, default=None) + last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) + last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + + extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) + extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) + _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data + + user_log = relationship('UserLog') + user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') + + repositories = relationship('Repository') + repository_groups = relationship('RepoGroup') + user_groups = relationship('UserGroup') + + user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') + followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') + + repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') + repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') + + group_member = relationship('UserGroupMember', cascade='all') + + notifications = relationship('UserNotification', cascade='all') + # notifications assigned to this user + user_created_notifications = relationship('Notification', cascade='all') + # comments created by this user + user_comments = relationship('ChangesetComment', cascade='all') + # user profile extra info + user_emails = relationship('UserEmailMap', cascade='all') + user_ip_map = relationship('UserIpMap', cascade='all') + user_auth_tokens = relationship('UserApiKeys', cascade='all') + user_ssh_keys = relationship('UserSshKeys', cascade='all') + + # gists + user_gists = relationship('Gist', cascade='all') + # user pull requests + user_pull_requests = relationship('PullRequest', cascade='all') + + # external identities + external_identities = relationship( + 'ExternalIdentity', + primaryjoin="User.user_id==ExternalIdentity.local_user_id", + cascade='all') + # review rules + user_review_rules = relationship('RepoReviewRuleUser', cascade='all') + + # artifacts owned + artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id') + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id') + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.user_id, self.username) + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + @hybrid_property + def first_name(self): + from rhodecode.lib import helpers as h + if self.name: + return h.escape(self.name) + return self.name + + @hybrid_property + def last_name(self): + from rhodecode.lib import helpers as h + if self.lastname: + return h.escape(self.lastname) + return self.lastname + + @hybrid_property + def api_key(self): + """ + Fetch if exist an auth-token with role ALL connected to this user + """ + user_auth_token = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time()))\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() + if user_auth_token: + user_auth_token = user_auth_token.api_key + + return user_auth_token + + @api_key.setter + def api_key(self, val): + # don't allow to set API key this is deprecated for now + self._api_key = None + + @property + def reviewer_pull_requests(self): + return PullRequestReviewers.query() \ + .options(joinedload(PullRequestReviewers.pull_request)) \ + .filter(PullRequestReviewers.user_id == self.user_id) \ + .all() + + @property + def firstname(self): + # alias for future + return self.name + + @property + def emails(self): + other = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) \ + .all() + return [self.email] + [x.email for x in other] + + def emails_cached(self): + emails = UserEmailMap.query()\ + .filter(UserEmailMap.user == self) \ + .order_by(UserEmailMap.email_id.asc()) + + emails = emails.options( + FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id)) + ) + + return [self.email] + [x.email for x in emails] + + @property + def auth_tokens(self): + auth_tokens = self.get_auth_tokens() + return [x.api_key for x in auth_tokens] + + def get_auth_tokens(self): + return UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .order_by(UserApiKeys.user_api_key_id.asc())\ + .all() + + @LazyProperty + def feed_token(self): + return self.get_feed_token() + + def get_feed_token(self, cache=True): + feed_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) + if cache: + feed_tokens = feed_tokens.options( + FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) + + feed_tokens = feed_tokens.all() + if feed_tokens: + return feed_tokens[0].api_key + return 'NO_FEED_TOKEN_AVAILABLE' + + @LazyProperty + def artifact_token(self): + return self.get_artifact_token() + + def get_artifact_token(self, cache=True): + artifacts_tokens = UserApiKeys.query()\ + .filter(UserApiKeys.user == self)\ + .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) + if cache: + artifacts_tokens = artifacts_tokens.options( + FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id)) + + artifacts_tokens = artifacts_tokens.all() + if artifacts_tokens: + return artifacts_tokens[0].api_key + return 'NO_ARTIFACT_TOKEN_AVAILABLE' + + @classmethod + def get(cls, user_id, cache=False): + if not user_id: + return + + user = cls.query() + if cache: + user = user.options( + FromCache("sql_cache_short", "get_users_%s" % user_id)) + return user.get(user_id) + + @classmethod + def extra_valid_auth_tokens(cls, user, role=None): + tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if role: + tokens = tokens.filter(or_(UserApiKeys.role == role, + UserApiKeys.role == UserApiKeys.ROLE_ALL)) + return tokens.all() + + def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): + from rhodecode.lib import auth + + log.debug('Trying to authenticate user: %s via auth-token, ' + 'and roles: %s', self, roles) + + if not auth_token: + return False + + roles = (roles or []) + [UserApiKeys.ROLE_ALL] + tokens_q = UserApiKeys.query()\ + .filter(UserApiKeys.user_id == self.user_id)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + + tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) + + crypto_backend = auth.crypto_backend() + enc_token_map = {} + plain_token_map = {} + for token in tokens_q: + if token.api_key.startswith(crypto_backend.ENC_PREF): + enc_token_map[token.api_key] = token + else: + plain_token_map[token.api_key] = token + log.debug( + 'Found %s plain and %s encrypted tokens to check for authentication for this user', + len(plain_token_map), len(enc_token_map)) + + # plain token match comes first + match = plain_token_map.get(auth_token) + + # check encrypted tokens now + if not match: + for token_hash, token in enc_token_map.items(): + # NOTE(marcink): this is expensive to calculate, but most secure + if crypto_backend.hash_check(auth_token, token_hash): + match = token + break + + if match: + log.debug('Found matching token %s', match) + if match.repo_id: + log.debug('Found scope, checking for scope match of token %s', match) + if match.repo_id == scope_repo_id: + return True + else: + log.debug( + 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' + 'and calling scope is:%s, skipping further checks', + match.repo, scope_repo_id) + return False + else: + return True + + return False + + @property + def ip_addresses(self): + ret = UserIpMap.query().filter(UserIpMap.user == self).all() + return [x.ip_addr for x in ret] + + @property + def username_and_name(self): + return '%s (%s %s)' % (self.username, self.first_name, self.last_name) + + @property + def username_or_name_or_email(self): + full_name = self.full_name if self.full_name is not ' ' else None + return self.username or full_name or self.email + + @property + def full_name(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def full_name_or_username(self): + return ('%s %s' % (self.first_name, self.last_name) + if (self.first_name and self.last_name) else self.username) + + @property + def full_contact(self): + return '%s %s <%s>' % (self.first_name, self.last_name, self.email) + + @property + def short_contact(self): + return '%s %s' % (self.first_name, self.last_name) + + @property + def is_admin(self): + return self.admin + + @property + def language(self): + return self.user_data.get('language') + + def AuthUser(self, **kwargs): + """ + Returns instance of AuthUser for this user + """ + from rhodecode.lib.auth import AuthUser + return AuthUser(user_id=self.user_id, username=self.username, **kwargs) + + @hybrid_property + def user_data(self): + if not self._user_data: + return {} + + try: + return json.loads(self._user_data) + except TypeError: + return {} + + @user_data.setter + def user_data(self, val): + if not isinstance(val, dict): + raise Exception('user_data must be dict, got %s' % type(val)) + try: + self._user_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def get_by_username(cls, username, case_insensitive=False, + cache=False, identity_cache=False): + session = Session() + + if case_insensitive: + q = cls.query().filter( + func.lower(cls.username) == func.lower(username)) + else: + q = cls.query().filter(cls.username == username) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'username', username) + if val: + return val + else: + cache_key = "get_user_by_name_%s" % _hash_key(username) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_auth_token(cls, auth_token, cache=False): + q = UserApiKeys.query()\ + .filter(UserApiKeys.api_key == auth_token)\ + .filter(or_(UserApiKeys.expires == -1, + UserApiKeys.expires >= time.time())) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_auth_token_%s" % auth_token)) + + match = q.first() + if match: + return match.user + + @classmethod + def get_by_email(cls, email, case_insensitive=False, cache=False): + + if case_insensitive: + q = cls.query().filter(func.lower(cls.email) == func.lower(email)) + + else: + q = cls.query().filter(cls.email == email) + + email_key = _hash_key(email) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_key_%s" % email_key)) + + ret = q.scalar() + if ret is None: + q = UserEmailMap.query() + # try fetching in alternate email map + if case_insensitive: + q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) + else: + q = q.filter(UserEmailMap.email == email) + q = q.options(joinedload(UserEmailMap.user)) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_email_map_key_%s" % email_key)) + ret = getattr(q.scalar(), 'user', None) + + return ret + + @classmethod + def get_from_cs_author(cls, author): + """ + Tries to get User objects out of commit author string + + :param author: + """ + from rhodecode.lib.helpers import email, author_name + # Valid email in the attribute passed, see if they're in the system + _email = email(author) + if _email: + user = cls.get_by_email(_email, case_insensitive=True) + if user: + return user + # Maybe we can match by username? + _author = author_name(author) + user = cls.get_by_username(_author, case_insensitive=True) + if user: + return user + + def update_userdata(self, **kwargs): + usr = self + old = usr.user_data + old.update(**kwargs) + usr.user_data = old + Session().add(usr) + log.debug('updated userdata with %s', kwargs) + + def update_lastlogin(self): + """Update user lastlogin""" + self.last_login = datetime.datetime.now() + Session().add(self) + log.debug('updated user %s lastlogin', self.username) + + def update_password(self, new_password): + from rhodecode.lib.auth import get_crypt_password + + self.password = get_crypt_password(new_password) + Session().add(self) + + @classmethod + def get_first_super_admin(cls): + user = User.query()\ + .filter(User.admin == true()) \ + .order_by(User.user_id.asc()) \ + .first() + + if user is None: + raise Exception('FATAL: Missing administrative account!') + return user + + @classmethod + def get_all_super_admins(cls, only_active=False): + """ + Returns all admin accounts sorted by username + """ + qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) + if only_active: + qry = qry.filter(User.active == true()) + return qry.all() + + @classmethod + def get_all_user_ids(cls, only_active=True): + """ + Returns all users IDs + """ + qry = Session().query(User.user_id) + + if only_active: + qry = qry.filter(User.active == true()) + return [x.user_id for x in qry] + + @classmethod + def get_default_user(cls, cache=False, refresh=False): + user = User.get_by_username(User.DEFAULT_USER, cache=cache) + if user is None: + raise Exception('FATAL: Missing default account!') + if refresh: + # The default user might be based on outdated state which + # has been loaded from the cache. + # A call to refresh() ensures that the + # latest state from the database is used. + Session().refresh(user) + return user + + @classmethod + def get_default_user_id(cls): + import rhodecode + return rhodecode.CONFIG['default_user_id'] + + def _get_default_perms(self, user, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user.user_perms, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, include_secrets=False, details='full'): + """ + Common function for generating user related data for API + + :param include_secrets: By default secrets in the API data will be replaced + by a placeholder value to prevent exposing this data by accident. In case + this data shall be exposed, set this flag to ``True``. + + :param details: details can be 'basic|full' basic gives only a subset of + the available user information that includes user_id, name and emails. + """ + user = self + user_data = self.user_data + data = { + 'user_id': user.user_id, + 'username': user.username, + 'firstname': user.name, + 'lastname': user.lastname, + 'description': user.description, + 'email': user.email, + 'emails': user.emails, + } + if details == 'basic': + return data + + auth_token_length = 40 + auth_token_replacement = '*' * auth_token_length + + extras = { + 'auth_tokens': [auth_token_replacement], + 'active': user.active, + 'admin': user.admin, + 'extern_type': user.extern_type, + 'extern_name': user.extern_name, + 'last_login': user.last_login, + 'last_activity': user.last_activity, + 'ip_addresses': user.ip_addresses, + 'language': user_data.get('language') + } + data.update(extras) + + if include_secrets: + data['auth_tokens'] = user.auth_tokens + return data + + def __json__(self): + data = { + 'full_name': self.full_name, + 'full_name_or_username': self.full_name_or_username, + 'short_contact': self.short_contact, + 'full_contact': self.full_contact, + } + data.update(self.get_api_data()) + return data + + +class UserApiKeys(Base, BaseModel): + __tablename__ = 'user_api_keys' + __table_args__ = ( + Index('uak_api_key_idx', 'api_key'), + Index('uak_api_key_expires_idx', 'api_key', 'expires'), + base_table_args + ) + __mapper_args__ = {} + + # ApiKey role + ROLE_ALL = 'token_role_all' + ROLE_VCS = 'token_role_vcs' + ROLE_API = 'token_role_api' + ROLE_HTTP = 'token_role_http' + ROLE_FEED = 'token_role_feed' + ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' + # The last one is ignored in the list as we only + # use it for one action, and cannot be created by users + ROLE_PASSWORD_RESET = 'token_password_reset' + + ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] + + user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + api_key = Column("api_key", String(255), nullable=False, unique=True) + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + expires = Column('expires', Float(53), nullable=False) + role = Column('role', String(255), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + # scope columns + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + user = relationship('User', lazy='joined') + + def __unicode__(self): + return u"<%s('%s')>" % (self.__class__.__name__, self.role) + + def __json__(self): + data = { + 'auth_token': self.api_key, + 'role': self.role, + 'scope': self.scope_humanized, + 'expired': self.expired + } + return data + + def get_api_data(self, include_secrets=False): + data = self.__json__() + if include_secrets: + return data + else: + data['auth_token'] = self.token_obfuscated + return data + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @property + def expired(self): + if self.expires == -1: + return False + return time.time() > self.expires + + @classmethod + def _get_role_name(cls, role): + return { + cls.ROLE_ALL: _('all'), + cls.ROLE_HTTP: _('http/web interface'), + cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), + cls.ROLE_API: _('api calls'), + cls.ROLE_FEED: _('feed access'), + cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), + }.get(role, role) + + @classmethod + def _get_role_description(cls, role): + return { + cls.ROLE_ALL: _('Token for all actions.'), + cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without ' + 'login using `api_access_controllers_whitelist` functionality.'), + cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. ' + 'Requires auth_token authentication plugin to be active.
' + 'Such Token should be used then instead of a password to ' + 'interact with a repository, and additionally can be ' + 'limited to single repository using repo scope.'), + cls.ROLE_API: _('Token limited to api calls.'), + cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'), + cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'), + }.get(role, role) + + @property + def role_humanized(self): + return self._get_role_name(self.role) + + def _get_scope(self): + if self.repo: + return 'Repository: {}'.format(self.repo.repo_name) + if self.repo_group: + return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) + return 'Global' + + @property + def scope_humanized(self): + return self._get_scope() + + @property + def token_obfuscated(self): + if self.api_key: + return self.api_key[:4] + "****" + + +class UserEmailMap(Base, BaseModel): + __tablename__ = 'user_email_map' + __table_args__ = ( + Index('uem_email_idx', 'email'), + UniqueConstraint('email'), + base_table_args + ) + __mapper_args__ = {} + + email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + _email = Column("email", String(255), nullable=True, unique=False, default=None) + user = relationship('User', lazy='joined') + + @validates('_email') + def validate_email(self, key, email): + # check if this email is not main one + main_email = Session().query(User).filter(User.email == email).scalar() + if main_email is not None: + raise AttributeError('email %s is present is user table' % email) + return email + + @hybrid_property + def email(self): + return self._email + + @email.setter + def email(self, val): + self._email = val.lower() if val else None + + +class UserIpMap(Base, BaseModel): + __tablename__ = 'user_ip_map' + __table_args__ = ( + UniqueConstraint('user_id', 'ip_addr'), + base_table_args + ) + __mapper_args__ = {} + + ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) + active = Column("active", Boolean(), nullable=True, unique=None, default=True) + description = Column("description", String(10000), nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined') + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @classmethod + def _get_ip_range(cls, ip_addr): + net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False) + return [str(net.network_address), str(net.broadcast_address)] + + def __json__(self): + return { + 'ip_addr': self.ip_addr, + 'ip_range': self._get_ip_range(self.ip_addr), + } + + def __unicode__(self): + return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, + self.user_id, self.ip_addr) + + +class UserSshKeys(Base, BaseModel): + __tablename__ = 'user_ssh_keys' + __table_args__ = ( + Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), + + UniqueConstraint('ssh_key_fingerprint'), + + base_table_args + ) + __mapper_args__ = {} + + ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) + ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) + ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) + + description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + + user = relationship('User', lazy='joined') + + def __json__(self): + data = { + 'ssh_fingerprint': self.ssh_key_fingerprint, + 'description': self.description, + 'created_on': self.created_on + } + return data + + def get_api_data(self): + data = self.__json__() + return data + + +class UserLog(Base, BaseModel): + __tablename__ = 'user_logs' + __table_args__ = ( + base_table_args, + ) + + VERSION_1 = 'v1' + VERSION_2 = 'v2' + VERSIONS = [VERSION_1, VERSION_2] + + user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) + username = Column("username", String(255), nullable=True, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) + repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) + user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) + action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) + action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) + + version = Column("version", String(255), nullable=True, default=VERSION_1) + user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.repository_name, self.action) + + def __json__(self): + return { + 'user_id': self.user_id, + 'username': self.username, + 'repository_id': self.repository_id, + 'repository_name': self.repository_name, + 'user_ip': self.user_ip, + 'action_date': self.action_date, + 'action': self.action, + } + + @hybrid_property + def entry_id(self): + return self.user_log_id + + @property + def action_as_day(self): + return datetime.date(*self.action_date.timetuple()[:3]) + + user = relationship('User') + repository = relationship('Repository', cascade='') + + +class UserGroup(Base, BaseModel): + __tablename__ = 'users_groups' + __table_args__ = ( + base_table_args, + ) + + users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) + user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) + users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) + inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data + + members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined") + users_group_to_perm = relationship('UserGroupToPerm', cascade='all') + users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') + user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') + + user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all') + user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id") + + @classmethod + def _load_group_data(cls, column): + if not column: + return {} + + try: + return json.loads(column) or {} + except TypeError: + return {} + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.user_group_description) + + @hybrid_property + def group_data(self): + return self._load_group_data(self._group_data) + + @group_data.expression + def group_data(self, **kwargs): + return self._group_data + + @group_data.setter + def group_data(self, val): + try: + self._group_data = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @classmethod + def _load_sync(cls, group_data): + if group_data: + return group_data.get('extern_type') + + @property + def sync(self): + return self._load_sync(self.group_data) + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % (self.__class__.__name__, + self.users_group_id, + self.users_group_name) + + @classmethod + def get_by_group_name(cls, group_name, cache=False, + case_insensitive=False): + if case_insensitive: + q = cls.query().filter(func.lower(cls.users_group_name) == + func.lower(group_name)) + + else: + q = cls.query().filter(cls.users_group_name == group_name) + if cache: + q = q.options( + FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name))) + return q.scalar() + + @classmethod + def get(cls, user_group_id, cache=False): + if not user_group_id: + return + + user_group = cls.query() + if cache: + user_group = user_group.options( + FromCache("sql_cache_short", "get_users_group_%s" % user_group_id)) + return user_group.get(user_group_id) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for user groups + """ + _admin_perm = 'usergroup.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) + q = q.options(joinedload(UserUserGroupToPerm.user_group), + joinedload(UserUserGroupToPerm.user), + joinedload(UserUserGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupUserGroupToPerm.query()\ + .filter(UserGroupUserGroupToPerm.target_user_group == self) + q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), + joinedload(UserGroupUserGroupToPerm.target_user_group), + joinedload(UserGroupUserGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.user_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.user_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def _get_default_perms(self, user_group, suffix=''): + from rhodecode.model.permission import PermissionModel + return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) + + def get_default_perms(self, suffix=''): + return self._get_default_perms(self, suffix) + + def get_api_data(self, with_group_members=True, include_secrets=False): + """ + :param include_secrets: See :meth:`User.get_api_data`, this parameter is + basically forwarded. + + """ + user_group = self + data = { + 'users_group_id': user_group.users_group_id, + 'group_name': user_group.users_group_name, + 'group_description': user_group.user_group_description, + 'active': user_group.users_group_active, + 'owner': user_group.user.username, + 'sync': user_group.sync, + 'owner_email': user_group.user.email, + } + + if with_group_members: + users = [] + for user in user_group.members: + user = user.user + users.append(user.get_api_data(include_secrets=include_secrets)) + data['users'] = users + + return data + + +class UserGroupMember(Base, BaseModel): + __tablename__ = 'users_groups_members' + __table_args__ = ( + base_table_args, + ) + + users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + + user = relationship('User', lazy='joined') + users_group = relationship('UserGroup') + + def __init__(self, gr_id='', u_id=''): + self.users_group_id = gr_id + self.user_id = u_id + + +class RepositoryField(Base, BaseModel): + __tablename__ = 'repositories_fields' + __table_args__ = ( + UniqueConstraint('repository_id', 'field_key'), # no-multi field + base_table_args, + ) + + PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields + + repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + field_key = Column("field_key", String(250)) + field_label = Column("field_label", String(1024), nullable=False) + field_value = Column("field_value", String(10000), nullable=False) + field_desc = Column("field_desc", String(1024), nullable=False) + field_type = Column("field_type", String(255), nullable=False, unique=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + repository = relationship('Repository') + + @property + def field_key_prefixed(self): + return 'ex_%s' % self.field_key + + @classmethod + def un_prefix_key(cls, key): + if key.startswith(cls.PREFIX): + return key[len(cls.PREFIX):] + return key + + @classmethod + def get_by_key_name(cls, key, repo): + row = cls.query()\ + .filter(cls.repository == repo)\ + .filter(cls.field_key == key).scalar() + return row + + +class Repository(Base, BaseModel): + __tablename__ = 'repositories' + __table_args__ = ( + Index('r_repo_name_idx', 'repo_name', mysql_length=255), + base_table_args, + ) + DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' + DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' + DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' + + STATE_CREATED = 'repo_state_created' + STATE_PENDING = 'repo_state_pending' + STATE_ERROR = 'repo_state_error' + + LOCK_AUTOMATIC = 'lock_auto' + LOCK_API = 'lock_api' + LOCK_WEB = 'lock_web' + LOCK_PULL = 'lock_pull' + + NAME_SEP = URL_SEP + + repo_id = Column( + "repo_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _repo_name = Column( + "repo_name", Text(), nullable=False, default=None) + repo_name_hash = Column( + "repo_name_hash", String(255), nullable=False, unique=True) + repo_state = Column("repo_state", String(255), nullable=True) + + clone_uri = Column( + "clone_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + push_uri = Column( + "push_uri", EncryptedTextValue(), nullable=True, unique=False, + default=None) + repo_type = Column( + "repo_type", String(255), nullable=False, unique=False, default=None) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=False, default=None) + private = Column( + "private", Boolean(), nullable=True, unique=None, default=None) + archived = Column( + "archived", Boolean(), nullable=True, unique=None, default=None) + enable_statistics = Column( + "statistics", Boolean(), nullable=True, unique=None, default=True) + enable_downloads = Column( + "downloads", Boolean(), nullable=True, unique=None, default=True) + description = Column( + "description", String(10000), nullable=True, unique=None, default=None) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=True, unique=None, + default=datetime.datetime.now) + _landing_revision = Column( + "landing_revision", String(255), nullable=False, unique=False, + default=None) + enable_locking = Column( + "enable_locking", Boolean(), nullable=False, unique=None, + default=False) + _locked = Column( + "locked", String(255), nullable=True, unique=False, default=None) + _changeset_cache = Column( + "changeset_cache", LargeBinary(), nullable=True) # JSON data + + fork_id = Column( + "fork_id", Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=False, default=None) + group_id = Column( + "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, + unique=False, default=None) + + user = relationship('User', lazy='joined') + fork = relationship('Repository', remote_side=repo_id, lazy='joined') + group = relationship('RepoGroup', lazy='joined') + repo_to_perm = relationship( + 'UserRepoToPerm', cascade='all', + order_by='UserRepoToPerm.repo_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') + stats = relationship('Statistics', cascade='all', uselist=False) + + followers = relationship( + 'UserFollowing', + primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', + cascade='all') + extra_fields = relationship( + 'RepositoryField', cascade="all, delete-orphan") + logs = relationship('UserLog') + comments = relationship( + 'ChangesetComment', cascade="all, delete-orphan") + pull_requests_source = relationship( + 'PullRequest', + primaryjoin='PullRequest.source_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + pull_requests_target = relationship( + 'PullRequest', + primaryjoin='PullRequest.target_repo_id==Repository.repo_id', + cascade="all, delete-orphan") + ui = relationship('RepoRhodeCodeUi', cascade="all") + settings = relationship('RepoRhodeCodeSetting', cascade="all") + integrations = relationship('Integration', cascade="all, delete-orphan") + + scoped_tokens = relationship('UserApiKeys', cascade="all") + + # no cascade, set NULL + artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id') + + def __unicode__(self): + return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, + safe_unicode(self.repo_name)) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def landing_rev(self): + # always should return [rev_type, rev], e.g ['branch', 'master'] + if self._landing_revision: + _rev_info = self._landing_revision.split(':') + if len(_rev_info) < 2: + _rev_info.insert(0, 'rev') + return [_rev_info[0], _rev_info[1]] + return [None, None] + + @property + def landing_ref_type(self): + return self.landing_rev[0] + + @property + def landing_ref_name(self): + return self.landing_rev[1] + + @landing_rev.setter + def landing_rev(self, val): + if ':' not in val: + raise ValueError('value must be delimited with `:` and consist ' + 'of :, got %s instead' % val) + self._landing_revision = val + + @hybrid_property + def locked(self): + if self._locked: + user_id, timelocked, reason = self._locked.split(':') + lock_values = int(user_id), timelocked, reason + else: + lock_values = [None, None, None] + return lock_values + + @locked.setter + def locked(self, val): + if val and isinstance(val, (list, tuple)): + self._locked = ':'.join(map(str, val)) + else: + self._locked = None + + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id + return json.loads(json.dumps(dummy)) + + try: + return json.loads(changeset_cache_raw) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache(self.repo_id, self._changeset_cache) + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @hybrid_property + def repo_name(self): + return self._repo_name + + @repo_name.setter + def repo_name(self, value): + self._repo_name = value + self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() + + @classmethod + def normalize_repo_name(cls, repo_name): + """ + Normalizes os specific repo_name to the format internally stored inside + database using URL_SEP + + :param cls: + :param repo_name: + """ + return cls.NAME_SEP.join(repo_name.split(os.sep)) + + @classmethod + def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): + session = Session() + q = session.query(cls).filter(cls.repo_name == repo_name) + + if cache: + if identity_cache: + val = cls.identity_cache(session, 'repo_name', repo_name) + if val: + return val + else: + cache_key = "get_repo_by_name_%s" % _hash_key(repo_name) + q = q.options( + FromCache("sql_cache_short", cache_key)) + + return q.scalar() + + @classmethod + def get_by_id_or_repo_name(cls, repoid): + if isinstance(repoid, (int, long)): + try: + repo = cls.get(repoid) + except ValueError: + repo = None + else: + repo = cls.get_by_repo_name(repoid) + return repo + + @classmethod + def get_by_full_path(cls, repo_full_path): + repo_name = repo_full_path.split(cls.base_path(), 1)[-1] + repo_name = cls.normalize_repo_name(repo_name) + return cls.get_by_repo_name(repo_name.strip(URL_SEP)) + + @classmethod + def get_repo_forks(cls, repo_id): + return cls.query().filter(Repository.fork_id == repo_id) + + @classmethod + def base_path(cls): + """ + Returns base path when all repos are stored + + :param cls: + """ + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @classmethod + def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True, archived=False): + q = Repository.query() + + if not archived: + q = q.filter(Repository.archived.isnot(true())) + + if not isinstance(user_id, Optional): + q = q.filter(Repository.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(Repository.group_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(Repository.repo_name)) + else: + q = q.order_by(Repository.repo_name) + + return q.all() + + @property + def repo_uid(self): + return '_{}'.format(self.repo_id) + + @property + def forks(self): + """ + Return forks of this repo + """ + return Repository.get_repo_forks(self.repo_id) + + @property + def parent(self): + """ + Returns fork parent + """ + return self.fork + + @property + def just_name(self): + return self.repo_name.split(self.NAME_SEP)[-1] + + @property + def groups_with_parents(self): + groups = [] + if self.group is None: + return groups + + cur_gr = self.group + groups.insert(0, cur_gr) + while 1: + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + groups.insert(0, gr) + + return groups + + @property + def groups_and_repo(self): + return self.groups_with_parents, self + + @LazyProperty + def repo_path(self): + """ + Returns base full path for that repository means where it actually + exists on a filesystem + """ + q = Session().query(RhodeCodeUi).filter( + RhodeCodeUi.ui_key == self.NAME_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return q.one().ui_value + + @property + def repo_full_path(self): + p = [self.repo_path] + # we need to split the name by / since this is how we store the + # names in the database, but that eventually needs to be converted + # into a valid system path + p += self.repo_name.split(self.NAME_SEP) + return os.path.join(*map(safe_unicode, p)) + + @property + def cache_keys(self): + """ + Returns associated cache keys for that repo + """ + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + return CacheKey.query()\ + .filter(CacheKey.cache_args == invalidation_namespace)\ + .order_by(CacheKey.cache_key)\ + .all() + + @property + def cached_diffs_relative_dir(self): + """ + Return a relative to the repository store path of cached diffs + used for safe display for users, who shouldn't know the absolute store + path + """ + return os.path.join( + os.path.dirname(self.repo_name), + self.cached_diffs_dir.split(os.path.sep)[-1]) + + @property + def cached_diffs_dir(self): + path = self.repo_full_path + return os.path.join( + os.path.dirname(path), + '.__shadow_diff_cache_repo_{}'.format(self.repo_id)) + + def cached_diffs(self): + diff_cache_dir = self.cached_diffs_dir + if os.path.isdir(diff_cache_dir): + return os.listdir(diff_cache_dir) + return [] + + def shadow_repos(self): + shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id) + return [ + x for x in os.listdir(os.path.dirname(self.repo_full_path)) + if x.startswith(shadow_repos_pattern)] + + def get_new_name(self, repo_name): + """ + returns new full repository name based on assigned group and new new + + :param group_name: + """ + path_prefix = self.group.full_path_splitted if self.group else [] + return self.NAME_SEP.join(path_prefix + [repo_name]) + + @property + def _config(self): + """ + Returns db based config object. + """ + from rhodecode.lib.utils import make_db_config + return make_db_config(clear_session=False, repo=self) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repositories + """ + _admin_perm = 'repository.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + usr.permission_id = None + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + usr.permission_id = None + super_admin_rows.append(usr) + + q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) + q = q.options(joinedload(UserRepoToPerm.repository), + joinedload(UserRepoToPerm.user), + joinedload(UserRepoToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + # also check if this permission is maybe used by branch_permissions + if _usr.branch_perm_entry: + usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] + + usr.permission = _usr.permission.permission_name + usr.permission_id = _usr.repo_to_perm_id + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=True): + q = UserGroupRepoToPerm.query()\ + .filter(UserGroupRepoToPerm.repository == self) + q = q.options(joinedload(UserGroupRepoToPerm.repository), + joinedload(UserGroupRepoToPerm.users_group), + joinedload(UserGroupRepoToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self, include_secrets=False): + """ + Common function for generating repo api data + + :param include_secrets: See :meth:`User.get_api_data`. + + """ + # TODO: mikhail: Here there is an anti-pattern, we probably need to + # move this methods on models level. + from rhodecode.model.settings import SettingsModel + from rhodecode.model.repo import RepoModel + + repo = self + _user_id, _time, _reason = self.locked + + data = { + 'repo_id': repo.repo_id, + 'repo_name': repo.repo_name, + 'repo_type': repo.repo_type, + 'clone_uri': repo.clone_uri or '', + 'push_uri': repo.push_uri or '', + 'url': RepoModel().get_url(self), + 'private': repo.private, + 'created_on': repo.created_on, + 'description': repo.description_safe, + 'landing_rev': repo.landing_rev, + 'owner': repo.user.username, + 'fork_of': repo.fork.repo_name if repo.fork else None, + 'fork_of_id': repo.fork.repo_id if repo.fork else None, + 'enable_statistics': repo.enable_statistics, + 'enable_locking': repo.enable_locking, + 'enable_downloads': repo.enable_downloads, + 'last_changeset': repo.changeset_cache, + 'locked_by': User.get(_user_id).get_api_data( + include_secrets=include_secrets) if _user_id else None, + 'locked_date': time_to_datetime(_time) if _time else None, + 'lock_reason': _reason if _reason else None, + } + + # TODO: mikhail: should be per-repo settings here + rc_config = SettingsModel().get_all_settings() + repository_fields = str2bool( + rc_config.get('rhodecode_repository_fields')) + if repository_fields: + for f in self.extra_fields: + data[f.field_key_prefixed] = f.field_value + + return data + + @classmethod + def lock(cls, repo, user_id, lock_time=None, lock_reason=None): + if not lock_time: + lock_time = time.time() + if not lock_reason: + lock_reason = cls.LOCK_AUTOMATIC + repo.locked = [user_id, lock_time, lock_reason] + Session().add(repo) + Session().commit() + + @classmethod + def unlock(cls, repo): + repo.locked = None + Session().add(repo) + Session().commit() + + @classmethod + def getlock(cls, repo): + return repo.locked + + def is_user_lock(self, user_id): + if self.lock[0]: + lock_user_id = safe_int(self.lock[0]) + user_id = safe_int(user_id) + # both are ints, and they are equal + return all([lock_user_id, user_id]) and lock_user_id == user_id + + return False + + def get_locking_state(self, action, user_id, only_when_enabled=True): + """ + Checks locking on this repository, if locking is enabled and lock is + present returns a tuple of make_lock, locked, locked_by. + make_lock can have 3 states None (do nothing) True, make lock + False release lock, This value is later propagated to hooks, which + do the locking. Think about this as signals passed to hooks what to do. + + """ + # TODO: johbo: This is part of the business logic and should be moved + # into the RepositoryModel. + + if action not in ('push', 'pull'): + raise ValueError("Invalid action value: %s" % repr(action)) + + # defines if locked error should be thrown to user + currently_locked = False + # defines if new lock should be made, tri-state + make_lock = None + repo = self + user = User.get(user_id) + + lock_info = repo.locked + + if repo and (repo.enable_locking or not only_when_enabled): + if action == 'push': + # check if it's already locked !, if it is compare users + locked_by_user_id = lock_info[0] + if user.user_id == locked_by_user_id: + log.debug( + 'Got `push` action from user %s, now unlocking', user) + # unlock if we have push from user who locked + make_lock = False + else: + # we're not the same user who locked, ban with + # code defined in settings (default is 423 HTTP Locked) ! + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + elif action == 'pull': + # [0] user [1] date + if lock_info[0] and lock_info[1]: + log.debug('Repo %s is currently locked by %s', repo, user) + currently_locked = True + else: + log.debug('Setting lock on repo %s by %s', repo, user) + make_lock = True + + else: + log.debug('Repository %s do not have locking enabled', repo) + + log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', + make_lock, currently_locked, lock_info) + + from rhodecode.lib.auth import HasRepoPermissionAny + perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') + if make_lock and not perm_check(repo_name=repo.repo_name, user=user): + # if we don't have at least write permission we cannot make a lock + log.debug('lock state reset back to FALSE due to lack ' + 'of at least read permission') + make_lock = False + + return make_lock, currently_locked, lock_info + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @classmethod + def _load_commit_change(cls, last_commit_cache): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = last_commit_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property + def last_db_change(self): + return self.updated_on + + @property + def clone_uri_hidden(self): + clone_uri = self.clone_uri + if clone_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) + if url_obj.password: + clone_uri = url_obj.with_password('*****') + return clone_uri + + @property + def push_uri_hidden(self): + push_uri = self.push_uri + if push_uri: + import urlobject + url_obj = urlobject.URLObject(cleaned_uri(push_uri)) + if url_obj.password: + push_uri = url_obj.with_password('*****') + return push_uri + + def clone_url(self, **override): + from rhodecode.model.settings import SettingsModel + + uri_tmpl = None + if 'with_id' in override: + uri_tmpl = self.DEFAULT_CLONE_URI_ID + del override['with_id'] + + if 'uri_tmpl' in override: + uri_tmpl = override['uri_tmpl'] + del override['uri_tmpl'] + + ssh = False + if 'ssh' in override: + ssh = True + del override['ssh'] + + # we didn't override our tmpl from **overrides + request = get_current_request() + if not uri_tmpl: + if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): + rc_config = request.call_context.rc_config + else: + rc_config = SettingsModel().get_all_settings(cache=True) + + if ssh: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH + + else: + uri_tmpl = rc_config.get( + 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI + + return get_clone_url(request=request, + uri_tmpl=uri_tmpl, + repo_name=self.repo_name, + repo_id=self.repo_id, + repo_type=self.repo_type, + **override) + + def set_state(self, state): + self.repo_state = state + Session().add(self) + #========================================================================== + # SCM PROPERTIES + #========================================================================== + + def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False): + return get_commit_safe( + self.scm_instance(), commit_id, commit_idx, pre_load=pre_load, + maybe_unreachable=maybe_unreachable) + + def get_changeset(self, rev=None, pre_load=None): + warnings.warn("Use get_commit", DeprecationWarning) + commit_id = None + commit_idx = None + if isinstance(rev, compat.string_types): + commit_id = rev + else: + commit_idx = rev + return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, + pre_load=pre_load) + + def get_landing_commit(self): + """ + Returns landing commit, or if that doesn't exist returns the tip + """ + _rev_type, _rev = self.landing_rev + commit = self.get_commit(_rev) + if isinstance(commit, EmptyCommit): + return self.get_commit() + return commit + + def flush_commit_cache(self): + self.update_commit_cache(cs_cache={'raw_id':'0'}) + self.update_commit_cache() + + def update_commit_cache(self, cs_cache=None, config=None): + """ + Update cache of last commit for repository + cache_keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + updated_on + + """ + from rhodecode.lib.vcs.backends.base import BaseChangeset + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + + if cs_cache is None: + # use no-cache version here + try: + scm_repo = self.scm_instance(cache=False, config=config) + except VCSError: + scm_repo = None + empty = scm_repo is None or scm_repo.is_empty() + + if not empty: + cs_cache = scm_repo.get_commit( + pre_load=["author", "date", "message", "parents", "branch"]) + else: + cs_cache = EmptyCommit() + + if isinstance(cs_cache, BaseChangeset): + cs_cache = cs_cache.__json__() + + def is_outdated(new_cs_cache): + if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or + new_cs_cache['revision'] != self.changeset_cache['revision']): + return True + return False + + # check if we have maybe already latest cached revision + if is_outdated(cs_cache) or not self.changeset_cache: + _current_datetime = datetime.datetime.utcnow() + last_change = cs_cache.get('date') or _current_datetime + # we check if last update is newer than the new value + # if yes, we use the current timestamp instead. Imagine you get + # old commit pushed 1y ago, we'd set last update 1y to ago. + last_change_timestamp = datetime_to_time(last_change) + current_timestamp = datetime_to_time(last_change) + if last_change_timestamp > current_timestamp and not empty: + cs_cache['date'] = _current_datetime + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + self.updated_on = last_change + Session().add(self) + Session().commit() + + else: + if empty: + cs_cache = EmptyCommit().__json__() + else: + cs_cache = self.changeset_cache + + _date_latest = parse_datetime(cs_cache.get('date') or empty_date) + + cs_cache['updated_on'] = time.time() + self.changeset_cache = cs_cache + self.updated_on = _date_latest + Session().add(self) + Session().commit() + + log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', + self.repo_name, cs_cache, _date_latest) + + @property + def tip(self): + return self.get_commit('tip') + + @property + def author(self): + return self.tip.author + + @property + def last_change(self): + return self.scm_instance().last_change + + def get_comments(self, revisions=None): + """ + Returns comments for this repository grouped by revisions + + :param revisions: filter query by revisions only + """ + cmts = ChangesetComment.query()\ + .filter(ChangesetComment.repo == self) + if revisions: + cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) + grouped = collections.defaultdict(list) + for cmt in cmts.all(): + grouped[cmt.revision].append(cmt) + return grouped + + def statuses(self, revisions=None): + """ + Returns statuses for this repository + + :param revisions: list of revisions to get statuses for + """ + statuses = ChangesetStatus.query()\ + .filter(ChangesetStatus.repo == self)\ + .filter(ChangesetStatus.version == 0) + + if revisions: + # Try doing the filtering in chunks to avoid hitting limits + size = 500 + status_results = [] + for chunk in xrange(0, len(revisions), size): + status_results += statuses.filter( + ChangesetStatus.revision.in_( + revisions[chunk: chunk+size]) + ).all() + else: + status_results = statuses.all() + + grouped = {} + + # maybe we have open new pullrequest without a status? + stat = ChangesetStatus.STATUS_UNDER_REVIEW + status_lbl = ChangesetStatus.get_status_lbl(stat) + for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): + for rev in pr.revisions: + pr_id = pr.pull_request_id + pr_repo = pr.target_repo.repo_name + grouped[rev] = [stat, status_lbl, pr_id, pr_repo] + + for stat in status_results: + pr_id = pr_repo = None + if stat.pull_request: + pr_id = stat.pull_request.pull_request_id + pr_repo = stat.pull_request.target_repo.repo_name + grouped[stat.revision] = [str(stat.status), stat.status_lbl, + pr_id, pr_repo] + return grouped + + # ========================================================================== + # SCM CACHE INSTANCE + # ========================================================================== + + def scm_instance(self, **kwargs): + import rhodecode + + # Passing a config will not hit the cache currently only used + # for repo2dbmapper + config = kwargs.pop('config', None) + cache = kwargs.pop('cache', None) + vcs_full_cache = kwargs.pop('vcs_full_cache', None) + if vcs_full_cache is not None: + # allows override global config + full_cache = vcs_full_cache + else: + full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) + # if cache is NOT defined use default global, else we have a full + # control over cache behaviour + if cache is None and full_cache and not config: + log.debug('Initializing pure cached instance for %s', self.repo_path) + return self._get_instance_cached() + + # cache here is sent to the "vcs server" + return self._get_instance(cache=bool(cache), config=config) + + def _get_instance_cached(self): + from rhodecode.lib import rc_cache + + cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def get_instance_cached(repo_id, context_id, _cache_state_uid): + return self._get_instance(repo_state_uid=_cache_state_uid) + + # we must use thread scoped cache here, + # because each thread of gevent needs it's own not shared connection and cache + # we also alter `args` so the cache key is individual for every green thread. + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, + thread_scoped=True) + with inv_context_manager as invalidation_context: + cache_state_uid = invalidation_context.cache_data['cache_state_uid'] + args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) + + # re-compute and store cache if we get invalidate signal + if invalidation_context.should_invalidate(): + instance = get_instance_cached.refresh(*args) + else: + instance = get_instance_cached(*args) + + log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) + return instance + + def _get_instance(self, cache=True, config=None, repo_state_uid=None): + log.debug('Initializing %s instance `%s` with cache flag set to: %s', + self.repo_type, self.repo_path, cache) + config = config or self._config + custom_wire = { + 'cache': cache, # controls the vcs.remote cache + 'repo_state_uid': repo_state_uid + } + repo = get_vcs_instance( + repo_path=safe_str(self.repo_full_path), + config=config, + with_wire=custom_wire, + create=False, + _vcs_alias=self.repo_type) + if repo is not None: + repo.count() # cache rebuild + return repo + + def get_shadow_repository_path(self, workspace_id): + from rhodecode.lib.vcs.backends.base import BaseRepository + shadow_repo_path = BaseRepository._get_shadow_repository_path( + self.repo_full_path, self.repo_id, workspace_id) + return shadow_repo_path + + def __json__(self): + return {'landing_rev': self.landing_rev} + + def get_dict(self): + + # Since we transformed `repo_name` to a hybrid property, we need to + # keep compatibility with the code which uses `repo_name` field. + + result = super(Repository, self).get_dict() + result['repo_name'] = result.pop('_repo_name', None) + return result + + +class RepoGroup(Base, BaseModel): + __tablename__ = 'groups' + __table_args__ = ( + UniqueConstraint('group_name', 'group_parent_id'), + base_table_args, + ) + __mapper_args__ = {'order_by': 'group_name'} + + CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups + + group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) + group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) + group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) + group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) + enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) + _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data + + repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') + users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') + parent_group = relationship('RepoGroup', remote_side=group_id) + user = relationship('User') + integrations = relationship('Integration', cascade="all, delete-orphan") + + # no cascade, set NULL + scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id') + + def __init__(self, group_name='', parent_group=None): + self.group_name = group_name + self.parent_group = parent_group + + def __unicode__(self): + return u"<%s('id:%s:%s')>" % ( + self.__class__.__name__, self.group_id, self.group_name) + + @hybrid_property + def group_name(self): + return self._group_name + + @group_name.setter + def group_name(self, value): + self._group_name = value + self.group_name_hash = self.hash_repo_group_name(value) + + @classmethod + def _load_changeset_cache(cls, repo_id, changeset_cache_raw): + from rhodecode.lib.vcs.backends.base import EmptyCommit + dummy = EmptyCommit().__json__() + if not changeset_cache_raw: + dummy['source_repo_id'] = repo_id + return json.loads(json.dumps(dummy)) + + try: + return json.loads(changeset_cache_raw) + except TypeError: + return dummy + except Exception: + log.error(traceback.format_exc()) + return dummy + + @hybrid_property + def changeset_cache(self): + return self._load_changeset_cache('', self._changeset_cache) + + @changeset_cache.setter + def changeset_cache(self, val): + try: + self._changeset_cache = json.dumps(val) + except Exception: + log.error(traceback.format_exc()) + + @validates('group_parent_id') + def validate_group_parent_id(self, key, val): + """ + Check cycle references for a parent group to self + """ + if self.group_id and val: + assert val != self.group_id + + return val + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.group_description) + + @classmethod + def hash_repo_group_name(cls, repo_group_name): + val = remove_formatting(repo_group_name) + val = safe_str(val).lower() + chars = [] + for c in val: + if c not in string.ascii_letters: + c = str(ord(c)) + chars.append(c) + + return ''.join(chars) + + @classmethod + def _generate_choice(cls, repo_group): + from webhelpers2.html import literal as _literal + _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) + return repo_group.group_id, _name(repo_group.full_path_splitted) + + @classmethod + def groups_choices(cls, groups=None, show_empty_group=True): + if not groups: + groups = cls.query().all() + + repo_groups = [] + if show_empty_group: + repo_groups = [(-1, u'-- %s --' % _('No parent'))] + + repo_groups.extend([cls._generate_choice(x) for x in groups]) + + repo_groups = sorted( + repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) + return repo_groups + + @classmethod + def url_sep(cls): + return URL_SEP + + @classmethod + def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): + if case_insensitive: + gr = cls.query().filter(func.lower(cls.group_name) + == func.lower(group_name)) + else: + gr = cls.query().filter(cls.group_name == group_name) + if cache: + name_key = _hash_key(group_name) + gr = gr.options( + FromCache("sql_cache_short", "get_group_%s" % name_key)) + return gr.scalar() + + @classmethod + def get_user_personal_repo_group(cls, user_id): + user = User.get(user_id) + if user.username == User.DEFAULT_USER: + return None + + return cls.query()\ + .filter(cls.personal == true()) \ + .filter(cls.user == user) \ + .order_by(cls.group_id.asc()) \ + .first() + + @classmethod + def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), + case_insensitive=True): + q = RepoGroup.query() + + if not isinstance(user_id, Optional): + q = q.filter(RepoGroup.user_id == user_id) + + if not isinstance(group_id, Optional): + q = q.filter(RepoGroup.group_parent_id == group_id) + + if case_insensitive: + q = q.order_by(func.lower(RepoGroup.group_name)) + else: + q = q.order_by(RepoGroup.group_name) + return q.all() + + @property + def parents(self, parents_recursion_limit=10): + groups = [] + if self.parent_group is None: + return groups + cur_gr = self.parent_group + groups.insert(0, cur_gr) + cnt = 0 + while 1: + cnt += 1 + gr = getattr(cur_gr, 'parent_group', None) + cur_gr = cur_gr.parent_group + if gr is None: + break + if cnt == parents_recursion_limit: + # this will prevent accidental infinit loops + log.error('more than %s parents found for group %s, stopping ' + 'recursive parent fetching', parents_recursion_limit, self) + break + + groups.insert(0, gr) + return groups + + @property + def last_commit_cache_update_diff(self): + return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) + + @classmethod + def _load_commit_change(cls, last_commit_cache): + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + date_latest = last_commit_cache.get('date', empty_date) + try: + return parse_datetime(date_latest) + except Exception: + return empty_date + + @property + def last_commit_change(self): + return self._load_commit_change(self.changeset_cache) + + @property + def last_db_change(self): + return self.updated_on + + @property + def children(self): + return RepoGroup.query().filter(RepoGroup.parent_group == self) + + @property + def name(self): + return self.group_name.split(RepoGroup.url_sep())[-1] + + @property + def full_path(self): + return self.group_name + + @property + def full_path_splitted(self): + return self.group_name.split(RepoGroup.url_sep()) + + @property + def repositories(self): + return Repository.query()\ + .filter(Repository.group == self)\ + .order_by(Repository.repo_name) + + @property + def repositories_recursive_count(self): + cnt = self.repositories.count() + + def children_count(group): + cnt = 0 + for child in group.children: + cnt += child.repositories.count() + cnt += children_count(child) + return cnt + + return cnt + children_count(self) + + def _recursive_objects(self, include_repos=True, include_groups=True): + all_ = [] + + def _get_members(root_gr): + if include_repos: + for r in root_gr.repositories: + all_.append(r) + childs = root_gr.children.all() + if childs: + for gr in childs: + if include_groups: + all_.append(gr) + _get_members(gr) + + root_group = [] + if include_groups: + root_group = [self] + + _get_members(self) + return root_group + all_ + + def recursive_groups_and_repos(self): + """ + Recursive return all groups, with repositories in those groups + """ + return self._recursive_objects() + + def recursive_groups(self): + """ + Returns all children groups for this group including children of children + """ + return self._recursive_objects(include_repos=False) + + def recursive_repos(self): + """ + Returns all children repositories for this group + """ + return self._recursive_objects(include_groups=False) + + def get_new_name(self, group_name): + """ + returns new full group name based on parent and new name + + :param group_name: + """ + path_prefix = (self.parent_group.full_path_splitted if + self.parent_group else []) + return RepoGroup.url_sep().join(path_prefix + [group_name]) + + def update_commit_cache(self, config=None): + """ + Update cache of last commit for newest repository inside this repository group. + cache_keys should be:: + + source_repo_id + short_id + raw_id + revision + parents + message + date + author + + """ + from rhodecode.lib.vcs.utils.helpers import parse_datetime + empty_date = datetime.datetime.fromtimestamp(0) + + def repo_groups_and_repos(root_gr): + for _repo in root_gr.repositories: + yield _repo + for child_group in root_gr.children.all(): + yield child_group + + latest_repo_cs_cache = {} + for obj in repo_groups_and_repos(self): + repo_cs_cache = obj.changeset_cache + date_latest = latest_repo_cs_cache.get('date', empty_date) + date_current = repo_cs_cache.get('date', empty_date) + current_timestamp = datetime_to_time(parse_datetime(date_latest)) + if current_timestamp < datetime_to_time(parse_datetime(date_current)): + latest_repo_cs_cache = repo_cs_cache + if hasattr(obj, 'repo_id'): + latest_repo_cs_cache['source_repo_id'] = obj.repo_id + else: + latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') + + _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) + + latest_repo_cs_cache['updated_on'] = time.time() + self.changeset_cache = latest_repo_cs_cache + self.updated_on = _date_latest + Session().add(self) + Session().commit() + + log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', + self.group_name, latest_repo_cs_cache, _date_latest) + + def permissions(self, with_admins=True, with_owner=True, + expand_from_user_groups=False): + """ + Permissions for repository groups + """ + _admin_perm = 'group.admin' + + owner_row = [] + if with_owner: + usr = AttributeDict(self.user.get_dict()) + usr.owner_row = True + usr.permission = _admin_perm + owner_row.append(usr) + + super_admin_ids = [] + super_admin_rows = [] + if with_admins: + for usr in User.get_all_super_admins(): + super_admin_ids.append(usr.user_id) + # if this admin is also owner, don't double the record + if usr.user_id == owner_row[0].user_id: + owner_row[0].admin_row = True + else: + usr = AttributeDict(usr.get_dict()) + usr.admin_row = True + usr.permission = _admin_perm + super_admin_rows.append(usr) + + q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) + q = q.options(joinedload(UserRepoGroupToPerm.group), + joinedload(UserRepoGroupToPerm.user), + joinedload(UserRepoGroupToPerm.permission),) + + # get owners and admins and permissions. We do a trick of re-writing + # objects from sqlalchemy to named-tuples due to sqlalchemy session + # has a global reference and changing one object propagates to all + # others. This means if admin is also an owner admin_row that change + # would propagate to both objects + perm_rows = [] + for _usr in q.all(): + usr = AttributeDict(_usr.user.get_dict()) + # if this user is also owner/admin, mark as duplicate record + if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: + usr.duplicate_perm = True + usr.permission = _usr.permission.permission_name + perm_rows.append(usr) + + # filter the perm rows by 'default' first and then sort them by + # admin,write,read,none permissions sorted again alphabetically in + # each group + perm_rows = sorted(perm_rows, key=display_user_sort) + + user_groups_rows = [] + if expand_from_user_groups: + for ug in self.permission_user_groups(with_members=True): + for user_data in ug.members: + user_groups_rows.append(user_data) + + return super_admin_rows + owner_row + perm_rows + user_groups_rows + + def permission_user_groups(self, with_members=False): + q = UserGroupRepoGroupToPerm.query()\ + .filter(UserGroupRepoGroupToPerm.group == self) + q = q.options(joinedload(UserGroupRepoGroupToPerm.group), + joinedload(UserGroupRepoGroupToPerm.users_group), + joinedload(UserGroupRepoGroupToPerm.permission),) + + perm_rows = [] + for _user_group in q.all(): + entry = AttributeDict(_user_group.users_group.get_dict()) + entry.permission = _user_group.permission.permission_name + if with_members: + entry.members = [x.user.get_dict() + for x in _user_group.users_group.members] + perm_rows.append(entry) + + perm_rows = sorted(perm_rows, key=display_user_group_sort) + return perm_rows + + def get_api_data(self): + """ + Common function for generating api data + + """ + group = self + data = { + 'group_id': group.group_id, + 'group_name': group.group_name, + 'group_description': group.description_safe, + 'parent_group': group.parent_group.group_name if group.parent_group else None, + 'repositories': [x.repo_name for x in group.repositories], + 'owner': group.user.username, + } + return data + + def get_dict(self): + # Since we transformed `group_name` to a hybrid property, we need to + # keep compatibility with the code which uses `group_name` field. + result = super(RepoGroup, self).get_dict() + result['group_name'] = result.pop('_group_name', None) + return result + + +class Permission(Base, BaseModel): + __tablename__ = 'permissions' + __table_args__ = ( + Index('p_perm_name_idx', 'permission_name'), + base_table_args, + ) + + PERMS = [ + ('hg.admin', _('RhodeCode Super Administrator')), + + ('repository.none', _('Repository no access')), + ('repository.read', _('Repository read access')), + ('repository.write', _('Repository write access')), + ('repository.admin', _('Repository admin access')), + + ('group.none', _('Repository group no access')), + ('group.read', _('Repository group read access')), + ('group.write', _('Repository group write access')), + ('group.admin', _('Repository group admin access')), + + ('usergroup.none', _('User group no access')), + ('usergroup.read', _('User group read access')), + ('usergroup.write', _('User group write access')), + ('usergroup.admin', _('User group admin access')), + + ('branch.none', _('Branch no permissions')), + ('branch.merge', _('Branch access by web merge')), + ('branch.push', _('Branch access by push')), + ('branch.push_force', _('Branch access by push with force')), + + ('hg.repogroup.create.false', _('Repository Group creation disabled')), + ('hg.repogroup.create.true', _('Repository Group creation enabled')), + + ('hg.usergroup.create.false', _('User Group creation disabled')), + ('hg.usergroup.create.true', _('User Group creation enabled')), + + ('hg.create.none', _('Repository creation disabled')), + ('hg.create.repository', _('Repository creation enabled')), + ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), + ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), + + ('hg.fork.none', _('Repository forking disabled')), + ('hg.fork.repository', _('Repository forking enabled')), + + ('hg.register.none', _('Registration disabled')), + ('hg.register.manual_activate', _('User Registration with manual account activation')), + ('hg.register.auto_activate', _('User Registration with automatic account activation')), + + ('hg.password_reset.enabled', _('Password reset enabled')), + ('hg.password_reset.hidden', _('Password reset hidden')), + ('hg.password_reset.disabled', _('Password reset disabled')), + + ('hg.extern_activate.manual', _('Manual activation of external account')), + ('hg.extern_activate.auto', _('Automatic activation of external account')), + + ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), + ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), + ] + + # definition of system default permissions for DEFAULT user, created on + # system setup + DEFAULT_USER_PERMISSIONS = [ + # object perms + 'repository.read', + 'group.read', + 'usergroup.read', + # branch, for backward compat we need same value as before so forced pushed + 'branch.push_force', + # global + 'hg.create.repository', + 'hg.repogroup.create.false', + 'hg.usergroup.create.false', + 'hg.create.write_on_repogroup.true', + 'hg.fork.repository', + 'hg.register.manual_activate', + 'hg.password_reset.enabled', + 'hg.extern_activate.auto', + 'hg.inherit_default_perms.true', + ] + + # defines which permissions are more important higher the more important + # Weight defines which permissions are more important. + # The higher number the more important. + PERM_WEIGHTS = { + 'repository.none': 0, + 'repository.read': 1, + 'repository.write': 3, + 'repository.admin': 4, + + 'group.none': 0, + 'group.read': 1, + 'group.write': 3, + 'group.admin': 4, + + 'usergroup.none': 0, + 'usergroup.read': 1, + 'usergroup.write': 3, + 'usergroup.admin': 4, + + 'branch.none': 0, + 'branch.merge': 1, + 'branch.push': 3, + 'branch.push_force': 4, + + 'hg.repogroup.create.false': 0, + 'hg.repogroup.create.true': 1, + + 'hg.usergroup.create.false': 0, + 'hg.usergroup.create.true': 1, + + 'hg.fork.none': 0, + 'hg.fork.repository': 1, + 'hg.create.none': 0, + 'hg.create.repository': 1 + } + + permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) + permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) + + def __unicode__(self): + return u"<%s('%s:%s')>" % ( + self.__class__.__name__, self.permission_id, self.permission_name + ) + + @classmethod + def get_by_key(cls, key): + return cls.query().filter(cls.permission_name == key).scalar() + + @classmethod + def get_default_repo_perms(cls, user_id, repo_id=None): + q = Session().query(UserRepoToPerm, Repository, Permission)\ + .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ + .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ + .filter(UserRepoToPerm.user_id == user_id) + if repo_id: + q = q.filter(UserRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms(cls, user_id, repo_id=None): + q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ + .join( + Permission, + UserToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserRepoToPerm, + UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ + .filter(UserRepoToPerm.user_id == user_id) + + if repo_id: + q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ + .join( + Permission, + UserGroupRepoToPerm.permission_id == Permission.permission_id)\ + .join( + Repository, + UserGroupRepoToPerm.repository_id == Repository.repo_id)\ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_id: + q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) + return q.all() + + @classmethod + def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): + q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ + .join( + Permission, + UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ + .join( + UserGroupRepoToPerm, + UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ + .join( + UserGroup, + UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ + .join( + UserGroupMember, + UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + + if repo_id: + q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) + return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() + + @classmethod + def get_default_group_perms(cls, user_id, repo_group_id=None): + q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserRepoGroupToPerm.permission_id == Permission.permission_id)\ + .join( + RepoGroup, + UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .filter(UserRepoGroupToPerm.user_id == user_id) + if repo_group_id: + q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_group_perms_from_user_group( + cls, user_id, repo_group_id=None): + q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ + .join( + Permission, + UserGroupRepoGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + RepoGroup, + UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ + .join( + UserGroup, + UserGroupRepoGroupToPerm.users_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupRepoGroupToPerm.users_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if repo_group_id: + q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms(cls, user_id, user_group_id=None): + q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ + .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ + .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ + .filter(UserUserGroupToPerm.user_id == user_id) + if user_group_id: + q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) + return q.all() + + @classmethod + def get_default_user_group_perms_from_user_group( + cls, user_id, user_group_id=None): + TargetUserGroup = aliased(UserGroup, name='target_user_group') + q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ + .join( + Permission, + UserGroupUserGroupToPerm.permission_id == + Permission.permission_id)\ + .join( + TargetUserGroup, + UserGroupUserGroupToPerm.target_user_group_id == + TargetUserGroup.users_group_id)\ + .join( + UserGroup, + UserGroupUserGroupToPerm.user_group_id == + UserGroup.users_group_id)\ + .join( + UserGroupMember, + UserGroupUserGroupToPerm.user_group_id == + UserGroupMember.users_group_id)\ + .filter( + UserGroupMember.user_id == user_id, + UserGroup.users_group_active == true()) + if user_group_id: + q = q.filter( + UserGroupUserGroupToPerm.user_group_id == user_group_id) + + return q.all() + + +class UserRepoToPerm(Base, BaseModel): + __tablename__ = 'repo_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'repository_id', 'permission_id'), + base_table_args + ) + + repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + repository = relationship('Repository') + permission = relationship('Permission') + + branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined') + + @classmethod + def create(cls, user, repository, permission): + n = cls() + n.user = user + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.repository) + + +class UserUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'user_group_id', 'permission_id'), + base_table_args + ) + + user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + user_group = relationship('UserGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, user_group, permission): + n = cls() + n.user = user + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.user_group) + + +class UserToPerm(Base, BaseModel): + __tablename__ = 'user_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'permission_id'), + base_table_args + ) + + user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + permission = relationship('Permission', lazy='joined') + + def __unicode__(self): + return u'<%s => %s >' % (self.user, self.permission) + + +class UserGroupRepoToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_to_perm' + __table_args__ = ( + UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + repository = relationship('Repository') + user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all') + + @classmethod + def create(cls, users_group, repository, permission): + n = cls() + n.users_group = users_group + n.repository = repository + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.repository) + + +class UserGroupUserGroupToPerm(Base, BaseModel): + __tablename__ = 'user_group_user_group_to_perm' + __table_args__ = ( + UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), + CheckConstraint('target_user_group_id != user_group_id'), + base_table_args + ) + + user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + + target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') + user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') + permission = relationship('Permission') + + @classmethod + def create(cls, target_user_group, user_group, permission): + n = cls() + n.target_user_group = target_user_group + n.user_group = user_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.target_user_group, self.user_group) + + +class UserGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'permission_id',), + base_table_args + ) + + users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + + +class UserRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'user_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('user_id', 'group_id', 'permission_id'), + base_table_args + ) + + group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + user = relationship('User') + group = relationship('RepoGroup') + permission = relationship('Permission') + + @classmethod + def create(cls, user, repository_group, permission): + n = cls() + n.user = user + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + +class UserGroupRepoGroupToPerm(Base, BaseModel): + __tablename__ = 'users_group_repo_group_to_perm' + __table_args__ = ( + UniqueConstraint('users_group_id', 'group_id'), + base_table_args + ) + + users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) + group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) + permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + + users_group = relationship('UserGroup') + permission = relationship('Permission') + group = relationship('RepoGroup') + + @classmethod + def create(cls, user_group, repository_group, permission): + n = cls() + n.users_group = user_group + n.group = repository_group + n.permission = permission + Session().add(n) + return n + + def __unicode__(self): + return u' %s >' % (self.users_group, self.group) + + +class Statistics(Base, BaseModel): + __tablename__ = 'statistics' + __table_args__ = ( + base_table_args + ) + + stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) + stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) + commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data + commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data + languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data + + repository = relationship('Repository', single_parent=True) + + +class UserFollowing(Base, BaseModel): + __tablename__ = 'user_followings' + __table_args__ = ( + UniqueConstraint('user_id', 'follows_repository_id'), + UniqueConstraint('user_id', 'follows_user_id'), + base_table_args + ) + + user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) + follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) + follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) + + user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') + + follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') + follows_repository = relationship('Repository', order_by='Repository.repo_name') + + @classmethod + def get_repo_followers(cls, repo_id): + return cls.query().filter(cls.follows_repo_id == repo_id) + + +class CacheKey(Base, BaseModel): + __tablename__ = 'cache_invalidation' + __table_args__ = ( + UniqueConstraint('cache_key'), + Index('key_idx', 'cache_key'), + base_table_args, + ) + + CACHE_TYPE_FEED = 'FEED' + + # namespaces used to register process/thread aware caches + REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' + SETTINGS_INVALIDATION_NAMESPACE = 'system_settings' + + cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) + cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) + cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) + cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) + + def __init__(self, cache_key, cache_args='', cache_state_uid=None): + self.cache_key = cache_key + self.cache_args = cache_args + self.cache_active = False + # first key should be same for all entries, since all workers should share it + self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() + + def __unicode__(self): + return u"<%s('%s:%s[%s]')>" % ( + self.__class__.__name__, + self.cache_id, self.cache_key, self.cache_active) + + def _cache_key_partition(self): + prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) + return prefix, repo_name, suffix + + def get_prefix(self): + """ + Try to extract prefix from existing cache key. The key could consist + of prefix, repo_name, suffix + """ + # this returns prefix, repo_name, suffix + return self._cache_key_partition()[0] + + def get_suffix(self): + """ + get suffix that might have been used in _get_cache_key to + generate self.cache_key. Only used for informational purposes + in repo_edit.mako. + """ + # prefix, repo_name, suffix + return self._cache_key_partition()[2] + + @classmethod + def generate_new_state_uid(cls, based_on=None): + if based_on: + return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) + else: + return str(uuid.uuid4()) + + @classmethod + def delete_all_cache(cls): + """ + Delete all cache keys from database. + Should only be run when all instances are down and all entries + thus stale. + """ + cls.query().delete() + Session().commit() + + @classmethod + def set_invalidate(cls, cache_uid, delete=False): + """ + Mark all caches of a repo as invalid in the database. + """ + + try: + qry = Session().query(cls).filter(cls.cache_args == cache_uid) + if delete: + qry.delete() + log.debug('cache objects deleted for cache args %s', + safe_str(cache_uid)) + else: + qry.update({"cache_active": False, + "cache_state_uid": cls.generate_new_state_uid()}) + log.debug('cache objects marked as invalid for cache args %s', + safe_str(cache_uid)) + + Session().commit() + except Exception: + log.exception( + 'Cache key invalidation failed for cache args %s', + safe_str(cache_uid)) + Session().rollback() + + @classmethod + def get_active_cache(cls, cache_key): + inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() + if inv_obj: + return inv_obj + return None + + @classmethod + def get_namespace_map(cls, namespace): + return { + x.cache_key: x + for x in cls.query().filter(cls.cache_args == namespace)} + + +class ChangesetComment(Base, BaseModel): + __tablename__ = 'changeset_comments' + __table_args__ = ( + Index('cc_revision_idx', 'revision'), + base_table_args, + ) + + COMMENT_OUTDATED = u'comment_outdated' + COMMENT_TYPE_NOTE = u'note' + COMMENT_TYPE_TODO = u'todo' + COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] + + OP_IMMUTABLE = u'immutable' + OP_CHANGEABLE = u'changeable' + + comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + revision = Column('revision', String(40), nullable=True) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) + line_no = Column('line_no', Unicode(10), nullable=True) + hl_lines = Column('hl_lines', Unicode(512), nullable=True) + f_path = Column('f_path', Unicode(1000), nullable=True) + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + renderer = Column('renderer', Unicode(64), nullable=True) + display_state = Column('display_state', Unicode(128), nullable=True) + immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE) + + comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) + resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) + + resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') + resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + pull_request_version = relationship('PullRequestVersion') + history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='joined', order_by='ChangesetCommentHistory.version') + + @classmethod + def get_users(cls, revision=None, pull_request_id=None): + """ + Returns user associated with this ChangesetComment. ie those + who actually commented + + :param cls: + :param revision: + """ + q = Session().query(User)\ + .join(ChangesetComment.author) + if revision: + q = q.filter(cls.revision == revision) + elif pull_request_id: + q = q.filter(cls.pull_request_id == pull_request_id) + return q.all() + + @classmethod + def get_index_from_version(cls, pr_version, versions): + num_versions = [x.pull_request_version_id for x in versions] + try: + return num_versions.index(pr_version) + 1 + except (IndexError, ValueError): + return + + @property + def outdated(self): + return self.display_state == self.COMMENT_OUTDATED + + @property + def immutable(self): + return self.immutable_state == self.OP_IMMUTABLE + + def outdated_at_version(self, version): + """ + Checks if comment is outdated for given pull request version + """ + return self.outdated and self.pull_request_version_id != version + + def older_than_version(self, version): + """ + Checks if comment is made from previous version than given + """ + if version is None: + return self.pull_request_version_id is not None + + return self.pull_request_version_id < version + + @property + def commit_id(self): + """New style naming to stop using .revision""" + return self.revision + + @property + def resolved(self): + return self.resolved_by[0] if self.resolved_by else None + + @property + def is_todo(self): + return self.comment_type == self.COMMENT_TYPE_TODO + + @property + def is_inline(self): + return self.line_no and self.f_path + + @property + def last_version(self): + version = 0 + if self.history: + version = self.history[-1].version + return version + + def get_index_version(self, versions): + return self.get_index_from_version( + self.pull_request_version_id, versions) + + def __repr__(self): + if self.comment_id: + return '' % self.comment_id + else: + return '' % id(self) + + def get_api_data(self): + comment = self + + data = { + 'comment_id': comment.comment_id, + 'comment_type': comment.comment_type, + 'comment_text': comment.text, + 'comment_status': comment.status_change, + 'comment_f_path': comment.f_path, + 'comment_lineno': comment.line_no, + 'comment_author': comment.author, + 'comment_created_on': comment.created_on, + 'comment_resolved_by': self.resolved, + 'comment_commit_id': comment.revision, + 'comment_pull_request_id': comment.pull_request_id, + 'comment_last_version': self.last_version + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class ChangesetCommentHistory(Base, BaseModel): + __tablename__ = 'changeset_comments_history' + __table_args__ = ( + Index('cch_comment_id_idx', 'comment_id'), + base_table_args, + ) + + comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) + comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) + version = Column("version", Integer(), nullable=False, default=0) + created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + deleted = Column('deleted', Boolean(), default=False) + + author = relationship('User', lazy='joined') + comment = relationship('ChangesetComment', cascade="all, delete") + + @classmethod + def get_version(cls, comment_id): + q = Session().query(ChangesetCommentHistory).filter( + ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) + if q.count() == 0: + return 1 + elif q.count() >= q[0].version: + return q.count() + 1 + else: + return q[0].version + 1 + + +class ChangesetStatus(Base, BaseModel): + __tablename__ = 'changeset_statuses' + __table_args__ = ( + Index('cs_revision_idx', 'revision'), + Index('cs_version_idx', 'version'), + UniqueConstraint('repo_id', 'revision', 'version'), + base_table_args + ) + + STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' + STATUS_APPROVED = 'approved' + STATUS_REJECTED = 'rejected' + STATUS_UNDER_REVIEW = 'under_review' + + STATUSES = [ + (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default + (STATUS_APPROVED, _("Approved")), + (STATUS_REJECTED, _("Rejected")), + (STATUS_UNDER_REVIEW, _("Under Review")), + ] + + changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) + repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) + revision = Column('revision', String(40), nullable=False) + status = Column('status', String(128), nullable=False, default=DEFAULT) + changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) + modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) + version = Column('version', Integer(), nullable=False, default=0) + pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) + + author = relationship('User', lazy='joined') + repo = relationship('Repository') + comment = relationship('ChangesetComment', lazy='joined') + pull_request = relationship('PullRequest', lazy='joined') + + def __unicode__(self): + return u"<%s('%s[v%s]:%s')>" % ( + self.__class__.__name__, + self.status, self.version, self.author + ) + + @classmethod + def get_status_lbl(cls, value): + return dict(cls.STATUSES).get(value) + + @property + def status_lbl(self): + return ChangesetStatus.get_status_lbl(self.status) + + def get_api_data(self): + status = self + data = { + 'status_id': status.changeset_status_id, + 'status': status.status, + } + return data + + def __json__(self): + data = dict() + data.update(self.get_api_data()) + return data + + +class _SetState(object): + """ + Context processor allowing changing state for sensitive operation such as + pull request update or merge + """ + + def __init__(self, pull_request, pr_state, back_state=None): + self._pr = pull_request + self._org_state = back_state or pull_request.pull_request_state + self._pr_state = pr_state + self._current_state = None + + def __enter__(self): + log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`', + self._pr, self._pr_state) + self.set_pr_state(self._pr_state) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_val is not None: + log.error(traceback.format_exc(exc_tb)) + return None + + self.set_pr_state(self._org_state) + log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`', + self._pr, self._org_state) + + @property + def state(self): + return self._current_state + + def set_pr_state(self, pr_state): + try: + self._pr.pull_request_state = pr_state + Session().add(self._pr) + Session().commit() + self._current_state = pr_state + except Exception: + log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) + raise + + +class _PullRequestBase(BaseModel): + """ + Common attributes of pull request and version entries. + """ + + # .status values + STATUS_NEW = u'new' + STATUS_OPEN = u'open' + STATUS_CLOSED = u'closed' + + # available states + STATE_CREATING = u'creating' + STATE_UPDATING = u'updating' + STATE_MERGING = u'merging' + STATE_CREATED = u'created' + + title = Column('title', Unicode(255), nullable=True) + description = Column( + 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), + nullable=True) + description_renderer = Column('description_renderer', Unicode(64), nullable=True) + + # new/open/closed status of pull request (not approve/reject/etc) + status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) + created_on = Column( + 'created_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + updated_on = Column( + 'updated_on', DateTime(timezone=False), nullable=False, + default=datetime.datetime.now) + + pull_request_state = Column("pull_request_state", String(255), nullable=True) + + @declared_attr + def user_id(cls): + return Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, + unique=None) + + # 500 revisions max + _revisions = Column( + 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) + + common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True) + + @declared_attr + def source_repo_id(cls): + # TODO: dan: rename column to source_repo_id + return Column( + 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _source_ref = Column('org_ref', Unicode(255), nullable=False) + + @hybrid_property + def source_ref(self): + return self._source_ref + + @source_ref.setter + def source_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._source_ref = safe_unicode(val) + + _target_ref = Column('other_ref', Unicode(255), nullable=False) + + @hybrid_property + def target_ref(self): + return self._target_ref + + @target_ref.setter + def target_ref(self, val): + parts = (val or '').split(':') + if len(parts) != 3: + raise ValueError( + 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) + self._target_ref = safe_unicode(val) + + @declared_attr + def target_repo_id(cls): + # TODO: dan: rename column to target_repo_id + return Column( + 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=False) + + _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) + + # TODO: dan: rename column to last_merge_source_rev + _last_merge_source_rev = Column( + 'last_merge_org_rev', String(40), nullable=True) + # TODO: dan: rename column to last_merge_target_rev + _last_merge_target_rev = Column( + 'last_merge_other_rev', String(40), nullable=True) + _last_merge_status = Column('merge_status', Integer(), nullable=True) + last_merge_metadata = Column( + 'last_merge_metadata', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + + merge_rev = Column('merge_rev', String(40), nullable=True) + + reviewer_data = Column( + 'reviewer_data_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + + @property + def reviewer_data_json(self): + return json.dumps(self.reviewer_data) + + @property + def last_merge_metadata_parsed(self): + metadata = {} + if not self.last_merge_metadata: + return metadata + + if hasattr(self.last_merge_metadata, 'de_coerce'): + for k, v in self.last_merge_metadata.de_coerce().items(): + if k in ['target_ref', 'source_ref']: + metadata[k] = Reference(v['type'], v['name'], v['commit_id']) + else: + if hasattr(v, 'de_coerce'): + metadata[k] = v.de_coerce() + else: + metadata[k] = v + return metadata + + @property + def work_in_progress(self): + """checks if pull request is work in progress by checking the title""" + title = self.title.upper() + if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): + return True + return False + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.description) + + @hybrid_property + def revisions(self): + return self._revisions.split(':') if self._revisions else [] + + @revisions.setter + def revisions(self, val): + self._revisions = u':'.join(val) + + @hybrid_property + def last_merge_status(self): + return safe_int(self._last_merge_status) + + @last_merge_status.setter + def last_merge_status(self, val): + self._last_merge_status = val + + @declared_attr + def author(cls): + return relationship('User', lazy='joined') + + @declared_attr + def source_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) + + @property + def source_ref_parts(self): + return self.unicode_to_reference(self.source_ref) + + @declared_attr + def target_repo(cls): + return relationship( + 'Repository', + primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) + + @property + def target_ref_parts(self): + return self.unicode_to_reference(self.target_ref) + + @property + def shadow_merge_ref(self): + return self.unicode_to_reference(self._shadow_merge_ref) + + @shadow_merge_ref.setter + def shadow_merge_ref(self, ref): + self._shadow_merge_ref = self.reference_to_unicode(ref) + + @staticmethod + def unicode_to_reference(raw): + """ + Convert a unicode (or string) to a reference object. + If unicode evaluates to False it returns None. + """ + if raw: + refs = raw.split(':') + return Reference(*refs) + else: + return None + + @staticmethod + def reference_to_unicode(ref): + """ + Convert a reference object to unicode. + If reference is None it returns None. + """ + if ref: + return u':'.join(ref) + else: + return None + + def get_api_data(self, with_merge_state=True): + from rhodecode.model.pull_request import PullRequestModel + + pull_request = self + if with_merge_state: + merge_response, merge_status, msg = \ + PullRequestModel().merge_status(pull_request) + merge_state = { + 'status': merge_status, + 'message': safe_unicode(msg), + } + else: + merge_state = {'status': 'not_available', + 'message': 'not_available'} + + merge_data = { + 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), + 'reference': ( + pull_request.shadow_merge_ref._asdict() + if pull_request.shadow_merge_ref else None), + } + + data = { + 'pull_request_id': pull_request.pull_request_id, + 'url': PullRequestModel().get_url(pull_request), + 'title': pull_request.title, + 'description': pull_request.description, + 'status': pull_request.status, + 'state': pull_request.pull_request_state, + 'created_on': pull_request.created_on, + 'updated_on': pull_request.updated_on, + 'commit_ids': pull_request.revisions, + 'review_status': pull_request.calculated_review_status(), + 'mergeable': merge_state, + 'source': { + 'clone_url': pull_request.source_repo.clone_url(), + 'repository': pull_request.source_repo.repo_name, + 'reference': { + 'name': pull_request.source_ref_parts.name, + 'type': pull_request.source_ref_parts.type, + 'commit_id': pull_request.source_ref_parts.commit_id, + }, + }, + 'target': { + 'clone_url': pull_request.target_repo.clone_url(), + 'repository': pull_request.target_repo.repo_name, + 'reference': { + 'name': pull_request.target_ref_parts.name, + 'type': pull_request.target_ref_parts.type, + 'commit_id': pull_request.target_ref_parts.commit_id, + }, + }, + 'merge': merge_data, + 'author': pull_request.author.get_api_data(include_secrets=False, + details='basic'), + 'reviewers': [ + { + 'user': reviewer.get_api_data(include_secrets=False, + details='basic'), + 'reasons': reasons, + 'review_status': st[0][1].status if st else 'not_reviewed', + } + for obj, reviewer, reasons, mandatory, st in + pull_request.reviewers_statuses() + ] + } + + return data + + def set_state(self, pull_request_state, final_state=None): + """ + # goes from initial state to updating to initial state. + # initial state can be changed by specifying back_state= + with pull_request_obj.set_state(PullRequest.STATE_UPDATING): + pull_request.merge() + + :param pull_request_state: + :param final_state: + + """ + + return _SetState(self, pull_request_state, back_state=final_state) + + +class PullRequest(Base, _PullRequestBase): + __tablename__ = 'pull_requests' + __table_args__ = ( + base_table_args, + ) + + pull_request_id = Column( + 'pull_request_id', Integer(), nullable=False, primary_key=True) + + def __repr__(self): + if self.pull_request_id: + return '' % self.pull_request_id + else: + return '' % id(self) + + reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan") + statuses = relationship('ChangesetStatus', cascade="all, delete-orphan") + comments = relationship('ChangesetComment', cascade="all, delete-orphan") + versions = relationship('PullRequestVersion', cascade="all, delete-orphan", + lazy='dynamic') + + @classmethod + def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, + internal_methods=None): + + class PullRequestDisplay(object): + """ + Special object wrapper for showing PullRequest data via Versions + It mimics PR object as close as possible. This is read only object + just for display + """ + + def __init__(self, attrs, internal=None): + self.attrs = attrs + # internal have priority over the given ones via attrs + self.internal = internal or ['versions'] + + def __getattr__(self, item): + if item in self.internal: + return getattr(self, item) + try: + return self.attrs[item] + except KeyError: + raise AttributeError( + '%s object has no attribute %s' % (self, item)) + + def __repr__(self): + return '' % self.attrs.get('pull_request_id') + + def versions(self): + return pull_request_obj.versions.order_by( + PullRequestVersion.pull_request_version_id).all() + + def is_closed(self): + return pull_request_obj.is_closed() + + def is_state_changing(self): + return pull_request_obj.is_state_changing() + + @property + def pull_request_version_id(self): + return getattr(pull_request_obj, 'pull_request_version_id', None) + + attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) + + attrs.author = StrictAttributeDict( + pull_request_obj.author.get_api_data()) + if pull_request_obj.target_repo: + attrs.target_repo = StrictAttributeDict( + pull_request_obj.target_repo.get_api_data()) + attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url + + if pull_request_obj.source_repo: + attrs.source_repo = StrictAttributeDict( + pull_request_obj.source_repo.get_api_data()) + attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url + + attrs.source_ref_parts = pull_request_obj.source_ref_parts + attrs.target_ref_parts = pull_request_obj.target_ref_parts + attrs.revisions = pull_request_obj.revisions + attrs.common_ancestor_id = pull_request_obj.common_ancestor_id + attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref + attrs.reviewer_data = org_pull_request_obj.reviewer_data + attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json + + return PullRequestDisplay(attrs, internal=internal_methods) + + def is_closed(self): + return self.status == self.STATUS_CLOSED + + def is_state_changing(self): + return self.pull_request_state != PullRequest.STATE_CREATED + + def __json__(self): + return { + 'revisions': self.revisions, + 'versions': self.versions_count + } + + def calculated_review_status(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().calculated_review_status(self) + + def reviewers_statuses(self): + from rhodecode.model.changeset_status import ChangesetStatusModel + return ChangesetStatusModel().reviewers_statuses(self) + + @property + def workspace_id(self): + from rhodecode.model.pull_request import PullRequestModel + return PullRequestModel()._workspace_id(self) + + def get_shadow_repo(self): + workspace_id = self.workspace_id + shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) + if os.path.isdir(shadow_repository_path): + vcs_obj = self.target_repo.scm_instance() + return vcs_obj.get_shadow_instance(shadow_repository_path) + + @property + def versions_count(self): + """ + return number of versions this PR have, e.g a PR that once been + updated will have 2 versions + """ + return self.versions.count() + 1 + + +class PullRequestVersion(Base, _PullRequestBase): + __tablename__ = 'pull_request_versions' + __table_args__ = ( + base_table_args, + ) + + pull_request_version_id = Column( + 'pull_request_version_id', Integer(), nullable=False, primary_key=True) + pull_request_id = Column( + 'pull_request_id', Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + pull_request = relationship('PullRequest') + + def __repr__(self): + if self.pull_request_version_id: + return '' % self.pull_request_version_id + else: + return '' % id(self) + + @property + def reviewers(self): + return self.pull_request.reviewers + + @property + def versions(self): + return self.pull_request.versions + + def is_closed(self): + # calculate from original + return self.pull_request.status == self.STATUS_CLOSED + + def is_state_changing(self): + return self.pull_request.pull_request_state != PullRequest.STATE_CREATED + + def calculated_review_status(self): + return self.pull_request.calculated_review_status() + + def reviewers_statuses(self): + return self.pull_request.reviewers_statuses() + + +class PullRequestReviewers(Base, BaseModel): + __tablename__ = 'pull_request_reviewers' + __table_args__ = ( + base_table_args, + ) + + @hybrid_property + def reasons(self): + if not self._reasons: + return [] + return self._reasons + + @reasons.setter + def reasons(self, val): + val = val or [] + if any(not isinstance(x, compat.string_types) for x in val): + raise Exception('invalid reasons type, must be list of strings') + self._reasons = val + + pull_requests_reviewers_id = Column( + 'pull_requests_reviewers_id', Integer(), nullable=False, + primary_key=True) + pull_request_id = Column( + "pull_request_id", Integer(), + ForeignKey('pull_requests.pull_request_id'), nullable=False) + user_id = Column( + "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) + _reasons = Column( + 'reason', MutationList.as_mutable( + JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) + + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + pull_request = relationship('PullRequest') + + rule_data = Column( + 'rule_data_json', + JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) + + def rule_user_group_data(self): + """ + Returns the voting user group rule data for this reviewer + """ + + if self.rule_data and 'vote_rule' in self.rule_data: + user_group_data = {} + if 'rule_user_group_entry_id' in self.rule_data: + # means a group with voting rules ! + user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] + user_group_data['name'] = self.rule_data['rule_name'] + user_group_data['vote_rule'] = self.rule_data['vote_rule'] + + return user_group_data + + def __unicode__(self): + return u"<%s('id:%s')>" % (self.__class__.__name__, + self.pull_requests_reviewers_id) + + +class Notification(Base, BaseModel): + __tablename__ = 'notifications' + __table_args__ = ( + Index('notification_type_idx', 'type'), + base_table_args, + ) + + TYPE_CHANGESET_COMMENT = u'cs_comment' + TYPE_MESSAGE = u'message' + TYPE_MENTION = u'mention' + TYPE_REGISTRATION = u'registration' + TYPE_PULL_REQUEST = u'pull_request' + TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' + TYPE_PULL_REQUEST_UPDATE = u'pull_request_update' + + notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) + subject = Column('subject', Unicode(512), nullable=True) + body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) + created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + type_ = Column('type', Unicode(255)) + + created_by_user = relationship('User') + notifications_to_users = relationship('UserNotification', lazy='joined', + cascade="all, delete-orphan") + + @property + def recipients(self): + return [x.user for x in UserNotification.query()\ + .filter(UserNotification.notification == self)\ + .order_by(UserNotification.user_id.asc()).all()] + + @classmethod + def create(cls, created_by, subject, body, recipients, type_=None): + if type_ is None: + type_ = Notification.TYPE_MESSAGE + + notification = cls() + notification.created_by_user = created_by + notification.subject = subject + notification.body = body + notification.type_ = type_ + notification.created_on = datetime.datetime.now() + + # For each recipient link the created notification to his account + for u in recipients: + assoc = UserNotification() + assoc.user_id = u.user_id + assoc.notification = notification + + # if created_by is inside recipients mark his notification + # as read + if u.user_id == created_by.user_id: + assoc.read = True + Session().add(assoc) + + Session().add(notification) + + return notification + + +class UserNotification(Base, BaseModel): + __tablename__ = 'user_to_notification' + __table_args__ = ( + UniqueConstraint('user_id', 'notification_id'), + base_table_args + ) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) + read = Column('read', Boolean, default=False) + sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) + + user = relationship('User', lazy="joined") + notification = relationship('Notification', lazy="joined", + order_by=lambda: Notification.created_on.desc(),) + + def mark_as_read(self): + self.read = True + Session().add(self) + + +class UserNotice(Base, BaseModel): + __tablename__ = 'user_notices' + __table_args__ = ( + base_table_args + ) + + NOTIFICATION_TYPE_MESSAGE = 'message' + NOTIFICATION_TYPE_NOTICE = 'notice' + + NOTIFICATION_LEVEL_INFO = 'info' + NOTIFICATION_LEVEL_WARNING = 'warning' + NOTIFICATION_LEVEL_ERROR = 'error' + + user_notice_id = Column('gist_id', Integer(), primary_key=True) + + notice_subject = Column('notice_subject', Unicode(512), nullable=True) + notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) + + notice_read = Column('notice_read', Boolean, default=False) + + notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO) + notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE) + + notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True) + notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id')) + user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id') + + @classmethod + def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False): + + if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR, + cls.NOTIFICATION_LEVEL_WARNING, + cls.NOTIFICATION_LEVEL_INFO]: + return + + from rhodecode.model.user import UserModel + user = UserModel().get_user(user) + + new_notice = UserNotice() + if not allow_duplicate: + existing_msg = UserNotice().query() \ + .filter(UserNotice.user == user) \ + .filter(UserNotice.notice_body == body) \ + .filter(UserNotice.notice_read == false()) \ + .scalar() + if existing_msg: + log.warning('Ignoring duplicate notice for user %s', user) + return + + new_notice.user = user + new_notice.notice_subject = subject + new_notice.notice_body = body + new_notice.notification_level = notice_level + Session().add(new_notice) + Session().commit() + + +class Gist(Base, BaseModel): + __tablename__ = 'gists' + __table_args__ = ( + Index('g_gist_access_id_idx', 'gist_access_id'), + Index('g_created_on_idx', 'created_on'), + base_table_args + ) + + GIST_PUBLIC = u'public' + GIST_PRIVATE = u'private' + DEFAULT_FILENAME = u'gistfile1.txt' + + ACL_LEVEL_PUBLIC = u'acl_public' + ACL_LEVEL_PRIVATE = u'acl_private' + + gist_id = Column('gist_id', Integer(), primary_key=True) + gist_access_id = Column('gist_access_id', Unicode(250)) + gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) + gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) + gist_expires = Column('gist_expires', Float(53), nullable=False) + gist_type = Column('gist_type', Unicode(128), nullable=False) + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + acl_level = Column('acl_level', Unicode(128), nullable=True) + + owner = relationship('User') + + def __repr__(self): + return '' % (self.gist_type, self.gist_access_id) + + @hybrid_property + def description_safe(self): + from rhodecode.lib import helpers as h + return h.escape(self.gist_description) + + @classmethod + def get_or_404(cls, id_): + from pyramid.httpexceptions import HTTPNotFound + + res = cls.query().filter(cls.gist_access_id == id_).scalar() + if not res: + raise HTTPNotFound() + return res + + @classmethod + def get_by_access_id(cls, gist_access_id): + return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() + + def gist_url(self): + from rhodecode.model.gist import GistModel + return GistModel().get_url(self) + + @classmethod + def base_path(cls): + """ + Returns base path when all gists are stored + + :param cls: + """ + from rhodecode.model.gist import GIST_STORE_LOC + q = Session().query(RhodeCodeUi)\ + .filter(RhodeCodeUi.ui_key == URL_SEP) + q = q.options(FromCache("sql_cache_short", "repository_repo_path")) + return os.path.join(q.one().ui_value, GIST_STORE_LOC) + + def get_api_data(self): + """ + Common function for generating gist related data for API + """ + gist = self + data = { + 'gist_id': gist.gist_id, + 'type': gist.gist_type, + 'access_id': gist.gist_access_id, + 'description': gist.gist_description, + 'url': gist.gist_url(), + 'expires': gist.gist_expires, + 'created_on': gist.created_on, + 'modified_at': gist.modified_at, + 'content': None, + 'acl_level': gist.acl_level, + } + return data + + def __json__(self): + data = dict( + ) + data.update(self.get_api_data()) + return data + # SCM functions + + def scm_instance(self, **kwargs): + """ + Get an instance of VCS Repository + + :param kwargs: + """ + from rhodecode.model.gist import GistModel + full_repo_path = os.path.join(self.base_path(), self.gist_access_id) + return get_vcs_instance( + repo_path=safe_str(full_repo_path), create=False, + _vcs_alias=GistModel.vcs_backend) + + +class ExternalIdentity(Base, BaseModel): + __tablename__ = 'external_identities' + __table_args__ = ( + Index('local_user_id_idx', 'local_user_id'), + Index('external_id_idx', 'external_id'), + base_table_args + ) + + external_id = Column('external_id', Unicode(255), default=u'', primary_key=True) + external_username = Column('external_username', Unicode(1024), default=u'') + local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) + provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True) + access_token = Column('access_token', String(1024), default=u'') + alt_token = Column('alt_token', String(1024), default=u'') + token_secret = Column('token_secret', String(1024), default=u'') + + @classmethod + def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): + """ + Returns ExternalIdentity instance based on search params + + :param external_id: + :param provider_name: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + if local_user_id: + query = query.filter(cls.local_user_id == local_user_id) + return query.first() + + @classmethod + def user_by_external_id_and_provider(cls, external_id, provider_name): + """ + Returns User instance based on search params + + :param external_id: + :param provider_name: + :return: User + """ + query = User.query() + query = query.filter(cls.external_id == external_id) + query = query.filter(cls.provider_name == provider_name) + query = query.filter(User.user_id == cls.local_user_id) + return query.first() + + @classmethod + def by_local_user_id(cls, local_user_id): + """ + Returns all tokens for user + + :param local_user_id: + :return: ExternalIdentity + """ + query = cls.query() + query = query.filter(cls.local_user_id == local_user_id) + return query + + @classmethod + def load_provider_plugin(cls, plugin_id): + from rhodecode.authentication.base import loadplugin + _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) + auth_plugin = loadplugin(_plugin_id) + return auth_plugin + + +class Integration(Base, BaseModel): + __tablename__ = 'integrations' + __table_args__ = ( + base_table_args + ) + + integration_id = Column('integration_id', Integer(), primary_key=True) + integration_type = Column('integration_type', String(255)) + enabled = Column('enabled', Boolean(), nullable=False) + name = Column('name', String(255), nullable=False) + child_repos_only = Column('child_repos_only', Boolean(), nullable=False, + default=False) + + settings = Column( + 'settings_json', MutationObj.as_mutable( + JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) + repo_id = Column( + 'repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + repo_group_id = Column( + 'repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @property + def scope(self): + if self.repo: + return repr(self.repo) + if self.repo_group: + if self.child_repos_only: + return repr(self.repo_group) + ' (child repos only)' + else: + return repr(self.repo_group) + ' (recursive)' + if self.child_repos_only: + return 'root_repos' + return 'global' + + def __repr__(self): + return '' % (self.integration_type, self.scope) + + +class RepoReviewRuleUser(Base, BaseModel): + __tablename__ = 'repo_review_rules_users' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + user = relationship('User') + + def rule_data(self): + return { + 'mandatory': self.mandatory + } + + +class RepoReviewRuleUserGroup(Base, BaseModel): + __tablename__ = 'repo_review_rules_users_groups' + __table_args__ = ( + base_table_args + ) + + VOTE_RULE_ALL = -1 + + repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) + repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) + users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False) + mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) + users_group = relationship('UserGroup') + + def rule_data(self): + return { + 'mandatory': self.mandatory, + 'vote_rule': self.vote_rule + } + + @property + def vote_rule_label(self): + if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: + return 'all must vote' + else: + return 'min. vote {}'.format(self.vote_rule) + + +class RepoReviewRule(Base, BaseModel): + __tablename__ = 'repo_review_rules' + __table_args__ = ( + base_table_args + ) + + repo_review_rule_id = Column( + 'repo_review_rule_id', Integer(), primary_key=True) + repo_id = Column( + "repo_id", Integer(), ForeignKey('repositories.repo_id')) + repo = relationship('Repository', backref='review_rules') + + review_rule_name = Column('review_rule_name', String(255)) + _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob + + use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) + forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) + forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) + forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) + + rule_users = relationship('RepoReviewRuleUser') + rule_user_groups = relationship('RepoReviewRuleUserGroup') + + def _validate_pattern(self, value): + re.compile('^' + glob2re(value) + '$') + + @hybrid_property + def source_branch_pattern(self): + return self._branch_pattern or '*' + + @source_branch_pattern.setter + def source_branch_pattern(self, value): + self._validate_pattern(value) + self._branch_pattern = value or '*' + + @hybrid_property + def target_branch_pattern(self): + return self._target_branch_pattern or '*' + + @target_branch_pattern.setter + def target_branch_pattern(self, value): + self._validate_pattern(value) + self._target_branch_pattern = value or '*' + + @hybrid_property + def file_pattern(self): + return self._file_pattern or '*' + + @file_pattern.setter + def file_pattern(self, value): + self._validate_pattern(value) + self._file_pattern = value or '*' + + def matches(self, source_branch, target_branch, files_changed): + """ + Check if this review rule matches a branch/files in a pull request + + :param source_branch: source branch name for the commit + :param target_branch: target branch name for the commit + :param files_changed: list of file paths changed in the pull request + """ + + source_branch = source_branch or '' + target_branch = target_branch or '' + files_changed = files_changed or [] + + branch_matches = True + if source_branch or target_branch: + if self.source_branch_pattern == '*': + source_branch_match = True + else: + if self.source_branch_pattern.startswith('re:'): + source_pattern = self.source_branch_pattern[3:] + else: + source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' + source_branch_regex = re.compile(source_pattern) + source_branch_match = bool(source_branch_regex.search(source_branch)) + if self.target_branch_pattern == '*': + target_branch_match = True + else: + if self.target_branch_pattern.startswith('re:'): + target_pattern = self.target_branch_pattern[3:] + else: + target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' + target_branch_regex = re.compile(target_pattern) + target_branch_match = bool(target_branch_regex.search(target_branch)) + + branch_matches = source_branch_match and target_branch_match + + files_matches = True + if self.file_pattern != '*': + files_matches = False + if self.file_pattern.startswith('re:'): + file_pattern = self.file_pattern[3:] + else: + file_pattern = glob2re(self.file_pattern) + file_regex = re.compile(file_pattern) + for file_data in files_changed: + filename = file_data.get('filename') + + if file_regex.search(filename): + files_matches = True + break + + return branch_matches and files_matches + + @property + def review_users(self): + """ Returns the users which this rule applies to """ + + users = collections.OrderedDict() + + for rule_user in self.rule_users: + if rule_user.user.active: + if rule_user.user not in users: + users[rule_user.user.username] = { + 'user': rule_user.user, + 'source': 'user', + 'source_data': {}, + 'data': rule_user.rule_data() + } + + for rule_user_group in self.rule_user_groups: + source_data = { + 'user_group_id': rule_user_group.users_group.users_group_id, + 'name': rule_user_group.users_group.users_group_name, + 'members': len(rule_user_group.users_group.members) + } + for member in rule_user_group.users_group.members: + if member.user.active: + key = member.user.username + if key in users: + # skip this member as we have him already + # this prevents from override the "first" matched + # users with duplicates in multiple groups + continue + + users[key] = { + 'user': member.user, + 'source': 'user_group', + 'source_data': source_data, + 'data': rule_user_group.rule_data() + } + + return users + + def user_group_vote_rule(self, user_id): + + rules = [] + if not self.rule_user_groups: + return rules + + for user_group in self.rule_user_groups: + user_group_members = [x.user_id for x in user_group.users_group.members] + if user_id in user_group_members: + rules.append(user_group) + return rules + + def __repr__(self): + return '' % ( + self.repo_review_rule_id, self.repo) + + +class ScheduleEntry(Base, BaseModel): + __tablename__ = 'schedule_entries' + __table_args__ = ( + UniqueConstraint('schedule_name', name='s_schedule_name_idx'), + UniqueConstraint('task_uid', name='s_task_uid_idx'), + base_table_args, + ) + + schedule_types = ['crontab', 'timedelta', 'integer'] + schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) + + schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) + schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) + schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) + + _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) + schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) + + schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) + schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) + + # task + task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) + task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) + task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) + task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) + + @hybrid_property + def schedule_type(self): + return self._schedule_type + + @schedule_type.setter + def schedule_type(self, val): + if val not in self.schedule_types: + raise ValueError('Value must be on of `{}` and got `{}`'.format( + val, self.schedule_type)) + + self._schedule_type = val + + @classmethod + def get_uid(cls, obj): + args = obj.task_args + kwargs = obj.task_kwargs + if isinstance(args, JsonRaw): + try: + args = json.loads(args) + except ValueError: + args = tuple() + + if isinstance(kwargs, JsonRaw): + try: + kwargs = json.loads(kwargs) + except ValueError: + kwargs = dict() + + dot_notation = obj.task_dot_notation + val = '.'.join(map(safe_str, [ + sorted(dot_notation), args, sorted(kwargs.items())])) + return hashlib.sha1(val).hexdigest() + + @classmethod + def get_by_schedule_name(cls, schedule_name): + return cls.query().filter(cls.schedule_name == schedule_name).scalar() + + @classmethod + def get_by_schedule_id(cls, schedule_id): + return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() + + @property + def task(self): + return self.task_dot_notation + + @property + def schedule(self): + from rhodecode.lib.celerylib.utils import raw_2_schedule + schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) + return schedule + + @property + def args(self): + try: + return list(self.task_args or []) + except ValueError: + return list() + + @property + def kwargs(self): + try: + return dict(self.task_kwargs or {}) + except ValueError: + return dict() + + def _as_raw(self, val): + if hasattr(val, 'de_coerce'): + val = val.de_coerce() + if val: + val = json.dumps(val) + + return val + + @property + def schedule_definition_raw(self): + return self._as_raw(self.schedule_definition) + + @property + def args_raw(self): + return self._as_raw(self.task_args) + + @property + def kwargs_raw(self): + return self._as_raw(self.task_kwargs) + + def __repr__(self): + return ''.format( + self.schedule_entry_id, self.schedule_name) + + +@event.listens_for(ScheduleEntry, 'before_update') +def update_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +@event.listens_for(ScheduleEntry, 'before_insert') +def set_task_uid(mapper, connection, target): + target.task_uid = ScheduleEntry.get_uid(target) + + +class _BaseBranchPerms(BaseModel): + @classmethod + def compute_hash(cls, value): + return sha1_safe(value) + + @hybrid_property + def branch_pattern(self): + return self._branch_pattern or '*' + + @hybrid_property + def branch_hash(self): + return self._branch_hash + + def _validate_glob(self, value): + re.compile('^' + glob2re(value) + '$') + + @branch_pattern.setter + def branch_pattern(self, value): + self._validate_glob(value) + self._branch_pattern = value or '*' + # set the Hash when setting the branch pattern + self._branch_hash = self.compute_hash(self._branch_pattern) + + def matches(self, branch): + """ + Check if this the branch matches entry + + :param branch: branch name for the commit + """ + + branch = branch or '' + + branch_matches = True + if branch: + branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') + branch_matches = bool(branch_regex.search(branch)) + + return branch_matches + + +class UserToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) + user_repo_to_perm = relationship('UserRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_repo_to_perm, self.branch_pattern) + + +class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): + __tablename__ = 'user_group_to_repo_branch_permissions' + __table_args__ = ( + base_table_args + ) + + branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) + + repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) + repo = relationship('Repository', backref='user_group_branch_perms') + + permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) + permission = relationship('Permission') + + rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) + user_group_repo_to_perm = relationship('UserGroupRepoToPerm') + + rule_order = Column('rule_order', Integer(), nullable=False) + _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob + _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) + + def __unicode__(self): + return u' %r)>' % ( + self.user_group_repo_to_perm, self.branch_pattern) + + +class UserBookmark(Base, BaseModel): + __tablename__ = 'user_bookmarks' + __table_args__ = ( + UniqueConstraint('user_id', 'bookmark_repo_id'), + UniqueConstraint('user_id', 'bookmark_repo_group_id'), + UniqueConstraint('user_id', 'bookmark_position'), + base_table_args + ) + + user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) + user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) + position = Column("bookmark_position", Integer(), nullable=False) + title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) + redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) + created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + + bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) + bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) + + user = relationship("User") + + repository = relationship("Repository") + repository_group = relationship("RepoGroup") + + @classmethod + def get_by_position_for_user(cls, position, user_id): + return cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .filter(UserBookmark.position == position).scalar() + + @classmethod + def get_bookmarks_for_user(cls, user_id, cache=True): + bookmarks = cls.query() \ + .filter(UserBookmark.user_id == user_id) \ + .options(joinedload(UserBookmark.repository)) \ + .options(joinedload(UserBookmark.repository_group)) \ + .order_by(UserBookmark.position.asc()) + + if cache: + bookmarks = bookmarks.options( + FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id)) + ) + + return bookmarks.all() + + def __unicode__(self): + return u'' % (self.position, self.redirect_url) + + +class FileStore(Base, BaseModel): + __tablename__ = 'file_store' + __table_args__ = ( + base_table_args + ) + + file_store_id = Column('file_store_id', Integer(), primary_key=True) + file_uid = Column('file_uid', String(1024), nullable=False) + file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) + file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) + file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) + + # sha256 hash + file_hash = Column('file_hash', String(512), nullable=False) + file_size = Column('file_size', BigInteger(), nullable=False) + + created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) + accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) + accessed_count = Column('accessed_count', Integer(), default=0) + + enabled = Column('enabled', Boolean(), nullable=False, default=True) + + # if repo/repo_group reference is set, check for permissions + check_acl = Column('check_acl', Boolean(), nullable=False, default=True) + + # hidden defines an attachment that should be hidden from showing in artifact listing + hidden = Column('hidden', Boolean(), nullable=False, default=False) + + user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) + upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id') + + file_metadata = relationship('FileStoreMetadata', lazy='joined') + + # scope limited to user, which requester have access to + scope_user_id = Column( + 'scope_user_id', Integer(), ForeignKey('users.user_id'), + nullable=True, unique=None, default=None) + user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id') + + # scope limited to user group, which requester have access to + scope_user_group_id = Column( + 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), + nullable=True, unique=None, default=None) + user_group = relationship('UserGroup', lazy='joined') + + # scope limited to repo, which requester have access to + scope_repo_id = Column( + 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), + nullable=True, unique=None, default=None) + repo = relationship('Repository', lazy='joined') + + # scope limited to repo group, which requester have access to + scope_repo_group_id = Column( + 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), + nullable=True, unique=None, default=None) + repo_group = relationship('RepoGroup', lazy='joined') + + @classmethod + def get_by_store_uid(cls, file_store_uid): + return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() + + @classmethod + def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', + file_description='', enabled=True, hidden=False, check_acl=True, + user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): + + store_entry = FileStore() + store_entry.file_uid = file_uid + store_entry.file_display_name = file_display_name + store_entry.file_org_name = filename + store_entry.file_size = file_size + store_entry.file_hash = file_hash + store_entry.file_description = file_description + + store_entry.check_acl = check_acl + store_entry.enabled = enabled + store_entry.hidden = hidden + + store_entry.user_id = user_id + store_entry.scope_user_id = scope_user_id + store_entry.scope_repo_id = scope_repo_id + store_entry.scope_repo_group_id = scope_repo_group_id + + return store_entry + + @classmethod + def store_metadata(cls, file_store_id, args, commit=True): + file_store = FileStore.get(file_store_id) + if file_store is None: + return + + for section, key, value, value_type in args: + has_key = FileStoreMetadata().query() \ + .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ + .filter(FileStoreMetadata.file_store_meta_section == section) \ + .filter(FileStoreMetadata.file_store_meta_key == key) \ + .scalar() + if has_key: + msg = 'key `{}` already defined under section `{}` for this file.'\ + .format(key, section) + raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) + + # NOTE(marcink): raises ArtifactMetadataBadValueType + FileStoreMetadata.valid_value_type(value_type) + + meta_entry = FileStoreMetadata() + meta_entry.file_store = file_store + meta_entry.file_store_meta_section = section + meta_entry.file_store_meta_key = key + meta_entry.file_store_meta_value_type = value_type + meta_entry.file_store_meta_value = value + + Session().add(meta_entry) + + try: + if commit: + Session().commit() + except IntegrityError: + Session().rollback() + raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') + + @classmethod + def bump_access_counter(cls, file_uid, commit=True): + FileStore().query()\ + .filter(FileStore.file_uid == file_uid)\ + .update({FileStore.accessed_count: (FileStore.accessed_count + 1), + FileStore.accessed_on: datetime.datetime.now()}) + if commit: + Session().commit() + + def __json__(self): + data = { + 'filename': self.file_display_name, + 'filename_org': self.file_org_name, + 'file_uid': self.file_uid, + 'description': self.file_description, + 'hidden': self.hidden, + 'size': self.file_size, + 'created_on': self.created_on, + 'uploaded_by': self.upload_user.get_api_data(details='basic'), + 'downloaded_times': self.accessed_count, + 'sha256': self.file_hash, + 'metadata': self.file_metadata, + } + + return data + + def __repr__(self): + return ''.format(self.file_store_id) + + +class FileStoreMetadata(Base, BaseModel): + __tablename__ = 'file_store_metadata' + __table_args__ = ( + UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), + Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), + Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), + base_table_args + ) + SETTINGS_TYPES = { + 'str': safe_str, + 'int': safe_int, + 'unicode': safe_unicode, + 'bool': str2bool, + 'list': functools.partial(aslist, sep=',') + } + + file_store_meta_id = Column( + "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, + primary_key=True) + _file_store_meta_section = Column( + "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_section_hash = Column( + "file_store_meta_section_hash", String(255), + nullable=True, unique=None, default=None) + _file_store_meta_key = Column( + "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_key_hash = Column( + "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) + _file_store_meta_value = Column( + "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), + nullable=True, unique=None, default=None) + _file_store_meta_value_type = Column( + "file_store_meta_value_type", String(255), nullable=True, unique=None, + default='unicode') + + file_store_id = Column( + 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), + nullable=True, unique=None, default=None) + + file_store = relationship('FileStore', lazy='joined') + + @classmethod + def valid_value_type(cls, value): + if value.split('.')[0] not in cls.SETTINGS_TYPES: + raise ArtifactMetadataBadValueType( + 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) + + @hybrid_property + def file_store_meta_section(self): + return self._file_store_meta_section + + @file_store_meta_section.setter + def file_store_meta_section(self, value): + self._file_store_meta_section = value + self._file_store_meta_section_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_key(self): + return self._file_store_meta_key + + @file_store_meta_key.setter + def file_store_meta_key(self, value): + self._file_store_meta_key = value + self._file_store_meta_key_hash = _hash_key(value) + + @hybrid_property + def file_store_meta_value(self): + val = self._file_store_meta_value + + if self._file_store_meta_value_type: + # e.g unicode.encrypted == unicode + _type = self._file_store_meta_value_type.split('.')[0] + # decode the encrypted value if it's encrypted field type + if '.encrypted' in self._file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_result_value(val, None)) + # do final type conversion + converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] + val = converter(val) + + return val + + @file_store_meta_value.setter + def file_store_meta_value(self, val): + val = safe_unicode(val) + # encode the encrypted value + if '.encrypted' in self.file_store_meta_value_type: + cipher = EncryptedTextValue() + val = safe_unicode(cipher.process_bind_param(val, None)) + self._file_store_meta_value = val + + @hybrid_property + def file_store_meta_value_type(self): + return self._file_store_meta_value_type + + @file_store_meta_value_type.setter + def file_store_meta_value_type(self, val): + # e.g unicode.encrypted + self.valid_value_type(val) + self._file_store_meta_value_type = val + + def __json__(self): + data = { + 'artifact': self.file_store.file_uid, + 'section': self.file_store_meta_section, + 'key': self.file_store_meta_key, + 'value': self.file_store_meta_value, + } + + return data + + def __repr__(self): + return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section, + self.file_store_meta_key, self.file_store_meta_value) + + +class DbMigrateVersion(Base, BaseModel): + __tablename__ = 'db_migrate_version' + __table_args__ = ( + base_table_args, + ) + + repository_id = Column('repository_id', String(250), primary_key=True) + repository_path = Column('repository_path', Text) + version = Column('version', Integer) + + @classmethod + def set_version(cls, version): + """ + Helper for forcing a different version, usually for debugging purposes via ishell. + """ + ver = DbMigrateVersion.query().first() + ver.version = version + Session().commit() + + +class DbSession(Base, BaseModel): + __tablename__ = 'db_session' + __table_args__ = ( + base_table_args, + ) + + def __repr__(self): + return ''.format(self.id) + + id = Column('id', Integer()) + namespace = Column('namespace', String(255), primary_key=True) + accessed = Column('accessed', DateTime, nullable=False) + created = Column('created', DateTime, nullable=False) + data = Column('data', PickleType, nullable=False) diff --git a/rhodecode/lib/dbmigrate/versions/109_version_4_21_1.py b/rhodecode/lib/dbmigrate/versions/109_version_4_21_1.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/dbmigrate/versions/109_version_4_21_1.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- + +import logging +from sqlalchemy import * + +from alembic.migration import MigrationContext +from alembic.operations import Operations + +from rhodecode.lib.dbmigrate.versions import _reset_base +from rhodecode.model import meta, init_model_encryption + + +log = logging.getLogger(__name__) + + +def upgrade(migrate_engine): + """ + Upgrade operations go here. + Don't create your own engine; bind migrate_engine to your metadata + """ + _reset_base(migrate_engine) + from rhodecode.lib.dbmigrate.schema import db_4_20_0_0 as db + + init_model_encryption(db) + + context = MigrationContext.configure(migrate_engine.connect()) + op = Operations(context) + + table = db.PullRequestReviewers.__table__ + with op.batch_alter_table(table.name) as batch_op: + new_column = Column('role', Unicode(255), nullable=True) + batch_op.add_column(new_column) + + _fill_reviewers_role(db, op, meta.Session) + + +def downgrade(migrate_engine): + meta = MetaData() + meta.bind = migrate_engine + + +def fixups(models, _SESSION): + pass + + +def _fill_reviewers_role(models, op, session): + params = {'role': 'reviewer'} + query = text( + 'UPDATE pull_request_reviewers SET role = :role' + ).bindparams(**params) + op.execute(query) + session().commit() diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py --- a/rhodecode/lib/helpers.py +++ b/rhodecode/lib/helpers.py @@ -90,7 +90,7 @@ from rhodecode.lib.vcs.conf.settings imp from rhodecode.lib.index.search_utils import get_matching_line_offsets from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT from rhodecode.model.changeset_status import ChangesetStatusModel -from rhodecode.model.db import Permission, User, Repository, UserApiKeys +from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.settings import IssueTrackerSettingsModel @@ -810,8 +810,7 @@ import tzlocal local_timezone = tzlocal.get_localzone() -def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): - title = value or format_date(datetime_iso) +def get_timezone(datetime_iso, time_is_local=False): tzinfo = '+00:00' # detect if we have a timezone info, otherwise, add it @@ -822,6 +821,12 @@ def age_component(datetime_iso, value=No timezone = force_timezone or local_timezone offset = timezone.localize(datetime_iso).strftime('%z') tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) + return tzinfo + + +def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): + title = value or format_date(datetime_iso) + tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local) return literal( ''.format( @@ -1357,20 +1362,76 @@ class InitialsGravatar(object): return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data) -def initials_gravatar(email_address, first_name, last_name, size=30): +def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False): + svg_type = None if email_address == User.DEFAULT_USER_EMAIL: svg_type = 'default_user' + klass = InitialsGravatar(email_address, first_name, last_name, size) - return klass.generate_svg(svg_type=svg_type) + + if store_on_disk: + from rhodecode.apps.file_store import utils as store_utils + from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ + FileOverSizeException + from rhodecode.model.db import Session + + image_key = md5_safe(email_address.lower() + + first_name.lower() + last_name.lower()) + + storage = store_utils.get_file_storage(request.registry.settings) + filename = '{}.svg'.format(image_key) + subdir = 'gravatars' + # since final name has a counter, we apply the 0 + uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) + store_uid = os.path.join(subdir, uid) + + db_entry = FileStore.get_by_store_uid(store_uid) + if db_entry: + return request.route_path('download_file', fid=store_uid) + + img_data = klass.get_img_data(svg_type=svg_type) + img_file = store_utils.bytes_to_file_obj(img_data) + + try: + store_uid, metadata = storage.save_file( + img_file, filename, directory=subdir, + extensions=['.svg'], randomized_name=False) + except (FileNotAllowedException, FileOverSizeException): + raise + + try: + entry = FileStore.create( + file_uid=store_uid, filename=metadata["filename"], + file_hash=metadata["sha256"], file_size=metadata["size"], + file_display_name=filename, + file_description=u'user gravatar `{}`'.format(safe_unicode(filename)), + hidden=True, check_acl=False, user_id=1 + ) + Session().add(entry) + Session().commit() + log.debug('Stored upload in DB as %s', entry) + except Exception: + raise + + return request.route_path('download_file', fid=store_uid) + + else: + return klass.generate_svg(svg_type=svg_type) + + +def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): + return safe_str(gravatar_url_tmpl)\ + .replace('{email}', email_address) \ + .replace('{md5email}', md5_safe(email_address.lower())) \ + .replace('{netloc}', request.host) \ + .replace('{scheme}', request.scheme) \ + .replace('{size}', safe_str(size)) def gravatar_url(email_address, size=30, request=None): - request = get_current_request() + request = request or get_current_request() _use_gravatar = request.call_context.visual.use_gravatar - _gravatar_url = request.call_context.visual.gravatar_url - - _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL email_address = email_address or User.DEFAULT_USER_EMAIL if isinstance(email_address, unicode): @@ -1379,21 +1440,15 @@ def gravatar_url(email_address, size=30, # empty email or default user if not email_address or email_address == User.DEFAULT_USER_EMAIL: - return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size) + return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size) if _use_gravatar: - # TODO: Disuse pyramid thread locals. Think about another solution to - # get the host and schema here. - request = get_current_request() - tmpl = safe_str(_gravatar_url) - tmpl = tmpl.replace('{email}', email_address)\ - .replace('{md5email}', md5_safe(email_address.lower())) \ - .replace('{netloc}', request.host)\ - .replace('{scheme}', request.scheme)\ - .replace('{size}', safe_str(size)) - return tmpl + gravatar_url_tmpl = request.call_context.visual.gravatar_url \ + or User.DEFAULT_GRAVATAR_URL + return gravatar_external(request, gravatar_url_tmpl, email_address, size=size) + else: - return initials_gravatar(email_address, '', '', size=size) + return initials_gravatar(request, email_address, '', '', size=size) def breadcrumb_repo_link(repo): @@ -1560,7 +1615,7 @@ def _process_url_func(match_obj, repo_na # named regex variables named_vars.update(match_obj.groupdict()) _url = string.Template(entry['url']).safe_substitute(**named_vars) - desc = string.Template(entry['desc']).safe_substitute(**named_vars) + desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars) hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) def quote_cleaner(input_str): @@ -1600,17 +1655,18 @@ def get_active_pattern_entries(repo_name pr_pattern_re = re.compile(r'(?:(?:^!)|(?: !))(\d+)') +allowed_link_formats = [ + 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] + def process_patterns(text_string, repo_name, link_format='html', active_entries=None): - allowed_formats = ['html', 'rst', 'markdown', - 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] - if link_format not in allowed_formats: + if link_format not in allowed_link_formats: raise ValueError('Link format can be only one of:{} got {}'.format( - allowed_formats, link_format)) + allowed_link_formats, link_format)) if active_entries is None: - log.debug('Fetch active patterns for repo: %s', repo_name) + log.debug('Fetch active issue tracker patterns for repo: %s', repo_name) active_entries = get_active_pattern_entries(repo_name) issues_data = [] @@ -1668,7 +1724,8 @@ def process_patterns(text_string, repo_n return new_text, issues_data -def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None): +def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None, + issues_container=None): """ Parses given text message and makes proper links. issues are linked to given issue-server, and rest is a commit link @@ -1691,6 +1748,9 @@ def urlify_commit_message(commit_text, r new_text, issues = process_patterns(new_text, repository or '', active_entries=active_pattern_entries) + if issues_container is not None: + issues_container.extend(issues) + return literal(new_text) @@ -1731,7 +1791,7 @@ def renderer_from_filename(filename, exc def render(source, renderer='rst', mentions=False, relative_urls=None, - repo_name=None, active_pattern_entries=None): + repo_name=None, active_pattern_entries=None, issues_container=None): def maybe_convert_relative_links(html_source): if relative_urls: @@ -1748,6 +1808,8 @@ def render(source, renderer='rst', menti source, issues = process_patterns( source, repo_name, link_format='rst', active_entries=active_pattern_entries) + if issues_container is not None: + issues_container.extend(issues) return literal( '
%s
' % @@ -1760,6 +1822,8 @@ def render(source, renderer='rst', menti source, issues = process_patterns( source, repo_name, link_format='markdown', active_entries=active_pattern_entries) + if issues_container is not None: + issues_container.extend(issues) return literal( '
%s
' % diff --git a/rhodecode/lib/middleware/vcs.py b/rhodecode/lib/middleware/vcs.py --- a/rhodecode/lib/middleware/vcs.py +++ b/rhodecode/lib/middleware/vcs.py @@ -139,6 +139,18 @@ def is_vcs_call(environ): return False +def get_path_elem(route_path): + if not route_path: + return None + + cleaned_route_path = route_path.lstrip('/') + if cleaned_route_path: + cleaned_route_path_elems = cleaned_route_path.split('/') + if cleaned_route_path_elems: + return cleaned_route_path_elems[0] + return None + + def detect_vcs_request(environ, backends): checks = { 'hg': (is_hg, SimpleHg), @@ -146,6 +158,17 @@ def detect_vcs_request(environ, backends 'svn': (is_svn, SimpleSvn), } handler = None + # List of path views first chunk we don't do any checks + white_list = [ + # e.g /_file_store/download + '_file_store' + ] + + path_info = environ['PATH_INFO'] + + if get_path_elem(path_info) in white_list: + log.debug('path `%s` in whitelist, skipping...', path_info) + return handler if VCS_TYPE_KEY in environ: raw_type = environ[VCS_TYPE_KEY] diff --git a/rhodecode/lib/user_sessions.py b/rhodecode/lib/user_sessions.py --- a/rhodecode/lib/user_sessions.py +++ b/rhodecode/lib/user_sessions.py @@ -224,7 +224,10 @@ class RedisAuthSessions(BaseAuthSessions data = client.get(key) if data: json_data = pickle.loads(data) - accessed_time = json_data['_accessed_time'] + try: + accessed_time = json_data['_accessed_time'] + except KeyError: + accessed_time = 0 if accessed_time < expiry_time: client.delete(key) deleted_keys += 1 diff --git a/rhodecode/model/changeset_status.py b/rhodecode/model/changeset_status.py --- a/rhodecode/model/changeset_status.py +++ b/rhodecode/model/changeset_status.py @@ -212,10 +212,10 @@ class ChangesetStatusModel(BaseModel): # TODO(marcink): with group voting, how does rejected work, # do we ever get rejected state ? - if approved_votes_count == reviewers_number: + if approved_votes_count and (approved_votes_count == reviewers_number): return ChangesetStatus.STATUS_APPROVED - if rejected_votes_count == reviewers_number: + if rejected_votes_count and (rejected_votes_count == reviewers_number): return ChangesetStatus.STATUS_REJECTED return ChangesetStatus.STATUS_UNDER_REVIEW @@ -354,34 +354,37 @@ class ChangesetStatusModel(BaseModel): Session().add(new_status) return new_statuses + def aggregate_votes_by_user(self, commit_statuses, reviewers_data): + + commit_statuses_map = collections.defaultdict(list) + for st in commit_statuses: + commit_statuses_map[st.author.username] += [st] + + reviewers = [] + + def version(commit_status): + return commit_status.version + + for obj in reviewers_data: + if not obj.user: + continue + statuses = commit_statuses_map.get(obj.user.username, None) + if statuses: + status_groups = itertools.groupby( + sorted(statuses, key=version), version) + statuses = [(x, list(y)[0]) for x, y in status_groups] + + reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses)) + + return reviewers + def reviewers_statuses(self, pull_request): _commit_statuses = self.get_statuses( pull_request.source_repo, pull_request=pull_request, with_revisions=True) - commit_statuses = collections.defaultdict(list) - for st in _commit_statuses: - commit_statuses[st.author.username] += [st] - - pull_request_reviewers = [] - - def version(commit_status): - return commit_status.version - - for obj in pull_request.reviewers: - if not obj.user: - continue - statuses = commit_statuses.get(obj.user.username, None) - if statuses: - status_groups = itertools.groupby( - sorted(statuses, key=version), version) - statuses = [(x, list(y)[0]) for x, y in status_groups] - - pull_request_reviewers.append( - (obj, obj.user, obj.reasons, obj.mandatory, statuses)) - - return pull_request_reviewers + return self.aggregate_votes_by_user(_commit_statuses, pull_request.reviewers) def calculated_review_status(self, pull_request, reviewers_statuses=None): """ diff --git a/rhodecode/model/comment.py b/rhodecode/model/comment.py --- a/rhodecode/model/comment.py +++ b/rhodecode/model/comment.py @@ -91,8 +91,7 @@ class CommentsModel(BaseModel): # group by versions, and count until, and display objects comment_groups = collections.defaultdict(list) - [comment_groups[ - _co.pull_request_version_id].append(_co) for _co in comments] + [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments] def yield_comments(pos): for co in comment_groups[pos]: @@ -229,6 +228,14 @@ class CommentsModel(BaseModel): return todos + def get_commit_inline_comments(self, commit_id): + inline_comments = Session().query(ChangesetComment) \ + .filter(ChangesetComment.line_no != None) \ + .filter(ChangesetComment.f_path != None) \ + .filter(ChangesetComment.revision == commit_id) + inline_comments = inline_comments.all() + return inline_comments + def _log_audit_action(self, action, action_data, auth_user, comment): audit_logger.store( action=action, @@ -456,38 +463,54 @@ class CommentsModel(BaseModel): else: action = 'repo.commit.comment.create' + comment_id = comment.comment_id comment_data = comment.get_api_data() + self._log_audit_action( action, {'data': comment_data}, auth_user, comment) - msg_url = '' channel = None if commit_obj: - msg_url = commit_comment_url repo_name = repo.repo_name channel = u'/repo${}$/commit/{}'.format( repo_name, commit_obj.raw_id ) elif pull_request_obj: - msg_url = pr_comment_url repo_name = pr_target_repo.repo_name channel = u'/repo${}$/pr/{}'.format( repo_name, - pull_request_id + pull_request_obj.pull_request_id ) - message = '{} {} - ' \ - '' \ - '{}' - message = message.format( - user.username, _('made a comment'), msg_url, - _('Show it now')) + if channel: + username = user.username + message = '{} {} #{}, {}' + message = message.format( + username, + _('posted a new comment'), + comment_id, + _('Refresh the page to see new comments.')) - channelstream.post_message( - channel, message, user.username, - registry=get_current_registry()) + message_obj = { + 'message': message, + 'level': 'success', + 'topic': '/notifications' + } + + channelstream.post_message( + channel, message_obj, user.username, + registry=get_current_registry()) + + message_obj = { + 'message': None, + 'user': username, + 'comment_id': comment_id, + 'topic': '/comment' + } + channelstream.post_message( + channel, message_obj, user.username, + registry=get_current_registry()) return comment @@ -641,16 +664,16 @@ class CommentsModel(BaseModel): q = self._get_inline_comments_query(repo_id, revision, pull_request) return self._group_comments_by_path_and_line_number(q) - def get_inline_comments_count(self, inline_comments, skip_outdated=True, - version=None): - inline_cnt = 0 + def get_inline_comments_as_list(self, inline_comments, skip_outdated=True, + version=None): + inline_comms = [] for fname, per_line_comments in inline_comments.iteritems(): for lno, comments in per_line_comments.iteritems(): for comm in comments: if not comm.outdated_at_version(version) and skip_outdated: - inline_cnt += 1 + inline_comms.append(comm) - return inline_cnt + return inline_comms def get_outdated_comments(self, repo_id, pull_request): # TODO: johbo: Remove `repo_id`, it is not needed to find the comments diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -3810,6 +3810,10 @@ class ChangesetComment(Base, BaseModel): return self.display_state == self.COMMENT_OUTDATED @property + def outdated_js(self): + return json.dumps(self.display_state == self.COMMENT_OUTDATED) + + @property def immutable(self): return self.immutable_state == self.OP_IMMUTABLE @@ -3817,16 +3821,35 @@ class ChangesetComment(Base, BaseModel): """ Checks if comment is outdated for given pull request version """ - return self.outdated and self.pull_request_version_id != version + def version_check(): + return self.pull_request_version_id and self.pull_request_version_id != version + + if self.is_inline: + return self.outdated and version_check() + else: + # general comments don't have .outdated set, also latest don't have a version + return version_check() + + def outdated_at_version_js(self, version): + """ + Checks if comment is outdated for given pull request version + """ + return json.dumps(self.outdated_at_version(version)) def older_than_version(self, version): """ Checks if comment is made from previous version than given """ if version is None: - return self.pull_request_version_id is not None - - return self.pull_request_version_id < version + return self.pull_request_version != version + + return self.pull_request_version < version + + def older_than_version_js(self, version): + """ + Checks if comment is made from previous version than given + """ + return json.dumps(self.older_than_version(version)) @property def commit_id(self): @@ -3843,7 +3866,9 @@ class ChangesetComment(Base, BaseModel): @property def is_inline(self): - return self.line_no and self.f_path + if self.line_no and self.f_path: + return True + return False @property def last_version(self): @@ -3856,6 +3881,16 @@ class ChangesetComment(Base, BaseModel): return self.get_index_from_version( self.pull_request_version_id, versions) + @property + def review_status(self): + if self.status_change: + return self.status_change[0].status + + @property + def review_status_lbl(self): + if self.status_change: + return self.status_change[0].status_lbl + def __repr__(self): if self.comment_id: return '' % self.comment_id @@ -4134,6 +4169,23 @@ class _PullRequestBase(BaseModel): return json.dumps(self.reviewer_data) @property + def last_merge_metadata_parsed(self): + metadata = {} + if not self.last_merge_metadata: + return metadata + + if hasattr(self.last_merge_metadata, 'de_coerce'): + for k, v in self.last_merge_metadata.de_coerce().items(): + if k in ['target_ref', 'source_ref']: + metadata[k] = Reference(v['type'], v['name'], v['commit_id']) + else: + if hasattr(v, 'de_coerce'): + metadata[k] = v.de_coerce() + else: + metadata[k] = v + return metadata + + @property def work_in_progress(self): """checks if pull request is work in progress by checking the title""" title = self.title.upper() @@ -4306,6 +4358,7 @@ class PullRequest(Base, _PullRequestBase __table_args__ = ( base_table_args, ) + LATEST_VER = 'latest' pull_request_id = Column( 'pull_request_id', Integer(), nullable=False, primary_key=True) @@ -4364,6 +4417,10 @@ class PullRequest(Base, _PullRequestBase def pull_request_version_id(self): return getattr(pull_request_obj, 'pull_request_version_id', None) + @property + def pull_request_last_version(self): + return pull_request_obj.pull_request_last_version + attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) attrs.author = StrictAttributeDict( @@ -4428,6 +4485,10 @@ class PullRequest(Base, _PullRequestBase """ return self.versions.count() + 1 + @property + def pull_request_last_version(self): + return self.versions_count + class PullRequestVersion(Base, _PullRequestBase): __tablename__ = 'pull_request_versions' @@ -4475,6 +4536,8 @@ class PullRequestReviewers(Base, BaseMod __table_args__ = ( base_table_args, ) + ROLE_REVIEWER = u'reviewer' + ROLE_OBSERVER = u'observer' @hybrid_property def reasons(self): @@ -4502,6 +4565,8 @@ class PullRequestReviewers(Base, BaseMod JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) mandatory = Column("mandatory", Boolean(), nullable=False, default=False) + role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) + user = relationship('User') pull_request = relationship('PullRequest') @@ -5425,8 +5490,11 @@ class FileStore(Base, BaseModel): repo_group = relationship('RepoGroup', lazy='joined') @classmethod - def get_by_store_uid(cls, file_store_uid): - return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() + def get_by_store_uid(cls, file_store_uid, safe=False): + if safe: + return FileStore.query().filter(FileStore.file_uid == file_store_uid).first() + else: + return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() @classmethod def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py --- a/rhodecode/model/pull_request.py +++ b/rhodecode/model/pull_request.py @@ -1600,7 +1600,7 @@ class PullRequestModel(BaseModel): 'source_ref': pull_request.source_ref_parts, } if pull_request.last_merge_metadata: - metadata.update(pull_request.last_merge_metadata) + metadata.update(pull_request.last_merge_metadata_parsed) if not possible and target_ref.type == 'branch': # NOTE(marcink): case for mercurial multiple heads on branch diff --git a/rhodecode/public/css/alerts.less b/rhodecode/public/css/alerts.less --- a/rhodecode/public/css/alerts.less +++ b/rhodecode/public/css/alerts.less @@ -55,3 +55,16 @@ margin: 0 auto 35px auto; } } + +.alert-text-success { + color: @alert1; + +} + +.alert-text-error { + color: @alert2; +} + +.alert-text-warning { + color: @alert3; +} diff --git a/rhodecode/public/css/buttons.less b/rhodecode/public/css/buttons.less --- a/rhodecode/public/css/buttons.less +++ b/rhodecode/public/css/buttons.less @@ -254,7 +254,7 @@ input[type="button"] { .btn-group-actions { position: relative; - z-index: 100; + z-index: 50; &:not(.open) .btn-action-switcher-container { display: none; diff --git a/rhodecode/public/css/code-block.less b/rhodecode/public/css/code-block.less --- a/rhodecode/public/css/code-block.less +++ b/rhodecode/public/css/code-block.less @@ -1078,10 +1078,16 @@ input.filediff-collapse-state { background: @color5; color: white; } + &[op="comments"] { /* comments on file */ background: @grey4; color: white; } + + &[op="options"] { /* context menu */ + background: @grey6; + color: black; + } } } diff --git a/rhodecode/public/css/helpers.less b/rhodecode/public/css/helpers.less --- a/rhodecode/public/css/helpers.less +++ b/rhodecode/public/css/helpers.less @@ -31,6 +31,10 @@ a { cursor: pointer; } clear: both; } +.display-none { + display: none; +} + .pull-right { float: right !important; } diff --git a/rhodecode/public/css/legacy_code_styles.less b/rhodecode/public/css/legacy_code_styles.less --- a/rhodecode/public/css/legacy_code_styles.less +++ b/rhodecode/public/css/legacy_code_styles.less @@ -240,14 +240,14 @@ div.markdown-block ol { div.markdown-block ul.checkbox li, div.markdown-block ol.checkbox li { list-style: none !important; - margin: 6px !important; + margin: 0px !important; padding: 0 !important; } div.markdown-block ul li, div.markdown-block ol li { list-style: disc !important; - margin: 6px !important; + margin: 0px !important; padding: 0 !important; } diff --git a/rhodecode/public/css/main.less b/rhodecode/public/css/main.less --- a/rhodecode/public/css/main.less +++ b/rhodecode/public/css/main.less @@ -83,6 +83,11 @@ body { } } +.flex-container { + display: flex; + justify-content: space-between; +} + .action-link{ margin-left: @padding; padding-left: @padding; @@ -482,10 +487,15 @@ ul.auth_plugins { text-align: left; overflow: hidden; white-space: pre-line; -} - -.pr-details-title { - height: 16px + padding-top: 5px +} + +#add_reviewer { + padding-top: 10px; +} + +#add_reviewer_input { + padding-top: 10px } .pr-details-title-author-pref { @@ -1173,9 +1183,12 @@ label { a { color: @grey5 } - @media screen and (max-width: 1200px) { + + // 1024px or smaller + @media screen and (max-width: 1180px) { display: none; } + } img { @@ -1492,26 +1505,17 @@ table.integrations { // Pull Requests .summary-details { - width: 72%; + width: 100%; } .pr-summary { border-bottom: @border-thickness solid @grey5; margin-bottom: @space; } -.reviewers-title { - width: 25%; - min-width: 200px; - - &.first-panel { - margin-top: 34px; - } -} - .reviewers { - width: 25%; - min-width: 200px; -} + width: 98%; +} + .reviewers ul li { position: relative; width: 100%; @@ -1523,18 +1527,14 @@ table.integrations { min-height: 55px; } -.reviewers_member { - width: 100%; - overflow: auto; -} .reviewer_reason { padding-left: 20px; line-height: 1.5em; } .reviewer_status { display: inline-block; - width: 25px; - min-width: 25px; + width: 20px; + min-width: 20px; height: 1.2em; line-height: 1em; } @@ -1557,25 +1557,20 @@ table.integrations { } .reviewer_member_mandatory { - position: absolute; - left: 15px; - top: 8px; width: 16px; font-size: 11px; margin: 0; padding: 0; color: black; + opacity: 0.4; } .reviewer_member_mandatory_remove, .reviewer_member_remove { - position: absolute; - right: 0; - top: 0; width: 16px; - margin-bottom: 10px; padding: 0; color: black; + cursor: pointer; } .reviewer_member_mandatory_remove { @@ -1593,6 +1588,9 @@ table.integrations { cursor: pointer; } .pr-details-title { + height: 20px; + line-height: 20px; + padding-bottom: 8px; border-bottom: @border-thickness solid @grey5; @@ -1617,7 +1615,7 @@ table.integrations { text-decoration: line-through; } -.todo-table { +.todo-table, .comments-table { width: 100%; td { @@ -1627,7 +1625,8 @@ table.integrations { .td-todo-number { text-align: left; white-space: nowrap; - width: 15%; + width: 1%; + padding-right: 2px; } .td-todo-gravatar { @@ -1651,10 +1650,13 @@ table.integrations { text-overflow: ellipsis; } +table.group_members { + width: 100% +} + .group_members { margin-top: 0; padding: 0; - list-style: outside none none; img { height: @gravatar-size; @@ -1698,7 +1700,7 @@ table.integrations { } .reviewer_ac .ac-input { - width: 92%; + width: 100%; margin-bottom: 1em; } @@ -2772,7 +2774,7 @@ table.rctable td.td-search-results div { } #help_kb .modal-content{ - max-width: 750px; + max-width: 800px; margin: 10% auto; table{ @@ -3069,4 +3071,141 @@ form.markup-form { .pr-hovercard-title { padding-top: 5px; -} \ No newline at end of file +} + +.action-divider { + opacity: 0.5; +} + +.details-inline-block { + display: inline-block; + position: relative; +} + +.details-inline-block summary { + list-style: none; +} + +details:not([open]) > :not(summary) { + display: none !important; +} + +.details-reset > summary { + list-style: none; +} + +.details-reset > summary::-webkit-details-marker { + display: none; +} + +.details-dropdown { + position: absolute; + top: 100%; + width: 185px; + list-style: none; + background-color: #fff; + background-clip: padding-box; + border: 1px solid @grey5; + box-shadow: 0 8px 24px rgba(149, 157, 165, .2); + left: -150px; + text-align: left; + z-index: 90; +} + +.dropdown-divider { + display: block; + height: 0; + margin: 8px 0; + border-top: 1px solid @grey5; +} + +.dropdown-item { + display: block; + padding: 4px 8px 4px 16px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + font-weight: normal; +} + +.right-sidebar { + position: fixed; + top: 0px; + bottom: 0; + right: 0; + + background: #fafafa; + z-index: 50; +} + +.right-sidebar { + border-left: 1px solid @grey5; +} + +.right-sidebar.right-sidebar-expanded { + width: 300px; + overflow: scroll; +} + +.right-sidebar.right-sidebar-collapsed { + width: 40px; + padding: 0; + display: block; + overflow: hidden; +} + +.sidenav { + float: right; + will-change: min-height; + background: #fafafa; + width: 100%; +} + +.sidebar-toggle { + height: 30px; + text-align: center; + margin: 15px 0px 0 0; +} + +.sidebar-toggle a { + +} + +.sidebar-content { + margin-left: 15px; + margin-right: 15px; +} + +.sidebar-heading { + font-size: 1.2em; + font-weight: 700; + margin-top: 10px; +} + +.sidebar-element { + margin-top: 20px; +} + +.right-sidebar-collapsed-state { + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + padding: 0 10px; + cursor: pointer; + font-size: 1.3em; + margin: 0 -15px; +} + +.right-sidebar-collapsed-state:hover { + background-color: @grey5; +} + +.old-comments-marker { + text-align: left; +} + +.old-comments-marker td { + padding-top: 15px; + border-bottom: 1px solid @grey5; +} diff --git a/rhodecode/public/css/navigation.less b/rhodecode/public/css/navigation.less --- a/rhodecode/public/css/navigation.less +++ b/rhodecode/public/css/navigation.less @@ -790,7 +790,7 @@ input { &.main_filter_input { padding: 5px 10px; - min-width: 340px; + color: @grey7; background: @black; min-height: 18px; @@ -800,11 +800,34 @@ input { color: @grey2 !important; background: white !important; } + &:focus { color: @grey2 !important; background: white !important; } + + min-width: 360px; + + @media screen and (max-width: 1600px) { + min-width: 300px; + } + @media screen and (max-width: 1500px) { + min-width: 280px; + } + @media screen and (max-width: 1400px) { + min-width: 260px; + } + @media screen and (max-width: 1300px) { + min-width: 240px; + } + @media screen and (max-width: 1200px) { + min-width: 220px; + } + @media screen and (max-width: 720px) { + min-width: 140px; + } } + } diff --git a/rhodecode/public/css/rcicons.less b/rhodecode/public/css/rcicons.less --- a/rhodecode/public/css/rcicons.less +++ b/rhodecode/public/css/rcicons.less @@ -168,6 +168,7 @@ .icon-remove:before { content: '\e810'; } /* '' */ .icon-fork:before { content: '\e811'; } /* '' */ .icon-more:before { content: '\e812'; } /* '' */ +.icon-options:before { content: '\e812'; } /* '' */ .icon-search:before { content: '\e813'; } /* '' */ .icon-scissors:before { content: '\e814'; } /* '' */ .icon-download:before { content: '\e815'; } /* '' */ @@ -251,6 +252,7 @@ // TRANSFORM .icon-merge:before {transform: rotate(180deg);} .icon-wide-mode:before {transform: rotate(90deg);} +.icon-options:before {transform: rotate(90deg);} // -- END ICON CLASSES -- // diff --git a/rhodecode/public/js/rhodecode/base/keyboard-bindings.js b/rhodecode/public/js/rhodecode/base/keyboard-bindings.js --- a/rhodecode/public/js/rhodecode/base/keyboard-bindings.js +++ b/rhodecode/public/js/rhodecode/base/keyboard-bindings.js @@ -131,6 +131,11 @@ function setRCMouseBindings(repoName, re window.location = pyroutes.url( 'edit_repo_perms', {'repo_name': repoName}); }); + Mousetrap.bind(['t s'], function(e) { + if (window.toggleSidebar !== undefined) { + window.toggleSidebar(); + } + }); } } diff --git a/rhodecode/public/js/rhodecode/routes.js b/rhodecode/public/js/rhodecode/routes.js --- a/rhodecode/public/js/rhodecode/routes.js +++ b/rhodecode/public/js/rhodecode/routes.js @@ -246,6 +246,8 @@ function registerRCRoutes() { pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']); pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); + pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']); + pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']); pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); diff --git a/rhodecode/public/js/src/components/rhodecode-app/rhodecode-app.js b/rhodecode/public/js/src/components/rhodecode-app/rhodecode-app.js --- a/rhodecode/public/js/src/components/rhodecode-app/rhodecode-app.js +++ b/rhodecode/public/js/src/components/rhodecode-app/rhodecode-app.js @@ -28,9 +28,12 @@ export class RhodecodeApp extends Polyme super.connectedCallback(); ccLog.debug('rhodeCodeApp created'); $.Topic('/notifications').subscribe(this.handleNotifications.bind(this)); + $.Topic('/comment').subscribe(this.handleComment.bind(this)); $.Topic('/favicon/update').subscribe(this.faviconUpdate.bind(this)); $.Topic('/connection_controller/subscribe').subscribe( - this.subscribeToChannelTopic.bind(this)); + this.subscribeToChannelTopic.bind(this) + ); + // this event can be used to coordinate plugins to do their // initialization before channelstream is kicked off $.Topic('/__MAIN_APP__').publish({}); @@ -71,6 +74,14 @@ export class RhodecodeApp extends Polyme } + handleComment(data) { + if (data.message.comment_id) { + if (window.refreshAllComments !== undefined) { + refreshAllComments() + } + } + } + faviconUpdate(data) { this.shadowRoot.querySelector('rhodecode-favicon').counter = data.count; } @@ -95,6 +106,7 @@ export class RhodecodeApp extends Polyme } // append any additional channels registered in other plugins $.Topic('/connection_controller/subscribe').processPrepared(); + channelstreamConnection.connect(); } } @@ -157,8 +169,7 @@ export class RhodecodeApp extends Polyme handleConnected(event) { var channelstreamConnection = this.getChannelStreamConnection(); - channelstreamConnection.set('channelsState', - event.detail.channels_info); + channelstreamConnection.set('channelsState', event.detail.channels_info); channelstreamConnection.set('userState', event.detail.state); channelstreamConnection.set('channels', event.detail.channels); this.propagageChannelsState(); diff --git a/rhodecode/public/js/src/rhodecode.js b/rhodecode/public/js/src/rhodecode.js --- a/rhodecode/public/js/src/rhodecode.js +++ b/rhodecode/public/js/src/rhodecode.js @@ -296,16 +296,25 @@ var tooltipActivate = function () { // we set a variable so the data is only loaded once via Ajax, not every time the tooltip opens if ($origin.data('loaded') !== true) { var hovercardUrl = $origin.data('hovercardUrl'); - var altHovercard =$origin.data('hovercardAlt'); + var altHovercard = $origin.data('hovercardAlt'); if (hovercardUrl !== undefined && hovercardUrl !== "") { - if (hovercardUrl.substr(0,12) === 'pyroutes.url'){ + var urlLoad = true; + if (hovercardUrl.substr(0, 12) === 'pyroutes.url') { hovercardUrl = eval(hovercardUrl) + } else if (hovercardUrl.substr(0, 11) === 'javascript:') { + var jsFunc = hovercardUrl.substr(11); + urlLoad = false; + loaded = true; + instance.content(eval(jsFunc)) } - var loaded = loadHoverCard(hovercardUrl, altHovercard, function (data) { - instance.content(data); - }) + if (urlLoad) { + var loaded = loadHoverCard(hovercardUrl, altHovercard, function (data) { + instance.content(data); + }) + } + } else { if ($origin.data('hovercardAltHtml')) { var data = atob($origin.data('hovercardAltHtml')); @@ -677,7 +686,9 @@ var feedLifetimeOptions = function(query query.callback(data); }; - +/* +* Retrievew via templateContext.session_attrs.key +* */ var storeUserSessionAttr = function (key, val) { var postData = { diff --git a/rhodecode/public/js/src/rhodecode/comments.js b/rhodecode/public/js/src/rhodecode/comments.js --- a/rhodecode/public/js/src/rhodecode/comments.js +++ b/rhodecode/public/js/src/rhodecode/comments.js @@ -558,7 +558,7 @@ var CommentsController = function() { return false; }; - this.showVersion = function (comment_id, comment_history_id) { + this.showVersion = function (comment_id, comment_history_id) { var historyViewUrl = pyroutes.url( 'repo_commit_comment_history_view', @@ -585,7 +585,7 @@ var CommentsController = function() { successRenderCommit, failRenderCommit ); - }; + }; this.getLineNumber = function(node) { var $node = $(node); @@ -670,8 +670,20 @@ var CommentsController = function() { var success = function(response) { $comment.remove(); + + if (window.updateSticky !== undefined) { + // potentially our comments change the active window size, so we + // notify sticky elements + updateSticky() + } + + if (window.refreshAllComments !== undefined) { + // if we have this handler, run it, and refresh all comments boxes + refreshAllComments() + } return false; }; + var failure = function(jqXHR, textStatus, errorThrown) { var prefix = "Error while deleting this comment.\n" var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix); @@ -682,6 +694,9 @@ var CommentsController = function() { return false; }; ajaxPOST(url, postData, success, failure); + + + } this.deleteComment = function(node) { @@ -727,6 +742,15 @@ var CommentsController = function() { $filediff.find('.hide-line-comments').removeClass('hide-line-comments'); $filediff.toggleClass('hide-comments'); } + + // since we change the height of the diff container that has anchor points for upper + // sticky header, we need to tell it to re-calculate those + if (window.updateSticky !== undefined) { + // potentially our comments change the active window size, so we + // notify sticky elements + updateSticky() + } + return false; }; @@ -747,7 +771,7 @@ var CommentsController = function() { var cm = commentForm.getCmInstance(); if (resolvesCommentId){ - var placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId); + placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId); } setTimeout(function() { @@ -1077,9 +1101,15 @@ var CommentsController = function() { updateSticky() } + if (window.refreshAllComments !== undefined) { + // if we have this handler, run it, and refresh all comments boxes + refreshAllComments() + } + commentForm.setActionButtonsDisabled(false); }; + var submitFailCallback = function(jqXHR, textStatus, errorThrown) { var prefix = "Error while editing comment.\n" var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix); @@ -1209,6 +1239,11 @@ var CommentsController = function() { updateSticky() } + if (window.refreshAllComments !== undefined) { + // if we have this handler, run it, and refresh all comments boxes + refreshAllComments() + } + commentForm.setActionButtonsDisabled(false); }; diff --git a/rhodecode/public/js/src/rhodecode/menus.js b/rhodecode/public/js/src/rhodecode/menus.js --- a/rhodecode/public/js/src/rhodecode/menus.js +++ b/rhodecode/public/js/src/rhodecode/menus.js @@ -35,4 +35,75 @@ var quick_repo_menu = function() { }, function() { hide_quick_repo_menus(); }); -}; \ No newline at end of file +}; + + +window.toggleElement = function (elem, target) { + var $elem = $(elem); + var $target = $(target); + + if ($target.is(':visible') || $target.length === 0) { + $target.hide(); + $elem.html($elem.data('toggleOn')) + } else { + $target.show(); + $elem.html($elem.data('toggleOff')) + } + + return false +} + +var marginExpVal = '300' // needs a sync with `.right-sidebar.right-sidebar-expanded` value +var marginColVal = '40' // needs a sync with `.right-sidebar.right-sidebar-collapsed` value + +var marginExpanded = {'margin': '0 {0}px 0 0'.format(marginExpVal)}; +var marginCollapsed = {'margin': '0 {0}px 0 0'.format(marginColVal)}; + +var updateStickyHeader = function () { + if (window.updateSticky !== undefined) { + // potentially our comments change the active window size, so we + // notify sticky elements + updateSticky() + } +} + +var expandSidebar = function () { + var $sideBar = $('.right-sidebar'); + $('.outerwrapper').css(marginExpanded); + $('.sidebar-toggle a').html(''); + $('.right-sidebar-collapsed-state').hide(); + $('.right-sidebar-expanded-state').show(); + $('.branding').addClass('display-none'); + $sideBar.addClass('right-sidebar-expanded') + $sideBar.removeClass('right-sidebar-collapsed') +} + +var collapseSidebar = function () { + var $sideBar = $('.right-sidebar'); + $('.outerwrapper').css(marginCollapsed); + $('.sidebar-toggle a').html(''); + $('.right-sidebar-collapsed-state').show(); + $('.right-sidebar-expanded-state').hide(); + $('.branding').removeClass('display-none'); + $sideBar.removeClass('right-sidebar-expanded') + $sideBar.addClass('right-sidebar-collapsed') +} + +window.toggleSidebar = function () { + var $sideBar = $('.right-sidebar'); + + if ($sideBar.hasClass('right-sidebar-expanded')) { + // expanded -> collapsed transition + collapseSidebar(); + var sidebarState = 'collapsed'; + + } else { + // collapsed -> expanded + expandSidebar(); + var sidebarState = 'expanded'; + } + + // update our other sticky header in same context + updateStickyHeader(); + storeUserSessionAttr('rc_user_session_attr.sidebarState', sidebarState); +} diff --git a/rhodecode/public/js/src/rhodecode/pullrequests.js b/rhodecode/public/js/src/rhodecode/pullrequests.js --- a/rhodecode/public/js/src/rhodecode/pullrequests.js +++ b/rhodecode/public/js/src/rhodecode/pullrequests.js @@ -98,10 +98,13 @@ ReviewersController = function () { var self = this; this.$reviewRulesContainer = $('#review_rules'); this.$rulesList = this.$reviewRulesContainer.find('.pr-reviewer-rules'); + this.$userRule = $('.pr-user-rule-container'); this.forbidReviewUsers = undefined; this.$reviewMembers = $('#review_members'); this.currentRequest = null; this.diffData = null; + this.enabledRules = []; + //dummy handler, we might register our own later this.diffDataHandler = function(data){}; @@ -116,14 +119,17 @@ ReviewersController = function () { this.hideReviewRules = function () { self.$reviewRulesContainer.hide(); + $(self.$userRule.selector).hide(); }; this.showReviewRules = function () { self.$reviewRulesContainer.show(); + $(self.$userRule.selector).show(); }; this.addRule = function (ruleText) { self.showReviewRules(); + self.enabledRules.push(ruleText); return '
- {0}
'.format(ruleText) }; @@ -179,6 +185,7 @@ ReviewersController = function () { _gettext('Reviewers picked from source code changes.')) ) } + if (data.rules.forbid_adding_reviewers) { $('#add_reviewer_input').remove(); self.$rulesList.append( @@ -186,6 +193,7 @@ ReviewersController = function () { _gettext('Adding new reviewers is forbidden.')) ) } + if (data.rules.forbid_author_to_review) { self.forbidReviewUsers.push(data.rules_data.pr_author); self.$rulesList.append( @@ -193,6 +201,7 @@ ReviewersController = function () { _gettext('Author is not allowed to be a reviewer.')) ) } + if (data.rules.forbid_commit_author_to_review) { if (data.rules_data.forbidden_users) { @@ -208,6 +217,12 @@ ReviewersController = function () { ) } + // we don't have any rules set, so we inform users about it + if (self.enabledRules.length === 0) { + self.addRule( + _gettext('No review rules set.')) + } + return self.forbidReviewUsers }; @@ -264,8 +279,11 @@ ReviewersController = function () { $('#user').show(); // show user autocomplete after load var commitElements = data["diff_info"]['commits']; + if (commitElements.length === 0) { - prButtonLock(true, _gettext('no commits'), 'all'); + var noCommitsMsg = '{0}'.format( + _gettext('There are no commits to merge.')); + prButtonLock(true, noCommitsMsg, 'all'); } else { // un-lock PR button, so we cannot send PR before it's calculated @@ -309,7 +327,6 @@ ReviewersController = function () { }; this.addReviewMember = function (reviewer_obj, reasons, mandatory) { - var members = self.$reviewMembers.get(0); var id = reviewer_obj.user_id; var username = reviewer_obj.username; @@ -318,10 +335,10 @@ ReviewersController = function () { // register IDS to check if we don't have this ID already in var currentIds = []; - var _els = self.$reviewMembers.find('li').toArray(); - for (el in _els) { - currentIds.push(_els[el].id) - } + + $.each(self.$reviewMembers.find('.reviewer_entry'), function (index, value) { + currentIds.push($(value).data('reviewerUserId')) + }) var userAllowedReview = function (userId) { var allowed = true; @@ -339,20 +356,23 @@ ReviewersController = function () { alert(_gettext('User `{0}` not allowed to be a reviewer').format(username)); } else { // only add if it's not there - var alreadyReviewer = currentIds.indexOf('reviewer_' + id) != -1; + var alreadyReviewer = currentIds.indexOf(id) != -1; if (alreadyReviewer) { alert(_gettext('User `{0}` already in reviewers').format(username)); } else { - members.innerHTML += renderTemplate('reviewMemberEntry', { + var reviewerEntry = renderTemplate('reviewMemberEntry', { 'member': reviewer_obj, 'mandatory': mandatory, + 'reasons': reasons, 'allowed_to_update': true, 'review_status': 'not_reviewed', 'review_status_label': _gettext('Not Reviewed'), - 'reasons': reasons, - 'create': true - }); + 'user_group': reviewer_obj.user_group, + 'create': true, + 'rule_show': true, + }) + $(self.$reviewMembers.selector).append(reviewerEntry); tooltipActivate(); } } @@ -476,7 +496,7 @@ var ReviewerAutoComplete = function(inpu }; -VersionController = function () { +window.VersionController = function () { var self = this; this.$verSource = $('input[name=ver_source]'); this.$verTarget = $('input[name=ver_target]'); @@ -596,25 +616,10 @@ VersionController = function () { return false }; - this.toggleElement = function (elem, target) { - var $elem = $(elem); - var $target = $(target); - - if ($target.is(':visible')) { - $target.hide(); - $elem.html($elem.data('toggleOn')) - } else { - $target.show(); - $elem.html($elem.data('toggleOff')) - } - - return false - } - }; -UpdatePrController = function () { +window.UpdatePrController = function () { var self = this; this.$updateCommits = $('#update_commits'); this.$updateCommitsSwitcher = $('#update_commits_switcher'); @@ -656,4 +661,230 @@ UpdatePrController = function () { templateContext.repo_name, templateContext.pull_request_data.pull_request_id, force); }; -}; \ No newline at end of file +}; + +/** + * Reviewer display panel + */ +window.ReviewersPanel = { + editButton: null, + closeButton: null, + addButton: null, + removeButtons: null, + reviewRules: null, + setReviewers: null, + + setSelectors: function () { + var self = this; + self.editButton = $('#open_edit_reviewers'); + self.closeButton =$('#close_edit_reviewers'); + self.addButton = $('#add_reviewer'); + self.removeButtons = $('.reviewer_member_remove,.reviewer_member_mandatory_remove'); + }, + + init: function (reviewRules, setReviewers) { + var self = this; + self.setSelectors(); + + this.reviewRules = reviewRules; + this.setReviewers = setReviewers; + + this.editButton.on('click', function (e) { + self.edit(); + }); + this.closeButton.on('click', function (e) { + self.close(); + self.renderReviewers(); + }); + + self.renderReviewers(); + + }, + + renderReviewers: function () { + + $('#review_members').html('') + $.each(this.setReviewers.reviewers, function (key, val) { + var member = val; + + var entry = renderTemplate('reviewMemberEntry', { + 'member': member, + 'mandatory': member.mandatory, + 'reasons': member.reasons, + 'allowed_to_update': member.allowed_to_update, + 'review_status': member.review_status, + 'review_status_label': member.review_status_label, + 'user_group': member.user_group, + 'create': false + }); + + $('#review_members').append(entry) + }); + tooltipActivate(); + + }, + + edit: function (event) { + this.editButton.hide(); + this.closeButton.show(); + this.addButton.show(); + $(this.removeButtons.selector).css('visibility', 'visible'); + // review rules + reviewersController.loadReviewRules(this.reviewRules); + }, + + close: function (event) { + this.editButton.show(); + this.closeButton.hide(); + this.addButton.hide(); + $(this.removeButtons.selector).css('visibility', 'hidden'); + // hide review rules + reviewersController.hideReviewRules() + } +}; + + +/** + * OnLine presence using channelstream + */ +window.ReviewerPresenceController = function (channel) { + var self = this; + this.channel = channel; + this.users = {}; + + this.storeUsers = function (users) { + self.users = {} + $.each(users, function (index, value) { + var userId = value.state.id; + self.users[userId] = value.state; + }) + } + + this.render = function () { + $.each($('.reviewer_entry'), function (index, value) { + var userData = $(value).data(); + if (self.users[userData.reviewerUserId] !== undefined) { + $(value).find('.presence-state').show(); + } else { + $(value).find('.presence-state').hide(); + } + }) + }; + + this.handlePresence = function (data) { + if (data.type == 'presence' && data.channel === self.channel) { + this.storeUsers(data.users); + this.render() + } + }; + + this.handleChannelUpdate = function (data) { + if (data.channel === this.channel) { + this.storeUsers(data.state.users); + this.render() + } + + }; + + /* subscribe to the current presence */ + $.Topic('/connection_controller/presence').subscribe(this.handlePresence.bind(this)); + /* subscribe to updates e.g connect/disconnect */ + $.Topic('/connection_controller/channel_update').subscribe(this.handleChannelUpdate.bind(this)); + +}; + +window.refreshComments = function (version) { + version = version || templateContext.pull_request_data.pull_request_version || ''; + + // Pull request case + if (templateContext.pull_request_data.pull_request_id !== null) { + var params = { + 'pull_request_id': templateContext.pull_request_data.pull_request_id, + 'repo_name': templateContext.repo_name, + 'version': version, + }; + var loadUrl = pyroutes.url('pullrequest_comments', params); + } // commit case + else { + return + } + + var currentIDs = [] + $.each($('.comment'), function (idx, element) { + currentIDs.push($(element).data('commentId')); + }); + var data = {"comments[]": currentIDs}; + + var $targetElem = $('.comments-content-table'); + $targetElem.css('opacity', 0.3); + $targetElem.load( + loadUrl, data, function (responseText, textStatus, jqXHR) { + if (jqXHR.status !== 200) { + return false; + } + var $counterElem = $('#comments-count'); + var newCount = $(responseText).data('counter'); + if (newCount !== undefined) { + var callback = function () { + $counterElem.animate({'opacity': 1.00}, 200) + $counterElem.html(newCount); + }; + $counterElem.animate({'opacity': 0.15}, 200, callback); + } + + $targetElem.css('opacity', 1); + tooltipActivate(); + } + ); +} + +window.refreshTODOs = function (version) { + version = version || templateContext.pull_request_data.pull_request_version || ''; + // Pull request case + if (templateContext.pull_request_data.pull_request_id !== null) { + var params = { + 'pull_request_id': templateContext.pull_request_data.pull_request_id, + 'repo_name': templateContext.repo_name, + 'version': version, + }; + var loadUrl = pyroutes.url('pullrequest_comments', params); + } // commit case + else { + return + } + + var currentIDs = [] + $.each($('.comment'), function (idx, element) { + currentIDs.push($(element).data('commentId')); + }); + + var data = {"comments[]": currentIDs}; + var $targetElem = $('.todos-content-table'); + $targetElem.css('opacity', 0.3); + $targetElem.load( + loadUrl, data, function (responseText, textStatus, jqXHR) { + if (jqXHR.status !== 200) { + return false; + } + var $counterElem = $('#todos-count') + var newCount = $(responseText).data('counter'); + if (newCount !== undefined) { + var callback = function () { + $counterElem.animate({'opacity': 1.00}, 200) + $counterElem.html(newCount); + }; + $counterElem.animate({'opacity': 0.15}, 200, callback); + } + + $targetElem.css('opacity', 1); + tooltipActivate(); + } + ); +} + +window.refreshAllComments = function (version) { + version = version || templateContext.pull_request_data.pull_request_version || ''; + + refreshComments(version); + refreshTODOs(version); +}; diff --git a/rhodecode/subscribers.py b/rhodecode/subscribers.py --- a/rhodecode/subscribers.py +++ b/rhodecode/subscribers.py @@ -18,6 +18,7 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ import io +import math import re import os import datetime @@ -196,6 +197,72 @@ def write_metadata_if_needed(event): pass +def write_usage_data(event): + import rhodecode + from rhodecode.lib import system_info + from rhodecode.lib import ext_json + + settings = event.app.registry.settings + instance_tag = settings.get('metadata.write_usage_tag') + if not settings.get('metadata.write_usage'): + return + + def get_update_age(dest_file): + now = datetime.datetime.utcnow() + + with open(dest_file, 'rb') as f: + data = ext_json.json.loads(f.read()) + if 'created_on' in data: + update_date = parse(data['created_on']) + diff = now - update_date + return math.ceil(diff.total_seconds() / 60.0) + + return 0 + + utc_date = datetime.datetime.utcnow() + hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) + fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format( + date=utc_date, hour=hour_quarter) + ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) + + usage_dir = os.path.join(ini_loc, '.rcusage') + if not os.path.isdir(usage_dir): + os.makedirs(usage_dir) + usage_metadata_destination = os.path.join(usage_dir, fname) + + try: + age_in_min = get_update_age(usage_metadata_destination) + except Exception: + age_in_min = 0 + + # write every 6th hour + if age_in_min and age_in_min < 60 * 6: + log.debug('Usage file created %s minutes ago, skipping (threashold: %s)...', + age_in_min, 60 * 6) + return + + def write(dest_file): + configuration = system_info.SysInfo(system_info.rhodecode_config)()['value'] + license_token = configuration['config']['license_token'] + + metadata = dict( + desc='Usage data', + instance_tag=instance_tag, + license_token=license_token, + created_on=datetime.datetime.utcnow().isoformat(), + usage=system_info.SysInfo(system_info.usage_info)()['value'], + ) + + with open(dest_file, 'wb') as f: + f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True)) + + try: + log.debug('Writing usage file at: %s', usage_metadata_destination) + write(usage_metadata_destination) + except Exception: + pass + + def write_js_routes_if_enabled(event): registry = event.app.registry diff --git a/rhodecode/templates/base/base.mako b/rhodecode/templates/base/base.mako --- a/rhodecode/templates/base/base.mako +++ b/rhodecode/templates/base/base.mako @@ -38,10 +38,12 @@
${next.main()}
+ + + + + diff --git a/rhodecode/templates/base/sidebar.mako b/rhodecode/templates/base/sidebar.mako new file mode 100644 --- /dev/null +++ b/rhodecode/templates/base/sidebar.mako @@ -0,0 +1,142 @@ +## snippet for sidebar elements +## usage: +## <%namespace name="sidebar" file="/base/sidebar.mako"/> +## ${sidebar.comments_table()} +<%namespace name="base" file="/base/base.mako"/> + +<%def name="comments_table(comments, counter_num, todo_comments=False, existing_ids=None, is_pr=True)"> + <% + if todo_comments: + cls_ = 'todos-content-table' + def sorter(entry): + user_id = entry.author.user_id + resolved = '1' if entry.resolved else '0' + if user_id == c.rhodecode_user.user_id: + # own comments first + user_id = 0 + return '{}'.format(str(entry.comment_id).zfill(10000)) + else: + cls_ = 'comments-content-table' + def sorter(entry): + user_id = entry.author.user_id + return '{}'.format(str(entry.comment_id).zfill(10000)) + + existing_ids = existing_ids or [] + + %> + + + + % for loop_obj, comment_obj in h.looper(reversed(sorted(comments, key=sorter))): + <% + display = '' + _cls = '' + %> + + <% + comment_ver_index = comment_obj.get_index_version(getattr(c, 'versions', [])) + prev_comment_ver_index = 0 + if loop_obj.previous: + prev_comment_ver_index = loop_obj.previous.get_index_version(getattr(c, 'versions', [])) + + ver_info = None + if getattr(c, 'versions', []): + ver_info = c.versions[comment_ver_index-1] if comment_ver_index else None + %> + <% hidden_at_ver = comment_obj.outdated_at_version_js(c.at_version_num) %> + <% is_from_old_ver = comment_obj.older_than_version_js(c.at_version_num) %> + <% + if (prev_comment_ver_index > comment_ver_index): + comments_ver_divider = comment_ver_index + else: + comments_ver_divider = None + %> + + % if todo_comments: + % if comment_obj.resolved: + <% _cls = 'resolved-todo' %> + <% display = 'none' %> + % endif + % else: + ## SKIP TODOs we display them in other area + % if comment_obj.is_todo: + <% display = 'none' %> + % endif + ## Skip outdated comments + % if comment_obj.outdated: + <% display = 'none' %> + <% _cls = 'hidden-comment' %> + % endif + % endif + + % if not todo_comments and comments_ver_divider: + + + + + % endif + + + + + + + + % endfor + +
+ % if ver_info: + v${comments_ver_divider} ${h.age_component(ver_info.created_on, time_is_local=True, tooltip=False)} + % else: + v${comments_ver_divider} + % endif +
+ <% + version_info = '' + if is_pr: + version_info = (' made in older version (v{})'.format(comment_ver_index) if is_from_old_ver == 'true' else ' made in this version') + %> + + + + % if comment_obj.outdated: + + % elif comment_obj.is_inline: + + % else: + + % endif + + ## NEW, since refresh + % if existing_ids and comment_obj.comment_id not in existing_ids: + NEW + % endif + + ${base.gravatar(comment_obj.author.email, 16, user=comment_obj.author, tooltip=True, extra_class=['no-margin'])} + + +
+ + \ No newline at end of file diff --git a/rhodecode/templates/changeset/changeset.mako b/rhodecode/templates/changeset/changeset.mako --- a/rhodecode/templates/changeset/changeset.mako +++ b/rhodecode/templates/changeset/changeset.mako @@ -4,6 +4,8 @@ <%namespace name="base" file="/base/base.mako"/> <%namespace name="diff_block" file="/changeset/diff_block.mako"/> <%namespace name="file_base" file="/files/base.mako"/> +<%namespace name="sidebar" file="/base/sidebar.mako"/> + <%def name="title()"> ${_('{} Commit').format(c.repo_name)} - ${h.show_id(c.commit)} @@ -100,22 +102,6 @@ % endif - %if c.statuses: -
- -
${h.commit_status_lbl(c.statuses[0])}
-
- %endif - - - - - - - @@ -160,7 +148,9 @@ <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/> ${cbdiffs.render_diffset_menu(c.changes[c.commit.raw_id], commit=c.commit)} ${cbdiffs.render_diffset( - c.changes[c.commit.raw_id], commit=c.commit, use_comments=True,inline_comments=c.inline_comments )} + c.changes[c.commit.raw_id], commit=c.commit, use_comments=True, + inline_comments=c.inline_comments, + show_todos=False)} ## template for inline comment form @@ -169,7 +159,7 @@ ## comments heading with count
- ${_('Comments')} ${len(c.comments)} + ${_('General Comments')} ${len(c.comments)}
## render comments @@ -180,123 +170,262 @@ h.commit_status(c.rhodecode_db_repo, c.commit.raw_id))} - ## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS - + ReviewersPanel.init(null, setReviewersData); + + var channel = '${c.commit_broadcast_channel}'; + new ReviewerPresenceController(channel) + + }) + diff --git a/rhodecode/templates/changeset/changeset_file_comment.mako b/rhodecode/templates/changeset/changeset_file_comment.mako --- a/rhodecode/templates/changeset/changeset_file_comment.mako +++ b/rhodecode/templates/changeset/changeset_file_comment.mako @@ -10,12 +10,18 @@ <%namespace name="base" file="/base/base.mako"/> <%def name="comment_block(comment, inline=False, active_pattern_entries=None)"> - <% pr_index_ver = comment.get_index_version(getattr(c, 'versions', [])) %> + + <% + from rhodecode.model.comment import CommentsModel + comment_model = CommentsModel() + %> + <% comment_ver = comment.get_index_version(getattr(c, 'versions', [])) %> <% latest_ver = len(getattr(c, 'versions', [])) %> + % if inline: - <% outdated_at_ver = comment.outdated_at_version(getattr(c, 'at_version_num', None)) %> + <% outdated_at_ver = comment.outdated_at_version(c.at_version_num) %> % else: - <% outdated_at_ver = comment.older_than_version(getattr(c, 'at_version_num', None)) %> + <% outdated_at_ver = comment.older_than_version(c.at_version_num) %> % endif
+
- ${comment.status_change[0].status_lbl} + ${comment.review_status_lbl}
% else:
@@ -153,69 +159,90 @@
%endif - - diff --git a/rhodecode/templates/changeset/changeset_range.mako b/rhodecode/templates/changeset/changeset_range.mako --- a/rhodecode/templates/changeset/changeset_range.mako +++ b/rhodecode/templates/changeset/changeset_range.mako @@ -102,6 +102,11 @@ <%namespace name="diff_block" file="/changeset/diff_block.mako"/> %for commit in c.commit_ranges: + ## commit range header for each individual diff +

+ ${('r%s:%s' % (commit.idx,h.short_id(commit.raw_id)))} +

+ ${cbdiffs.render_diffset_menu(c.changes[commit.raw_id])} ${cbdiffs.render_diffset( diffset=c.changes[commit.raw_id], diff --git a/rhodecode/templates/codeblocks/diffs.mako b/rhodecode/templates/codeblocks/diffs.mako --- a/rhodecode/templates/codeblocks/diffs.mako +++ b/rhodecode/templates/codeblocks/diffs.mako @@ -61,6 +61,8 @@ return '%s_%s_%i' % (h.md5_safe(commit+f diffset_container_id = h.md5(diffset.target_ref) collapse_all = len(diffset.files) > collapse_when_files_over active_pattern_entries = h.get_active_pattern_entries(getattr(c, 'repo_name', None)) + from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ + MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE %> %if use_comments: @@ -159,45 +161,45 @@ return '%s_%s_%i' % (h.md5_safe(commit+f
% endif - ## comments -
-
- % if hasattr(c, 'comments') and hasattr(c, 'inline_cnt'): - COMMENTS: - % if c.comments: - ${_ungettext("{} General", "{} General", len(c.comments)).format(len(c.comments))}, - % else: - ${_('0 General')} - % endif - - % if c.inline_cnt: - ${_ungettext("{} Inline", "{} Inline", c.inline_cnt).format(c.inline_cnt)} - - % else: - ${_('0 Inline')} - % endif - % endif - - % if pull_request_menu: - <% - outdated_comm_count_ver = pull_request_menu['outdated_comm_count_ver'] - %> - - % if outdated_comm_count_ver: - - (${_("{} Outdated").format(outdated_comm_count_ver)}) - - | ${_('show outdated')} - - % else: - (${_("{} Outdated").format(outdated_comm_count_ver)}) - % endif - - % endif - -
-
+## ## comments +##
+##
+## % if hasattr(c, 'comments') and hasattr(c, 'inline_cnt'): +## COMMENTS: +## % if c.comments: +## ${_ungettext("{} General", "{} General", len(c.comments)).format(len(c.comments))}, +## % else: +## ${_('0 General')} +## % endif +## +## % if c.inline_cnt: +## ${_ungettext("{} Inline", "{} Inline", c.inline_cnt).format(c.inline_cnt)} +## +## % else: +## ${_('0 Inline')} +## % endif +## % endif +## +## % if pull_request_menu: +## <% +## outdated_comm_count_ver = pull_request_menu['outdated_comm_count_ver'] +## %> +## +## % if outdated_comm_count_ver: +## +## (${_("{} Outdated").format(outdated_comm_count_ver)}) +## +## | ${_('show outdated')} +## +## % else: +## (${_("{} Outdated").format(outdated_comm_count_ver)}) +## % endif +## +## % endif +## +##
+##
@@ -208,13 +210,6 @@ return '%s_%s_%i' % (h.md5_safe(commit+f ${_('Show full diff')} - ## commit range header for each individual diff - % elif commit and hasattr(c, 'commit_ranges') and len(c.commit_ranges) > 1: - % endif
@@ -239,6 +234,43 @@ return '%s_%s_%i' % (h.md5_safe(commit+f <% over_lines_changed_limit = False %> %for i, filediff in enumerate(diffset.files): + %if filediff.source_file_path and filediff.target_file_path: + %if filediff.source_file_path != filediff.target_file_path: + ## file was renamed, or copied + %if RENAMED_FILENODE in filediff.patch['stats']['ops']: + <% + final_file_name = h.literal(u'{} {}'.format(filediff.target_file_path, filediff.source_file_path)) + final_path = filediff.target_file_path + %> + %elif COPIED_FILENODE in filediff.patch['stats']['ops']: + <% + final_file_name = h.literal(u'{} {}'.format(filediff.target_file_path, filediff.source_file_path)) + final_path = filediff.target_file_path + %> + %endif + %else: + ## file was modified + <% + final_file_name = filediff.source_file_path + final_path = final_file_name + %> + %endif + %else: + %if filediff.source_file_path: + ## file was deleted + <% + final_file_name = filediff.source_file_path + final_path = final_file_name + %> + %else: + ## file was added + <% + final_file_name = filediff.target_file_path + final_path = final_file_name + %> + %endif + %endif + <% lines_changed = filediff.patch['stats']['added'] + filediff.patch['stats']['deleted'] over_lines_changed_limit = lines_changed > lines_changed_limit @@ -258,13 +290,39 @@ return '%s_%s_%i' % (h.md5_safe(commit+f total_file_comments = [_c for _c in h.itertools.chain.from_iterable(file_comments) if not _c.outdated] %>
- + + ## Comments/Options PILL + - ${len(total_file_comments)} + +
+ + + + + + + + + + + + + +
+
- ${diff_ops(filediff)} + + ${diff_ops(final_file_name, filediff)} @@ -463,43 +521,15 @@ return '%s_%s_%i' % (h.md5_safe(commit+f
-<%def name="diff_ops(filediff)"> -<% -from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ - MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE -%> +<%def name="diff_ops(file_name, filediff)"> + <% + from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ + MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE + %> - %if filediff.source_file_path and filediff.target_file_path: - %if filediff.source_file_path != filediff.target_file_path: - ## file was renamed, or copied - %if RENAMED_FILENODE in filediff.patch['stats']['ops']: - ${filediff.target_file_path} ⬅ ${filediff.source_file_path} - <% final_path = filediff.target_file_path %> - %elif COPIED_FILENODE in filediff.patch['stats']['ops']: - ${filediff.target_file_path} ⬅ ${filediff.source_file_path} - <% final_path = filediff.target_file_path %> - %endif - %else: - ## file was modified - ${filediff.source_file_path} - <% final_path = filediff.source_file_path %> - %endif - %else: - %if filediff.source_file_path: - ## file was deleted - ${filediff.source_file_path} - <% final_path = filediff.source_file_path %> - %else: - ## file was added - ${filediff.target_file_path} - <% final_path = filediff.target_file_path %> - %endif - %endif - + ${file_name} - ## anchor link - @@ -934,7 +964,7 @@ def get_comments_for(diff_type, comments %endif % if commit or pull_request_menu: - Loading diff...: + Loading diff...: diff --git a/rhodecode/templates/compare/compare_commits.mako b/rhodecode/templates/compare/compare_commits.mako --- a/rhodecode/templates/compare/compare_commits.mako +++ b/rhodecode/templates/compare/compare_commits.mako @@ -21,8 +21,9 @@ ## to speed up lookups cache some functions before the loop <% active_patterns = h.get_active_pattern_entries(c.repo_name) - urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns) + urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns, issues_container=getattr(c, 'referenced_commit_issues', None)) %> + %for commit in c.commit_ranges:
+ + - - - + + +##// END OF EJS Templates +
+ +
@@ -79,7 +88,7 @@
- ${h.render(c.pull_request.description, renderer=c.renderer, repo_name=c.repo_name)} + ${h.render(c.pull_request.description, renderer=c.renderer, repo_name=c.repo_name, issues_container=c.referenced_desc_issues)}