db.py
6055 lines
| 223.1 KiB
| text/x-python
|
PythonLexer
r5088 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
r1 | # | |||
# This program is free software: you can redistribute it and/or modify | ||||
# it under the terms of the GNU Affero General Public License, version 3 | ||||
# (only), as published by the Free Software Foundation. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU Affero General Public License | ||||
# along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
# | ||||
# This program is dual-licensed. If you wish to learn more about the | ||||
# RhodeCode Enterprise Edition, including its added features, Support services, | ||||
# and proprietary license terms, please see https://rhodecode.com/licenses/ | ||||
""" | ||||
Database Models for RhodeCode Enterprise | ||||
""" | ||||
r821 | import re | |||
r1 | import os | |||
import time | ||||
r3623 | import string | |||
r1 | import logging | |||
import datetime | ||||
r3848 | import uuid | |||
r1 | import warnings | |||
import ipaddress | ||||
import functools | ||||
import traceback | ||||
import collections | ||||
r5360 | import pyotp | |||
r2398 | from sqlalchemy import ( | |||
r5071 | or_, and_, not_, func, cast, TypeDecorator, event, select, | |||
r5358 | true, false, null, union_all, | |||
r2406 | Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, | |||
r2398 | Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, | |||
r4004 | Text, Float, PickleType, BigInteger) | |||
r5071 | from sqlalchemy.sql.expression import case | |||
r3282 | from sqlalchemy.sql.functions import coalesce, count # pragma: no cover | |||
r2398 | from sqlalchemy.orm import ( | |||
r5071 | relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only) | |||
r1 | from sqlalchemy.ext.declarative import declared_attr | |||
from sqlalchemy.ext.hybrid import hybrid_property | ||||
r3282 | from sqlalchemy.exc import IntegrityError # pragma: no cover | |||
r2087 | from sqlalchemy.dialects.mysql import LONGTEXT | |||
r1 | from zope.cachedescriptors.property import Lazy as LazyProperty | |||
r1774 | from pyramid.threadlocal import get_current_request | |||
r4090 | from webhelpers2.text import remove_formatting | |||
r1 | ||||
r5360 | from rhodecode import ConfigGet | |||
r5010 | from rhodecode.lib.str_utils import safe_bytes | |||
r1917 | from rhodecode.translation import _ | |||
r4162 | from rhodecode.lib.vcs import get_vcs_instance, VCSError | |||
r4519 | from rhodecode.lib.vcs.backends.base import ( | |||
EmptyCommit, Reference, unicode_to_reference, reference_to_unicode) | ||||
r1 | from rhodecode.lib.utils2 import ( | |||
r5010 | str2bool, safe_str, get_commit_safe, sha1_safe, | |||
r821 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, | |||
r4913 | glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time) | |||
r5071 | from rhodecode.lib.jsonalchemy import ( | |||
MutationObj, MutationList, JsonType, JsonRaw) | ||||
from rhodecode.lib.hash_utils import sha1 | ||||
r4995 | from rhodecode.lib import ext_json | |||
from rhodecode.lib import enc_utils | ||||
r5211 | from rhodecode.lib.ext_json import json, str_json | |||
r1 | from rhodecode.lib.caching_query import FromCache | |||
r3997 | from rhodecode.lib.exceptions import ( | |||
ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) | ||||
r1 | from rhodecode.model.meta import Base, Session | |||
URL_SEP = '/' | ||||
log = logging.getLogger(__name__) | ||||
# ============================================================================= | ||||
# BASE CLASSES | ||||
# ============================================================================= | ||||
r281 | # this is propagated from .ini file rhodecode.encrypted_values.secret or | |||
# beaker.session.secret if first is not set. | ||||
r1 | # and initialized at environment.py | |||
r5071 | ENCRYPTION_KEY: bytes = b'' | |||
r1 | ||||
# used to sort permissions by types, '#' used here is not allowed to be in | ||||
# usernames, and it's very early in sorted string.printable table. | ||||
PERMISSION_TYPE_SORT = { | ||||
'admin': '####', | ||||
'write': '###', | ||||
'read': '##', | ||||
'none': '#', | ||||
} | ||||
r2060 | def display_user_sort(obj): | |||
r1 | """ | |||
Sort function used to sort permissions in .permissions() function of | ||||
Repository, RepoGroup, UserGroup. Also it put the default user in front | ||||
of all other resources | ||||
""" | ||||
if obj.username == User.DEFAULT_USER: | ||||
return '#####' | ||||
prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | ||||
r4417 | extra_sort_num = '1' # default | |||
# NOTE(dan): inactive duplicates goes last | ||||
if getattr(obj, 'duplicate_perm', None): | ||||
extra_sort_num = '9' | ||||
return prefix + extra_sort_num + obj.username | ||||
r1 | ||||
r2060 | def display_user_group_sort(obj): | |||
""" | ||||
Sort function used to sort permissions in .permissions() function of | ||||
Repository, RepoGroup, UserGroup. Also it put the default user in front | ||||
of all other resources | ||||
""" | ||||
prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | ||||
return prefix + obj.users_group_name | ||||
r1 | def _hash_key(k): | |||
r2837 | return sha1_safe(k) | |||
r1 | ||||
r5463 | def description_escaper(desc): | |||
from rhodecode.lib import helpers as h | ||||
r5465 | return h.escape(desc) | |||
r5463 | ||||
r2038 | def in_filter_generator(qry, items, limit=500): | |||
""" | ||||
Splits IN() into multiple with OR | ||||
e.g.:: | ||||
cnt = Repository.query().filter( | ||||
or_( | ||||
*in_filter_generator(Repository.repo_id, range(100000)) | ||||
)).count() | ||||
""" | ||||
r2167 | if not items: | |||
# empty list will cause empty query which might cause security issues | ||||
# this can lead to hidden unpleasant results | ||||
items = [-1] | ||||
r2038 | parts = [] | |||
r4906 | for chunk in range(0, len(items), limit): | |||
r2038 | parts.append( | |||
qry.in_(items[chunk: chunk + limit]) | ||||
) | ||||
return parts | ||||
r2830 | base_table_args = { | |||
'extend_existing': True, | ||||
'mysql_engine': 'InnoDB', | ||||
'mysql_charset': 'utf8', | ||||
'sqlite_autoincrement': True | ||||
} | ||||
r1 | class EncryptedTextValue(TypeDecorator): | |||
""" | ||||
Special column for encrypted long text data, use like:: | ||||
value = Column("encrypted_value", EncryptedValue(), nullable=False) | ||||
This column is intelligent so if value is in unencrypted form it return | ||||
unencrypted form, but on save it always encrypts | ||||
""" | ||||
r5071 | cache_ok = True | |||
r1 | impl = Text | |||
def process_bind_param(self, value, dialect): | ||||
r3522 | """ | |||
Setter for storing value | ||||
""" | ||||
r281 | import rhodecode | |||
r1 | if not value: | |||
return value | ||||
r3522 | # protect against double encrypting if values is already encrypted | |||
if value.startswith('enc$aes$') \ | ||||
or value.startswith('enc$aes_hmac$') \ | ||||
or value.startswith('enc2$'): | ||||
raise ValueError('value needs to be in unencrypted format, ' | ||||
'ie. not starting with enc$ or enc2$') | ||||
algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' | ||||
r5071 | bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo) | |||
return safe_str(bytes_val) | ||||
r3522 | ||||
def process_result_value(self, value, dialect): | ||||
""" | ||||
Getter for retrieving value | ||||
""" | ||||
import rhodecode | ||||
if not value: | ||||
return value | ||||
r5376 | bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY) | |||
r5071 | ||||
return safe_str(bytes_val) | ||||
r1 | ||||
class BaseModel(object): | ||||
""" | ||||
Base Model for all classes | ||||
""" | ||||
@classmethod | ||||
def _get_keys(cls): | ||||
"""return column names for this model """ | ||||
return class_mapper(cls).c.keys() | ||||
def get_dict(self): | ||||
""" | ||||
return dict with keys and values corresponding | ||||
to this model data """ | ||||
d = {} | ||||
for k in self._get_keys(): | ||||
d[k] = getattr(self, k) | ||||
# also use __json__() if present to get additional fields | ||||
_json_attr = getattr(self, '__json__', None) | ||||
if _json_attr: | ||||
# update with attributes from __json__ | ||||
if callable(_json_attr): | ||||
_json_attr = _json_attr() | ||||
r4932 | for k, val in _json_attr.items(): | |||
r1 | d[k] = val | |||
return d | ||||
def get_appstruct(self): | ||||
"""return list with keys and values tuples corresponding | ||||
to this model data """ | ||||
r2406 | lst = [] | |||
r1 | for k in self._get_keys(): | |||
r2406 | lst.append((k, getattr(self, k),)) | |||
return lst | ||||
r1 | ||||
def populate_obj(self, populate_dict): | ||||
"""populate model with data from given populate_dict""" | ||||
for k in self._get_keys(): | ||||
if k in populate_dict: | ||||
setattr(self, k, populate_dict[k]) | ||||
@classmethod | ||||
def query(cls): | ||||
return Session().query(cls) | ||||
@classmethod | ||||
r5071 | def select(cls, custom_cls=None): | |||
""" | ||||
stmt = cls.select().where(cls.user_id==1) | ||||
# optionally | ||||
stmt = cls.select(User.user_id).where(cls.user_id==1) | ||||
result = cls.execute(stmt) | cls.scalars(stmt) | ||||
""" | ||||
if custom_cls: | ||||
stmt = select(custom_cls) | ||||
else: | ||||
stmt = select(cls) | ||||
return stmt | ||||
@classmethod | ||||
def execute(cls, stmt): | ||||
return Session().execute(stmt) | ||||
@classmethod | ||||
def scalars(cls, stmt): | ||||
return Session().scalars(stmt) | ||||
@classmethod | ||||
r1 | def get(cls, id_): | |||
if id_: | ||||
return cls.query().get(id_) | ||||
@classmethod | ||||
r1956 | def get_or_404(cls, id_): | |||
from pyramid.httpexceptions import HTTPNotFound | ||||
r1512 | ||||
r1 | try: | |||
id_ = int(id_) | ||||
except (TypeError, ValueError): | ||||
r1956 | raise HTTPNotFound() | |||
r1 | ||||
res = cls.query().get(id_) | ||||
if not res: | ||||
r1956 | raise HTTPNotFound() | |||
r1 | return res | |||
@classmethod | ||||
def getAll(cls): | ||||
# deprecated and left for backward compatibility | ||||
return cls.get_all() | ||||
@classmethod | ||||
def get_all(cls): | ||||
return cls.query().all() | ||||
@classmethod | ||||
def delete(cls, id_): | ||||
obj = cls.query().get(id_) | ||||
Session().delete(obj) | ||||
r255 | @classmethod | |||
def identity_cache(cls, session, attr_name, value): | ||||
exist_in_session = [] | ||||
for (item_cls, pkey), instance in session.identity_map.items(): | ||||
if cls == item_cls and getattr(instance, attr_name) == value: | ||||
exist_in_session.append(instance) | ||||
if exist_in_session: | ||||
if len(exist_in_session) == 1: | ||||
return exist_in_session[0] | ||||
log.exception( | ||||
'multiple objects with attr %s and ' | ||||
'value %s found with same name: %r', | ||||
attr_name, value, exist_in_session) | ||||
r5071 | @property | |||
def cls_name(self): | ||||
return self.__class__.__name__ | ||||
r1 | def __repr__(self): | |||
r5071 | return f'<DB:{self.cls_name}>' | |||
r1 | ||||
class RhodeCodeSetting(Base, BaseModel): | ||||
__tablename__ = 'rhodecode_settings' | ||||
__table_args__ = ( | ||||
UniqueConstraint('app_settings_name'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
SETTINGS_TYPES = { | ||||
'str': safe_str, | ||||
'int': safe_int, | ||||
r5010 | 'unicode': safe_str, | |||
r1 | 'bool': str2bool, | |||
'list': functools.partial(aslist, sep=',') | ||||
} | ||||
DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' | ||||
GLOBAL_CONF_KEY = 'app_settings' | ||||
app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | ||||
_app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) | ||||
_app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) | ||||
def __init__(self, key='', val='', type='unicode'): | ||||
self.app_settings_name = key | ||||
self.app_settings_type = type | ||||
self.app_settings_value = val | ||||
@validates('_app_settings_value') | ||||
def validate_settings_value(self, key, val): | ||||
r4959 | assert type(val) == str | |||
r1 | return val | |||
@hybrid_property | ||||
def app_settings_value(self): | ||||
v = self._app_settings_value | ||||
_type = self.app_settings_type | ||||
if _type: | ||||
_type = self.app_settings_type.split('.')[0] | ||||
# decode the encrypted value | ||||
if 'encrypted' in self.app_settings_type: | ||||
cipher = EncryptedTextValue() | ||||
r4995 | v = safe_str(cipher.process_result_value(v, None)) | |||
r1 | ||||
converter = self.SETTINGS_TYPES.get(_type) or \ | ||||
self.SETTINGS_TYPES['unicode'] | ||||
return converter(v) | ||||
@app_settings_value.setter | ||||
def app_settings_value(self, val): | ||||
""" | ||||
Setter that will always make sure we use unicode in app_settings_value | ||||
:param val: | ||||
""" | ||||
r5010 | val = safe_str(val) | |||
r1 | # encode the encrypted value | |||
if 'encrypted' in self.app_settings_type: | ||||
cipher = EncryptedTextValue() | ||||
r4995 | val = safe_str(cipher.process_bind_param(val, None)) | |||
r1 | self._app_settings_value = val | |||
@hybrid_property | ||||
def app_settings_type(self): | ||||
return self._app_settings_type | ||||
@app_settings_type.setter | ||||
def app_settings_type(self, val): | ||||
if val.split('.')[0] not in self.SETTINGS_TYPES: | ||||
raise Exception('type must be one of %s got %s' | ||||
% (self.SETTINGS_TYPES.keys(), val)) | ||||
self._app_settings_type = val | ||||
r3251 | @classmethod | |||
def get_by_prefix(cls, prefix): | ||||
return RhodeCodeSetting.query()\ | ||||
.filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ | ||||
.all() | ||||
r5071 | def __repr__(self): | |||
r4995 | return "<%s('%s:%s[%s]')>" % ( | |||
r5071 | self.cls_name, | |||
r1 | self.app_settings_name, self.app_settings_value, | |||
self.app_settings_type | ||||
) | ||||
class RhodeCodeUi(Base, BaseModel): | ||||
__tablename__ = 'rhodecode_ui' | ||||
__table_args__ = ( | ||||
UniqueConstraint('ui_key'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r5071 | # Sync those values with vcsserver.config.hooks | |||
r1 | ||||
HOOK_REPO_SIZE = 'changegroup.repo_size' | ||||
# HG | ||||
HOOK_PRE_PULL = 'preoutgoing.pre_pull' | ||||
HOOK_PULL = 'outgoing.pull_logger' | ||||
HOOK_PRE_PUSH = 'prechangegroup.pre_push' | ||||
r1461 | HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' | |||
r1 | HOOK_PUSH = 'changegroup.push_logger' | |||
r1753 | HOOK_PUSH_KEY = 'pushkey.key_push' | |||
r1 | ||||
r3637 | HOOKS_BUILTIN = [ | |||
HOOK_PRE_PULL, | ||||
HOOK_PULL, | ||||
HOOK_PRE_PUSH, | ||||
HOOK_PRETX_PUSH, | ||||
HOOK_PUSH, | ||||
HOOK_PUSH_KEY, | ||||
] | ||||
r1 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |||
# git part is currently hardcoded. | ||||
# SVN PATTERNS | ||||
SVN_BRANCH_ID = 'vcs_svn_branch' | ||||
SVN_TAG_ID = 'vcs_svn_tag' | ||||
ui_id = Column( | ||||
"ui_id", Integer(), nullable=False, unique=True, default=None, | ||||
primary_key=True) | ||||
ui_section = Column( | ||||
"ui_section", String(255), nullable=True, unique=None, default=None) | ||||
ui_key = Column( | ||||
"ui_key", String(255), nullable=True, unique=None, default=None) | ||||
ui_value = Column( | ||||
"ui_value", String(255), nullable=True, unique=None, default=None) | ||||
ui_active = Column( | ||||
"ui_active", Boolean(), nullable=True, unique=None, default=True) | ||||
r5071 | def __repr__(self): | |||
return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section, | ||||
r1 | self.ui_key, self.ui_value) | |||
class RepoRhodeCodeSetting(Base, BaseModel): | ||||
__tablename__ = 'repo_rhodecode_settings' | ||||
__table_args__ = ( | ||||
UniqueConstraint( | ||||
'app_settings_name', 'repository_id', | ||||
name='uq_repo_rhodecode_setting_name_repo_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
repository_id = Column( | ||||
"repository_id", Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=False) | ||||
app_settings_id = Column( | ||||
"app_settings_id", Integer(), nullable=False, unique=True, | ||||
default=None, primary_key=True) | ||||
app_settings_name = Column( | ||||
"app_settings_name", String(255), nullable=True, unique=None, | ||||
default=None) | ||||
_app_settings_value = Column( | ||||
"app_settings_value", String(4096), nullable=True, unique=None, | ||||
default=None) | ||||
_app_settings_type = Column( | ||||
"app_settings_type", String(255), nullable=True, unique=None, | ||||
default=None) | ||||
r5071 | repository = relationship('Repository', viewonly=True) | |||
r1 | ||||
def __init__(self, repository_id, key='', val='', type='unicode'): | ||||
self.repository_id = repository_id | ||||
self.app_settings_name = key | ||||
self.app_settings_type = type | ||||
self.app_settings_value = val | ||||
@validates('_app_settings_value') | ||||
def validate_settings_value(self, key, val): | ||||
r4959 | assert type(val) == str | |||
r1 | return val | |||
@hybrid_property | ||||
def app_settings_value(self): | ||||
v = self._app_settings_value | ||||
type_ = self.app_settings_type | ||||
SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | ||||
converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] | ||||
return converter(v) | ||||
@app_settings_value.setter | ||||
def app_settings_value(self, val): | ||||
""" | ||||
Setter that will always make sure we use unicode in app_settings_value | ||||
:param val: | ||||
""" | ||||
r5010 | self._app_settings_value = safe_str(val) | |||
r1 | ||||
@hybrid_property | ||||
def app_settings_type(self): | ||||
return self._app_settings_type | ||||
@app_settings_type.setter | ||||
def app_settings_type(self, val): | ||||
SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | ||||
if val not in SETTINGS_TYPES: | ||||
raise Exception('type must be one of %s got %s' | ||||
% (SETTINGS_TYPES.keys(), val)) | ||||
self._app_settings_type = val | ||||
r5071 | def __repr__(self): | |||
r5010 | return "<%s('%s:%s:%s[%s]')>" % ( | |||
r5071 | self.cls_name, self.repository.repo_name, | |||
r1 | self.app_settings_name, self.app_settings_value, | |||
self.app_settings_type | ||||
) | ||||
class RepoRhodeCodeUi(Base, BaseModel): | ||||
__tablename__ = 'repo_rhodecode_ui' | ||||
__table_args__ = ( | ||||
UniqueConstraint( | ||||
'repository_id', 'ui_section', 'ui_key', | ||||
name='uq_repo_rhodecode_ui_repository_id_section_key'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
repository_id = Column( | ||||
"repository_id", Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=False) | ||||
ui_id = Column( | ||||
"ui_id", Integer(), nullable=False, unique=True, default=None, | ||||
primary_key=True) | ||||
ui_section = Column( | ||||
"ui_section", String(255), nullable=True, unique=None, default=None) | ||||
ui_key = Column( | ||||
"ui_key", String(255), nullable=True, unique=None, default=None) | ||||
ui_value = Column( | ||||
"ui_value", String(255), nullable=True, unique=None, default=None) | ||||
ui_active = Column( | ||||
"ui_active", Boolean(), nullable=True, unique=None, default=True) | ||||
r5071 | repository = relationship('Repository', viewonly=True) | |||
def __repr__(self): | ||||
r1 | return '<%s[%s:%s]%s=>%s]>' % ( | |||
r5071 | self.cls_name, self.repository.repo_name, | |||
r1 | self.ui_section, self.ui_key, self.ui_value) | |||
class User(Base, BaseModel): | ||||
__tablename__ = 'users' | ||||
__table_args__ = ( | ||||
UniqueConstraint('username'), UniqueConstraint('email'), | ||||
Index('u_username_idx', 'username'), | ||||
Index('u_email_idx', 'email'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | DEFAULT_USER = 'default' | |||
DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' | ||||
DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' | ||||
r5360 | RECOVERY_CODES_COUNT = 10 | |||
r1 | ||||
user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
username = Column("username", String(255), nullable=True, unique=None, default=None) | ||||
password = Column("password", String(255), nullable=True, unique=None, default=None) | ||||
active = Column("active", Boolean(), nullable=True, unique=None, default=True) | ||||
admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | ||||
name = Column("firstname", String(255), nullable=True, unique=None, default=None) | ||||
lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | ||||
_email = Column("email", String(255), nullable=True, unique=None, default=None) | ||||
last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | ||||
r1635 | last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
r4021 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
r1545 | ||||
r1 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) | |||
extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) | ||||
r1481 | _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |||
r1 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
_user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data | ||||
r5071 | user_log = relationship('UserLog', back_populates='user') | |||
r3981 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') | |||
r1 | ||||
r5071 | repositories = relationship('Repository', back_populates='user') | |||
repository_groups = relationship('RepoGroup', back_populates='user') | ||||
user_groups = relationship('UserGroup', back_populates='user') | ||||
user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user') | ||||
followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user') | ||||
r1 | ||||
r3981 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') | |||
r5071 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user') | |||
user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user') | ||||
group_member = relationship('UserGroupMember', cascade='all', back_populates='user') | ||||
notifications = relationship('UserNotification', cascade='all', back_populates='user') | ||||
r1 | # notifications assigned to this user | |||
r5071 | user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user') | |||
r1 | # comments created by this user | |||
r5071 | user_comments = relationship('ChangesetComment', cascade='all', back_populates='author') | |||
r1 | # user profile extra info | |||
r5071 | user_emails = relationship('UserEmailMap', cascade='all', back_populates='user') | |||
user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user') | ||||
user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user') | ||||
user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user') | ||||
r1993 | ||||
r1 | # gists | |||
r5071 | user_gists = relationship('Gist', cascade='all', back_populates='owner') | |||
r1 | # user pull requests | |||
r5071 | user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author') | |||
r4351 | ||||
r1 | # external identities | |||
r5071 | external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all') | |||
r2054 | # review rules | |||
r5071 | user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user') | |||
r1 | ||||
r4011 | # artifacts owned | |||
r5071 | artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user') | |||
r4011 | ||||
# no cascade, set NULL | ||||
r5071 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user') | |||
def __repr__(self): | ||||
return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>" | ||||
r1 | ||||
@hybrid_property | ||||
def email(self): | ||||
return self._email | ||||
@email.setter | ||||
def email(self, val): | ||||
self._email = val.lower() if val else None | ||||
r1481 | @hybrid_property | |||
r1814 | def first_name(self): | |||
r1815 | if self.name: | |||
r5463 | return description_escaper(self.name) | |||
r1815 | return self.name | |||
r1814 | ||||
@hybrid_property | ||||
def last_name(self): | ||||
r1815 | if self.lastname: | |||
r5463 | return description_escaper(self.lastname) | |||
r1815 | return self.lastname | |||
r1814 | ||||
@hybrid_property | ||||
r1481 | def api_key(self): | |||
""" | ||||
Fetch if exist an auth-token with role ALL connected to this user | ||||
""" | ||||
user_auth_token = UserApiKeys.query()\ | ||||
.filter(UserApiKeys.user_id == self.user_id)\ | ||||
.filter(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time()))\ | ||||
.filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() | ||||
r1482 | if user_auth_token: | |||
user_auth_token = user_auth_token.api_key | ||||
r1481 | return user_auth_token | |||
@api_key.setter | ||||
def api_key(self, val): | ||||
# don't allow to set API key this is deprecated for now | ||||
self._api_key = None | ||||
r1 | @property | |||
r1923 | def reviewer_pull_requests(self): | |||
return PullRequestReviewers.query() \ | ||||
.options(joinedload(PullRequestReviewers.pull_request)) \ | ||||
.filter(PullRequestReviewers.user_id == self.user_id) \ | ||||
.all() | ||||
@property | ||||
r1 | def firstname(self): | |||
# alias for future | ||||
return self.name | ||||
@property | ||||
def emails(self): | ||||
r1981 | other = UserEmailMap.query()\ | |||
.filter(UserEmailMap.user == self) \ | ||||
.order_by(UserEmailMap.email_id.asc()) \ | ||||
.all() | ||||
r1 | return [self.email] + [x.email for x in other] | |||
r4018 | def emails_cached(self): | |||
r5009 | emails = [] | |||
if self.user_id != self.get_default_user_id(): | ||||
emails = UserEmailMap.query()\ | ||||
.filter(UserEmailMap.user == self) \ | ||||
.order_by(UserEmailMap.email_id.asc()) | ||||
emails = emails.options( | ||||
FromCache("sql_cache_short", f"get_user_{self.user_id}_emails") | ||||
) | ||||
r4018 | ||||
return [self.email] + [x.email for x in emails] | ||||
r1 | @property | |||
def auth_tokens(self): | ||||
r1953 | auth_tokens = self.get_auth_tokens() | |||
return [x.api_key for x in auth_tokens] | ||||
def get_auth_tokens(self): | ||||
r1981 | return UserApiKeys.query()\ | |||
.filter(UserApiKeys.user == self)\ | ||||
.order_by(UserApiKeys.user_api_key_id.asc())\ | ||||
.all() | ||||
r1 | ||||
r2424 | @LazyProperty | |||
r1 | def feed_token(self): | |||
r1421 | return self.get_feed_token() | |||
r2424 | def get_feed_token(self, cache=True): | |||
r1 | feed_tokens = UserApiKeys.query()\ | |||
.filter(UserApiKeys.user == self)\ | ||||
r2424 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) | |||
if cache: | ||||
feed_tokens = feed_tokens.options( | ||||
r5009 | FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}")) | |||
r2424 | ||||
feed_tokens = feed_tokens.all() | ||||
r1 | if feed_tokens: | |||
return feed_tokens[0].api_key | ||||
r1421 | return 'NO_FEED_TOKEN_AVAILABLE' | |||
r1 | ||||
r4003 | @LazyProperty | |||
def artifact_token(self): | ||||
return self.get_artifact_token() | ||||
def get_artifact_token(self, cache=True): | ||||
artifacts_tokens = UserApiKeys.query()\ | ||||
r4611 | .filter(UserApiKeys.user == self) \ | |||
.filter(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time())) \ | ||||
r4003 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) | |||
r4611 | ||||
r4003 | if cache: | |||
artifacts_tokens = artifacts_tokens.options( | ||||
r5009 | FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}")) | |||
r4003 | ||||
artifacts_tokens = artifacts_tokens.all() | ||||
if artifacts_tokens: | ||||
return artifacts_tokens[0].api_key | ||||
return 'NO_ARTIFACT_TOKEN_AVAILABLE' | ||||
r4611 | def get_or_create_artifact_token(self): | |||
artifacts_tokens = UserApiKeys.query()\ | ||||
.filter(UserApiKeys.user == self) \ | ||||
.filter(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time())) \ | ||||
.filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) | ||||
artifacts_tokens = artifacts_tokens.all() | ||||
if artifacts_tokens: | ||||
return artifacts_tokens[0].api_key | ||||
else: | ||||
from rhodecode.model.auth_token import AuthTokenModel | ||||
artifact_token = AuthTokenModel().create( | ||||
self, 'auto-generated-artifact-token', | ||||
lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) | ||||
Session.commit() | ||||
return artifact_token.api_key | ||||
r5367 | def is_totp_valid(self, received_code, secret): | |||
totp = pyotp.TOTP(secret) | ||||
r5360 | return totp.verify(received_code) | |||
r5367 | def is_2fa_recovery_code_valid(self, received_code, secret): | |||
r5360 | encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', []) | |||
r5367 | recovery_codes = self.get_2fa_recovery_codes() | |||
r5360 | if received_code in recovery_codes: | |||
encrypted_recovery_codes.pop(recovery_codes.index(received_code)) | ||||
self.update_userdata(recovery_codes_2fa=encrypted_recovery_codes) | ||||
return True | ||||
return False | ||||
@hybrid_property | ||||
def has_forced_2fa(self): | ||||
""" | ||||
r5397 | Checks if 2fa was forced for current user | |||
r5360 | """ | |||
from rhodecode.model.settings import SettingsModel | ||||
r5398 | if value := SettingsModel().get_setting_by_name(f'auth_{self.extern_type}_global_2fa'): | |||
r5360 | return value.app_settings_value | |||
return False | ||||
@hybrid_property | ||||
def has_enabled_2fa(self): | ||||
""" | ||||
r5367 | Checks if user enabled 2fa | |||
r5360 | """ | |||
if value := self.has_forced_2fa: | ||||
return value | ||||
return self.user_data.get('enabled_2fa', False) | ||||
@has_enabled_2fa.setter | ||||
def has_enabled_2fa(self, val): | ||||
val = str2bool(val) | ||||
r5367 | self.update_userdata(enabled_2fa=val) | |||
r5360 | if not val: | |||
r5367 | # NOTE: setting to false we clear the user_data to not store any 2fa artifacts | |||
self.update_userdata(secret_2fa=None, recovery_codes_2fa=[], check_2fa=False) | ||||
Session().commit() | ||||
@hybrid_property | ||||
r5374 | def check_2fa_required(self): | |||
r5367 | """ | |||
Check if check 2fa flag is set for this user | ||||
""" | ||||
value = self.user_data.get('check_2fa', False) | ||||
return value | ||||
r5374 | @check_2fa_required.setter | |||
def check_2fa_required(self, val): | ||||
r5367 | val = str2bool(val) | |||
self.update_userdata(check_2fa=val) | ||||
r5360 | Session().commit() | |||
r5367 | @hybrid_property | |||
def has_seen_2fa_codes(self): | ||||
""" | ||||
get the flag about if user has seen 2fa recovery codes | ||||
""" | ||||
value = self.user_data.get('recovery_codes_2fa_seen', False) | ||||
return value | ||||
@has_seen_2fa_codes.setter | ||||
def has_seen_2fa_codes(self, val): | ||||
val = str2bool(val) | ||||
self.update_userdata(recovery_codes_2fa_seen=val) | ||||
Session().commit() | ||||
@hybrid_property | ||||
def needs_2fa_configure(self): | ||||
""" | ||||
Determines if setup2fa has completed for this user. Means he has all needed data for 2fa to work. | ||||
Currently this is 2fa enabled and secret exists | ||||
""" | ||||
if self.has_enabled_2fa: | ||||
return not self.user_data.get('secret_2fa') | ||||
return False | ||||
def init_2fa_recovery_codes(self, persist=True, force=False): | ||||
r5360 | """ | |||
Creates 2fa recovery codes | ||||
""" | ||||
recovery_codes = self.user_data.get('recovery_codes_2fa', []) | ||||
encrypted_codes = [] | ||||
r5367 | if not recovery_codes or force: | |||
r5360 | for _ in range(self.RECOVERY_CODES_COUNT): | |||
recovery_code = pyotp.random_base32() | ||||
recovery_codes.append(recovery_code) | ||||
r5367 | encrypted_code = enc_utils.encrypt_value(safe_bytes(recovery_code), enc_key=ENCRYPTION_KEY) | |||
encrypted_codes.append(safe_str(encrypted_code)) | ||||
if persist: | ||||
self.update_userdata(recovery_codes_2fa=encrypted_codes, recovery_codes_2fa_seen=False) | ||||
r5360 | return recovery_codes | |||
# User should not check the same recovery codes more than once | ||||
return [] | ||||
r5367 | def get_2fa_recovery_codes(self): | |||
encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', []) | ||||
recovery_codes = list(map( | ||||
lambda val: safe_str( | ||||
enc_utils.decrypt_value( | ||||
val, | ||||
r5376 | enc_key=ENCRYPTION_KEY | |||
r5367 | )), | |||
encrypted_recovery_codes)) | ||||
return recovery_codes | ||||
def init_secret_2fa(self, persist=True, force=False): | ||||
secret_2fa = self.user_data.get('secret_2fa') | ||||
if not secret_2fa or force: | ||||
secret = pyotp.random_base32() | ||||
if persist: | ||||
self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(safe_bytes(secret), enc_key=ENCRYPTION_KEY))) | ||||
return secret | ||||
return '' | ||||
r5374 | @hybrid_property | |||
def secret_2fa(self) -> str: | ||||
""" | ||||
get stored secret for 2fa | ||||
""" | ||||
r5373 | secret_2fa = self.user_data.get('secret_2fa') | |||
r5367 | if secret_2fa: | |||
return safe_str( | ||||
r5376 | enc_utils.decrypt_value(secret_2fa, enc_key=ENCRYPTION_KEY)) | |||
r5367 | return '' | |||
r5374 | @secret_2fa.setter | |||
def secret_2fa(self, value: str) -> None: | ||||
r5367 | encrypted_value = enc_utils.encrypt_value(safe_bytes(value), enc_key=ENCRYPTION_KEY) | |||
self.update_userdata(secret_2fa=safe_str(encrypted_value)) | ||||
r5360 | def regenerate_2fa_recovery_codes(self): | |||
""" | ||||
Regenerates 2fa recovery codes upon request | ||||
""" | ||||
r5367 | new_recovery_codes = self.init_2fa_recovery_codes(force=True) | |||
r5360 | Session().commit() | |||
return new_recovery_codes | ||||
r1 | @classmethod | |||
def extra_valid_auth_tokens(cls, user, role=None): | ||||
tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ | ||||
.filter(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time())) | ||||
if role: | ||||
tokens = tokens.filter(or_(UserApiKeys.role == role, | ||||
UserApiKeys.role == UserApiKeys.ROLE_ALL)) | ||||
return tokens.all() | ||||
r1510 | def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): | |||
r1421 | from rhodecode.lib import auth | |||
log.debug('Trying to authenticate user: %s via auth-token, ' | ||||
'and roles: %s', self, roles) | ||||
if not auth_token: | ||||
return False | ||||
roles = (roles or []) + [UserApiKeys.ROLE_ALL] | ||||
tokens_q = UserApiKeys.query()\ | ||||
.filter(UserApiKeys.user_id == self.user_id)\ | ||||
.filter(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time())) | ||||
tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) | ||||
r3464 | crypto_backend = auth.crypto_backend() | |||
enc_token_map = {} | ||||
plain_token_map = {} | ||||
for token in tokens_q: | ||||
if token.api_key.startswith(crypto_backend.ENC_PREF): | ||||
enc_token_map[token.api_key] = token | ||||
else: | ||||
plain_token_map[token.api_key] = token | ||||
log.debug( | ||||
r4003 | 'Found %s plain and %s encrypted tokens to check for authentication for this user', | |||
r3464 | len(plain_token_map), len(enc_token_map)) | |||
# plain token match comes first | ||||
match = plain_token_map.get(auth_token) | ||||
# check encrypted tokens now | ||||
if not match: | ||||
for token_hash, token in enc_token_map.items(): | ||||
# NOTE(marcink): this is expensive to calculate, but most secure | ||||
if crypto_backend.hash_check(auth_token, token_hash): | ||||
match = token | ||||
break | ||||
if match: | ||||
log.debug('Found matching token %s', match) | ||||
if match.repo_id: | ||||
log.debug('Found scope, checking for scope match of token %s', match) | ||||
if match.repo_id == scope_repo_id: | ||||
return True | ||||
else: | ||||
r1510 | log.debug( | |||
r3059 | 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' | |||
r1510 | 'and calling scope is:%s, skipping further checks', | |||
r3464 | match.repo, scope_repo_id) | |||
return False | ||||
r1421 | else: | |||
return True | ||||
return False | ||||
r1 | @property | |||
def ip_addresses(self): | ||||
ret = UserIpMap.query().filter(UserIpMap.user == self).all() | ||||
return [x.ip_addr for x in ret] | ||||
@property | ||||
def username_and_name(self): | ||||
r5071 | return f'{self.username} ({self.first_name} {self.last_name})' | |||
r1 | ||||
@property | ||||
def username_or_name_or_email(self): | ||||
r5009 | full_name = self.full_name if self.full_name != ' ' else None | |||
r1 | return self.username or full_name or self.email | |||
@property | ||||
def full_name(self): | ||||
r5071 | return f'{self.first_name} {self.last_name}' | |||
r1 | ||||
@property | ||||
def full_name_or_username(self): | ||||
r5071 | return (f'{self.first_name} {self.last_name}' | |||
r1815 | if (self.first_name and self.last_name) else self.username) | |||
r1 | ||||
@property | ||||
def full_contact(self): | ||||
r5071 | return f'{self.first_name} {self.last_name} <{self.email}>' | |||
r1 | ||||
@property | ||||
def short_contact(self): | ||||
r5071 | return f'{self.first_name} {self.last_name}' | |||
r1 | ||||
@property | ||||
def is_admin(self): | ||||
return self.admin | ||||
r4018 | @property | |||
def language(self): | ||||
return self.user_data.get('language') | ||||
r1997 | def AuthUser(self, **kwargs): | |||
r1 | """ | |||
Returns instance of AuthUser for this user | ||||
""" | ||||
from rhodecode.lib.auth import AuthUser | ||||
r1997 | return AuthUser(user_id=self.user_id, username=self.username, **kwargs) | |||
r1 | ||||
@hybrid_property | ||||
def user_data(self): | ||||
if not self._user_data: | ||||
return {} | ||||
try: | ||||
r4686 | return json.loads(self._user_data) or {} | |||
r1 | except TypeError: | |||
return {} | ||||
@user_data.setter | ||||
def user_data(self, val): | ||||
if not isinstance(val, dict): | ||||
r5365 | raise Exception(f'user_data must be dict, got {type(val)}') | |||
r1 | try: | |||
r5071 | self._user_data = safe_bytes(json.dumps(val)) | |||
r1 | except Exception: | |||
log.error(traceback.format_exc()) | ||||
@classmethod | ||||
r5365 | def get(cls, user_id, cache=False): | |||
if not user_id: | ||||
return | ||||
user = cls.query() | ||||
if cache: | ||||
user = user.options( | ||||
FromCache("sql_cache_short", f"get_users_{user_id}")) | ||||
return user.get(user_id) | ||||
@classmethod | ||||
r255 | def get_by_username(cls, username, case_insensitive=False, | |||
r5071 | cache=False): | |||
r255 | ||||
r1 | if case_insensitive: | |||
r5071 | q = cls.select().where( | |||
r255 | func.lower(cls.username) == func.lower(username)) | |||
r1 | else: | |||
r5071 | q = cls.select().where(cls.username == username) | |||
r1 | ||||
if cache: | ||||
r5071 | hash_key = _hash_key(username) | |||
q = q.options( | ||||
FromCache("sql_cache_short", f"get_user_by_name_{hash_key}")) | ||||
return cls.execute(q).scalar_one_or_none() | ||||
r1 | ||||
@classmethod | ||||
r5358 | def get_by_username_or_primary_email(cls, user_identifier): | |||
qs = union_all(cls.select().where(func.lower(cls.username) == func.lower(user_identifier)), | ||||
cls.select().where(func.lower(cls.email) == func.lower(user_identifier))) | ||||
return cls.execute(cls.select(User).from_statement(qs)).scalar_one_or_none() | ||||
@classmethod | ||||
r1481 | def get_by_auth_token(cls, auth_token, cache=False): | |||
r5071 | ||||
q = cls.select(User)\ | ||||
.join(UserApiKeys)\ | ||||
.where(UserApiKeys.api_key == auth_token)\ | ||||
.where(or_(UserApiKeys.expires == -1, | ||||
UserApiKeys.expires >= time.time())) | ||||
r1 | if cache: | |||
r1749 | q = q.options( | |||
r5009 | FromCache("sql_cache_short", f"get_auth_token_{auth_token}")) | |||
r1481 | ||||
r5071 | matched_user = cls.execute(q).scalar_one_or_none() | |||
return matched_user | ||||
r1 | ||||
@classmethod | ||||
def get_by_email(cls, email, case_insensitive=False, cache=False): | ||||
if case_insensitive: | ||||
r5071 | q = cls.select().where(func.lower(cls.email) == func.lower(email)) | |||
r1 | else: | |||
r5071 | q = cls.select().where(cls.email == email) | |||
r1 | if cache: | |||
r5071 | email_key = _hash_key(email) | |||
r1749 | q = q.options( | |||
r5009 | FromCache("sql_cache_short", f"get_email_key_{email_key}")) | |||
r1 | ||||
r5071 | ret = cls.execute(q).scalar_one_or_none() | |||
r1 | if ret is None: | |||
r5071 | q = cls.select(UserEmailMap) | |||
r1 | # try fetching in alternate email map | |||
if case_insensitive: | ||||
r5071 | q = q.where(func.lower(UserEmailMap.email) == func.lower(email)) | |||
r1 | else: | |||
r5071 | q = q.where(UserEmailMap.email == email) | |||
r1 | q = q.options(joinedload(UserEmailMap.user)) | |||
if cache: | ||||
r1749 | q = q.options( | |||
r5009 | FromCache("sql_cache_short", f"get_email_map_key_{email_key}")) | |||
r5071 | ||||
result = cls.execute(q).scalar_one_or_none() | ||||
ret = getattr(result, 'user', None) | ||||
r1 | ||||
return ret | ||||
@classmethod | ||||
def get_from_cs_author(cls, author): | ||||
""" | ||||
Tries to get User objects out of commit author string | ||||
:param author: | ||||
""" | ||||
from rhodecode.lib.helpers import email, author_name | ||||
# Valid email in the attribute passed, see if they're in the system | ||||
_email = email(author) | ||||
if _email: | ||||
user = cls.get_by_email(_email, case_insensitive=True) | ||||
if user: | ||||
return user | ||||
# Maybe we can match by username? | ||||
_author = author_name(author) | ||||
user = cls.get_by_username(_author, case_insensitive=True) | ||||
if user: | ||||
return user | ||||
def update_userdata(self, **kwargs): | ||||
usr = self | ||||
old = usr.user_data | ||||
old.update(**kwargs) | ||||
usr.user_data = old | ||||
Session().add(usr) | ||||
r3944 | log.debug('updated userdata with %s', kwargs) | |||
r1 | ||||
def update_lastlogin(self): | ||||
"""Update user lastlogin""" | ||||
self.last_login = datetime.datetime.now() | ||||
Session().add(self) | ||||
log.debug('updated user %s lastlogin', self.username) | ||||
r1478 | def update_password(self, new_password): | |||
from rhodecode.lib.auth import get_crypt_password | ||||
r1 | ||||
self.password = get_crypt_password(new_password) | ||||
Session().add(self) | ||||
@classmethod | ||||
r278 | def get_first_super_admin(cls): | |||
r5071 | stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc()) | |||
user = cls.scalars(stmt).first() | ||||
r3085 | ||||
r1 | if user is None: | |||
r279 | raise Exception('FATAL: Missing administrative account!') | |||
r1 | return user | |||
@classmethod | ||||
r3411 | def get_all_super_admins(cls, only_active=False): | |||
r1 | """ | |||
Returns all admin accounts sorted by username | ||||
""" | ||||
r3411 | qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) | |||
if only_active: | ||||
qry = qry.filter(User.active == true()) | ||||
return qry.all() | ||||
r1 | ||||
@classmethod | ||||
r4190 | def get_all_user_ids(cls, only_active=True): | |||
""" | ||||
Returns all users IDs | ||||
""" | ||||
qry = Session().query(User.user_id) | ||||
if only_active: | ||||
qry = qry.filter(User.active == true()) | ||||
return [x.user_id for x in qry] | ||||
@classmethod | ||||
r1764 | def get_default_user(cls, cache=False, refresh=False): | |||
r1 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) | |||
if user is None: | ||||
r279 | raise Exception('FATAL: Missing default account!') | |||
r1764 | if refresh: | |||
# The default user might be based on outdated state which | ||||
# has been loaded from the cache. | ||||
# A call to refresh() ensures that the | ||||
# latest state from the database is used. | ||||
Session().refresh(user) | ||||
r5071 | ||||
r1 | return user | |||
r4332 | @classmethod | |||
def get_default_user_id(cls): | ||||
import rhodecode | ||||
return rhodecode.CONFIG['default_user_id'] | ||||
r1 | def _get_default_perms(self, user, suffix=''): | |||
from rhodecode.model.permission import PermissionModel | ||||
return PermissionModel().get_default_perms(user.user_perms, suffix) | ||||
def get_default_perms(self, suffix=''): | ||||
return self._get_default_perms(self, suffix) | ||||
def get_api_data(self, include_secrets=False, details='full'): | ||||
""" | ||||
Common function for generating user related data for API | ||||
:param include_secrets: By default secrets in the API data will be replaced | ||||
by a placeholder value to prevent exposing this data by accident. In case | ||||
this data shall be exposed, set this flag to ``True``. | ||||
:param details: details can be 'basic|full' basic gives only a subset of | ||||
the available user information that includes user_id, name and emails. | ||||
""" | ||||
user = self | ||||
user_data = self.user_data | ||||
data = { | ||||
'user_id': user.user_id, | ||||
'username': user.username, | ||||
'firstname': user.name, | ||||
'lastname': user.lastname, | ||||
r4024 | 'description': user.description, | |||
r1 | 'email': user.email, | |||
'emails': user.emails, | ||||
} | ||||
if details == 'basic': | ||||
return data | ||||
r1953 | auth_token_length = 40 | |||
auth_token_replacement = '*' * auth_token_length | ||||
r1 | ||||
extras = { | ||||
r1953 | 'auth_tokens': [auth_token_replacement], | |||
r1 | 'active': user.active, | |||
'admin': user.admin, | ||||
'extern_type': user.extern_type, | ||||
'extern_name': user.extern_name, | ||||
'last_login': user.last_login, | ||||
r1558 | 'last_activity': user.last_activity, | |||
r1 | 'ip_addresses': user.ip_addresses, | |||
'language': user_data.get('language') | ||||
} | ||||
data.update(extras) | ||||
if include_secrets: | ||||
r1953 | data['auth_tokens'] = user.auth_tokens | |||
r1 | return data | |||
def __json__(self): | ||||
data = { | ||||
'full_name': self.full_name, | ||||
'full_name_or_username': self.full_name_or_username, | ||||
'short_contact': self.short_contact, | ||||
'full_contact': self.full_contact, | ||||
} | ||||
data.update(self.get_api_data()) | ||||
return data | ||||
class UserApiKeys(Base, BaseModel): | ||||
__tablename__ = 'user_api_keys' | ||||
__table_args__ = ( | ||||
r3978 | Index('uak_api_key_idx', 'api_key'), | |||
r1 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), | |||
r2830 | base_table_args | |||
r1 | ) | |||
# ApiKey role | ||||
ROLE_ALL = 'token_role_all' | ||||
ROLE_VCS = 'token_role_vcs' | ||||
ROLE_API = 'token_role_api' | ||||
r4430 | ROLE_HTTP = 'token_role_http' | |||
r1 | ROLE_FEED = 'token_role_feed' | |||
r4003 | ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' | |||
r4430 | # The last one is ignored in the list as we only | |||
# use it for one action, and cannot be created by users | ||||
r1471 | ROLE_PASSWORD_RESET = 'token_password_reset' | |||
r4430 | ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] | |||
r1 | ||||
user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | ||||
api_key = Column("api_key", String(255), nullable=False, unique=True) | ||||
description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | ||||
expires = Column('expires', Float(53), nullable=False) | ||||
role = Column('role', String(255), nullable=True) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
r1475 | # scope columns | |||
repo_id = Column( | ||||
'repo_id', Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=True, unique=None, default=None) | ||||
r5071 | repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens') | |||
r1475 | ||||
repo_group_id = Column( | ||||
'repo_group_id', Integer(), ForeignKey('groups.group_id'), | ||||
nullable=True, unique=None, default=None) | ||||
repo_group = relationship('RepoGroup', lazy='joined') | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='user_auth_tokens') | |||
def __repr__(self): | ||||
return f"<{self.cls_name}('{self.role}')>" | ||||
r1482 | ||||
r1522 | def __json__(self): | |||
data = { | ||||
'auth_token': self.api_key, | ||||
'role': self.role, | ||||
'scope': self.scope_humanized, | ||||
'expired': self.expired | ||||
} | ||||
return data | ||||
r1820 | def get_api_data(self, include_secrets=False): | |||
data = self.__json__() | ||||
if include_secrets: | ||||
return data | ||||
else: | ||||
data['auth_token'] = self.token_obfuscated | ||||
return data | ||||
r1830 | @hybrid_property | |||
def description_safe(self): | ||||
r5463 | return description_escaper(self.description) | |||
r1830 | ||||
r1522 | @property | |||
def expired(self): | ||||
if self.expires == -1: | ||||
return False | ||||
return time.time() > self.expires | ||||
r1 | @classmethod | |||
def _get_role_name(cls, role): | ||||
return { | ||||
cls.ROLE_ALL: _('all'), | ||||
cls.ROLE_HTTP: _('http/web interface'), | ||||
r442 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), | |||
r1 | cls.ROLE_API: _('api calls'), | |||
cls.ROLE_FEED: _('feed access'), | ||||
r4003 | cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), | |||
r1 | }.get(role, role) | |||
r4430 | @classmethod | |||
def _get_role_description(cls, role): | ||||
return { | ||||
cls.ROLE_ALL: _('Token for all actions.'), | ||||
cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without ' | ||||
'login using `api_access_controllers_whitelist` functionality.'), | ||||
cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. ' | ||||
'Requires auth_token authentication plugin to be active. <br/>' | ||||
'Such Token should be used then instead of a password to ' | ||||
'interact with a repository, and additionally can be ' | ||||
'limited to single repository using repo scope.'), | ||||
cls.ROLE_API: _('Token limited to api calls.'), | ||||
cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'), | ||||
cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'), | ||||
}.get(role, role) | ||||
r1 | @property | |||
def role_humanized(self): | ||||
return self._get_role_name(self.role) | ||||
r1480 | def _get_scope(self): | |||
if self.repo: | ||||
r3391 | return 'Repository: {}'.format(self.repo.repo_name) | |||
r1480 | if self.repo_group: | |||
r3391 | return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) | |||
return 'Global' | ||||
r1480 | ||||
@property | ||||
def scope_humanized(self): | ||||
return self._get_scope() | ||||
r1820 | @property | |||
def token_obfuscated(self): | ||||
if self.api_key: | ||||
return self.api_key[:4] + "****" | ||||
r1 | ||||
class UserEmailMap(Base, BaseModel): | ||||
__tablename__ = 'user_email_map' | ||||
__table_args__ = ( | ||||
Index('uem_email_idx', 'email'), | ||||
r5071 | Index('uem_user_id_idx', 'user_id'), | |||
r1 | UniqueConstraint('email'), | |||
r2830 | base_table_args | |||
r1 | ) | |||
email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | ||||
_email = Column("email", String(255), nullable=True, unique=False, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='user_emails') | |||
r1 | ||||
@validates('_email') | ||||
def validate_email(self, key, email): | ||||
# check if this email is not main one | ||||
main_email = Session().query(User).filter(User.email == email).scalar() | ||||
if main_email is not None: | ||||
raise AttributeError('email %s is present is user table' % email) | ||||
return email | ||||
@hybrid_property | ||||
def email(self): | ||||
return self._email | ||||
@email.setter | ||||
def email(self, val): | ||||
self._email = val.lower() if val else None | ||||
class UserIpMap(Base, BaseModel): | ||||
__tablename__ = 'user_ip_map' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'ip_addr'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | ||||
ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | ||||
active = Column("active", Boolean(), nullable=True, unique=None, default=True) | ||||
description = Column("description", String(10000), nullable=True, unique=None, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='user_ip_map') | |||
r1 | ||||
r1830 | @hybrid_property | |||
def description_safe(self): | ||||
r5463 | return description_escaper(self.description) | |||
r1830 | ||||
r1 | @classmethod | |||
def _get_ip_range(cls, ip_addr): | ||||
r5071 | net = ipaddress.ip_network(safe_str(ip_addr), strict=False) | |||
r1 | return [str(net.network_address), str(net.broadcast_address)] | |||
def __json__(self): | ||||
return { | ||||
'ip_addr': self.ip_addr, | ||||
'ip_range': self._get_ip_range(self.ip_addr), | ||||
} | ||||
r5071 | def __repr__(self): | |||
return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>" | ||||
r1 | ||||
r1480 | ||||
r1993 | class UserSshKeys(Base, BaseModel): | |||
__tablename__ = 'user_ssh_keys' | ||||
__table_args__ = ( | ||||
Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), | ||||
UniqueConstraint('ssh_key_fingerprint'), | ||||
r2830 | base_table_args | |||
r1993 | ) | |||
ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) | ||||
r2220 | ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) | |||
r1993 | ||||
description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) | ||||
user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='user_ssh_keys') | |||
r1993 | ||||
def __json__(self): | ||||
data = { | ||||
'ssh_fingerprint': self.ssh_key_fingerprint, | ||||
'description': self.description, | ||||
'created_on': self.created_on | ||||
} | ||||
return data | ||||
def get_api_data(self): | ||||
data = self.__json__() | ||||
return data | ||||
r1 | class UserLog(Base, BaseModel): | |||
__tablename__ = 'user_logs' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r2830 | ||||
r1694 | VERSION_1 = 'v1' | |||
VERSION_2 = 'v2' | ||||
VERSIONS = [VERSION_1, VERSION_2] | ||||
r1 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
r2111 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) | |||
r1 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |||
r2111 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) | |||
r1 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |||
user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | ||||
action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) | ||||
action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | ||||
r1694 | version = Column("version", String(255), nullable=True, default=VERSION_1) | |||
r2087 | user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) | |||
action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) | ||||
r5071 | user = relationship('User', cascade='', back_populates='user_log') | |||
repository = relationship('Repository', cascade='', back_populates='logs') | ||||
def __repr__(self): | ||||
return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>" | ||||
r1579 | ||||
def __json__(self): | ||||
return { | ||||
'user_id': self.user_id, | ||||
'username': self.username, | ||||
'repository_id': self.repository_id, | ||||
'repository_name': self.repository_name, | ||||
'user_ip': self.user_ip, | ||||
'action_date': self.action_date, | ||||
'action': self.action, | ||||
} | ||||
r1 | ||||
r2110 | @hybrid_property | |||
def entry_id(self): | ||||
return self.user_log_id | ||||
r1 | @property | |||
def action_as_day(self): | ||||
return datetime.date(*self.action_date.timetuple()[:3]) | ||||
class UserGroup(Base, BaseModel): | ||||
__tablename__ = 'users_groups' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | ||||
user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) | ||||
users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | ||||
inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
_group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data | ||||
r5071 | members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group') | |||
users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group') | ||||
users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group') | ||||
users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group') | ||||
user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group') | ||||
user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group') | ||||
user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group') | ||||
user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups') | ||||
r1980 | ||||
@classmethod | ||||
def _load_group_data(cls, column): | ||||
if not column: | ||||
return {} | ||||
try: | ||||
return json.loads(column) or {} | ||||
except TypeError: | ||||
return {} | ||||
r1 | ||||
@hybrid_property | ||||
r1830 | def description_safe(self): | |||
r5463 | return description_escaper(self.user_group_description) | |||
r1830 | ||||
@hybrid_property | ||||
r1 | def group_data(self): | |||
r1980 | return self._load_group_data(self._group_data) | |||
@group_data.expression | ||||
def group_data(self, **kwargs): | ||||
return self._group_data | ||||
r1 | ||||
@group_data.setter | ||||
def group_data(self, val): | ||||
try: | ||||
self._group_data = json.dumps(val) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
r2660 | @classmethod | |||
def _load_sync(cls, group_data): | ||||
if group_data: | ||||
return group_data.get('extern_type') | ||||
@property | ||||
def sync(self): | ||||
return self._load_sync(self.group_data) | ||||
r5071 | def __repr__(self): | |||
return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>" | ||||
r1 | ||||
@classmethod | ||||
def get_by_group_name(cls, group_name, cache=False, | ||||
case_insensitive=False): | ||||
if case_insensitive: | ||||
q = cls.query().filter(func.lower(cls.users_group_name) == | ||||
func.lower(group_name)) | ||||
else: | ||||
q = cls.query().filter(cls.users_group_name == group_name) | ||||
if cache: | ||||
r5009 | name_key = _hash_key(group_name) | |||
r1749 | q = q.options( | |||
r5009 | FromCache("sql_cache_short", f"get_group_{name_key}")) | |||
r1 | return q.scalar() | |||
@classmethod | ||||
def get(cls, user_group_id, cache=False): | ||||
r2114 | if not user_group_id: | |||
return | ||||
r1 | user_group = cls.query() | |||
if cache: | ||||
r1749 | user_group = user_group.options( | |||
r5365 | FromCache("sql_cache_short", f"get_users_group_{user_group_id}")) | |||
r1 | return user_group.get(user_group_id) | |||
r3411 | def permissions(self, with_admins=True, with_owner=True, | |||
expand_from_user_groups=False): | ||||
r2976 | """ | |||
Permissions for user groups | ||||
""" | ||||
_admin_perm = 'usergroup.admin' | ||||
owner_row = [] | ||||
if with_owner: | ||||
usr = AttributeDict(self.user.get_dict()) | ||||
usr.owner_row = True | ||||
usr.permission = _admin_perm | ||||
owner_row.append(usr) | ||||
super_admin_ids = [] | ||||
super_admin_rows = [] | ||||
if with_admins: | ||||
for usr in User.get_all_super_admins(): | ||||
super_admin_ids.append(usr.user_id) | ||||
# if this admin is also owner, don't double the record | ||||
if usr.user_id == owner_row[0].user_id: | ||||
owner_row[0].admin_row = True | ||||
else: | ||||
usr = AttributeDict(usr.get_dict()) | ||||
usr.admin_row = True | ||||
usr.permission = _admin_perm | ||||
super_admin_rows.append(usr) | ||||
r1 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) | |||
q = q.options(joinedload(UserUserGroupToPerm.user_group), | ||||
joinedload(UserUserGroupToPerm.user), | ||||
joinedload(UserUserGroupToPerm.permission),) | ||||
# get owners and admins and permissions. We do a trick of re-writing | ||||
# objects from sqlalchemy to named-tuples due to sqlalchemy session | ||||
# has a global reference and changing one object propagates to all | ||||
# others. This means if admin is also an owner admin_row that change | ||||
# would propagate to both objects | ||||
perm_rows = [] | ||||
for _usr in q.all(): | ||||
usr = AttributeDict(_usr.user.get_dict()) | ||||
r2976 | # if this user is also owner/admin, mark as duplicate record | |||
if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | ||||
usr.duplicate_perm = True | ||||
r1 | usr.permission = _usr.permission.permission_name | |||
perm_rows.append(usr) | ||||
# filter the perm rows by 'default' first and then sort them by | ||||
# admin,write,read,none permissions sorted again alphabetically in | ||||
# each group | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
r1 | ||||
r3411 | user_groups_rows = [] | |||
if expand_from_user_groups: | ||||
for ug in self.permission_user_groups(with_members=True): | ||||
for user_data in ug.members: | ||||
user_groups_rows.append(user_data) | ||||
return super_admin_rows + owner_row + perm_rows + user_groups_rows | ||||
def permission_user_groups(self, with_members=False): | ||||
q = UserGroupUserGroupToPerm.query()\ | ||||
.filter(UserGroupUserGroupToPerm.target_user_group == self) | ||||
r1 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), | |||
joinedload(UserGroupUserGroupToPerm.target_user_group), | ||||
joinedload(UserGroupUserGroupToPerm.permission),) | ||||
perm_rows = [] | ||||
for _user_group in q.all(): | ||||
r3411 | entry = AttributeDict(_user_group.user_group.get_dict()) | |||
entry.permission = _user_group.permission.permission_name | ||||
if with_members: | ||||
entry.members = [x.user.get_dict() | ||||
r3592 | for x in _user_group.user_group.members] | |||
r3411 | perm_rows.append(entry) | |||
r1 | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
r1 | return perm_rows | |||
def _get_default_perms(self, user_group, suffix=''): | ||||
from rhodecode.model.permission import PermissionModel | ||||
return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) | ||||
def get_default_perms(self, suffix=''): | ||||
return self._get_default_perms(self, suffix) | ||||
def get_api_data(self, with_group_members=True, include_secrets=False): | ||||
""" | ||||
:param include_secrets: See :meth:`User.get_api_data`, this parameter is | ||||
basically forwarded. | ||||
""" | ||||
user_group = self | ||||
data = { | ||||
'users_group_id': user_group.users_group_id, | ||||
'group_name': user_group.users_group_name, | ||||
'group_description': user_group.user_group_description, | ||||
'active': user_group.users_group_active, | ||||
'owner': user_group.user.username, | ||||
r2660 | 'sync': user_group.sync, | |||
Bartłomiej Wołyńczyk
|
r1556 | 'owner_email': user_group.user.email, | ||
r1 | } | |||
Bartłomiej Wołyńczyk
|
r1556 | |||
r1 | if with_group_members: | |||
users = [] | ||||
for user in user_group.members: | ||||
user = user.user | ||||
users.append(user.get_api_data(include_secrets=include_secrets)) | ||||
data['users'] = users | ||||
return data | ||||
class UserGroupMember(Base, BaseModel): | ||||
__tablename__ = 'users_groups_members' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='group_member') | |||
users_group = relationship('UserGroup', back_populates='members') | ||||
r1 | ||||
def __init__(self, gr_id='', u_id=''): | ||||
self.users_group_id = gr_id | ||||
self.user_id = u_id | ||||
class RepositoryField(Base, BaseModel): | ||||
__tablename__ = 'repositories_fields' | ||||
__table_args__ = ( | ||||
UniqueConstraint('repository_id', 'field_key'), # no-multi field | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r2830 | ||||
r1 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |||
repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | ||||
field_key = Column("field_key", String(250)) | ||||
field_label = Column("field_label", String(1024), nullable=False) | ||||
field_value = Column("field_value", String(10000), nullable=False) | ||||
field_desc = Column("field_desc", String(1024), nullable=False) | ||||
field_type = Column("field_type", String(255), nullable=False, unique=None) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
r5071 | repository = relationship('Repository', back_populates='extra_fields') | |||
r1 | ||||
@property | ||||
def field_key_prefixed(self): | ||||
return 'ex_%s' % self.field_key | ||||
@classmethod | ||||
def un_prefix_key(cls, key): | ||||
if key.startswith(cls.PREFIX): | ||||
return key[len(cls.PREFIX):] | ||||
return key | ||||
@classmethod | ||||
def get_by_key_name(cls, key, repo): | ||||
row = cls.query()\ | ||||
.filter(cls.repository == repo)\ | ||||
.filter(cls.field_key == key).scalar() | ||||
return row | ||||
class Repository(Base, BaseModel): | ||||
__tablename__ = 'repositories' | ||||
__table_args__ = ( | ||||
Index('r_repo_name_idx', 'repo_name', mysql_length=255), | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' | ||||
DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' | ||||
r2497 | DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' | |||
r1 | ||||
STATE_CREATED = 'repo_state_created' | ||||
STATE_PENDING = 'repo_state_pending' | ||||
STATE_ERROR = 'repo_state_error' | ||||
LOCK_AUTOMATIC = 'lock_auto' | ||||
LOCK_API = 'lock_api' | ||||
LOCK_WEB = 'lock_web' | ||||
LOCK_PULL = 'lock_pull' | ||||
NAME_SEP = URL_SEP | ||||
repo_id = Column( | ||||
"repo_id", Integer(), nullable=False, unique=True, default=None, | ||||
primary_key=True) | ||||
_repo_name = Column( | ||||
"repo_name", Text(), nullable=False, default=None) | ||||
r4146 | repo_name_hash = Column( | |||
r1 | "repo_name_hash", String(255), nullable=False, unique=True) | |||
repo_state = Column("repo_state", String(255), nullable=True) | ||||
clone_uri = Column( | ||||
"clone_uri", EncryptedTextValue(), nullable=True, unique=False, | ||||
default=None) | ||||
r2560 | push_uri = Column( | |||
"push_uri", EncryptedTextValue(), nullable=True, unique=False, | ||||
default=None) | ||||
r1 | repo_type = Column( | |||
"repo_type", String(255), nullable=False, unique=False, default=None) | ||||
user_id = Column( | ||||
"user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | ||||
unique=False, default=None) | ||||
private = Column( | ||||
"private", Boolean(), nullable=True, unique=None, default=None) | ||||
r3090 | archived = Column( | |||
"archived", Boolean(), nullable=True, unique=None, default=None) | ||||
r1 | enable_statistics = Column( | |||
"statistics", Boolean(), nullable=True, unique=None, default=True) | ||||
enable_downloads = Column( | ||||
"downloads", Boolean(), nullable=True, unique=None, default=True) | ||||
description = Column( | ||||
"description", String(10000), nullable=True, unique=None, default=None) | ||||
created_on = Column( | ||||
'created_on', DateTime(timezone=False), nullable=True, unique=None, | ||||
default=datetime.datetime.now) | ||||
updated_on = Column( | ||||
'updated_on', DateTime(timezone=False), nullable=True, unique=None, | ||||
default=datetime.datetime.now) | ||||
_landing_revision = Column( | ||||
"landing_revision", String(255), nullable=False, unique=False, | ||||
default=None) | ||||
enable_locking = Column( | ||||
"enable_locking", Boolean(), nullable=False, unique=None, | ||||
default=False) | ||||
_locked = Column( | ||||
"locked", String(255), nullable=True, unique=False, default=None) | ||||
_changeset_cache = Column( | ||||
"changeset_cache", LargeBinary(), nullable=True) # JSON data | ||||
fork_id = Column( | ||||
"fork_id", Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=True, unique=False, default=None) | ||||
group_id = Column( | ||||
"group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, | ||||
unique=False, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', back_populates='repositories') | |||
r250 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') | |||
group = relationship('RepoGroup', lazy='joined') | ||||
r5071 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') | |||
users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository') | ||||
r1 | stats = relationship('Statistics', cascade='all', uselist=False) | |||
r5071 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository') | |||
extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository') | ||||
logs = relationship('UserLog', back_populates='repository') | ||||
comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo') | ||||
r1 | pull_requests_source = relationship( | |||
'PullRequest', | ||||
primaryjoin='PullRequest.source_repo_id==Repository.repo_id', | ||||
r5071 | cascade="all, delete-orphan", | |||
r5293 | overlaps="source_repo" | |||
r5071 | ) | |||
r1 | pull_requests_target = relationship( | |||
'PullRequest', | ||||
primaryjoin='PullRequest.target_repo_id==Repository.repo_id', | ||||
r5071 | cascade="all, delete-orphan", | |||
r5293 | overlaps="target_repo" | |||
r5071 | ) | |||
r1 | ui = relationship('RepoRhodeCodeUi', cascade="all") | |||
settings = relationship('RepoRhodeCodeSetting', cascade="all") | ||||
r5071 | integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo') | |||
scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo') | ||||
r2482 | ||||
r4011 | # no cascade, set NULL | |||
r5071 | artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True) | |||
review_rules = relationship('RepoReviewRule') | ||||
user_branch_perms = relationship('UserToRepoBranchPermission') | ||||
user_group_branch_perms = relationship('UserGroupToRepoBranchPermission') | ||||
def __repr__(self): | ||||
return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name) | ||||
r1 | ||||
@hybrid_property | ||||
r1830 | def description_safe(self): | |||
r5463 | return description_escaper(self.description) | |||
r1830 | ||||
@hybrid_property | ||||
r1 | def landing_rev(self): | |||
r4370 | # always should return [rev_type, rev], e.g ['branch', 'master'] | |||
r1 | if self._landing_revision: | |||
_rev_info = self._landing_revision.split(':') | ||||
if len(_rev_info) < 2: | ||||
_rev_info.insert(0, 'rev') | ||||
return [_rev_info[0], _rev_info[1]] | ||||
return [None, None] | ||||
r4370 | @property | |||
def landing_ref_type(self): | ||||
return self.landing_rev[0] | ||||
@property | ||||
def landing_ref_name(self): | ||||
return self.landing_rev[1] | ||||
r1 | @landing_rev.setter | |||
def landing_rev(self, val): | ||||
if ':' not in val: | ||||
raise ValueError('value must be delimited with `:` and consist ' | ||||
'of <rev_type>:<rev>, got %s instead' % val) | ||||
self._landing_revision = val | ||||
@hybrid_property | ||||
def locked(self): | ||||
if self._locked: | ||||
user_id, timelocked, reason = self._locked.split(':') | ||||
lock_values = int(user_id), timelocked, reason | ||||
else: | ||||
lock_values = [None, None, None] | ||||
return lock_values | ||||
@locked.setter | ||||
def locked(self, val): | ||||
if val and isinstance(val, (list, tuple)): | ||||
self._locked = ':'.join(map(str, val)) | ||||
else: | ||||
self._locked = None | ||||
r4146 | @classmethod | |||
def _load_changeset_cache(cls, repo_id, changeset_cache_raw): | ||||
r1 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |||
dummy = EmptyCommit().__json__() | ||||
r4146 | if not changeset_cache_raw: | |||
dummy['source_repo_id'] = repo_id | ||||
r3689 | return json.loads(json.dumps(dummy)) | |||
r1 | try: | |||
r4146 | return json.loads(changeset_cache_raw) | |||
r1 | except TypeError: | |||
return dummy | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
return dummy | ||||
r4146 | @hybrid_property | |||
def changeset_cache(self): | ||||
return self._load_changeset_cache(self.repo_id, self._changeset_cache) | ||||
r1 | @changeset_cache.setter | |||
def changeset_cache(self, val): | ||||
try: | ||||
self._changeset_cache = json.dumps(val) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
@hybrid_property | ||||
def repo_name(self): | ||||
return self._repo_name | ||||
@repo_name.setter | ||||
def repo_name(self, value): | ||||
self._repo_name = value | ||||
r5071 | self.repo_name_hash = sha1(safe_bytes(value)) | |||
r1 | ||||
@classmethod | ||||
def normalize_repo_name(cls, repo_name): | ||||
""" | ||||
Normalizes os specific repo_name to the format internally stored inside | ||||
r255 | database using URL_SEP | |||
r1 | ||||
:param cls: | ||||
:param repo_name: | ||||
""" | ||||
return cls.NAME_SEP.join(repo_name.split(os.sep)) | ||||
@classmethod | ||||
r255 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): | |||
r247 | session = Session() | |||
q = session.query(cls).filter(cls.repo_name == repo_name) | ||||
r255 | ||||
if cache: | ||||
if identity_cache: | ||||
val = cls.identity_cache(session, 'repo_name', repo_name) | ||||
if val: | ||||
return val | ||||
else: | ||||
r5365 | cache_key = f"get_repo_by_name_{_hash_key(repo_name)}" | |||
r255 | q = q.options( | |||
r1749 | FromCache("sql_cache_short", cache_key)) | |||
r255 | ||||
r1 | return q.scalar() | |||
@classmethod | ||||
r2432 | def get_by_id_or_repo_name(cls, repoid): | |||
r4936 | if isinstance(repoid, int): | |||
r2432 | try: | |||
repo = cls.get(repoid) | ||||
except ValueError: | ||||
repo = None | ||||
else: | ||||
repo = cls.get_by_repo_name(repoid) | ||||
return repo | ||||
@classmethod | ||||
r1 | def get_by_full_path(cls, repo_full_path): | |||
repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | ||||
repo_name = cls.normalize_repo_name(repo_name) | ||||
return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | ||||
@classmethod | ||||
def get_repo_forks(cls, repo_id): | ||||
return cls.query().filter(Repository.fork_id == repo_id) | ||||
@classmethod | ||||
def base_path(cls): | ||||
""" | ||||
Returns base path when all repos are stored | ||||
:param cls: | ||||
""" | ||||
r5356 | from rhodecode.lib.utils import get_rhodecode_repo_store_path | |||
return get_rhodecode_repo_store_path() | ||||
r1 | ||||
@classmethod | ||||
def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), | ||||
r3090 | case_insensitive=True, archived=False): | |||
r1 | q = Repository.query() | |||
r3090 | if not archived: | |||
q = q.filter(Repository.archived.isnot(true())) | ||||
r1 | if not isinstance(user_id, Optional): | |||
q = q.filter(Repository.user_id == user_id) | ||||
if not isinstance(group_id, Optional): | ||||
q = q.filter(Repository.group_id == group_id) | ||||
if case_insensitive: | ||||
q = q.order_by(func.lower(Repository.repo_name)) | ||||
else: | ||||
q = q.order_by(Repository.repo_name) | ||||
r3090 | ||||
r1 | return q.all() | |||
@property | ||||
r3810 | def repo_uid(self): | |||
return '_{}'.format(self.repo_id) | ||||
@property | ||||
r1 | def forks(self): | |||
""" | ||||
Return forks of this repo | ||||
""" | ||||
return Repository.get_repo_forks(self.repo_id) | ||||
@property | ||||
def parent(self): | ||||
""" | ||||
Returns fork parent | ||||
""" | ||||
return self.fork | ||||
@property | ||||
def just_name(self): | ||||
return self.repo_name.split(self.NAME_SEP)[-1] | ||||
@property | ||||
def groups_with_parents(self): | ||||
groups = [] | ||||
if self.group is None: | ||||
return groups | ||||
cur_gr = self.group | ||||
groups.insert(0, cur_gr) | ||||
while 1: | ||||
gr = getattr(cur_gr, 'parent_group', None) | ||||
cur_gr = cur_gr.parent_group | ||||
if gr is None: | ||||
break | ||||
groups.insert(0, gr) | ||||
return groups | ||||
@property | ||||
def groups_and_repo(self): | ||||
return self.groups_with_parents, self | ||||
r5356 | @property | |||
r1 | def repo_path(self): | |||
""" | ||||
Returns base full path for that repository means where it actually | ||||
exists on a filesystem | ||||
""" | ||||
r5356 | return self.base_path() | |||
r1 | ||||
@property | ||||
def repo_full_path(self): | ||||
p = [self.repo_path] | ||||
# we need to split the name by / since this is how we store the | ||||
# names in the database, but that eventually needs to be converted | ||||
# into a valid system path | ||||
p += self.repo_name.split(self.NAME_SEP) | ||||
r5010 | return os.path.join(*map(safe_str, p)) | |||
r1 | ||||
@property | ||||
def cache_keys(self): | ||||
""" | ||||
Returns associated cache keys for that repo | ||||
""" | ||||
r5288 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id) | |||
r1 | return CacheKey.query()\ | |||
r5288 | .filter(CacheKey.cache_key == repo_namespace_key)\ | |||
r1 | .order_by(CacheKey.cache_key)\ | |||
.all() | ||||
r2687 | @property | |||
r2688 | def cached_diffs_relative_dir(self): | |||
""" | ||||
Return a relative to the repository store path of cached diffs | ||||
used for safe display for users, who shouldn't know the absolute store | ||||
path | ||||
""" | ||||
return os.path.join( | ||||
os.path.dirname(self.repo_name), | ||||
self.cached_diffs_dir.split(os.path.sep)[-1]) | ||||
@property | ||||
r2687 | def cached_diffs_dir(self): | |||
path = self.repo_full_path | ||||
return os.path.join( | ||||
os.path.dirname(path), | ||||
r5071 | f'.__shadow_diff_cache_repo_{self.repo_id}') | |||
r2687 | ||||
def cached_diffs(self): | ||||
diff_cache_dir = self.cached_diffs_dir | ||||
if os.path.isdir(diff_cache_dir): | ||||
return os.listdir(diff_cache_dir) | ||||
return [] | ||||
r2810 | def shadow_repos(self): | |||
r5071 | shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}' | |||
r2810 | return [ | |||
x for x in os.listdir(os.path.dirname(self.repo_full_path)) | ||||
r5071 | if x.startswith(shadow_repos_pattern) | |||
] | ||||
r2810 | ||||
r1 | def get_new_name(self, repo_name): | |||
""" | ||||
returns new full repository name based on assigned group and new new | ||||
r5071 | :param repo_name: | |||
r1 | """ | |||
path_prefix = self.group.full_path_splitted if self.group else [] | ||||
return self.NAME_SEP.join(path_prefix + [repo_name]) | ||||
@property | ||||
def _config(self): | ||||
""" | ||||
Returns db based config object. | ||||
""" | ||||
from rhodecode.lib.utils import make_db_config | ||||
return make_db_config(clear_session=False, repo=self) | ||||
r3411 | def permissions(self, with_admins=True, with_owner=True, | |||
expand_from_user_groups=False): | ||||
r2976 | """ | |||
Permissions for repositories | ||||
""" | ||||
_admin_perm = 'repository.admin' | ||||
owner_row = [] | ||||
if with_owner: | ||||
usr = AttributeDict(self.user.get_dict()) | ||||
usr.owner_row = True | ||||
usr.permission = _admin_perm | ||||
usr.permission_id = None | ||||
owner_row.append(usr) | ||||
super_admin_ids = [] | ||||
super_admin_rows = [] | ||||
if with_admins: | ||||
for usr in User.get_all_super_admins(): | ||||
super_admin_ids.append(usr.user_id) | ||||
# if this admin is also owner, don't double the record | ||||
if usr.user_id == owner_row[0].user_id: | ||||
owner_row[0].admin_row = True | ||||
else: | ||||
usr = AttributeDict(usr.get_dict()) | ||||
usr.admin_row = True | ||||
usr.permission = _admin_perm | ||||
usr.permission_id = None | ||||
super_admin_rows.append(usr) | ||||
r1 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) | |||
q = q.options(joinedload(UserRepoToPerm.repository), | ||||
joinedload(UserRepoToPerm.user), | ||||
joinedload(UserRepoToPerm.permission),) | ||||
# get owners and admins and permissions. We do a trick of re-writing | ||||
# objects from sqlalchemy to named-tuples due to sqlalchemy session | ||||
# has a global reference and changing one object propagates to all | ||||
# others. This means if admin is also an owner admin_row that change | ||||
# would propagate to both objects | ||||
perm_rows = [] | ||||
for _usr in q.all(): | ||||
usr = AttributeDict(_usr.user.get_dict()) | ||||
r2976 | # if this user is also owner/admin, mark as duplicate record | |||
if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | ||||
usr.duplicate_perm = True | ||||
r2977 | # also check if this permission is maybe used by branch_permissions | |||
if _usr.branch_perm_entry: | ||||
usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] | ||||
r1 | usr.permission = _usr.permission.permission_name | |||
r2975 | usr.permission_id = _usr.repo_to_perm_id | |||
r1 | perm_rows.append(usr) | |||
# filter the perm rows by 'default' first and then sort them by | ||||
# admin,write,read,none permissions sorted again alphabetically in | ||||
# each group | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
r1 | ||||
r3411 | user_groups_rows = [] | |||
if expand_from_user_groups: | ||||
for ug in self.permission_user_groups(with_members=True): | ||||
for user_data in ug.members: | ||||
user_groups_rows.append(user_data) | ||||
return super_admin_rows + owner_row + perm_rows + user_groups_rows | ||||
def permission_user_groups(self, with_members=True): | ||||
q = UserGroupRepoToPerm.query()\ | ||||
.filter(UserGroupRepoToPerm.repository == self) | ||||
r1 | q = q.options(joinedload(UserGroupRepoToPerm.repository), | |||
joinedload(UserGroupRepoToPerm.users_group), | ||||
joinedload(UserGroupRepoToPerm.permission),) | ||||
perm_rows = [] | ||||
for _user_group in q.all(): | ||||
r3411 | entry = AttributeDict(_user_group.users_group.get_dict()) | |||
entry.permission = _user_group.permission.permission_name | ||||
if with_members: | ||||
entry.members = [x.user.get_dict() | ||||
for x in _user_group.users_group.members] | ||||
perm_rows.append(entry) | ||||
r1 | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
r1 | return perm_rows | |||
def get_api_data(self, include_secrets=False): | ||||
""" | ||||
Common function for generating repo api data | ||||
:param include_secrets: See :meth:`User.get_api_data`. | ||||
""" | ||||
# TODO: mikhail: Here there is an anti-pattern, we probably need to | ||||
# move this methods on models level. | ||||
from rhodecode.model.settings import SettingsModel | ||||
r1785 | from rhodecode.model.repo import RepoModel | |||
r1 | ||||
repo = self | ||||
_user_id, _time, _reason = self.locked | ||||
data = { | ||||
'repo_id': repo.repo_id, | ||||
'repo_name': repo.repo_name, | ||||
'repo_type': repo.repo_type, | ||||
r68 | 'clone_uri': repo.clone_uri or '', | |||
r2560 | 'push_uri': repo.push_uri or '', | |||
r1785 | 'url': RepoModel().get_url(self), | |||
r1 | 'private': repo.private, | |||
'created_on': repo.created_on, | ||||
r1830 | 'description': repo.description_safe, | |||
r1 | 'landing_rev': repo.landing_rev, | |||
'owner': repo.user.username, | ||||
'fork_of': repo.fork.repo_name if repo.fork else None, | ||||
r1803 | 'fork_of_id': repo.fork.repo_id if repo.fork else None, | |||
r1 | 'enable_statistics': repo.enable_statistics, | |||
'enable_locking': repo.enable_locking, | ||||
'enable_downloads': repo.enable_downloads, | ||||
'last_changeset': repo.changeset_cache, | ||||
'locked_by': User.get(_user_id).get_api_data( | ||||
include_secrets=include_secrets) if _user_id else None, | ||||
'locked_date': time_to_datetime(_time) if _time else None, | ||||
'lock_reason': _reason if _reason else None, | ||||
} | ||||
# TODO: mikhail: should be per-repo settings here | ||||
rc_config = SettingsModel().get_all_settings() | ||||
repository_fields = str2bool( | ||||
rc_config.get('rhodecode_repository_fields')) | ||||
if repository_fields: | ||||
for f in self.extra_fields: | ||||
data[f.field_key_prefixed] = f.field_value | ||||
return data | ||||
@classmethod | ||||
def lock(cls, repo, user_id, lock_time=None, lock_reason=None): | ||||
if not lock_time: | ||||
lock_time = time.time() | ||||
if not lock_reason: | ||||
lock_reason = cls.LOCK_AUTOMATIC | ||||
repo.locked = [user_id, lock_time, lock_reason] | ||||
Session().add(repo) | ||||
Session().commit() | ||||
@classmethod | ||||
def unlock(cls, repo): | ||||
repo.locked = None | ||||
Session().add(repo) | ||||
Session().commit() | ||||
@classmethod | ||||
def getlock(cls, repo): | ||||
return repo.locked | ||||
def get_locking_state(self, action, user_id, only_when_enabled=True): | ||||
""" | ||||
Checks locking on this repository, if locking is enabled and lock is | ||||
present returns a tuple of make_lock, locked, locked_by. | ||||
make_lock can have 3 states None (do nothing) True, make lock | ||||
False release lock, This value is later propagated to hooks, which | ||||
do the locking. Think about this as signals passed to hooks what to do. | ||||
""" | ||||
# TODO: johbo: This is part of the business logic and should be moved | ||||
# into the RepositoryModel. | ||||
if action not in ('push', 'pull'): | ||||
raise ValueError("Invalid action value: %s" % repr(action)) | ||||
# defines if locked error should be thrown to user | ||||
currently_locked = False | ||||
# defines if new lock should be made, tri-state | ||||
make_lock = None | ||||
repo = self | ||||
user = User.get(user_id) | ||||
lock_info = repo.locked | ||||
if repo and (repo.enable_locking or not only_when_enabled): | ||||
if action == 'push': | ||||
# check if it's already locked !, if it is compare users | ||||
locked_by_user_id = lock_info[0] | ||||
if user.user_id == locked_by_user_id: | ||||
log.debug( | ||||
'Got `push` action from user %s, now unlocking', user) | ||||
# unlock if we have push from user who locked | ||||
make_lock = False | ||||
else: | ||||
# we're not the same user who locked, ban with | ||||
# code defined in settings (default is 423 HTTP Locked) ! | ||||
log.debug('Repo %s is currently locked by %s', repo, user) | ||||
currently_locked = True | ||||
elif action == 'pull': | ||||
# [0] user [1] date | ||||
if lock_info[0] and lock_info[1]: | ||||
log.debug('Repo %s is currently locked by %s', repo, user) | ||||
currently_locked = True | ||||
else: | ||||
log.debug('Setting lock on repo %s by %s', repo, user) | ||||
make_lock = True | ||||
else: | ||||
log.debug('Repository %s do not have locking enabled', repo) | ||||
log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | ||||
make_lock, currently_locked, lock_info) | ||||
from rhodecode.lib.auth import HasRepoPermissionAny | ||||
perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') | ||||
if make_lock and not perm_check(repo_name=repo.repo_name, user=user): | ||||
# if we don't have at least write permission we cannot make a lock | ||||
log.debug('lock state reset back to FALSE due to lack ' | ||||
'of at least read permission') | ||||
make_lock = False | ||||
return make_lock, currently_locked, lock_info | ||||
@property | ||||
r3705 | def last_commit_cache_update_diff(self): | |||
return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) | ||||
r4146 | @classmethod | |||
def _load_commit_change(cls, last_commit_cache): | ||||
r3689 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
empty_date = datetime.datetime.fromtimestamp(0) | ||||
r4146 | date_latest = last_commit_cache.get('date', empty_date) | |||
r3689 | try: | |||
return parse_datetime(date_latest) | ||||
except Exception: | ||||
return empty_date | ||||
@property | ||||
r4146 | def last_commit_change(self): | |||
return self._load_commit_change(self.changeset_cache) | ||||
@property | ||||
r1 | def last_db_change(self): | |||
return self.updated_on | ||||
@property | ||||
def clone_uri_hidden(self): | ||||
clone_uri = self.clone_uri | ||||
if clone_uri: | ||||
import urlobject | ||||
Bartłomiej Wołyńczyk
|
r1452 | url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) | ||
r1 | if url_obj.password: | |||
clone_uri = url_obj.with_password('*****') | ||||
return clone_uri | ||||
r2560 | @property | |||
def push_uri_hidden(self): | ||||
push_uri = self.push_uri | ||||
if push_uri: | ||||
import urlobject | ||||
url_obj = urlobject.URLObject(cleaned_uri(push_uri)) | ||||
if url_obj.password: | ||||
push_uri = url_obj.with_password('*****') | ||||
return push_uri | ||||
r1 | def clone_url(self, **override): | |||
r1890 | from rhodecode.model.settings import SettingsModel | |||
r1 | ||||
uri_tmpl = None | ||||
if 'with_id' in override: | ||||
uri_tmpl = self.DEFAULT_CLONE_URI_ID | ||||
del override['with_id'] | ||||
if 'uri_tmpl' in override: | ||||
uri_tmpl = override['uri_tmpl'] | ||||
del override['uri_tmpl'] | ||||
r2497 | ssh = False | |||
if 'ssh' in override: | ||||
ssh = True | ||||
del override['ssh'] | ||||
r1 | # we didn't override our tmpl from **overrides | |||
r3855 | request = get_current_request() | |||
r1 | if not uri_tmpl: | |||
r3855 | if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): | |||
rc_config = request.call_context.rc_config | ||||
else: | ||||
rc_config = SettingsModel().get_all_settings(cache=True) | ||||
r4133 | ||||
r2497 | if ssh: | |||
uri_tmpl = rc_config.get( | ||||
'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH | ||||
r4133 | ||||
r2497 | else: | |||
uri_tmpl = rc_config.get( | ||||
'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI | ||||
r1 | ||||
r1774 | return get_clone_url(request=request, | |||
uri_tmpl=uri_tmpl, | ||||
r1 | repo_name=self.repo_name, | |||
r4133 | repo_id=self.repo_id, | |||
repo_type=self.repo_type, | ||||
**override) | ||||
r1 | ||||
def set_state(self, state): | ||||
self.repo_state = state | ||||
Session().add(self) | ||||
#========================================================================== | ||||
# SCM PROPERTIES | ||||
#========================================================================== | ||||
r4653 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None): | |||
r1 | return get_commit_safe( | |||
r4299 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load, | |||
r4653 | maybe_unreachable=maybe_unreachable, reference_obj=reference_obj) | |||
r1 | ||||
def get_changeset(self, rev=None, pre_load=None): | ||||
warnings.warn("Use get_commit", DeprecationWarning) | ||||
commit_id = None | ||||
commit_idx = None | ||||
r4908 | if isinstance(rev, str): | |||
r1 | commit_id = rev | |||
else: | ||||
commit_idx = rev | ||||
return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, | ||||
pre_load=pre_load) | ||||
def get_landing_commit(self): | ||||
""" | ||||
Returns landing commit, or if that doesn't exist returns the tip | ||||
""" | ||||
_rev_type, _rev = self.landing_rev | ||||
commit = self.get_commit(_rev) | ||||
if isinstance(commit, EmptyCommit): | ||||
return self.get_commit() | ||||
return commit | ||||
r3998 | def flush_commit_cache(self): | |||
r5486 | self.update_commit_cache(cs_cache={'raw_id': '0'}) | |||
r3998 | self.update_commit_cache() | |||
r5486 | def update_commit_cache(self, cs_cache=None, config=None, recursive=True): | |||
r1 | """ | |||
r4162 | Update cache of last commit for repository | |||
cache_keys should be:: | ||||
r1 | ||||
r3689 | source_repo_id | |||
r1 | short_id | |||
raw_id | ||||
revision | ||||
parents | ||||
message | ||||
date | ||||
author | ||||
r3705 | updated_on | |||
r1 | ||||
""" | ||||
r5071 | from rhodecode.lib.vcs.backends.base import BaseCommit | |||
r4162 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
empty_date = datetime.datetime.fromtimestamp(0) | ||||
r5071 | repo_commit_count = 0 | |||
r4162 | ||||
r1 | if cs_cache is None: | |||
# use no-cache version here | ||||
r4162 | try: | |||
scm_repo = self.scm_instance(cache=False, config=config) | ||||
except VCSError: | ||||
scm_repo = None | ||||
r3723 | empty = scm_repo is None or scm_repo.is_empty() | |||
r4162 | ||||
r2955 | if not empty: | |||
r1 | cs_cache = scm_repo.get_commit( | |||
r3886 | pre_load=["author", "date", "message", "parents", "branch"]) | |||
r5071 | repo_commit_count = scm_repo.count() | |||
r1 | else: | |||
cs_cache = EmptyCommit() | ||||
r5071 | if isinstance(cs_cache, BaseCommit): | |||
r1 | cs_cache = cs_cache.__json__() | |||
r5486 | def maybe_update_recursive(instance, _config, _recursive, _cs_cache, _last_change): | |||
if _recursive: | ||||
repo_id = instance.repo_id | ||||
_cs_cache['source_repo_id'] = repo_id | ||||
for gr in instance.groups_with_parents: | ||||
gr.changeset_cache = _cs_cache | ||||
gr.updated_on = _last_change | ||||
r1 | def is_outdated(new_cs_cache): | |||
r339 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |||
new_cs_cache['revision'] != self.changeset_cache['revision']): | ||||
r1 | return True | |||
return False | ||||
# check if we have maybe already latest cached revision | ||||
if is_outdated(cs_cache) or not self.changeset_cache: | ||||
r4162 | _current_datetime = datetime.datetime.utcnow() | |||
last_change = cs_cache.get('date') or _current_datetime | ||||
r3689 | # we check if last update is newer than the new value | |||
# if yes, we use the current timestamp instead. Imagine you get | ||||
# old commit pushed 1y ago, we'd set last update 1y to ago. | ||||
last_change_timestamp = datetime_to_time(last_change) | ||||
current_timestamp = datetime_to_time(last_change) | ||||
r4162 | if last_change_timestamp > current_timestamp and not empty: | |||
cs_cache['date'] = _current_datetime | ||||
r5071 | # also store size of repo | |||
cs_cache['repo_commit_count'] = repo_commit_count | ||||
r4162 | _date_latest = parse_datetime(cs_cache.get('date') or empty_date) | |||
r3705 | cs_cache['updated_on'] = time.time() | |||
r1 | self.changeset_cache = cs_cache | |||
r4000 | self.updated_on = last_change | |||
r1 | Session().add(self) | |||
r5486 | maybe_update_recursive(self, config, recursive, cs_cache, last_change) | |||
r1 | Session().commit() | |||
r3689 | ||||
r1 | else: | |||
r4162 | if empty: | |||
cs_cache = EmptyCommit().__json__() | ||||
else: | ||||
cs_cache = self.changeset_cache | ||||
_date_latest = parse_datetime(cs_cache.get('date') or empty_date) | ||||
r3724 | cs_cache['updated_on'] = time.time() | |||
self.changeset_cache = cs_cache | ||||
r4162 | self.updated_on = _date_latest | |||
r3724 | Session().add(self) | |||
r5486 | maybe_update_recursive(self, config, recursive, cs_cache, _date_latest) | |||
r3724 | Session().commit() | |||
r4162 | log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', | |||
self.repo_name, cs_cache, _date_latest) | ||||
r1 | ||||
@property | ||||
def tip(self): | ||||
return self.get_commit('tip') | ||||
@property | ||||
def author(self): | ||||
return self.tip.author | ||||
@property | ||||
def last_change(self): | ||||
return self.scm_instance().last_change | ||||
def get_comments(self, revisions=None): | ||||
""" | ||||
Returns comments for this repository grouped by revisions | ||||
:param revisions: filter query by revisions only | ||||
""" | ||||
cmts = ChangesetComment.query()\ | ||||
.filter(ChangesetComment.repo == self) | ||||
if revisions: | ||||
cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | ||||
grouped = collections.defaultdict(list) | ||||
for cmt in cmts.all(): | ||||
grouped[cmt.revision].append(cmt) | ||||
return grouped | ||||
def statuses(self, revisions=None): | ||||
""" | ||||
Returns statuses for this repository | ||||
:param revisions: list of revisions to get statuses for | ||||
""" | ||||
statuses = ChangesetStatus.query()\ | ||||
.filter(ChangesetStatus.repo == self)\ | ||||
.filter(ChangesetStatus.version == 0) | ||||
if revisions: | ||||
# Try doing the filtering in chunks to avoid hitting limits | ||||
size = 500 | ||||
status_results = [] | ||||
r4906 | for chunk in range(0, len(revisions), size): | |||
r1 | status_results += statuses.filter( | |||
ChangesetStatus.revision.in_( | ||||
revisions[chunk: chunk+size]) | ||||
).all() | ||||
else: | ||||
status_results = statuses.all() | ||||
grouped = {} | ||||
# maybe we have open new pullrequest without a status? | ||||
stat = ChangesetStatus.STATUS_UNDER_REVIEW | ||||
status_lbl = ChangesetStatus.get_status_lbl(stat) | ||||
for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): | ||||
for rev in pr.revisions: | ||||
pr_id = pr.pull_request_id | ||||
pr_repo = pr.target_repo.repo_name | ||||
grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | ||||
for stat in status_results: | ||||
pr_id = pr_repo = None | ||||
if stat.pull_request: | ||||
pr_id = stat.pull_request.pull_request_id | ||||
pr_repo = stat.pull_request.target_repo.repo_name | ||||
grouped[stat.revision] = [str(stat.status), stat.status_lbl, | ||||
pr_id, pr_repo] | ||||
return grouped | ||||
# ========================================================================== | ||||
# SCM CACHE INSTANCE | ||||
# ========================================================================== | ||||
def scm_instance(self, **kwargs): | ||||
import rhodecode | ||||
# Passing a config will not hit the cache currently only used | ||||
# for repo2dbmapper | ||||
config = kwargs.pop('config', None) | ||||
cache = kwargs.pop('cache', None) | ||||
r3848 | vcs_full_cache = kwargs.pop('vcs_full_cache', None) | |||
if vcs_full_cache is not None: | ||||
# allows override global config | ||||
full_cache = vcs_full_cache | ||||
else: | ||||
r5071 | full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache') | |||
r1 | # if cache is NOT defined use default global, else we have a full | |||
# control over cache behaviour | ||||
if cache is None and full_cache and not config: | ||||
r3848 | log.debug('Initializing pure cached instance for %s', self.repo_path) | |||
r1 | return self._get_instance_cached() | |||
r3848 | ||||
r3740 | # cache here is sent to the "vcs server" | |||
r1 | return self._get_instance(cache=bool(cache), config=config) | |||
def _get_instance_cached(self): | ||||
r2932 | from rhodecode.lib import rc_cache | |||
r5071 | cache_namespace_uid = f'repo_instance.{self.repo_id}' | |||
r2932 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |||
r2946 | # we must use thread scoped cache here, | |||
r2948 | # because each thread of gevent needs it's own not shared connection and cache | |||
# we also alter `args` so the cache key is individual for every green thread. | ||||
r5288 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id) | |||
inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True) | ||||
# our wrapped caching function that takes state_uid to save the previous state in | ||||
def cache_generator(_state_uid): | ||||
@region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | ||||
def get_instance_cached(_repo_id, _process_context_id): | ||||
# we save in cached func the generation state so we can detect a change and invalidate caches | ||||
return _state_uid, self._get_instance(repo_state_uid=_state_uid) | ||||
return get_instance_cached | ||||
r2932 | with inv_context_manager as invalidation_context: | |||
r5288 | cache_state_uid = invalidation_context.state_uid | |||
cache_func = cache_generator(cache_state_uid) | ||||
args = self.repo_id, inv_context_manager.proc_key | ||||
previous_state_uid, instance = cache_func(*args) | ||||
r5300 | # now compare keys, the "cache" state vs expected state. | |||
if previous_state_uid != cache_state_uid: | ||||
log.warning('Cached state uid %s is different than current state uid %s', | ||||
previous_state_uid, cache_state_uid) | ||||
_, instance = cache_func.refresh(*args) | ||||
r2939 | ||||
r3853 | log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) | |||
r2932 | return instance | |||
r1 | ||||
r3848 | def _get_instance(self, cache=True, config=None, repo_state_uid=None): | |||
log.debug('Initializing %s instance `%s` with cache flag set to: %s', | ||||
self.repo_type, self.repo_path, cache) | ||||
r1 | config = config or self._config | |||
custom_wire = { | ||||
r3848 | 'cache': cache, # controls the vcs.remote cache | |||
'repo_state_uid': repo_state_uid | ||||
r1 | } | |||
r5300 | ||||
Martin Bornhold
|
r485 | repo = get_vcs_instance( | ||
repo_path=safe_str(self.repo_full_path), | ||||
config=config, | ||||
with_wire=custom_wire, | ||||
r1127 | create=False, | |||
_vcs_alias=self.repo_type) | ||||
r3740 | if repo is not None: | |||
repo.count() # cache rebuild | ||||
r5288 | ||||
r1 | return repo | |||
r3931 | def get_shadow_repository_path(self, workspace_id): | |||
from rhodecode.lib.vcs.backends.base import BaseRepository | ||||
shadow_repo_path = BaseRepository._get_shadow_repository_path( | ||||
self.repo_full_path, self.repo_id, workspace_id) | ||||
return shadow_repo_path | ||||
r1 | def __json__(self): | |||
return {'landing_rev': self.landing_rev} | ||||
def get_dict(self): | ||||
# Since we transformed `repo_name` to a hybrid property, we need to | ||||
# keep compatibility with the code which uses `repo_name` field. | ||||
result = super(Repository, self).get_dict() | ||||
result['repo_name'] = result.pop('_repo_name', None) | ||||
r5071 | result.pop('_changeset_cache', '') | |||
r1 | return result | |||
class RepoGroup(Base, BaseModel): | ||||
__tablename__ = 'groups' | ||||
__table_args__ = ( | ||||
UniqueConstraint('group_name', 'group_parent_id'), | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | ||||
group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
r3623 | _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |||
group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) | ||||
r1 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |||
group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | ||||
enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
r1940 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |||
r1094 | personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) | |||
r4146 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data | |||
r1 | ||||
r5071 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group') | |||
users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group') | ||||
r1 | parent_group = relationship('RepoGroup', remote_side=group_id) | |||
r5071 | user = relationship('User', back_populates='repository_groups') | |||
integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group') | ||||
r1 | ||||
r4011 | # no cascade, set NULL | |||
r5071 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True) | |||
r1 | ||||
def __init__(self, group_name='', parent_group=None): | ||||
self.group_name = group_name | ||||
self.parent_group = parent_group | ||||
r5071 | def __repr__(self): | |||
return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>" | ||||
r1830 | ||||
r3623 | @hybrid_property | |||
def group_name(self): | ||||
return self._group_name | ||||
@group_name.setter | ||||
def group_name(self, value): | ||||
self._group_name = value | ||||
self.group_name_hash = self.hash_repo_group_name(value) | ||||
r4146 | @classmethod | |||
def _load_changeset_cache(cls, repo_id, changeset_cache_raw): | ||||
r3689 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |||
dummy = EmptyCommit().__json__() | ||||
r4146 | if not changeset_cache_raw: | |||
dummy['source_repo_id'] = repo_id | ||||
r3689 | return json.loads(json.dumps(dummy)) | |||
try: | ||||
r4146 | return json.loads(changeset_cache_raw) | |||
r3689 | except TypeError: | |||
return dummy | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
return dummy | ||||
r4146 | @hybrid_property | |||
def changeset_cache(self): | ||||
return self._load_changeset_cache('', self._changeset_cache) | ||||
r3689 | @changeset_cache.setter | |||
def changeset_cache(self, val): | ||||
try: | ||||
self._changeset_cache = json.dumps(val) | ||||
except Exception: | ||||
log.error(traceback.format_exc()) | ||||
r3486 | @validates('group_parent_id') | |||
def validate_group_parent_id(self, key, val): | ||||
""" | ||||
Check cycle references for a parent group to self | ||||
""" | ||||
if self.group_id and val: | ||||
assert val != self.group_id | ||||
return val | ||||
r1830 | @hybrid_property | |||
def description_safe(self): | ||||
r5463 | return description_escaper(self.group_description) | |||
r1 | ||||
@classmethod | ||||
r3623 | def hash_repo_group_name(cls, repo_group_name): | |||
val = remove_formatting(repo_group_name) | ||||
val = safe_str(val).lower() | ||||
chars = [] | ||||
for c in val: | ||||
if c not in string.ascii_letters: | ||||
c = str(ord(c)) | ||||
chars.append(c) | ||||
return ''.join(chars) | ||||
@classmethod | ||||
r1 | def _generate_choice(cls, repo_group): | |||
r4090 | from webhelpers2.html import literal as _literal | |||
r5071 | ||||
def _name(k): | ||||
return _literal(cls.CHOICES_SEPARATOR.join(k)) | ||||
r1 | return repo_group.group_id, _name(repo_group.full_path_splitted) | |||
@classmethod | ||||
def groups_choices(cls, groups=None, show_empty_group=True): | ||||
if not groups: | ||||
groups = cls.query().all() | ||||
repo_groups = [] | ||||
if show_empty_group: | ||||
r5071 | repo_groups = [(-1, '-- %s --' % _('No parent'))] | |||
r1 | ||||
repo_groups.extend([cls._generate_choice(x) for x in groups]) | ||||
repo_groups = sorted( | ||||
repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) | ||||
return repo_groups | ||||
@classmethod | ||||
def url_sep(cls): | ||||
return URL_SEP | ||||
@classmethod | ||||
def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | ||||
if case_insensitive: | ||||
gr = cls.query().filter(func.lower(cls.group_name) | ||||
== func.lower(group_name)) | ||||
else: | ||||
gr = cls.query().filter(cls.group_name == group_name) | ||||
if cache: | ||||
r1749 | name_key = _hash_key(group_name) | |||
gr = gr.options( | ||||
r5071 | FromCache("sql_cache_short", f"get_group_{name_key}")) | |||
r1 | return gr.scalar() | |||
@classmethod | ||||
r1094 | def get_user_personal_repo_group(cls, user_id): | |||
user = User.get(user_id) | ||||
r1690 | if user.username == User.DEFAULT_USER: | |||
return None | ||||
r1094 | return cls.query()\ | |||
r1690 | .filter(cls.personal == true()) \ | |||
r3039 | .filter(cls.user == user) \ | |||
.order_by(cls.group_id.asc()) \ | ||||
.first() | ||||
r1094 | ||||
@classmethod | ||||
r1 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), | |||
case_insensitive=True): | ||||
q = RepoGroup.query() | ||||
if not isinstance(user_id, Optional): | ||||
q = q.filter(RepoGroup.user_id == user_id) | ||||
if not isinstance(group_id, Optional): | ||||
q = q.filter(RepoGroup.group_parent_id == group_id) | ||||
if case_insensitive: | ||||
q = q.order_by(func.lower(RepoGroup.group_name)) | ||||
else: | ||||
q = q.order_by(RepoGroup.group_name) | ||||
return q.all() | ||||
@property | ||||
r4146 | def parents(self, parents_recursion_limit=10): | |||
r1 | groups = [] | |||
if self.parent_group is None: | ||||
return groups | ||||
cur_gr = self.parent_group | ||||
groups.insert(0, cur_gr) | ||||
cnt = 0 | ||||
while 1: | ||||
cnt += 1 | ||||
gr = getattr(cur_gr, 'parent_group', None) | ||||
cur_gr = cur_gr.parent_group | ||||
if gr is None: | ||||
break | ||||
if cnt == parents_recursion_limit: | ||||
# this will prevent accidental infinit loops | ||||
r3061 | log.error('more than %s parents found for group %s, stopping ' | |||
'recursive parent fetching', parents_recursion_limit, self) | ||||
r1 | break | |||
groups.insert(0, gr) | ||||
return groups | ||||
@property | ||||
r3705 | def last_commit_cache_update_diff(self): | |||
return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) | ||||
r4146 | @classmethod | |||
def _load_commit_change(cls, last_commit_cache): | ||||
r3689 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
empty_date = datetime.datetime.fromtimestamp(0) | ||||
r4146 | date_latest = last_commit_cache.get('date', empty_date) | |||
r3689 | try: | |||
return parse_datetime(date_latest) | ||||
except Exception: | ||||
return empty_date | ||||
@property | ||||
r4146 | def last_commit_change(self): | |||
return self._load_commit_change(self.changeset_cache) | ||||
@property | ||||
r1940 | def last_db_change(self): | |||
return self.updated_on | ||||
@property | ||||
r1 | def children(self): | |||
return RepoGroup.query().filter(RepoGroup.parent_group == self) | ||||
@property | ||||
def name(self): | ||||
return self.group_name.split(RepoGroup.url_sep())[-1] | ||||
@property | ||||
def full_path(self): | ||||
return self.group_name | ||||
@property | ||||
def full_path_splitted(self): | ||||
return self.group_name.split(RepoGroup.url_sep()) | ||||
@property | ||||
def repositories(self): | ||||
return Repository.query()\ | ||||
.filter(Repository.group == self)\ | ||||
.order_by(Repository.repo_name) | ||||
@property | ||||
def repositories_recursive_count(self): | ||||
cnt = self.repositories.count() | ||||
def children_count(group): | ||||
cnt = 0 | ||||
for child in group.children: | ||||
cnt += child.repositories.count() | ||||
cnt += children_count(child) | ||||
return cnt | ||||
return cnt + children_count(self) | ||||
r3689 | def _recursive_objects(self, include_repos=True, include_groups=True): | |||
r1 | all_ = [] | |||
def _get_members(root_gr): | ||||
if include_repos: | ||||
for r in root_gr.repositories: | ||||
all_.append(r) | ||||
childs = root_gr.children.all() | ||||
if childs: | ||||
for gr in childs: | ||||
r3689 | if include_groups: | |||
all_.append(gr) | ||||
r1 | _get_members(gr) | |||
r3689 | root_group = [] | |||
if include_groups: | ||||
root_group = [self] | ||||
r1 | _get_members(self) | |||
r3689 | return root_group + all_ | |||
r1 | ||||
def recursive_groups_and_repos(self): | ||||
""" | ||||
Recursive return all groups, with repositories in those groups | ||||
""" | ||||
return self._recursive_objects() | ||||
def recursive_groups(self): | ||||
""" | ||||
Returns all children groups for this group including children of children | ||||
""" | ||||
return self._recursive_objects(include_repos=False) | ||||
r3689 | def recursive_repos(self): | |||
""" | ||||
Returns all children repositories for this group | ||||
""" | ||||
return self._recursive_objects(include_groups=False) | ||||
r1 | def get_new_name(self, group_name): | |||
""" | ||||
returns new full group name based on parent and new name | ||||
:param group_name: | ||||
""" | ||||
path_prefix = (self.parent_group.full_path_splitted if | ||||
self.parent_group else []) | ||||
return RepoGroup.url_sep().join(path_prefix + [group_name]) | ||||
r3689 | def update_commit_cache(self, config=None): | |||
""" | ||||
r4162 | Update cache of last commit for newest repository inside this repository group. | |||
cache_keys should be:: | ||||
r3689 | ||||
source_repo_id | ||||
short_id | ||||
raw_id | ||||
revision | ||||
parents | ||||
message | ||||
date | ||||
author | ||||
""" | ||||
from rhodecode.lib.vcs.utils.helpers import parse_datetime | ||||
empty_date = datetime.datetime.fromtimestamp(0) | ||||
r4162 | ||||
def repo_groups_and_repos(root_gr): | ||||
for _repo in root_gr.repositories: | ||||
yield _repo | ||||
for child_group in root_gr.children.all(): | ||||
yield child_group | ||||
latest_repo_cs_cache = {} | ||||
for obj in repo_groups_and_repos(self): | ||||
repo_cs_cache = obj.changeset_cache | ||||
date_latest = latest_repo_cs_cache.get('date', empty_date) | ||||
date_current = repo_cs_cache.get('date', empty_date) | ||||
current_timestamp = datetime_to_time(parse_datetime(date_latest)) | ||||
if current_timestamp < datetime_to_time(parse_datetime(date_current)): | ||||
latest_repo_cs_cache = repo_cs_cache | ||||
if hasattr(obj, 'repo_id'): | ||||
latest_repo_cs_cache['source_repo_id'] = obj.repo_id | ||||
else: | ||||
latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') | ||||
_date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) | ||||
latest_repo_cs_cache['updated_on'] = time.time() | ||||
self.changeset_cache = latest_repo_cs_cache | ||||
self.updated_on = _date_latest | ||||
Session().add(self) | ||||
Session().commit() | ||||
log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', | ||||
self.group_name, latest_repo_cs_cache, _date_latest) | ||||
r3689 | ||||
r3411 | def permissions(self, with_admins=True, with_owner=True, | |||
expand_from_user_groups=False): | ||||
r2976 | """ | |||
Permissions for repository groups | ||||
""" | ||||
_admin_perm = 'group.admin' | ||||
owner_row = [] | ||||
if with_owner: | ||||
usr = AttributeDict(self.user.get_dict()) | ||||
usr.owner_row = True | ||||
usr.permission = _admin_perm | ||||
owner_row.append(usr) | ||||
super_admin_ids = [] | ||||
super_admin_rows = [] | ||||
if with_admins: | ||||
for usr in User.get_all_super_admins(): | ||||
super_admin_ids.append(usr.user_id) | ||||
# if this admin is also owner, don't double the record | ||||
if usr.user_id == owner_row[0].user_id: | ||||
owner_row[0].admin_row = True | ||||
else: | ||||
usr = AttributeDict(usr.get_dict()) | ||||
usr.admin_row = True | ||||
usr.permission = _admin_perm | ||||
super_admin_rows.append(usr) | ||||
r1 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) | |||
q = q.options(joinedload(UserRepoGroupToPerm.group), | ||||
joinedload(UserRepoGroupToPerm.user), | ||||
joinedload(UserRepoGroupToPerm.permission),) | ||||
# get owners and admins and permissions. We do a trick of re-writing | ||||
# objects from sqlalchemy to named-tuples due to sqlalchemy session | ||||
# has a global reference and changing one object propagates to all | ||||
# others. This means if admin is also an owner admin_row that change | ||||
# would propagate to both objects | ||||
perm_rows = [] | ||||
for _usr in q.all(): | ||||
usr = AttributeDict(_usr.user.get_dict()) | ||||
r2976 | # if this user is also owner/admin, mark as duplicate record | |||
if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | ||||
usr.duplicate_perm = True | ||||
r1 | usr.permission = _usr.permission.permission_name | |||
perm_rows.append(usr) | ||||
# filter the perm rows by 'default' first and then sort them by | ||||
# admin,write,read,none permissions sorted again alphabetically in | ||||
# each group | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
r1 | ||||
r3411 | user_groups_rows = [] | |||
if expand_from_user_groups: | ||||
for ug in self.permission_user_groups(with_members=True): | ||||
for user_data in ug.members: | ||||
user_groups_rows.append(user_data) | ||||
return super_admin_rows + owner_row + perm_rows + user_groups_rows | ||||
def permission_user_groups(self, with_members=False): | ||||
q = UserGroupRepoGroupToPerm.query()\ | ||||
.filter(UserGroupRepoGroupToPerm.group == self) | ||||
r1 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), | |||
joinedload(UserGroupRepoGroupToPerm.users_group), | ||||
joinedload(UserGroupRepoGroupToPerm.permission),) | ||||
perm_rows = [] | ||||
for _user_group in q.all(): | ||||
r3411 | entry = AttributeDict(_user_group.users_group.get_dict()) | |||
entry.permission = _user_group.permission.permission_name | ||||
if with_members: | ||||
entry.members = [x.user.get_dict() | ||||
for x in _user_group.users_group.members] | ||||
perm_rows.append(entry) | ||||
r1 | ||||
r2060 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
r1 | return perm_rows | |||
def get_api_data(self): | ||||
""" | ||||
Common function for generating api data | ||||
""" | ||||
group = self | ||||
data = { | ||||
'group_id': group.group_id, | ||||
'group_name': group.group_name, | ||||
r1830 | 'group_description': group.description_safe, | |||
r1 | 'parent_group': group.parent_group.group_name if group.parent_group else None, | |||
'repositories': [x.repo_name for x in group.repositories], | ||||
'owner': group.user.username, | ||||
} | ||||
return data | ||||
r3623 | def get_dict(self): | |||
# Since we transformed `group_name` to a hybrid property, we need to | ||||
# keep compatibility with the code which uses `group_name` field. | ||||
result = super(RepoGroup, self).get_dict() | ||||
result['group_name'] = result.pop('_group_name', None) | ||||
r5071 | result.pop('_changeset_cache', '') | |||
r3623 | return result | |||
r1 | ||||
class Permission(Base, BaseModel): | ||||
__tablename__ = 'permissions' | ||||
__table_args__ = ( | ||||
Index('p_perm_name_idx', 'permission_name'), | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r2830 | ||||
r1 | PERMS = [ | |||
('hg.admin', _('RhodeCode Super Administrator')), | ||||
('repository.none', _('Repository no access')), | ||||
('repository.read', _('Repository read access')), | ||||
('repository.write', _('Repository write access')), | ||||
('repository.admin', _('Repository admin access')), | ||||
('group.none', _('Repository group no access')), | ||||
('group.read', _('Repository group read access')), | ||||
('group.write', _('Repository group write access')), | ||||
('group.admin', _('Repository group admin access')), | ||||
('usergroup.none', _('User group no access')), | ||||
('usergroup.read', _('User group read access')), | ||||
('usergroup.write', _('User group write access')), | ||||
('usergroup.admin', _('User group admin access')), | ||||
r2975 | ('branch.none', _('Branch no permissions')), | |||
('branch.merge', _('Branch access by web merge')), | ||||
('branch.push', _('Branch access by push')), | ||||
('branch.push_force', _('Branch access by push with force')), | ||||
r1 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), | |||
('hg.repogroup.create.true', _('Repository Group creation enabled')), | ||||
('hg.usergroup.create.false', _('User Group creation disabled')), | ||||
('hg.usergroup.create.true', _('User Group creation enabled')), | ||||
('hg.create.none', _('Repository creation disabled')), | ||||
('hg.create.repository', _('Repository creation enabled')), | ||||
('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), | ||||
('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), | ||||
('hg.fork.none', _('Repository forking disabled')), | ||||
('hg.fork.repository', _('Repository forking enabled')), | ||||
('hg.register.none', _('Registration disabled')), | ||||
('hg.register.manual_activate', _('User Registration with manual account activation')), | ||||
('hg.register.auto_activate', _('User Registration with automatic account activation')), | ||||
r1034 | ('hg.password_reset.enabled', _('Password reset enabled')), | |||
('hg.password_reset.hidden', _('Password reset hidden')), | ||||
('hg.password_reset.disabled', _('Password reset disabled')), | ||||
r1 | ('hg.extern_activate.manual', _('Manual activation of external account')), | |||
('hg.extern_activate.auto', _('Automatic activation of external account')), | ||||
('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), | ||||
('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), | ||||
] | ||||
r2975 | # definition of system default permissions for DEFAULT user, created on | |||
# system setup | ||||
r1 | DEFAULT_USER_PERMISSIONS = [ | |||
r2975 | # object perms | |||
r1 | 'repository.read', | |||
'group.read', | ||||
'usergroup.read', | ||||
r2975 | # branch, for backward compat we need same value as before so forced pushed | |||
'branch.push_force', | ||||
# global | ||||
r1 | 'hg.create.repository', | |||
'hg.repogroup.create.false', | ||||
'hg.usergroup.create.false', | ||||
'hg.create.write_on_repogroup.true', | ||||
'hg.fork.repository', | ||||
'hg.register.manual_activate', | ||||
r1034 | 'hg.password_reset.enabled', | |||
r1 | 'hg.extern_activate.auto', | |||
'hg.inherit_default_perms.true', | ||||
] | ||||
# defines which permissions are more important higher the more important | ||||
# Weight defines which permissions are more important. | ||||
# The higher number the more important. | ||||
PERM_WEIGHTS = { | ||||
'repository.none': 0, | ||||
'repository.read': 1, | ||||
'repository.write': 3, | ||||
'repository.admin': 4, | ||||
'group.none': 0, | ||||
'group.read': 1, | ||||
'group.write': 3, | ||||
'group.admin': 4, | ||||
'usergroup.none': 0, | ||||
'usergroup.read': 1, | ||||
'usergroup.write': 3, | ||||
'usergroup.admin': 4, | ||||
r2975 | 'branch.none': 0, | |||
'branch.merge': 1, | ||||
'branch.push': 3, | ||||
'branch.push_force': 4, | ||||
r1 | 'hg.repogroup.create.false': 0, | |||
'hg.repogroup.create.true': 1, | ||||
'hg.usergroup.create.false': 0, | ||||
'hg.usergroup.create.true': 1, | ||||
'hg.fork.none': 0, | ||||
'hg.fork.repository': 1, | ||||
'hg.create.none': 0, | ||||
'hg.create.repository': 1 | ||||
} | ||||
permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | ||||
permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | ||||
r5071 | def __repr__(self): | |||
r5010 | return "<%s('%s:%s')>" % ( | |||
r5071 | self.cls_name, self.permission_id, self.permission_name | |||
r1 | ) | |||
@classmethod | ||||
def get_by_key(cls, key): | ||||
return cls.query().filter(cls.permission_name == key).scalar() | ||||
@classmethod | ||||
def get_default_repo_perms(cls, user_id, repo_id=None): | ||||
q = Session().query(UserRepoToPerm, Repository, Permission)\ | ||||
.join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ | ||||
.join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | ||||
.filter(UserRepoToPerm.user_id == user_id) | ||||
if repo_id: | ||||
q = q.filter(UserRepoToPerm.repository_id == repo_id) | ||||
return q.all() | ||||
@classmethod | ||||
r2975 | def get_default_repo_branch_perms(cls, user_id, repo_id=None): | |||
q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ | ||||
.join( | ||||
Permission, | ||||
UserToRepoBranchPermission.permission_id == Permission.permission_id) \ | ||||
.join( | ||||
UserRepoToPerm, | ||||
UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ | ||||
.filter(UserRepoToPerm.user_id == user_id) | ||||
if repo_id: | ||||
q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) | ||||
return q.order_by(UserToRepoBranchPermission.rule_order).all() | ||||
@classmethod | ||||
r1 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): | |||
q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ | ||||
.join( | ||||
Permission, | ||||
UserGroupRepoToPerm.permission_id == Permission.permission_id)\ | ||||
.join( | ||||
Repository, | ||||
UserGroupRepoToPerm.repository_id == Repository.repo_id)\ | ||||
.join( | ||||
UserGroup, | ||||
UserGroupRepoToPerm.users_group_id == | ||||
UserGroup.users_group_id)\ | ||||
.join( | ||||
UserGroupMember, | ||||
UserGroupRepoToPerm.users_group_id == | ||||
UserGroupMember.users_group_id)\ | ||||
.filter( | ||||
UserGroupMember.user_id == user_id, | ||||
UserGroup.users_group_active == true()) | ||||
if repo_id: | ||||
q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) | ||||
return q.all() | ||||
@classmethod | ||||
r2975 | def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): | |||
q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ | ||||
.join( | ||||
Permission, | ||||
UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ | ||||
.join( | ||||
UserGroupRepoToPerm, | ||||
UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ | ||||
.join( | ||||
UserGroup, | ||||
UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ | ||||
.join( | ||||
UserGroupMember, | ||||
UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ | ||||
.filter( | ||||
UserGroupMember.user_id == user_id, | ||||
UserGroup.users_group_active == true()) | ||||
if repo_id: | ||||
q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) | ||||
return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() | ||||
@classmethod | ||||
r1 | def get_default_group_perms(cls, user_id, repo_group_id=None): | |||
q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ | ||||
r2975 | .join( | |||
Permission, | ||||
UserRepoGroupToPerm.permission_id == Permission.permission_id)\ | ||||
.join( | ||||
RepoGroup, | ||||
UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ | ||||
r1 | .filter(UserRepoGroupToPerm.user_id == user_id) | |||
if repo_group_id: | ||||
q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) | ||||
return q.all() | ||||
@classmethod | ||||
def get_default_group_perms_from_user_group( | ||||
cls, user_id, repo_group_id=None): | ||||
q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ | ||||
.join( | ||||
Permission, | ||||
UserGroupRepoGroupToPerm.permission_id == | ||||
Permission.permission_id)\ | ||||
.join( | ||||
RepoGroup, | ||||
UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ | ||||
.join( | ||||
UserGroup, | ||||
UserGroupRepoGroupToPerm.users_group_id == | ||||
UserGroup.users_group_id)\ | ||||
.join( | ||||
UserGroupMember, | ||||
UserGroupRepoGroupToPerm.users_group_id == | ||||
UserGroupMember.users_group_id)\ | ||||
.filter( | ||||
UserGroupMember.user_id == user_id, | ||||
UserGroup.users_group_active == true()) | ||||
if repo_group_id: | ||||
q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) | ||||
return q.all() | ||||
@classmethod | ||||
def get_default_user_group_perms(cls, user_id, user_group_id=None): | ||||
q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ | ||||
.join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ | ||||
.join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ | ||||
.filter(UserUserGroupToPerm.user_id == user_id) | ||||
if user_group_id: | ||||
q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) | ||||
return q.all() | ||||
@classmethod | ||||
def get_default_user_group_perms_from_user_group( | ||||
cls, user_id, user_group_id=None): | ||||
TargetUserGroup = aliased(UserGroup, name='target_user_group') | ||||
q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ | ||||
.join( | ||||
Permission, | ||||
UserGroupUserGroupToPerm.permission_id == | ||||
Permission.permission_id)\ | ||||
.join( | ||||
TargetUserGroup, | ||||
UserGroupUserGroupToPerm.target_user_group_id == | ||||
TargetUserGroup.users_group_id)\ | ||||
.join( | ||||
UserGroup, | ||||
UserGroupUserGroupToPerm.user_group_id == | ||||
UserGroup.users_group_id)\ | ||||
.join( | ||||
UserGroupMember, | ||||
UserGroupUserGroupToPerm.user_group_id == | ||||
UserGroupMember.users_group_id)\ | ||||
.filter( | ||||
UserGroupMember.user_id == user_id, | ||||
UserGroup.users_group_active == true()) | ||||
if user_group_id: | ||||
q = q.filter( | ||||
UserGroupUserGroupToPerm.user_group_id == user_group_id) | ||||
return q.all() | ||||
class UserRepoToPerm(Base, BaseModel): | ||||
__tablename__ = 'repo_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'repository_id', 'permission_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user = relationship('User', back_populates="repo_to_perm") | |||
repository = relationship('Repository', back_populates="repo_to_perm") | ||||
r1 | permission = relationship('Permission') | |||
r5071 | branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm') | |||
r2975 | ||||
r1 | @classmethod | |||
def create(cls, user, repository, permission): | ||||
n = cls() | ||||
n.user = user | ||||
n.repository = repository | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
r5071 | def __repr__(self): | |||
r5010 | return f'<{self.user} => {self.repository} >' | |||
r1 | ||||
class UserUserGroupToPerm(Base, BaseModel): | ||||
__tablename__ = 'user_user_group_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'user_group_id', 'permission_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user = relationship('User', back_populates='user_group_to_perm') | |||
user_group = relationship('UserGroup', back_populates='user_user_group_to_perm') | ||||
r1 | permission = relationship('Permission') | |||
@classmethod | ||||
def create(cls, user, user_group, permission): | ||||
n = cls() | ||||
n.user = user | ||||
n.user_group = user_group | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
r5071 | def __repr__(self): | |||
r5010 | return f'<{self.user} => {self.user_group} >' | |||
r1 | ||||
class UserToPerm(Base, BaseModel): | ||||
__tablename__ = 'user_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'permission_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user = relationship('User', back_populates='user_perms') | |||
r1 | permission = relationship('Permission', lazy='joined') | |||
r5071 | def __repr__(self): | |||
r5010 | return f'<{self.user} => {self.permission} >' | |||
r1 | ||||
class UserGroupRepoToPerm(Base, BaseModel): | ||||
__tablename__ = 'users_group_repo_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | ||||
r5071 | users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm') | |||
r1 | permission = relationship('Permission') | |||
r5071 | repository = relationship('Repository', back_populates='users_group_to_perm') | |||
user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm') | ||||
r1 | ||||
@classmethod | ||||
def create(cls, users_group, repository, permission): | ||||
n = cls() | ||||
n.users_group = users_group | ||||
n.repository = repository | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
r5071 | def __repr__(self): | |||
return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >' | ||||
r1 | ||||
class UserGroupUserGroupToPerm(Base, BaseModel): | ||||
__tablename__ = 'user_group_user_group_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), | ||||
CheckConstraint('target_user_group_id != user_group_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
r5071 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm') | |||
r1 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') | |||
permission = relationship('Permission') | ||||
@classmethod | ||||
def create(cls, target_user_group, user_group, permission): | ||||
n = cls() | ||||
n.target_user_group = target_user_group | ||||
n.user_group = user_group | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
r5071 | def __repr__(self): | |||
return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >' | ||||
r1 | ||||
class UserGroupToPerm(Base, BaseModel): | ||||
__tablename__ = 'users_group_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('users_group_id', 'permission_id',), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
r5071 | users_group = relationship('UserGroup', back_populates='users_group_to_perm') | |||
r1 | permission = relationship('Permission') | |||
class UserRepoGroupToPerm(Base, BaseModel): | ||||
__tablename__ = 'user_repo_group_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'group_id', 'permission_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user = relationship('User', back_populates='repo_group_to_perm') | |||
group = relationship('RepoGroup', back_populates='repo_group_to_perm') | ||||
r1 | permission = relationship('Permission') | |||
@classmethod | ||||
def create(cls, user, repository_group, permission): | ||||
n = cls() | ||||
n.user = user | ||||
n.group = repository_group | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
class UserGroupRepoGroupToPerm(Base, BaseModel): | ||||
__tablename__ = 'users_group_repo_group_to_perm' | ||||
__table_args__ = ( | ||||
UniqueConstraint('users_group_id', 'group_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | ||||
group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | ||||
permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
r5071 | users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm') | |||
r1 | permission = relationship('Permission') | |||
r5071 | group = relationship('RepoGroup', back_populates='users_group_to_perm') | |||
r1 | ||||
@classmethod | ||||
def create(cls, user_group, repository_group, permission): | ||||
n = cls() | ||||
n.users_group = user_group | ||||
n.group = repository_group | ||||
n.permission = permission | ||||
Session().add(n) | ||||
return n | ||||
r5071 | def __repr__(self): | |||
r5010 | return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) | |||
r1 | ||||
class Statistics(Base, BaseModel): | ||||
__tablename__ = 'statistics' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | ||||
stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | ||||
r5071 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data | |||
commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data | ||||
languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data | ||||
repository = relationship('Repository', single_parent=True, viewonly=True) | ||||
r1 | ||||
class UserFollowing(Base, BaseModel): | ||||
__tablename__ = 'user_followings' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'follows_repository_id'), | ||||
UniqueConstraint('user_id', 'follows_user_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | ||||
follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | ||||
follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | ||||
r5071 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings') | |||
r1 | ||||
follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | ||||
r5071 | follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers') | |||
r1 | ||||
@classmethod | ||||
def get_repo_followers(cls, repo_id): | ||||
return cls.query().filter(cls.follows_repo_id == repo_id) | ||||
class CacheKey(Base, BaseModel): | ||||
__tablename__ = 'cache_invalidation' | ||||
__table_args__ = ( | ||||
UniqueConstraint('cache_key'), | ||||
Index('key_idx', 'cache_key'), | ||||
r4830 | Index('cache_args_idx', 'cache_args'), | |||
r2830 | base_table_args, | |||
r1 | ) | |||
r2830 | ||||
r2932 | CACHE_TYPE_FEED = 'FEED' | |||
r3892 | ||||
r2932 | # namespaces used to register process/thread aware caches | |||
r5106 | REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}' | |||
r1 | ||||
cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | ||||
cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | ||||
r3848 | cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) | |||
r1 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |||
r5288 | def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False): | |||
r1 | self.cache_key = cache_key | |||
self.cache_args = cache_args | ||||
r5288 | self.cache_active = cache_active | |||
r3848 | # first key should be same for all entries, since all workers should share it | |||
r3861 | self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() | |||
r1 | ||||
r5071 | def __repr__(self): | |||
r5010 | return "<%s('%s:%s[%s]')>" % ( | |||
r5071 | self.cls_name, | |||
r1 | self.cache_id, self.cache_key, self.cache_active) | |||
def _cache_key_partition(self): | ||||
prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) | ||||
return prefix, repo_name, suffix | ||||
def get_prefix(self): | ||||
""" | ||||
Try to extract prefix from existing cache key. The key could consist | ||||
of prefix, repo_name, suffix | ||||
""" | ||||
# this returns prefix, repo_name, suffix | ||||
return self._cache_key_partition()[0] | ||||
def get_suffix(self): | ||||
""" | ||||
get suffix that might have been used in _get_cache_key to | ||||
generate self.cache_key. Only used for informational purposes | ||||
r1282 | in repo_edit.mako. | |||
r1 | """ | |||
# prefix, repo_name, suffix | ||||
return self._cache_key_partition()[2] | ||||
@classmethod | ||||
r3848 | def generate_new_state_uid(cls, based_on=None): | |||
if based_on: | ||||
return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) | ||||
else: | ||||
return str(uuid.uuid4()) | ||||
@classmethod | ||||
r1 | def delete_all_cache(cls): | |||
""" | ||||
Delete all cache keys from database. | ||||
Should only be run when all instances are down and all entries | ||||
thus stale. | ||||
""" | ||||
cls.query().delete() | ||||
Session().commit() | ||||
@classmethod | ||||
r2932 | def set_invalidate(cls, cache_uid, delete=False): | |||
r1 | """ | |||
Mark all caches of a repo as invalid in the database. | ||||
""" | ||||
try: | ||||
r5288 | qry = Session().query(cls).filter(cls.cache_key == cache_uid) | |||
r1 | if delete: | |||
qry.delete() | ||||
r2932 | log.debug('cache objects deleted for cache args %s', | |||
safe_str(cache_uid)) | ||||
r1 | else: | |||
r5288 | new_uid = cls.generate_new_state_uid() | |||
qry.update({"cache_state_uid": new_uid, | ||||
"cache_args": f"repo_state:{time.time()}"}) | ||||
log.debug('cache object %s set new UID %s', | ||||
safe_str(cache_uid), new_uid) | ||||
r1 | ||||
Session().commit() | ||||
except Exception: | ||||
r156 | log.exception( | |||
r2932 | 'Cache key invalidation failed for cache args %s', | |||
safe_str(cache_uid)) | ||||
r1 | Session().rollback() | |||
@classmethod | ||||
def get_active_cache(cls, cache_key): | ||||
inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() | ||||
if inv_obj: | ||||
return inv_obj | ||||
return None | ||||
r3861 | @classmethod | |||
def get_namespace_map(cls, namespace): | ||||
return { | ||||
x.cache_key: x | ||||
for x in cls.query().filter(cls.cache_args == namespace)} | ||||
r1 | ||||
class ChangesetComment(Base, BaseModel): | ||||
__tablename__ = 'changeset_comments' | ||||
__table_args__ = ( | ||||
Index('cc_revision_idx', 'revision'), | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r5071 | COMMENT_OUTDATED = 'comment_outdated' | |||
COMMENT_TYPE_NOTE = 'note' | ||||
COMMENT_TYPE_TODO = 'todo' | ||||
r1324 | COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] | |||
r1 | ||||
r5071 | OP_IMMUTABLE = 'immutable' | |||
OP_CHANGEABLE = 'changeable' | ||||
r4327 | ||||
r1 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |||
repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | ||||
revision = Column('revision', String(40), nullable=True) | ||||
pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | ||||
pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) | ||||
line_no = Column('line_no', Unicode(10), nullable=True) | ||||
hl_lines = Column('hl_lines', Unicode(512), nullable=True) | ||||
f_path = Column('f_path', Unicode(1000), nullable=True) | ||||
user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | ||||
text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
renderer = Column('renderer', Unicode(64), nullable=True) | ||||
display_state = Column('display_state', Unicode(128), nullable=True) | ||||
r4327 | immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE) | |||
r4540 | draft = Column('draft', Boolean(), nullable=True, default=False) | |||
r1 | ||||
r1324 | comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) | |||
resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) | ||||
r3347 | ||||
resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') | ||||
resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') | ||||
r5071 | author = relationship('User', lazy='select', back_populates='user_comments') | |||
repo = relationship('Repository', back_populates='comments') | ||||
status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment') | ||||
pull_request = relationship('PullRequest', lazy='select', back_populates='comments') | ||||
r4507 | pull_request_version = relationship('PullRequestVersion', lazy='select') | |||
r5071 | history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment") | |||
r1 | ||||
@classmethod | ||||
def get_users(cls, revision=None, pull_request_id=None): | ||||
""" | ||||
Returns user associated with this ChangesetComment. ie those | ||||
who actually commented | ||||
:param cls: | ||||
:param revision: | ||||
""" | ||||
r5071 | q = Session().query(User).join(ChangesetComment.author) | |||
r1 | if revision: | |||
q = q.filter(cls.revision == revision) | ||||
elif pull_request_id: | ||||
q = q.filter(cls.pull_request_id == pull_request_id) | ||||
return q.all() | ||||
r1286 | @classmethod | |||
r5121 | def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int: | |||
r5071 | if pr_version is None: | |||
return 0 | ||||
r4615 | ||||
if versions is not None: | ||||
num_versions = [x.pull_request_version_id for x in versions] | ||||
num_versions = num_versions or [] | ||||
r1286 | try: | |||
r4430 | return num_versions.index(pr_version) + 1 | |||
r1286 | except (IndexError, ValueError): | |||
r5121 | return 0 | |||
r1286 | ||||
r1141 | @property | |||
def outdated(self): | ||||
return self.display_state == self.COMMENT_OUTDATED | ||||
r4327 | @property | |||
r4481 | def outdated_js(self): | |||
r5211 | return str_json(self.display_state == self.COMMENT_OUTDATED) | |||
r4481 | ||||
@property | ||||
r4327 | def immutable(self): | |||
return self.immutable_state == self.OP_IMMUTABLE | ||||
r5220 | def outdated_at_version(self, version: int) -> bool: | |||
r1192 | """ | |||
Checks if comment is outdated for given pull request version | ||||
""" | ||||
r5211 | ||||
r4482 | def version_check(): | |||
return self.pull_request_version_id and self.pull_request_version_id != version | ||||
if self.is_inline: | ||||
return self.outdated and version_check() | ||||
else: | ||||
# general comments don't have .outdated set, also latest don't have a version | ||||
return version_check() | ||||
def outdated_at_version_js(self, version): | ||||
""" | ||||
Checks if comment is outdated for given pull request version | ||||
""" | ||||
r5211 | return str_json(self.outdated_at_version(version)) | |||
def older_than_version(self, version: int) -> bool: | ||||
""" | ||||
Checks if comment is made from a previous version than given. | ||||
Assumes self.pull_request_version.pull_request_version_id is an integer if not None. | ||||
r1332 | """ | |||
r5211 | ||||
# If version is None, return False as the current version cannot be less than None | ||||
if version is None: | ||||
return False | ||||
# Ensure that the version is an integer to prevent TypeError on comparison | ||||
if not isinstance(version, int): | ||||
raise ValueError("The provided version must be an integer.") | ||||
# Initialize current version to 0 or pull_request_version_id if it's available | ||||
r5071 | cur_ver = 0 | |||
r5211 | if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None: | |||
cur_ver = self.pull_request_version.pull_request_version_id | ||||
# Return True if the current version is less than the given version | ||||
r5071 | return cur_ver < version | |||
r4482 | ||||
def older_than_version_js(self, version): | ||||
""" | ||||
Checks if comment is made from previous version than given | ||||
""" | ||||
r5211 | return str_json(self.older_than_version(version)) | |||
r1332 | ||||
r1325 | @property | |||
r4411 | def commit_id(self): | |||
"""New style naming to stop using .revision""" | ||||
return self.revision | ||||
@property | ||||
r1325 | def resolved(self): | |||
return self.resolved_by[0] if self.resolved_by else None | ||||
r1334 | @property | |||
def is_todo(self): | ||||
return self.comment_type == self.COMMENT_TYPE_TODO | ||||
r1792 | @property | |||
def is_inline(self): | ||||
r4488 | if self.line_no and self.f_path: | |||
return True | ||||
return False | ||||
r1792 | ||||
r4440 | @property | |||
def last_version(self): | ||||
version = 0 | ||||
if self.history: | ||||
version = self.history[-1].version | ||||
return version | ||||
r1286 | def get_index_version(self, versions): | |||
return self.get_index_from_version( | ||||
self.pull_request_version_id, versions) | ||||
r4490 | @property | |||
def review_status(self): | ||||
if self.status_change: | ||||
return self.status_change[0].status | ||||
@property | ||||
def review_status_lbl(self): | ||||
if self.status_change: | ||||
return self.status_change[0].status_lbl | ||||
r5071 | def __repr__(self): | |||
r1 | if self.comment_id: | |||
r5010 | return f'<DB:Comment #{self.comment_id}>' | |||
r1 | else: | |||
r5010 | return f'<DB:Comment at {id(self)!r}>' | |||
r1 | ||||
r1807 | def get_api_data(self): | |||
comment = self | ||||
r4440 | ||||
r1807 | data = { | |||
'comment_id': comment.comment_id, | ||||
'comment_type': comment.comment_type, | ||||
'comment_text': comment.text, | ||||
'comment_status': comment.status_change, | ||||
'comment_f_path': comment.f_path, | ||||
'comment_lineno': comment.line_no, | ||||
'comment_author': comment.author, | ||||
r3435 | 'comment_created_on': comment.created_on, | |||
r4304 | 'comment_resolved_by': self.resolved, | |||
'comment_commit_id': comment.revision, | ||||
'comment_pull_request_id': comment.pull_request_id, | ||||
r4440 | 'comment_last_version': self.last_version | |||
r1807 | } | |||
return data | ||||
def __json__(self): | ||||
data = dict() | ||||
data.update(self.get_api_data()) | ||||
return data | ||||
r1 | ||||
r4401 | class ChangesetCommentHistory(Base, BaseModel): | |||
__tablename__ = 'changeset_comments_history' | ||||
__table_args__ = ( | ||||
Index('cch_comment_id_idx', 'comment_id'), | ||||
base_table_args, | ||||
) | ||||
comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) | ||||
comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) | ||||
version = Column("version", Integer(), nullable=False, default=0) | ||||
created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | ||||
text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
deleted = Column('deleted', Boolean(), default=False) | ||||
author = relationship('User', lazy='joined') | ||||
r5071 | comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history") | |||
r4401 | ||||
@classmethod | ||||
def get_version(cls, comment_id): | ||||
q = Session().query(ChangesetCommentHistory).filter( | ||||
ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) | ||||
if q.count() == 0: | ||||
return 1 | ||||
elif q.count() >= q[0].version: | ||||
return q.count() + 1 | ||||
else: | ||||
return q[0].version + 1 | ||||
r1 | class ChangesetStatus(Base, BaseModel): | |||
__tablename__ = 'changeset_statuses' | ||||
__table_args__ = ( | ||||
Index('cs_revision_idx', 'revision'), | ||||
Index('cs_version_idx', 'version'), | ||||
UniqueConstraint('repo_id', 'revision', 'version'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |||
STATUS_APPROVED = 'approved' | ||||
STATUS_REJECTED = 'rejected' | ||||
STATUS_UNDER_REVIEW = 'under_review' | ||||
r5071 | ||||
r1 | STATUSES = [ | |||
(STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | ||||
(STATUS_APPROVED, _("Approved")), | ||||
(STATUS_REJECTED, _("Rejected")), | ||||
(STATUS_UNDER_REVIEW, _("Under Review")), | ||||
] | ||||
changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | ||||
repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | ||||
revision = Column('revision', String(40), nullable=False) | ||||
status = Column('status', String(128), nullable=False, default=DEFAULT) | ||||
changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | ||||
modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | ||||
version = Column('version', Integer(), nullable=False, default=0) | ||||
pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | ||||
r4507 | author = relationship('User', lazy='select') | |||
repo = relationship('Repository', lazy='select') | ||||
r5071 | comment = relationship('ChangesetComment', lazy='select', back_populates='status_change') | |||
pull_request = relationship('PullRequest', lazy='select', back_populates='statuses') | ||||
def __repr__(self): | ||||
return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>" | ||||
r1 | ||||
@classmethod | ||||
def get_status_lbl(cls, value): | ||||
return dict(cls.STATUSES).get(value) | ||||
@property | ||||
def status_lbl(self): | ||||
return ChangesetStatus.get_status_lbl(self.status) | ||||
r1807 | def get_api_data(self): | |||
status = self | ||||
data = { | ||||
'status_id': status.changeset_status_id, | ||||
'status': status.status, | ||||
} | ||||
return data | ||||
def __json__(self): | ||||
data = dict() | ||||
data.update(self.get_api_data()) | ||||
return data | ||||
r1 | ||||
r3371 | class _SetState(object): | |||
""" | ||||
Context processor allowing changing state for sensitive operation such as | ||||
pull request update or merge | ||||
""" | ||||
def __init__(self, pull_request, pr_state, back_state=None): | ||||
self._pr = pull_request | ||||
self._org_state = back_state or pull_request.pull_request_state | ||||
self._pr_state = pr_state | ||||
r3828 | self._current_state = None | |||
r3371 | ||||
def __enter__(self): | ||||
r4233 | log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`', | |||
self._pr, self._pr_state) | ||||
r3828 | self.set_pr_state(self._pr_state) | |||
return self | ||||
r3371 | ||||
def __exit__(self, exc_type, exc_val, exc_tb): | ||||
r4696 | if exc_val is not None or exc_type is not None: | |||
r5071 | log.error(traceback.format_tb(exc_tb)) | |||
r3828 | return None | |||
self.set_pr_state(self._org_state) | ||||
r4233 | log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`', | |||
self._pr, self._org_state) | ||||
r3828 | @property | |||
def state(self): | ||||
return self._current_state | ||||
def set_pr_state(self, pr_state): | ||||
try: | ||||
self._pr.pull_request_state = pr_state | ||||
Session().add(self._pr) | ||||
Session().commit() | ||||
self._current_state = pr_state | ||||
except Exception: | ||||
log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) | ||||
raise | ||||
r3371 | ||||
r4011 | ||||
r1 | class _PullRequestBase(BaseModel): | |||
""" | ||||
Common attributes of pull request and version entries. | ||||
""" | ||||
# .status values | ||||
r5071 | STATUS_NEW = 'new' | |||
STATUS_OPEN = 'open' | ||||
STATUS_CLOSED = 'closed' | ||||
r1 | ||||
r3371 | # available states | |||
r5071 | STATE_CREATING = 'creating' | |||
STATE_UPDATING = 'updating' | ||||
STATE_MERGING = 'merging' | ||||
STATE_CREATED = 'created' | ||||
r3371 | ||||
r1 | title = Column('title', Unicode(255), nullable=True) | |||
description = Column( | ||||
'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), | ||||
nullable=True) | ||||
r2903 | description_renderer = Column('description_renderer', Unicode(64), nullable=True) | |||
r1 | # new/open/closed status of pull request (not approve/reject/etc) | |||
status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) | ||||
created_on = Column( | ||||
'created_on', DateTime(timezone=False), nullable=False, | ||||
default=datetime.datetime.now) | ||||
updated_on = Column( | ||||
'updated_on', DateTime(timezone=False), nullable=False, | ||||
default=datetime.datetime.now) | ||||
r3371 | pull_request_state = Column("pull_request_state", String(255), nullable=True) | |||
r1 | @declared_attr | |||
def user_id(cls): | ||||
return Column( | ||||
"user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | ||||
unique=None) | ||||
# 500 revisions max | ||||
_revisions = Column( | ||||
'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | ||||
r4346 | common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True) | |||
r1 | @declared_attr | |||
def source_repo_id(cls): | ||||
# TODO: dan: rename column to source_repo_id | ||||
return Column( | ||||
'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=False) | ||||
r5071 | @declared_attr | |||
def pr_source(cls): | ||||
return relationship( | ||||
'Repository', | ||||
primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id', | ||||
overlaps="pull_requests_source" | ||||
) | ||||
r3338 | _source_ref = Column('org_ref', Unicode(255), nullable=False) | |||
@hybrid_property | ||||
def source_ref(self): | ||||
return self._source_ref | ||||
@source_ref.setter | ||||
def source_ref(self, val): | ||||
parts = (val or '').split(':') | ||||
if len(parts) != 3: | ||||
raise ValueError( | ||||
'Invalid reference format given: {}, expected X:Y:Z'.format(val)) | ||||
r5010 | self._source_ref = safe_str(val) | |||
r3338 | ||||
_target_ref = Column('other_ref', Unicode(255), nullable=False) | ||||
@hybrid_property | ||||
def target_ref(self): | ||||
return self._target_ref | ||||
@target_ref.setter | ||||
def target_ref(self, val): | ||||
parts = (val or '').split(':') | ||||
if len(parts) != 3: | ||||
raise ValueError( | ||||
'Invalid reference format given: {}, expected X:Y:Z'.format(val)) | ||||
r5010 | self._target_ref = safe_str(val) | |||
r1 | ||||
@declared_attr | ||||
def target_repo_id(cls): | ||||
# TODO: dan: rename column to target_repo_id | ||||
return Column( | ||||
'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=False) | ||||
r5071 | @declared_attr | |||
def pr_target(cls): | ||||
return relationship( | ||||
'Repository', | ||||
primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id', | ||||
overlaps="pull_requests_target" | ||||
) | ||||
Martin Bornhold
|
r1049 | _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) | ||
r1 | ||||
# TODO: dan: rename column to last_merge_source_rev | ||||
_last_merge_source_rev = Column( | ||||
'last_merge_org_rev', String(40), nullable=True) | ||||
# TODO: dan: rename column to last_merge_target_rev | ||||
_last_merge_target_rev = Column( | ||||
'last_merge_other_rev', String(40), nullable=True) | ||||
_last_merge_status = Column('merge_status', Integer(), nullable=True) | ||||
r4299 | last_merge_metadata = Column( | |||
'last_merge_metadata', MutationObj.as_mutable( | ||||
JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | ||||
r1 | merge_rev = Column('merge_rev', String(40), nullable=True) | |||
r1769 | reviewer_data = Column( | |||
'reviewer_data_json', MutationObj.as_mutable( | ||||
JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | ||||
@property | ||||
def reviewer_data_json(self): | ||||
r5211 | return str_json(self.reviewer_data) | |||
r1769 | ||||
r4099 | @property | |||
r4471 | def last_merge_metadata_parsed(self): | |||
metadata = {} | ||||
if not self.last_merge_metadata: | ||||
return metadata | ||||
if hasattr(self.last_merge_metadata, 'de_coerce'): | ||||
for k, v in self.last_merge_metadata.de_coerce().items(): | ||||
if k in ['target_ref', 'source_ref']: | ||||
metadata[k] = Reference(v['type'], v['name'], v['commit_id']) | ||||
else: | ||||
if hasattr(v, 'de_coerce'): | ||||
metadata[k] = v.de_coerce() | ||||
else: | ||||
metadata[k] = v | ||||
return metadata | ||||
@property | ||||
r4099 | def work_in_progress(self): | |||
"""checks if pull request is work in progress by checking the title""" | ||||
title = self.title.upper() | ||||
if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): | ||||
return True | ||||
return False | ||||
r4631 | @property | |||
def title_safe(self): | ||||
return self.title\ | ||||
.replace('{', '{{')\ | ||||
.replace('}', '}}') | ||||
r1 | @hybrid_property | |||
r1830 | def description_safe(self): | |||
r5463 | return description_escaper(self.description) | |||
r1830 | ||||
@hybrid_property | ||||
r1 | def revisions(self): | |||
return self._revisions.split(':') if self._revisions else [] | ||||
@revisions.setter | ||||
def revisions(self, val): | ||||
r5071 | self._revisions = ':'.join(val) | |||
r1 | ||||
r1968 | @hybrid_property | |||
def last_merge_status(self): | ||||
return safe_int(self._last_merge_status) | ||||
@last_merge_status.setter | ||||
def last_merge_status(self, val): | ||||
self._last_merge_status = val | ||||
r1 | @declared_attr | |||
def author(cls): | ||||
r5071 | return relationship( | |||
'User', lazy='joined', | ||||
#TODO, problem that is somehow :? | ||||
#back_populates='user_pull_requests' | ||||
) | ||||
r1 | ||||
@declared_attr | ||||
def source_repo(cls): | ||||
return relationship( | ||||
'Repository', | ||||
r5071 | primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id', | |||
r5293 | overlaps="pr_source" | |||
r5071 | ) | |||
r1 | ||||
@property | ||||
def source_ref_parts(self): | ||||
Martin Bornhold
|
r1048 | return self.unicode_to_reference(self.source_ref) | ||
r1 | ||||
@declared_attr | ||||
def target_repo(cls): | ||||
return relationship( | ||||
'Repository', | ||||
r5293 | primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id', | |||
overlaps="pr_target" | ||||
r5071 | ) | |||
r1 | ||||
@property | ||||
def target_ref_parts(self): | ||||
Martin Bornhold
|
r1048 | return self.unicode_to_reference(self.target_ref) | ||
Martin Bornhold
|
r1049 | @property | ||
def shadow_merge_ref(self): | ||||
return self.unicode_to_reference(self._shadow_merge_ref) | ||||
@shadow_merge_ref.setter | ||||
def shadow_merge_ref(self, ref): | ||||
self._shadow_merge_ref = self.reference_to_unicode(ref) | ||||
r3338 | @staticmethod | |||
def unicode_to_reference(raw): | ||||
r4519 | return unicode_to_reference(raw) | |||
Martin Bornhold
|
r1048 | |||
r3338 | @staticmethod | |||
def reference_to_unicode(ref): | ||||
r4519 | return reference_to_unicode(ref) | |||
r1 | ||||
r1807 | def get_api_data(self, with_merge_state=True): | |||
r1 | from rhodecode.model.pull_request import PullRequestModel | |||
r1807 | ||||
r1 | pull_request = self | |||
r1807 | if with_merge_state: | |||
r4299 | merge_response, merge_status, msg = \ | |||
PullRequestModel().merge_status(pull_request) | ||||
r1807 | merge_state = { | |||
r4299 | 'status': merge_status, | |||
r5010 | 'message': safe_str(msg), | |||
r1807 | } | |||
else: | ||||
merge_state = {'status': 'not_available', | ||||
'message': 'not_available'} | ||||
Martin Bornhold
|
r1054 | |||
merge_data = { | ||||
'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), | ||||
'reference': ( | ||||
r5071 | pull_request.shadow_merge_ref.asdict() | |||
Martin Bornhold
|
r1054 | if pull_request.shadow_merge_ref else None), | ||
} | ||||
r1 | data = { | |||
'pull_request_id': pull_request.pull_request_id, | ||||
r1807 | 'url': PullRequestModel().get_url(pull_request), | |||
r1 | 'title': pull_request.title, | |||
'description': pull_request.description, | ||||
'status': pull_request.status, | ||||
r3371 | 'state': pull_request.pull_request_state, | |||
r1 | 'created_on': pull_request.created_on, | |||
'updated_on': pull_request.updated_on, | ||||
'commit_ids': pull_request.revisions, | ||||
'review_status': pull_request.calculated_review_status(), | ||||
r1807 | 'mergeable': merge_state, | |||
r1 | 'source': { | |||
'clone_url': pull_request.source_repo.clone_url(), | ||||
'repository': pull_request.source_repo.repo_name, | ||||
'reference': { | ||||
'name': pull_request.source_ref_parts.name, | ||||
'type': pull_request.source_ref_parts.type, | ||||
'commit_id': pull_request.source_ref_parts.commit_id, | ||||
}, | ||||
}, | ||||
'target': { | ||||
'clone_url': pull_request.target_repo.clone_url(), | ||||
'repository': pull_request.target_repo.repo_name, | ||||
'reference': { | ||||
'name': pull_request.target_ref_parts.name, | ||||
'type': pull_request.target_ref_parts.type, | ||||
'commit_id': pull_request.target_ref_parts.commit_id, | ||||
}, | ||||
}, | ||||
Martin Bornhold
|
r1054 | 'merge': merge_data, | ||
r1 | 'author': pull_request.author.get_api_data(include_secrets=False, | |||
details='basic'), | ||||
'reviewers': [ | ||||
{ | ||||
'user': reviewer.get_api_data(include_secrets=False, | ||||
details='basic'), | ||||
r873 | 'reasons': reasons, | |||
r1 | 'review_status': st[0][1].status if st else 'not_reviewed', | |||
} | ||||
r2484 | for obj, reviewer, reasons, mandatory, st in | |||
r1769 | pull_request.reviewers_statuses() | |||
r1 | ] | |||
} | ||||
return data | ||||
r3371 | def set_state(self, pull_request_state, final_state=None): | |||
""" | ||||
# goes from initial state to updating to initial state. | ||||
# initial state can be changed by specifying back_state= | ||||
with pull_request_obj.set_state(PullRequest.STATE_UPDATING): | ||||
pull_request.merge() | ||||
:param pull_request_state: | ||||
:param final_state: | ||||
""" | ||||
return _SetState(self, pull_request_state, back_state=final_state) | ||||
r1192 | ||||
class PullRequest(Base, _PullRequestBase): | ||||
__tablename__ = 'pull_requests' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1192 | ) | |||
r4482 | LATEST_VER = 'latest' | |||
r1192 | ||||
pull_request_id = Column( | ||||
'pull_request_id', Integer(), nullable=False, primary_key=True) | ||||
r5071 | def __repr__(self): | |||
r1192 | if self.pull_request_id: | |||
r5010 | return f'<DB:PullRequest #{self.pull_request_id}>' | |||
r1192 | else: | |||
r5071 | return f'<DB:PullRequest at {id(self)!r}>' | |||
r5428 | def __str__(self): | |||
if self.pull_request_id: | ||||
return f'#{self.pull_request_id}' | ||||
else: | ||||
return f'#{id(self)!r}' | ||||
r5071 | reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request') | |||
statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request') | ||||
comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request') | ||||
versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request') | ||||
r1192 | ||||
r1255 | @classmethod | |||
def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, | ||||
internal_methods=None): | ||||
class PullRequestDisplay(object): | ||||
""" | ||||
Special object wrapper for showing PullRequest data via Versions | ||||
It mimics PR object as close as possible. This is read only object | ||||
just for display | ||||
""" | ||||
def __init__(self, attrs, internal=None): | ||||
self.attrs = attrs | ||||
# internal have priority over the given ones via attrs | ||||
self.internal = internal or ['versions'] | ||||
def __getattr__(self, item): | ||||
if item in self.internal: | ||||
return getattr(self, item) | ||||
try: | ||||
return self.attrs[item] | ||||
except KeyError: | ||||
raise AttributeError( | ||||
'%s object has no attribute %s' % (self, item)) | ||||
r5071 | def __repr__(self): | |||
r5010 | pr_id = self.attrs.get('pull_request_id') | |||
return f'<DB:PullRequestDisplay #{pr_id}>' | ||||
r1255 | ||||
def versions(self): | ||||
return pull_request_obj.versions.order_by( | ||||
PullRequestVersion.pull_request_version_id).all() | ||||
def is_closed(self): | ||||
return pull_request_obj.is_closed() | ||||
r4103 | def is_state_changing(self): | |||
return pull_request_obj.is_state_changing() | ||||
r1368 | @property | |||
def pull_request_version_id(self): | ||||
return getattr(pull_request_obj, 'pull_request_version_id', None) | ||||
r4482 | @property | |||
def pull_request_last_version(self): | ||||
return pull_request_obj.pull_request_last_version | ||||
r3812 | attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) | |||
r1255 | ||||
attrs.author = StrictAttributeDict( | ||||
pull_request_obj.author.get_api_data()) | ||||
if pull_request_obj.target_repo: | ||||
attrs.target_repo = StrictAttributeDict( | ||||
pull_request_obj.target_repo.get_api_data()) | ||||
attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url | ||||
if pull_request_obj.source_repo: | ||||
attrs.source_repo = StrictAttributeDict( | ||||
pull_request_obj.source_repo.get_api_data()) | ||||
attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url | ||||
attrs.source_ref_parts = pull_request_obj.source_ref_parts | ||||
attrs.target_ref_parts = pull_request_obj.target_ref_parts | ||||
attrs.revisions = pull_request_obj.revisions | ||||
r4346 | attrs.common_ancestor_id = pull_request_obj.common_ancestor_id | |||
r1255 | attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref | |||
r1769 | attrs.reviewer_data = org_pull_request_obj.reviewer_data | |||
attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json | ||||
r1255 | ||||
return PullRequestDisplay(attrs, internal=internal_methods) | ||||
r1192 | def is_closed(self): | |||
return self.status == self.STATUS_CLOSED | ||||
r4103 | def is_state_changing(self): | |||
return self.pull_request_state != PullRequest.STATE_CREATED | ||||
r1 | def __json__(self): | |||
return { | ||||
'revisions': self.revisions, | ||||
r4197 | 'versions': self.versions_count | |||
r1 | } | |||
def calculated_review_status(self): | ||||
from rhodecode.model.changeset_status import ChangesetStatusModel | ||||
return ChangesetStatusModel().calculated_review_status(self) | ||||
r4690 | def reviewers_statuses(self, user=None): | |||
r1 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
r4690 | return ChangesetStatusModel().reviewers_statuses(self, user=user) | |||
r1 | ||||
r4500 | def get_pull_request_reviewers(self, role=None): | |||
qry = PullRequestReviewers.query()\ | ||||
.filter(PullRequestReviewers.pull_request_id == self.pull_request_id) | ||||
if role: | ||||
qry = qry.filter(PullRequestReviewers.role == role) | ||||
return qry.all() | ||||
@property | ||||
def reviewers_count(self): | ||||
qry = PullRequestReviewers.query()\ | ||||
.filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ | ||||
.filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER) | ||||
return qry.count() | ||||
@property | ||||
def observers_count(self): | ||||
qry = PullRequestReviewers.query()\ | ||||
.filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ | ||||
.filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER) | ||||
return qry.count() | ||||
def observers(self): | ||||
qry = PullRequestReviewers.query()\ | ||||
.filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ | ||||
.filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\ | ||||
.all() | ||||
for entry in qry: | ||||
yield entry, entry.user | ||||
r1363 | @property | |||
def workspace_id(self): | ||||
from rhodecode.model.pull_request import PullRequestModel | ||||
return PullRequestModel()._workspace_id(self) | ||||
def get_shadow_repo(self): | ||||
workspace_id = self.workspace_id | ||||
r3931 | shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) | |||
r2797 | if os.path.isdir(shadow_repository_path): | |||
r3931 | vcs_obj = self.target_repo.scm_instance() | |||
r3848 | return vcs_obj.get_shadow_instance(shadow_repository_path) | |||
r1363 | ||||
r4197 | @property | |||
def versions_count(self): | ||||
""" | ||||
return number of versions this PR have, e.g a PR that once been | ||||
updated will have 2 versions | ||||
""" | ||||
return self.versions.count() + 1 | ||||
r4482 | @property | |||
def pull_request_last_version(self): | ||||
return self.versions_count | ||||
r1 | ||||
class PullRequestVersion(Base, _PullRequestBase): | ||||
__tablename__ = 'pull_request_versions' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r5071 | pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True) | |||
pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) | ||||
pull_request = relationship('PullRequest', back_populates='versions') | ||||
def __repr__(self): | ||||
r1 | if self.pull_request_version_id: | |||
r5010 | return f'<DB:PullRequestVersion #{self.pull_request_version_id}>' | |||
r1 | else: | |||
r5010 | return f'<DB:PullRequestVersion at {id(self)!r}>' | |||
r1 | ||||
r1192 | @property | |||
def reviewers(self): | ||||
return self.pull_request.reviewers | ||||
r5010 | ||||
r4519 | @property | |||
r1192 | def versions(self): | |||
return self.pull_request.versions | ||||
def is_closed(self): | ||||
# calculate from original | ||||
return self.pull_request.status == self.STATUS_CLOSED | ||||
r4103 | def is_state_changing(self): | |||
return self.pull_request.pull_request_state != PullRequest.STATE_CREATED | ||||
r1192 | def calculated_review_status(self): | |||
return self.pull_request.calculated_review_status() | ||||
def reviewers_statuses(self): | ||||
return self.pull_request.reviewers_statuses() | ||||
r4519 | def observers(self): | |||
r4500 | return self.pull_request.observers() | |||
r1 | ||||
class PullRequestReviewers(Base, BaseModel): | ||||
__tablename__ = 'pull_request_reviewers' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r5071 | ROLE_REVIEWER = 'reviewer' | |||
ROLE_OBSERVER = 'observer' | ||||
r4500 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] | |||
r1 | ||||
r873 | @hybrid_property | |||
def reasons(self): | ||||
if not self._reasons: | ||||
return [] | ||||
return self._reasons | ||||
@reasons.setter | ||||
def reasons(self, val): | ||||
val = val or [] | ||||
r4908 | if any(not isinstance(x, str) for x in val): | |||
r873 | raise Exception('invalid reasons type, must be list of strings') | |||
self._reasons = val | ||||
r1 | ||||
pull_requests_reviewers_id = Column( | ||||
'pull_requests_reviewers_id', Integer(), nullable=False, | ||||
primary_key=True) | ||||
pull_request_id = Column( | ||||
"pull_request_id", Integer(), | ||||
ForeignKey('pull_requests.pull_request_id'), nullable=False) | ||||
user_id = Column( | ||||
"user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | ||||
r873 | _reasons = Column( | |||
'reason', MutationList.as_mutable( | ||||
JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) | ||||
r2484 | ||||
r1769 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | |||
r4481 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) | |||
r1 | user = relationship('User') | |||
r5071 | pull_request = relationship('PullRequest', back_populates='reviewers') | |||
r1 | ||||
r2484 | rule_data = Column( | |||
'rule_data_json', | ||||
JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) | ||||
def rule_user_group_data(self): | ||||
""" | ||||
Returns the voting user group rule data for this reviewer | ||||
""" | ||||
if self.rule_data and 'vote_rule' in self.rule_data: | ||||
user_group_data = {} | ||||
if 'rule_user_group_entry_id' in self.rule_data: | ||||
# means a group with voting rules ! | ||||
user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] | ||||
user_group_data['name'] = self.rule_data['rule_name'] | ||||
user_group_data['vote_rule'] = self.rule_data['vote_rule'] | ||||
return user_group_data | ||||
r4500 | @classmethod | |||
def get_pull_request_reviewers(cls, pull_request_id, role=None): | ||||
qry = PullRequestReviewers.query()\ | ||||
.filter(PullRequestReviewers.pull_request_id == pull_request_id) | ||||
if role: | ||||
qry = qry.filter(PullRequestReviewers.role == role) | ||||
return qry.all() | ||||
r5071 | def __repr__(self): | |||
return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>" | ||||
r2484 | ||||
r1 | ||||
class Notification(Base, BaseModel): | ||||
__tablename__ = 'notifications' | ||||
__table_args__ = ( | ||||
Index('notification_type_idx', 'type'), | ||||
r2830 | base_table_args, | |||
r1 | ) | |||
r5071 | TYPE_CHANGESET_COMMENT = 'cs_comment' | |||
TYPE_MESSAGE = 'message' | ||||
TYPE_MENTION = 'mention' | ||||
TYPE_REGISTRATION = 'registration' | ||||
TYPE_PULL_REQUEST = 'pull_request' | ||||
TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' | ||||
TYPE_PULL_REQUEST_UPDATE = 'pull_request_update' | ||||
r1 | ||||
notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | ||||
subject = Column('subject', Unicode(512), nullable=True) | ||||
body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | ||||
created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
type_ = Column('type', Unicode(255)) | ||||
r5071 | created_by_user = relationship('User', back_populates='user_created_notifications') | |||
notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification') | ||||
r1 | ||||
@property | ||||
def recipients(self): | ||||
return [x.user for x in UserNotification.query()\ | ||||
.filter(UserNotification.notification == self)\ | ||||
.order_by(UserNotification.user_id.asc()).all()] | ||||
@classmethod | ||||
def create(cls, created_by, subject, body, recipients, type_=None): | ||||
if type_ is None: | ||||
type_ = Notification.TYPE_MESSAGE | ||||
notification = cls() | ||||
notification.created_by_user = created_by | ||||
notification.subject = subject | ||||
notification.body = body | ||||
notification.type_ = type_ | ||||
notification.created_on = datetime.datetime.now() | ||||
r2897 | # For each recipient link the created notification to his account | |||
r1 | for u in recipients: | |||
assoc = UserNotification() | ||||
r2897 | assoc.user_id = u.user_id | |||
r1 | assoc.notification = notification | |||
# if created_by is inside recipients mark his notification | ||||
# as read | ||||
if u.user_id == created_by.user_id: | ||||
assoc.read = True | ||||
r2897 | Session().add(assoc) | |||
r1 | Session().add(notification) | |||
return notification | ||||
class UserNotification(Base, BaseModel): | ||||
__tablename__ = 'user_to_notification' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'notification_id'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r1 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |||
notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | ||||
read = Column('read', Boolean, default=False) | ||||
sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | ||||
r5071 | user = relationship('User', lazy="joined", back_populates='notifications') | |||
notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users') | ||||
r1 | ||||
def mark_as_read(self): | ||||
self.read = True | ||||
Session().add(self) | ||||
r4300 | class UserNotice(Base, BaseModel): | |||
__tablename__ = 'user_notices' | ||||
__table_args__ = ( | ||||
base_table_args | ||||
) | ||||
NOTIFICATION_TYPE_MESSAGE = 'message' | ||||
NOTIFICATION_TYPE_NOTICE = 'notice' | ||||
NOTIFICATION_LEVEL_INFO = 'info' | ||||
NOTIFICATION_LEVEL_WARNING = 'warning' | ||||
NOTIFICATION_LEVEL_ERROR = 'error' | ||||
user_notice_id = Column('gist_id', Integer(), primary_key=True) | ||||
notice_subject = Column('notice_subject', Unicode(512), nullable=True) | ||||
notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | ||||
notice_read = Column('notice_read', Boolean, default=False) | ||||
notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO) | ||||
notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE) | ||||
notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True) | ||||
notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
user_id = Column('user_id', Integer(), ForeignKey('users.user_id')) | ||||
user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id') | ||||
@classmethod | ||||
def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False): | ||||
if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR, | ||||
cls.NOTIFICATION_LEVEL_WARNING, | ||||
cls.NOTIFICATION_LEVEL_INFO]: | ||||
return | ||||
from rhodecode.model.user import UserModel | ||||
user = UserModel().get_user(user) | ||||
new_notice = UserNotice() | ||||
if not allow_duplicate: | ||||
existing_msg = UserNotice().query() \ | ||||
.filter(UserNotice.user == user) \ | ||||
.filter(UserNotice.notice_body == body) \ | ||||
.filter(UserNotice.notice_read == false()) \ | ||||
.scalar() | ||||
if existing_msg: | ||||
log.warning('Ignoring duplicate notice for user %s', user) | ||||
return | ||||
new_notice.user = user | ||||
new_notice.notice_subject = subject | ||||
new_notice.notice_body = body | ||||
new_notice.notification_level = notice_level | ||||
Session().add(new_notice) | ||||
Session().commit() | ||||
r1 | class Gist(Base, BaseModel): | |||
__tablename__ = 'gists' | ||||
__table_args__ = ( | ||||
Index('g_gist_access_id_idx', 'gist_access_id'), | ||||
Index('g_created_on_idx', 'created_on'), | ||||
r2830 | base_table_args | |||
r1 | ) | |||
r2830 | ||||
r5071 | GIST_PUBLIC = 'public' | |||
GIST_PRIVATE = 'private' | ||||
DEFAULT_FILENAME = 'gistfile1.txt' | ||||
ACL_LEVEL_PUBLIC = 'acl_public' | ||||
ACL_LEVEL_PRIVATE = 'acl_private' | ||||
r1 | ||||
gist_id = Column('gist_id', Integer(), primary_key=True) | ||||
gist_access_id = Column('gist_access_id', Unicode(250)) | ||||
gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | ||||
gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) | ||||
gist_expires = Column('gist_expires', Float(53), nullable=False) | ||||
gist_type = Column('gist_type', Unicode(128), nullable=False) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
acl_level = Column('acl_level', Unicode(128), nullable=True) | ||||
r5071 | owner = relationship('User', back_populates='user_gists') | |||
def __repr__(self): | ||||
r5010 | return f'<Gist:[{self.gist_type}]{self.gist_access_id}>' | |||
r1 | ||||
r1830 | @hybrid_property | |||
def description_safe(self): | ||||
r5463 | return description_escaper(self.gist_description) | |||
r1830 | ||||
r1 | @classmethod | |||
r1956 | def get_or_404(cls, id_): | |||
from pyramid.httpexceptions import HTTPNotFound | ||||
r1512 | ||||
r1 | res = cls.query().filter(cls.gist_access_id == id_).scalar() | |||
if not res: | ||||
r4610 | log.debug('WARN: No DB entry with id %s', id_) | |||
r1956 | raise HTTPNotFound() | |||
r1 | return res | |||
@classmethod | ||||
def get_by_access_id(cls, gist_access_id): | ||||
return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() | ||||
def gist_url(self): | ||||
r1891 | from rhodecode.model.gist import GistModel | |||
return GistModel().get_url(self) | ||||
r1 | ||||
@classmethod | ||||
def base_path(cls): | ||||
""" | ||||
Returns base path when all gists are stored | ||||
:param cls: | ||||
""" | ||||
from rhodecode.model.gist import GIST_STORE_LOC | ||||
r5366 | from rhodecode.lib.utils import get_rhodecode_repo_store_path | |||
repo_store_path = get_rhodecode_repo_store_path() | ||||
return os.path.join(repo_store_path, GIST_STORE_LOC) | ||||
r1 | ||||
def get_api_data(self): | ||||
""" | ||||
Common function for generating gist related data for API | ||||
""" | ||||
gist = self | ||||
data = { | ||||
'gist_id': gist.gist_id, | ||||
'type': gist.gist_type, | ||||
'access_id': gist.gist_access_id, | ||||
'description': gist.gist_description, | ||||
'url': gist.gist_url(), | ||||
'expires': gist.gist_expires, | ||||
'created_on': gist.created_on, | ||||
'modified_at': gist.modified_at, | ||||
'content': None, | ||||
'acl_level': gist.acl_level, | ||||
} | ||||
return data | ||||
def __json__(self): | ||||
r5367 | data = dict() | |||
r1 | data.update(self.get_api_data()) | |||
return data | ||||
# SCM functions | ||||
def scm_instance(self, **kwargs): | ||||
r3536 | """ | |||
r3740 | Get an instance of VCS Repository | |||
r3536 | :param kwargs: | |||
""" | ||||
from rhodecode.model.gist import GistModel | ||||
Martin Bornhold
|
r485 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) | ||
return get_vcs_instance( | ||||
r3536 | repo_path=safe_str(full_repo_path), create=False, | |||
_vcs_alias=GistModel.vcs_backend) | ||||
r1 | ||||
class ExternalIdentity(Base, BaseModel): | ||||
__tablename__ = 'external_identities' | ||||
__table_args__ = ( | ||||
Index('local_user_id_idx', 'local_user_id'), | ||||
Index('external_id_idx', 'external_id'), | ||||
r2830 | base_table_args | |||
) | ||||
r1 | ||||
r5071 | external_id = Column('external_id', Unicode(255), default='', primary_key=True) | |||
external_username = Column('external_username', Unicode(1024), default='') | ||||
r3251 | local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |||
r5071 | provider_name = Column('provider_name', Unicode(255), default='', primary_key=True) | |||
access_token = Column('access_token', String(1024), default='') | ||||
alt_token = Column('alt_token', String(1024), default='') | ||||
token_secret = Column('token_secret', String(1024), default='') | ||||
r1 | ||||
@classmethod | ||||
r3251 | def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): | |||
r1 | """ | |||
Returns ExternalIdentity instance based on search params | ||||
:param external_id: | ||||
:param provider_name: | ||||
:return: ExternalIdentity | ||||
""" | ||||
query = cls.query() | ||||
query = query.filter(cls.external_id == external_id) | ||||
query = query.filter(cls.provider_name == provider_name) | ||||
if local_user_id: | ||||
query = query.filter(cls.local_user_id == local_user_id) | ||||
return query.first() | ||||
@classmethod | ||||
def user_by_external_id_and_provider(cls, external_id, provider_name): | ||||
""" | ||||
Returns User instance based on search params | ||||
:param external_id: | ||||
:param provider_name: | ||||
:return: User | ||||
""" | ||||
query = User.query() | ||||
query = query.filter(cls.external_id == external_id) | ||||
query = query.filter(cls.provider_name == provider_name) | ||||
query = query.filter(User.user_id == cls.local_user_id) | ||||
return query.first() | ||||
@classmethod | ||||
def by_local_user_id(cls, local_user_id): | ||||
""" | ||||
Returns all tokens for user | ||||
:param local_user_id: | ||||
:return: ExternalIdentity | ||||
""" | ||||
query = cls.query() | ||||
query = query.filter(cls.local_user_id == local_user_id) | ||||
return query | ||||
r411 | ||||
r3251 | @classmethod | |||
def load_provider_plugin(cls, plugin_id): | ||||
from rhodecode.authentication.base import loadplugin | ||||
_plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) | ||||
auth_plugin = loadplugin(_plugin_id) | ||||
return auth_plugin | ||||
r411 | ||||
class Integration(Base, BaseModel): | ||||
__tablename__ = 'integrations' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args | |||
r411 | ) | |||
integration_id = Column('integration_id', Integer(), primary_key=True) | ||||
integration_type = Column('integration_type', String(255)) | ||||
r447 | enabled = Column('enabled', Boolean(), nullable=False) | |||
r411 | name = Column('name', String(255), nullable=False) | |||
r5071 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False) | |||
r447 | ||||
settings = Column( | ||||
'settings_json', MutationObj.as_mutable( | ||||
JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | ||||
r5071 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |||
repo = relationship('Repository', lazy='joined', back_populates='integrations') | ||||
repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | ||||
repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations') | ||||
r667 | ||||
r793 | @property | |||
r731 | def scope(self): | |||
r411 | if self.repo: | |||
r793 | return repr(self.repo) | |||
r731 | if self.repo_group: | |||
r793 | if self.child_repos_only: | |||
return repr(self.repo_group) + ' (child repos only)' | ||||
else: | ||||
return repr(self.repo_group) + ' (recursive)' | ||||
r731 | if self.child_repos_only: | |||
return 'root_repos' | ||||
return 'global' | ||||
r5071 | def __repr__(self): | |||
r731 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) | |||
r821 | ||||
class RepoReviewRuleUser(Base, BaseModel): | ||||
__tablename__ = 'repo_review_rules_users' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args | |||
r821 | ) | |||
r5071 | ROLE_REVIEWER = 'reviewer' | |||
ROLE_OBSERVER = 'observer' | ||||
r4500 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] | |||
r2484 | ||||
r1769 | repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) | |||
repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) | ||||
mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | ||||
r4500 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) | |||
r5071 | user = relationship('User', back_populates='user_review_rules') | |||
r821 | ||||
r1769 | def rule_data(self): | |||
return { | ||||
r4500 | 'mandatory': self.mandatory, | |||
'role': self.role, | ||||
r1769 | } | |||
r821 | ||||
class RepoReviewRuleUserGroup(Base, BaseModel): | ||||
__tablename__ = 'repo_review_rules_users_groups' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args | |||
r821 | ) | |||
r2830 | ||||
r2484 | VOTE_RULE_ALL = -1 | |||
r5071 | ROLE_REVIEWER = 'reviewer' | |||
ROLE_OBSERVER = 'observer' | ||||
r4500 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] | |||
r2484 | ||||
r1769 | repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) | |||
repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) | ||||
r4500 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False) | |||
r1769 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | |||
r4500 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) | |||
r2484 | vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) | |||
r821 | users_group = relationship('UserGroup') | |||
r1769 | def rule_data(self): | |||
return { | ||||
r2484 | 'mandatory': self.mandatory, | |||
r4500 | 'role': self.role, | |||
r2484 | 'vote_rule': self.vote_rule | |||
r1769 | } | |||
r2484 | @property | |||
def vote_rule_label(self): | ||||
if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: | ||||
return 'all must vote' | ||||
else: | ||||
return 'min. vote {}'.format(self.vote_rule) | ||||
r821 | ||||
class RepoReviewRule(Base, BaseModel): | ||||
__tablename__ = 'repo_review_rules' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args | |||
r821 | ) | |||
repo_review_rule_id = Column( | ||||
'repo_review_rule_id', Integer(), primary_key=True) | ||||
repo_id = Column( | ||||
"repo_id", Integer(), ForeignKey('repositories.repo_id')) | ||||
r5071 | repo = relationship('Repository', back_populates='review_rules') | |||
r821 | ||||
r2435 | review_rule_name = Column('review_rule_name', String(255)) | |||
r5071 | _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob | |||
_target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob | ||||
_file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob | ||||
r1769 | ||||
use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) | ||||
r4559 | ||||
# Legacy fields, just for backward compat | ||||
_forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) | ||||
_forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) | ||||
pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True) | ||||
commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True) | ||||
r1769 | forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) | |||
r821 | rule_users = relationship('RepoReviewRuleUser') | |||
rule_user_groups = relationship('RepoReviewRuleUserGroup') | ||||
r2877 | def _validate_pattern(self, value): | |||
r821 | re.compile('^' + glob2re(value) + '$') | |||
r2435 | @hybrid_property | |||
def source_branch_pattern(self): | ||||
return self._branch_pattern or '*' | ||||
@source_branch_pattern.setter | ||||
def source_branch_pattern(self, value): | ||||
r2877 | self._validate_pattern(value) | |||
r821 | self._branch_pattern = value or '*' | |||
@hybrid_property | ||||
r2435 | def target_branch_pattern(self): | |||
return self._target_branch_pattern or '*' | ||||
@target_branch_pattern.setter | ||||
def target_branch_pattern(self, value): | ||||
r2877 | self._validate_pattern(value) | |||
r2435 | self._target_branch_pattern = value or '*' | |||
@hybrid_property | ||||
r821 | def file_pattern(self): | |||
return self._file_pattern or '*' | ||||
@file_pattern.setter | ||||
def file_pattern(self, value): | ||||
r2877 | self._validate_pattern(value) | |||
r821 | self._file_pattern = value or '*' | |||
r4559 | @hybrid_property | |||
def forbid_pr_author_to_review(self): | ||||
return self.pr_author == 'forbid_pr_author' | ||||
@hybrid_property | ||||
def include_pr_author_to_review(self): | ||||
return self.pr_author == 'include_pr_author' | ||||
@hybrid_property | ||||
def forbid_commit_author_to_review(self): | ||||
return self.commit_author == 'forbid_commit_author' | ||||
@hybrid_property | ||||
def include_commit_author_to_review(self): | ||||
return self.commit_author == 'include_commit_author' | ||||
r2435 | def matches(self, source_branch, target_branch, files_changed): | |||
r821 | """ | |||
Check if this review rule matches a branch/files in a pull request | ||||
r2573 | :param source_branch: source branch name for the commit | |||
:param target_branch: target branch name for the commit | ||||
r821 | :param files_changed: list of file paths changed in the pull request | |||
""" | ||||
r2435 | source_branch = source_branch or '' | |||
target_branch = target_branch or '' | ||||
r821 | files_changed = files_changed or [] | |||
branch_matches = True | ||||
r2435 | if source_branch or target_branch: | |||
r2573 | if self.source_branch_pattern == '*': | |||
source_branch_match = True | ||||
else: | ||||
r2877 | if self.source_branch_pattern.startswith('re:'): | |||
source_pattern = self.source_branch_pattern[3:] | ||||
else: | ||||
source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' | ||||
source_branch_regex = re.compile(source_pattern) | ||||
r2573 | source_branch_match = bool(source_branch_regex.search(source_branch)) | |||
if self.target_branch_pattern == '*': | ||||
target_branch_match = True | ||||
else: | ||||
r2877 | if self.target_branch_pattern.startswith('re:'): | |||
target_pattern = self.target_branch_pattern[3:] | ||||
else: | ||||
target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' | ||||
target_branch_regex = re.compile(target_pattern) | ||||
r2573 | target_branch_match = bool(target_branch_regex.search(target_branch)) | |||
branch_matches = source_branch_match and target_branch_match | ||||
r821 | ||||
files_matches = True | ||||
if self.file_pattern != '*': | ||||
files_matches = False | ||||
r2877 | if self.file_pattern.startswith('re:'): | |||
file_pattern = self.file_pattern[3:] | ||||
else: | ||||
file_pattern = glob2re(self.file_pattern) | ||||
file_regex = re.compile(file_pattern) | ||||
r4385 | for file_data in files_changed: | |||
filename = file_data.get('filename') | ||||
r821 | if file_regex.search(filename): | |||
files_matches = True | ||||
break | ||||
return branch_matches and files_matches | ||||
@property | ||||
def review_users(self): | ||||
""" Returns the users which this rule applies to """ | ||||
r1769 | users = collections.OrderedDict() | |||
for rule_user in self.rule_users: | ||||
if rule_user.user.active: | ||||
if rule_user.user not in users: | ||||
users[rule_user.user.username] = { | ||||
'user': rule_user.user, | ||||
'source': 'user', | ||||
r1771 | 'source_data': {}, | |||
r1769 | 'data': rule_user.rule_data() | |||
} | ||||
for rule_user_group in self.rule_user_groups: | ||||
r1771 | source_data = { | |||
r2484 | 'user_group_id': rule_user_group.users_group.users_group_id, | |||
r1771 | 'name': rule_user_group.users_group.users_group_name, | |||
'members': len(rule_user_group.users_group.members) | ||||
} | ||||
r1769 | for member in rule_user_group.users_group.members: | |||
if member.user.active: | ||||
r2484 | key = member.user.username | |||
if key in users: | ||||
# skip this member as we have him already | ||||
# this prevents from override the "first" matched | ||||
# users with duplicates in multiple groups | ||||
continue | ||||
users[key] = { | ||||
r1769 | 'user': member.user, | |||
'source': 'user_group', | ||||
r1771 | 'source_data': source_data, | |||
r1769 | 'data': rule_user_group.rule_data() | |||
} | ||||
r821 | return users | |||
r2960 | def user_group_vote_rule(self, user_id): | |||
r2484 | rules = [] | |||
r2960 | if not self.rule_user_groups: | |||
return rules | ||||
for user_group in self.rule_user_groups: | ||||
user_group_members = [x.user_id for x in user_group.users_group.members] | ||||
if user_id in user_group_members: | ||||
r2484 | rules.append(user_group) | |||
return rules | ||||
r5071 | def __repr__(self): | |||
return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>' | ||||
r1294 | ||||
r2406 | class ScheduleEntry(Base, BaseModel): | |||
__tablename__ = 'schedule_entries' | ||||
__table_args__ = ( | ||||
UniqueConstraint('schedule_name', name='s_schedule_name_idx'), | ||||
UniqueConstraint('task_uid', name='s_task_uid_idx'), | ||||
r2830 | base_table_args, | |||
r2406 | ) | |||
r5138 | SCHEDULE_TYPE_INTEGER = "integer" | |||
SCHEDULE_TYPE_CRONTAB = "crontab" | ||||
r5139 | ||||
schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER] | ||||
r2406 | schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) | |||
schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) | ||||
schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) | ||||
schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) | ||||
_schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) | ||||
schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) | ||||
schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) | ||||
schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) | ||||
# task | ||||
task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) | ||||
task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) | ||||
task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) | ||||
task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) | ||||
@hybrid_property | ||||
def schedule_type(self): | ||||
return self._schedule_type | ||||
@schedule_type.setter | ||||
def schedule_type(self, val): | ||||
if val not in self.schedule_types: | ||||
r5452 | raise ValueError(f'Value must be on of `{val}` and got `{self.schedule_type}`') | |||
r2406 | ||||
self._schedule_type = val | ||||
@classmethod | ||||
def get_uid(cls, obj): | ||||
args = obj.task_args | ||||
kwargs = obj.task_kwargs | ||||
r5452 | ||||
r2406 | if isinstance(args, JsonRaw): | |||
try: | ||||
r5452 | args = json.loads(str(args)) | |||
r2406 | except ValueError: | |||
r5452 | log.exception('json.loads of args failed...') | |||
r2406 | args = tuple() | |||
if isinstance(kwargs, JsonRaw): | ||||
try: | ||||
r5452 | kwargs = json.loads(str(kwargs)) | |||
r2406 | except ValueError: | |||
r5452 | log.exception('json.loads of kwargs failed...') | |||
r2406 | kwargs = dict() | |||
dot_notation = obj.task_dot_notation | ||||
r5452 | val = '.'.join(map(safe_str, [dot_notation, args, sorted(kwargs.items())])) | |||
log.debug('calculating task uid using id:`%s`', val) | ||||
r5071 | return sha1(safe_bytes(val)) | |||
r2406 | ||||
@classmethod | ||||
def get_by_schedule_name(cls, schedule_name): | ||||
return cls.query().filter(cls.schedule_name == schedule_name).scalar() | ||||
@classmethod | ||||
def get_by_schedule_id(cls, schedule_id): | ||||
return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() | ||||
r5452 | @classmethod | |||
def get_by_task_uid(cls, task_uid): | ||||
return cls.query().filter(cls.task_uid == task_uid).scalar() | ||||
r2406 | @property | |||
def task(self): | ||||
return self.task_dot_notation | ||||
@property | ||||
def schedule(self): | ||||
from rhodecode.lib.celerylib.utils import raw_2_schedule | ||||
schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) | ||||
return schedule | ||||
@property | ||||
def args(self): | ||||
try: | ||||
return list(self.task_args or []) | ||||
except ValueError: | ||||
return list() | ||||
@property | ||||
def kwargs(self): | ||||
try: | ||||
return dict(self.task_kwargs or {}) | ||||
except ValueError: | ||||
return dict() | ||||
r5137 | def _as_raw(self, val, indent=False): | |||
r2406 | if hasattr(val, 'de_coerce'): | |||
val = val.de_coerce() | ||||
if val: | ||||
r5071 | if indent: | |||
r5137 | val = ext_json.formatted_str_json(val) | |||
r5071 | else: | |||
r5137 | val = ext_json.str_json(val) | |||
r2406 | ||||
return val | ||||
@property | ||||
def schedule_definition_raw(self): | ||||
return self._as_raw(self.schedule_definition) | ||||
r5137 | def args_raw(self, indent=False): | |||
r4583 | return self._as_raw(self.task_args, indent) | |||
r5137 | def kwargs_raw(self, indent=False): | |||
r4583 | return self._as_raw(self.task_kwargs, indent) | |||
r2406 | ||||
r5071 | def __repr__(self): | |||
return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>' | ||||
r2406 | ||||
@event.listens_for(ScheduleEntry, 'before_update') | ||||
def update_task_uid(mapper, connection, target): | ||||
target.task_uid = ScheduleEntry.get_uid(target) | ||||
@event.listens_for(ScheduleEntry, 'before_insert') | ||||
def set_task_uid(mapper, connection, target): | ||||
target.task_uid = ScheduleEntry.get_uid(target) | ||||
r2975 | class _BaseBranchPerms(BaseModel): | |||
@classmethod | ||||
def compute_hash(cls, value): | ||||
return sha1_safe(value) | ||||
@hybrid_property | ||||
def branch_pattern(self): | ||||
return self._branch_pattern or '*' | ||||
@hybrid_property | ||||
def branch_hash(self): | ||||
return self._branch_hash | ||||
def _validate_glob(self, value): | ||||
re.compile('^' + glob2re(value) + '$') | ||||
@branch_pattern.setter | ||||
def branch_pattern(self, value): | ||||
self._validate_glob(value) | ||||
self._branch_pattern = value or '*' | ||||
# set the Hash when setting the branch pattern | ||||
self._branch_hash = self.compute_hash(self._branch_pattern) | ||||
def matches(self, branch): | ||||
""" | ||||
Check if this the branch matches entry | ||||
:param branch: branch name for the commit | ||||
""" | ||||
branch = branch or '' | ||||
branch_matches = True | ||||
if branch: | ||||
branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') | ||||
branch_matches = bool(branch_regex.search(branch)) | ||||
return branch_matches | ||||
class UserToRepoBranchPermission(Base, _BaseBranchPerms): | ||||
__tablename__ = 'user_to_repo_branch_permissions' | ||||
__table_args__ = ( | ||||
r3991 | base_table_args | |||
r2975 | ) | |||
branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) | ||||
repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | ||||
r5071 | repo = relationship('Repository', back_populates='user_branch_perms') | |||
r2975 | ||||
permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
permission = relationship('Permission') | ||||
rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry') | |||
r2975 | ||||
rule_order = Column('rule_order', Integer(), nullable=False) | ||||
r5071 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob | |||
r2975 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) | |||
r5071 | def __repr__(self): | |||
r5010 | return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>' | |||
r2975 | ||||
class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): | ||||
__tablename__ = 'user_group_to_repo_branch_permissions' | ||||
__table_args__ = ( | ||||
r3991 | base_table_args | |||
r2975 | ) | |||
branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) | ||||
repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | ||||
r5071 | repo = relationship('Repository', back_populates='user_group_branch_perms') | |||
r2975 | ||||
permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | ||||
permission = relationship('Permission') | ||||
rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) | ||||
r5071 | user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms') | |||
r2975 | ||||
rule_order = Column('rule_order', Integer(), nullable=False) | ||||
r5071 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob | |||
r2975 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) | |||
r5071 | def __repr__(self): | |||
r5010 | return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>' | |||
r2975 | ||||
r3424 | class UserBookmark(Base, BaseModel): | |||
__tablename__ = 'user_bookmarks' | ||||
__table_args__ = ( | ||||
UniqueConstraint('user_id', 'bookmark_repo_id'), | ||||
UniqueConstraint('user_id', 'bookmark_repo_group_id'), | ||||
UniqueConstraint('user_id', 'bookmark_position'), | ||||
base_table_args | ||||
) | ||||
user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | ||||
user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | ||||
position = Column("bookmark_position", Integer(), nullable=False) | ||||
title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) | ||||
redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) | ||||
created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) | ||||
bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) | ||||
user = relationship("User") | ||||
repository = relationship("Repository") | ||||
repository_group = relationship("RepoGroup") | ||||
@classmethod | ||||
def get_by_position_for_user(cls, position, user_id): | ||||
return cls.query() \ | ||||
.filter(UserBookmark.user_id == user_id) \ | ||||
.filter(UserBookmark.position == position).scalar() | ||||
@classmethod | ||||
r4143 | def get_bookmarks_for_user(cls, user_id, cache=True): | |||
r5365 | bookmarks = select( | |||
UserBookmark.title, | ||||
UserBookmark.position, | ||||
) \ | ||||
.add_columns(Repository.repo_id, Repository.repo_type, Repository.repo_name) \ | ||||
.add_columns(RepoGroup.group_id, RepoGroup.group_name) \ | ||||
.where(UserBookmark.user_id == user_id) \ | ||||
.outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \ | ||||
.outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \ | ||||
r4143 | .order_by(UserBookmark.position.asc()) | |||
if cache: | ||||
bookmarks = bookmarks.options( | ||||
r5365 | FromCache("sql_cache_short", f"get_user_{user_id}_bookmarks") | |||
r4143 | ) | |||
r5365 | return Session().execute(bookmarks).all() | |||
r3424 | ||||
r5071 | def __repr__(self): | |||
r5010 | return f'<UserBookmark({self.position} @ {self.redirect_url!r})>' | |||
r3424 | ||||
r3456 | class FileStore(Base, BaseModel): | |||
__tablename__ = 'file_store' | ||||
__table_args__ = ( | ||||
base_table_args | ||||
) | ||||
file_store_id = Column('file_store_id', Integer(), primary_key=True) | ||||
file_uid = Column('file_uid', String(1024), nullable=False) | ||||
file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) | ||||
file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) | ||||
file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) | ||||
# sha256 hash | ||||
file_hash = Column('file_hash', String(512), nullable=False) | ||||
r4004 | file_size = Column('file_size', BigInteger(), nullable=False) | |||
r3456 | ||||
created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | ||||
accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) | ||||
accessed_count = Column('accessed_count', Integer(), default=0) | ||||
enabled = Column('enabled', Boolean(), nullable=False, default=True) | ||||
# if repo/repo_group reference is set, check for permissions | ||||
check_acl = Column('check_acl', Boolean(), nullable=False, default=True) | ||||
r3973 | # hidden defines an attachment that should be hidden from showing in artifact listing | |||
r3971 | hidden = Column('hidden', Boolean(), nullable=False, default=False) | |||
r3456 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |||
r5071 | upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts') | |||
r3456 | ||||
r3991 | file_metadata = relationship('FileStoreMetadata', lazy='joined') | |||
r3456 | # scope limited to user, which requester have access to | |||
scope_user_id = Column( | ||||
'scope_user_id', Integer(), ForeignKey('users.user_id'), | ||||
nullable=True, unique=None, default=None) | ||||
r5071 | user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts') | |||
r3456 | ||||
# scope limited to user group, which requester have access to | ||||
scope_user_group_id = Column( | ||||
'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), | ||||
nullable=True, unique=None, default=None) | ||||
user_group = relationship('UserGroup', lazy='joined') | ||||
# scope limited to repo, which requester have access to | ||||
scope_repo_id = Column( | ||||
'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), | ||||
nullable=True, unique=None, default=None) | ||||
repo = relationship('Repository', lazy='joined') | ||||
# scope limited to repo group, which requester have access to | ||||
scope_repo_group_id = Column( | ||||
'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), | ||||
nullable=True, unique=None, default=None) | ||||
repo_group = relationship('RepoGroup', lazy='joined') | ||||
r3457 | @classmethod | |||
r5137 | def get_scope(cls, scope_type, scope_id): | |||
if scope_type == 'repo': | ||||
return f'repo:{scope_id}' | ||||
elif scope_type == 'repo-group': | ||||
return f'repo-group:{scope_id}' | ||||
elif scope_type == 'user': | ||||
return f'user:{scope_id}' | ||||
elif scope_type == 'user-group': | ||||
return f'user-group:{scope_id}' | ||||
else: | ||||
return scope_type | ||||
@classmethod | ||||
r4476 | def get_by_store_uid(cls, file_store_uid, safe=False): | |||
if safe: | ||||
return FileStore.query().filter(FileStore.file_uid == file_store_uid).first() | ||||
else: | ||||
return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() | ||||
r3997 | ||||
@classmethod | ||||
r3457 | def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', | |||
r3973 | file_description='', enabled=True, hidden=False, check_acl=True, | |||
user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): | ||||
r3457 | ||||
store_entry = FileStore() | ||||
store_entry.file_uid = file_uid | ||||
store_entry.file_display_name = file_display_name | ||||
store_entry.file_org_name = filename | ||||
store_entry.file_size = file_size | ||||
store_entry.file_hash = file_hash | ||||
store_entry.file_description = file_description | ||||
store_entry.check_acl = check_acl | ||||
store_entry.enabled = enabled | ||||
r3973 | store_entry.hidden = hidden | |||
r3457 | ||||
store_entry.user_id = user_id | ||||
r3674 | store_entry.scope_user_id = scope_user_id | |||
r3457 | store_entry.scope_repo_id = scope_repo_id | |||
store_entry.scope_repo_group_id = scope_repo_group_id | ||||
r3973 | ||||
r3457 | return store_entry | |||
@classmethod | ||||
r3991 | def store_metadata(cls, file_store_id, args, commit=True): | |||
file_store = FileStore.get(file_store_id) | ||||
if file_store is None: | ||||
return | ||||
for section, key, value, value_type in args: | ||||
r3997 | has_key = FileStoreMetadata().query() \ | |||
.filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ | ||||
.filter(FileStoreMetadata.file_store_meta_section == section) \ | ||||
.filter(FileStoreMetadata.file_store_meta_key == key) \ | ||||
.scalar() | ||||
if has_key: | ||||
r5516 | msg = f'key `{key}` already defined under section `{section}` for this file.' | |||
r3997 | raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) | |||
# NOTE(marcink): raises ArtifactMetadataBadValueType | ||||
FileStoreMetadata.valid_value_type(value_type) | ||||
r3991 | meta_entry = FileStoreMetadata() | |||
meta_entry.file_store = file_store | ||||
meta_entry.file_store_meta_section = section | ||||
meta_entry.file_store_meta_key = key | ||||
meta_entry.file_store_meta_value_type = value_type | ||||
meta_entry.file_store_meta_value = value | ||||
Session().add(meta_entry) | ||||
r3997 | try: | |||
if commit: | ||||
Session().commit() | ||||
except IntegrityError: | ||||
Session().rollback() | ||||
raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') | ||||
r3991 | ||||
@classmethod | ||||
r3457 | def bump_access_counter(cls, file_uid, commit=True): | |||
FileStore().query()\ | ||||
.filter(FileStore.file_uid == file_uid)\ | ||||
.update({FileStore.accessed_count: (FileStore.accessed_count + 1), | ||||
FileStore.accessed_on: datetime.datetime.now()}) | ||||
if commit: | ||||
Session().commit() | ||||
r3999 | def __json__(self): | |||
data = { | ||||
'filename': self.file_display_name, | ||||
'filename_org': self.file_org_name, | ||||
'file_uid': self.file_uid, | ||||
'description': self.file_description, | ||||
'hidden': self.hidden, | ||||
'size': self.file_size, | ||||
'created_on': self.created_on, | ||||
'uploaded_by': self.upload_user.get_api_data(details='basic'), | ||||
'downloaded_times': self.accessed_count, | ||||
'sha256': self.file_hash, | ||||
'metadata': self.file_metadata, | ||||
} | ||||
return data | ||||
r5071 | def __repr__(self): | |||
r5010 | return f'<FileStore({self.file_store_id})>' | |||
r3456 | ||||
r3991 | class FileStoreMetadata(Base, BaseModel): | |||
__tablename__ = 'file_store_metadata' | ||||
__table_args__ = ( | ||||
r4010 | UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), | |||
Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), | ||||
Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), | ||||
r3991 | base_table_args | |||
) | ||||
SETTINGS_TYPES = { | ||||
'str': safe_str, | ||||
'int': safe_int, | ||||
r5010 | 'unicode': safe_str, | |||
r3991 | 'bool': str2bool, | |||
'list': functools.partial(aslist, sep=',') | ||||
} | ||||
file_store_meta_id = Column( | ||||
"file_store_meta_id", Integer(), nullable=False, unique=True, default=None, | ||||
primary_key=True) | ||||
r4010 | _file_store_meta_section = Column( | |||
"file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), | ||||
nullable=True, unique=None, default=None) | ||||
_file_store_meta_section_hash = Column( | ||||
"file_store_meta_section_hash", String(255), | ||||
r3991 | nullable=True, unique=None, default=None) | |||
r4010 | _file_store_meta_key = Column( | |||
"file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), | ||||
r3991 | nullable=True, unique=None, default=None) | |||
r4010 | _file_store_meta_key_hash = Column( | |||
"file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) | ||||
r3991 | _file_store_meta_value = Column( | |||
r4010 | "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), | |||
r3991 | nullable=True, unique=None, default=None) | |||
_file_store_meta_value_type = Column( | ||||
"file_store_meta_value_type", String(255), nullable=True, unique=None, | ||||
default='unicode') | ||||
file_store_id = Column( | ||||
'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), | ||||
nullable=True, unique=None, default=None) | ||||
r5071 | file_store = relationship('FileStore', lazy='joined', viewonly=True) | |||
r3991 | ||||
r3997 | @classmethod | |||
def valid_value_type(cls, value): | ||||
if value.split('.')[0] not in cls.SETTINGS_TYPES: | ||||
raise ArtifactMetadataBadValueType( | ||||
r5516 | f'value_type must be one of {cls.SETTINGS_TYPES.keys()} got {value}') | |||
r3997 | ||||
r3991 | @hybrid_property | |||
r4010 | def file_store_meta_section(self): | |||
return self._file_store_meta_section | ||||
@file_store_meta_section.setter | ||||
def file_store_meta_section(self, value): | ||||
self._file_store_meta_section = value | ||||
self._file_store_meta_section_hash = _hash_key(value) | ||||
@hybrid_property | ||||
def file_store_meta_key(self): | ||||
return self._file_store_meta_key | ||||
@file_store_meta_key.setter | ||||
def file_store_meta_key(self, value): | ||||
self._file_store_meta_key = value | ||||
self._file_store_meta_key_hash = _hash_key(value) | ||||
@hybrid_property | ||||
r3991 | def file_store_meta_value(self): | |||
r3997 | val = self._file_store_meta_value | |||
if self._file_store_meta_value_type: | ||||
r3991 | # e.g unicode.encrypted == unicode | |||
r3997 | _type = self._file_store_meta_value_type.split('.')[0] | |||
# decode the encrypted value if it's encrypted field type | ||||
r3991 | if '.encrypted' in self._file_store_meta_value_type: | |||
cipher = EncryptedTextValue() | ||||
r5010 | val = safe_str(cipher.process_result_value(val, None)) | |||
r3997 | # do final type conversion | |||
converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] | ||||
val = converter(val) | ||||
return val | ||||
r3991 | ||||
@file_store_meta_value.setter | ||||
def file_store_meta_value(self, val): | ||||
r5010 | val = safe_str(val) | |||
r3991 | # encode the encrypted value | |||
if '.encrypted' in self.file_store_meta_value_type: | ||||
cipher = EncryptedTextValue() | ||||
r5010 | val = safe_str(cipher.process_bind_param(val, None)) | |||
r3991 | self._file_store_meta_value = val | |||
@hybrid_property | ||||
def file_store_meta_value_type(self): | ||||
return self._file_store_meta_value_type | ||||
@file_store_meta_value_type.setter | ||||
def file_store_meta_value_type(self, val): | ||||
# e.g unicode.encrypted | ||||
r3997 | self.valid_value_type(val) | |||
r3991 | self._file_store_meta_value_type = val | |||
r3997 | def __json__(self): | |||
data = { | ||||
'artifact': self.file_store.file_uid, | ||||
'section': self.file_store_meta_section, | ||||
'key': self.file_store_meta_key, | ||||
'value': self.file_store_meta_value, | ||||
} | ||||
return data | ||||
r5071 | def __repr__(self): | |||
return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section, | ||||
r3991 | self.file_store_meta_key, self.file_store_meta_value) | |||
r1294 | class DbMigrateVersion(Base, BaseModel): | |||
__tablename__ = 'db_migrate_version' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1294 | ) | |||
r2830 | ||||
r1294 | repository_id = Column('repository_id', String(250), primary_key=True) | |||
repository_path = Column('repository_path', Text) | ||||
version = Column('version', Integer) | ||||
r2901 | @classmethod | |||
def set_version(cls, version): | ||||
""" | ||||
Helper for forcing a different version, usually for debugging purposes via ishell. | ||||
""" | ||||
ver = DbMigrateVersion.query().first() | ||||
ver.version = version | ||||
Session().commit() | ||||
r1294 | ||||
class DbSession(Base, BaseModel): | ||||
__tablename__ = 'db_session' | ||||
__table_args__ = ( | ||||
r2830 | base_table_args, | |||
r1294 | ) | |||
r1295 | ||||
r5071 | def __repr__(self): | |||
r5010 | return f'<DB:DbSession({self.id})>' | |||
r1295 | ||||
id = Column('id', Integer()) | ||||
r1294 | namespace = Column('namespace', String(255), primary_key=True) | |||
accessed = Column('accessed', DateTime, nullable=False) | ||||
created = Column('created', DateTime, nullable=False) | ||||
r1295 | data = Column('data', PickleType, nullable=False) | |||