##// END OF EJS Templates
fix(tests): fixed few tests
fix(tests): fixed few tests

File last commit:

r5516:3496180b default
r5575:521d91fe default
Show More
views.py
197 lines | 8.0 KiB | text/x-python | PythonLexer
copyrights: updated for 2023
r5088 # Copyright (C) 2016-2023 RhodeCode GmbH
file-store: rename module from upload_store to file_store.
r3453 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
import logging
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 from pyramid.response import Response
file-store: rename module from upload_store to file_store.
r3453 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
from rhodecode.apps._base import BaseAppView
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 from rhodecode.apps.file_store import utils as store_utils
file-store: rename module from upload_store to file_store.
r3453 from rhodecode.apps.file_store.exceptions import (
file-store: save DB entry on upload, and track access times.
r3457 FileNotAllowedException, FileOverSizeException)
file-store: rename module from upload_store to file_store.
r3453
from rhodecode.lib import helpers as h
from rhodecode.lib import audit_logger
artifacts: expose a special auth-token based artifacts download urls....
r4003 from rhodecode.lib.auth import (
CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny,
LoginRequired)
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 from rhodecode.lib.str_utils import header_safe_str
file-store: use our own logic for setting content-type. This solves a problem...
r4237 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
artifacts: expose a special auth-token based artifacts download urls....
r4003 from rhodecode.model.db import Session, FileStore, UserApiKeys
file-store: rename module from upload_store to file_store.
r3453
log = logging.getLogger(__name__)
class FileStoreView(BaseAppView):
upload_key = 'store_file'
def load_default_context(self):
c = self._get_local_tmpl_context()
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 self.f_store = store_utils.get_filestore_backend(self.request.registry.settings)
file-store: rename module from upload_store to file_store.
r3453 return c
file-store: use our own logic for setting content-type. This solves a problem...
r4237 def _guess_type(self, file_name):
"""
Our own type guesser for mimetypes using the rich DB
"""
if not hasattr(self, 'db'):
self.db = get_mimetypes_db()
_content_type, _encoding = self.db.guess_type(file_name, strict=False)
return _content_type, _encoding
file-store: small code cleanups.
r4012 def _serve_file(self, file_uid):
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 if not self.f_store.filename_exists(file_uid):
store_path = self.f_store.store_path(file_uid)
chore(logs): report warning on missing filestore object
r5507 log.warning('File with FID:%s not found in the store under `%s`',
file_uid, store_path)
file-store: small code cleanups.
r4012 raise HTTPNotFound()
artifacts: few fixes for handling cases of using sub path to store artifacts....
r4476 db_obj = FileStore.get_by_store_uid(file_uid, safe=True)
file-store: small code cleanups.
r4012 if not db_obj:
raise HTTPNotFound()
# private upload for user
if db_obj.check_acl and db_obj.scope_user_id:
log.debug('Artifact: checking scope access for bound artifact user: `%s`',
db_obj.scope_user_id)
user = db_obj.user
if self._rhodecode_db_user.user_id != user.user_id:
log.warning('Access to file store object forbidden')
raise HTTPNotFound()
# scoped to repository permissions
if db_obj.check_acl and db_obj.scope_repo_id:
log.debug('Artifact: checking scope access for bound artifact repo: `%s`',
db_obj.scope_repo_id)
repo = db_obj.repo
perm_set = ['repository.read', 'repository.write', 'repository.admin']
has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
if not has_perm:
log.warning('Access to file store object `%s` forbidden', file_uid)
raise HTTPNotFound()
# scoped to repository group permissions
if db_obj.check_acl and db_obj.scope_repo_group_id:
log.debug('Artifact: checking scope access for bound artifact repo group: `%s`',
db_obj.scope_repo_group_id)
repo_group = db_obj.repo_group
perm_set = ['group.read', 'group.write', 'group.admin']
has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
if not has_perm:
log.warning('Access to file store object `%s` forbidden', file_uid)
raise HTTPNotFound()
FileStore.bump_access_counter(file_uid)
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 file_name = db_obj.file_display_name
file-store: use our own logic for setting content-type. This solves a problem...
r4237 content_type = 'application/octet-stream'
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 _content_type, _encoding = self._guess_type(file_name)
file-store: use our own logic for setting content-type. This solves a problem...
r4237 if _content_type:
content_type = _content_type
file-store: don't response with cookies on file-store download.
r4236 # For file store we don't submit any session data, this logic tells the
# Session lib to skip it
setattr(self.request, '_file_response', True)
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 reader, _meta = self.f_store.fetch(file_uid)
file-store: expose additional headers, and content-disposiztion for nicer downloads
r4609
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 response = Response(app_iter=store_utils.archive_iterator(reader))
file-store: expose additional headers, and content-disposiztion for nicer downloads
r4609
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 response.content_type = str(content_type)
response.content_disposition = f'attachment; filename="{header_safe_str(file_name)}"'
file-store: expose additional headers, and content-disposiztion for nicer downloads
r4609 response.headers["X-RC-Artifact-Id"] = str(db_obj.file_store_id)
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 response.headers["X-RC-Artifact-Desc"] = header_safe_str(db_obj.file_description)
file-store: expose additional headers, and content-disposiztion for nicer downloads
r4609 response.headers["X-RC-Artifact-Sha256"] = str(db_obj.file_hash)
return response
file-store: small code cleanups.
r4012
artifacts: expose a special auth-token based artifacts download urls....
r4003 @LoginRequired()
file-store: rename module from upload_store to file_store.
r3453 @NotAnonymous()
@CSRFRequired()
def upload_file(self):
self.load_default_context()
file_obj = self.request.POST.get(self.upload_key)
if file_obj is None:
return {'store_fid': None,
'access_path': None,
modernize: updates for python3
r5095 'error': f'{self.upload_key} data field is missing'}
file-store: rename module from upload_store to file_store.
r3453
if not hasattr(file_obj, 'filename'):
return {'store_fid': None,
'access_path': None,
'error': 'filename cannot be read from the data field'}
filename = file_obj.filename
metadata = {
'user_uploaded': {'username': self._rhodecode_user.username,
'user_id': self._rhodecode_user.user_id,
'ip': self._rhodecode_user.ip_addr}}
try:
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 store_uid, metadata = self.f_store.store(
filename, file_obj.file, extra_metadata=metadata)
file-store: rename module from upload_store to file_store.
r3453 except FileNotAllowedException:
return {'store_fid': None,
'access_path': None,
modernize: updates for python3
r5095 'error': f'File {filename} is not allowed.'}
file-store: rename module from upload_store to file_store.
r3453
except FileOverSizeException:
return {'store_fid': None,
'access_path': None,
modernize: updates for python3
r5095 'error': f'File {filename} is exceeding allowed limit.'}
file-store: rename module from upload_store to file_store.
r3453
file-store: save DB entry on upload, and track access times.
r3457 try:
entry = FileStore.create(
artifacts: added reading of metadata and define basic audit-log entries for add/delete artifact
r3679 file_uid=store_uid, filename=metadata["filename"],
file-store: save DB entry on upload, and track access times.
r3457 file_hash=metadata["sha256"], file_size=metadata["size"],
modernize: updates for python3
r5095 file_description='upload attachment',
file-store: save DB entry on upload, and track access times.
r3457 check_acl=False, user_id=self._rhodecode_user.user_id
)
Session().add(entry)
Session().commit()
log.debug('Stored upload in DB as %s', entry)
except Exception:
log.exception('Failed to store file %s', filename)
return {'store_fid': None,
'access_path': None,
modernize: updates for python3
r5095 'error': f'File {filename} failed to store in DB.'}
file-store: save DB entry on upload, and track access times.
r3457
artifacts: added reading of metadata and define basic audit-log entries for add/delete artifact
r3679 return {'store_fid': store_uid,
'access_path': h.route_path('download_file', fid=store_uid)}
file-store: rename module from upload_store to file_store.
r3453
artifacts: expose a special auth-token based artifacts download urls....
r4003 # ACL is checked by scopes, if no scope the file is accessible to all
def download_file(self):
self.load_default_context()
file_uid = self.request.matchdict['fid']
feat(artifacts): new artifact storage engines allowing an s3 based uploads
r5516 log.debug('Requesting FID:%s from store %s', file_uid, self.f_store)
artifacts: expose a special auth-token based artifacts download urls....
r4003 return self._serve_file(file_uid)
tests: added some more artifact access tests.
r4008 # in addition to @LoginRequired ACL is checked by scopes
artifacts: expose a special auth-token based artifacts download urls....
r4003 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_ARTIFACT_DOWNLOAD])
tests: added some more artifact access tests.
r4008 @NotAnonymous()
artifacts: expose a special auth-token based artifacts download urls....
r4003 def download_file_by_token(self):
"""
Special view that allows to access the download file by special URL that
is stored inside the URL.
http://example.com/_file_store/token-download/TOKEN/FILE_UID
"""
self.load_default_context()
file_uid = self.request.matchdict['fid']
return self._serve_file(file_uid)