##// END OF EJS Templates
file-store: log miss store files better
marcink -
r3916:3561f4ad default
parent child Browse files
Show More
@@ -1,223 +1,226 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import shutil
23 import shutil
24 import hashlib
24 import hashlib
25
25
26 from rhodecode.lib.ext_json import json
26 from rhodecode.lib.ext_json import json
27 from rhodecode.apps.file_store import utils
27 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store.extensions import resolve_extensions
28 from rhodecode.apps.file_store.extensions import resolve_extensions
29 from rhodecode.apps.file_store.exceptions import FileNotAllowedException
29 from rhodecode.apps.file_store.exceptions import FileNotAllowedException
30
30
31 METADATA_VER = 'v1'
31 METADATA_VER = 'v1'
32
32
33
33
34 class LocalFileStorage(object):
34 class LocalFileStorage(object):
35
35
36 @classmethod
36 @classmethod
37 def resolve_name(cls, name, directory):
37 def resolve_name(cls, name, directory):
38 """
38 """
39 Resolves a unique name and the correct path. If a filename
39 Resolves a unique name and the correct path. If a filename
40 for that path already exists then a numeric prefix with values > 0 will be
40 for that path already exists then a numeric prefix with values > 0 will be
41 added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix.
41 added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix.
42
42
43 :param name: base name of file
43 :param name: base name of file
44 :param directory: absolute directory path
44 :param directory: absolute directory path
45 """
45 """
46
46
47 counter = 0
47 counter = 0
48 while True:
48 while True:
49 name = '%d-%s' % (counter, name)
49 name = '%d-%s' % (counter, name)
50
50
51 # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file
51 # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file
52 sub_store = cls._sub_store_from_filename(name)
52 sub_store = cls._sub_store_from_filename(name)
53 sub_store_path = os.path.join(directory, sub_store)
53 sub_store_path = os.path.join(directory, sub_store)
54 if not os.path.exists(sub_store_path):
54 if not os.path.exists(sub_store_path):
55 os.makedirs(sub_store_path)
55 os.makedirs(sub_store_path)
56
56
57 path = os.path.join(sub_store_path, name)
57 path = os.path.join(sub_store_path, name)
58 if not os.path.exists(path):
58 if not os.path.exists(path):
59 return name, path
59 return name, path
60 counter += 1
60 counter += 1
61
61
62 @classmethod
62 @classmethod
63 def _sub_store_from_filename(cls, filename):
63 def _sub_store_from_filename(cls, filename):
64 return filename[:2]
64 return filename[:2]
65
65
66 @classmethod
66 @classmethod
67 def calculate_path_hash(cls, file_path):
67 def calculate_path_hash(cls, file_path):
68 """
68 """
69 Efficient calculation of file_path sha256 sum
69 Efficient calculation of file_path sha256 sum
70
70
71 :param file_path:
71 :param file_path:
72 :return: sha256sum
72 :return: sha256sum
73 """
73 """
74 digest = hashlib.sha256()
74 digest = hashlib.sha256()
75 with open(file_path, 'rb') as f:
75 with open(file_path, 'rb') as f:
76 for chunk in iter(lambda: f.read(1024 * 100), b""):
76 for chunk in iter(lambda: f.read(1024 * 100), b""):
77 digest.update(chunk)
77 digest.update(chunk)
78
78
79 return digest.hexdigest()
79 return digest.hexdigest()
80
80
81 def __init__(self, base_path, extension_groups=None):
81 def __init__(self, base_path, extension_groups=None):
82
82
83 """
83 """
84 Local file storage
84 Local file storage
85
85
86 :param base_path: the absolute base path where uploads are stored
86 :param base_path: the absolute base path where uploads are stored
87 :param extension_groups: extensions string
87 :param extension_groups: extensions string
88 """
88 """
89
89
90 extension_groups = extension_groups or ['any']
90 extension_groups = extension_groups or ['any']
91 self.base_path = base_path
91 self.base_path = base_path
92 self.extensions = resolve_extensions([], groups=extension_groups)
92 self.extensions = resolve_extensions([], groups=extension_groups)
93
93
94 def __repr__(self):
95 return '{}@{}'.format(self.__class__, self.base_path)
96
94 def store_path(self, filename):
97 def store_path(self, filename):
95 """
98 """
96 Returns absolute file path of the filename, joined to the
99 Returns absolute file path of the filename, joined to the
97 base_path.
100 base_path.
98
101
99 :param filename: base name of file
102 :param filename: base name of file
100 """
103 """
101 sub_store = self._sub_store_from_filename(filename)
104 sub_store = self._sub_store_from_filename(filename)
102 return os.path.join(self.base_path, sub_store, filename)
105 return os.path.join(self.base_path, sub_store, filename)
103
106
104 def delete(self, filename):
107 def delete(self, filename):
105 """
108 """
106 Deletes the filename. Filename is resolved with the
109 Deletes the filename. Filename is resolved with the
107 absolute path based on base_path. If file does not exist,
110 absolute path based on base_path. If file does not exist,
108 returns **False**, otherwise **True**
111 returns **False**, otherwise **True**
109
112
110 :param filename: base name of file
113 :param filename: base name of file
111 """
114 """
112 if self.exists(filename):
115 if self.exists(filename):
113 os.remove(self.store_path(filename))
116 os.remove(self.store_path(filename))
114 return True
117 return True
115 return False
118 return False
116
119
117 def exists(self, filename):
120 def exists(self, filename):
118 """
121 """
119 Checks if file exists. Resolves filename's absolute
122 Checks if file exists. Resolves filename's absolute
120 path based on base_path.
123 path based on base_path.
121
124
122 :param filename: base name of file
125 :param filename: base name of file
123 """
126 """
124 return os.path.exists(self.store_path(filename))
127 return os.path.exists(self.store_path(filename))
125
128
126 def filename_allowed(self, filename, extensions=None):
129 def filename_allowed(self, filename, extensions=None):
127 """Checks if a filename has an allowed extension
130 """Checks if a filename has an allowed extension
128
131
129 :param filename: base name of file
132 :param filename: base name of file
130 :param extensions: iterable of extensions (or self.extensions)
133 :param extensions: iterable of extensions (or self.extensions)
131 """
134 """
132 _, ext = os.path.splitext(filename)
135 _, ext = os.path.splitext(filename)
133 return self.extension_allowed(ext, extensions)
136 return self.extension_allowed(ext, extensions)
134
137
135 def extension_allowed(self, ext, extensions=None):
138 def extension_allowed(self, ext, extensions=None):
136 """
139 """
137 Checks if an extension is permitted. Both e.g. ".jpg" and
140 Checks if an extension is permitted. Both e.g. ".jpg" and
138 "jpg" can be passed in. Extension lookup is case-insensitive.
141 "jpg" can be passed in. Extension lookup is case-insensitive.
139
142
140 :param ext: extension to check
143 :param ext: extension to check
141 :param extensions: iterable of extensions to validate against (or self.extensions)
144 :param extensions: iterable of extensions to validate against (or self.extensions)
142 """
145 """
143
146
144 extensions = extensions or self.extensions
147 extensions = extensions or self.extensions
145 if not extensions:
148 if not extensions:
146 return True
149 return True
147 if ext.startswith('.'):
150 if ext.startswith('.'):
148 ext = ext[1:]
151 ext = ext[1:]
149 return ext.lower() in extensions
152 return ext.lower() in extensions
150
153
151 def save_file(self, file_obj, filename, directory=None, extensions=None,
154 def save_file(self, file_obj, filename, directory=None, extensions=None,
152 extra_metadata=None, **kwargs):
155 extra_metadata=None, **kwargs):
153 """
156 """
154 Saves a file object to the uploads location.
157 Saves a file object to the uploads location.
155 Returns the resolved filename, i.e. the directory +
158 Returns the resolved filename, i.e. the directory +
156 the (randomized/incremented) base name.
159 the (randomized/incremented) base name.
157
160
158 :param file_obj: **cgi.FieldStorage** object (or similar)
161 :param file_obj: **cgi.FieldStorage** object (or similar)
159 :param filename: original filename
162 :param filename: original filename
160 :param directory: relative path of sub-directory
163 :param directory: relative path of sub-directory
161 :param extensions: iterable of allowed extensions, if not default
164 :param extensions: iterable of allowed extensions, if not default
162 :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix
165 :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix
163 """
166 """
164
167
165 extensions = extensions or self.extensions
168 extensions = extensions or self.extensions
166
169
167 if not self.filename_allowed(filename, extensions):
170 if not self.filename_allowed(filename, extensions):
168 raise FileNotAllowedException()
171 raise FileNotAllowedException()
169
172
170 if directory:
173 if directory:
171 dest_directory = os.path.join(self.base_path, directory)
174 dest_directory = os.path.join(self.base_path, directory)
172 else:
175 else:
173 dest_directory = self.base_path
176 dest_directory = self.base_path
174
177
175 if not os.path.exists(dest_directory):
178 if not os.path.exists(dest_directory):
176 os.makedirs(dest_directory)
179 os.makedirs(dest_directory)
177
180
178 filename = utils.uid_filename(filename)
181 filename = utils.uid_filename(filename)
179
182
180 # resolve also produces special sub-dir for file optimized store
183 # resolve also produces special sub-dir for file optimized store
181 filename, path = self.resolve_name(filename, dest_directory)
184 filename, path = self.resolve_name(filename, dest_directory)
182 stored_file_dir = os.path.dirname(path)
185 stored_file_dir = os.path.dirname(path)
183
186
184 file_obj.seek(0)
187 file_obj.seek(0)
185
188
186 with open(path, "wb") as dest:
189 with open(path, "wb") as dest:
187 shutil.copyfileobj(file_obj, dest)
190 shutil.copyfileobj(file_obj, dest)
188
191
189 metadata = {}
192 metadata = {}
190 if extra_metadata:
193 if extra_metadata:
191 metadata = extra_metadata
194 metadata = extra_metadata
192
195
193 size = os.stat(path).st_size
196 size = os.stat(path).st_size
194 file_hash = self.calculate_path_hash(path)
197 file_hash = self.calculate_path_hash(path)
195
198
196 metadata.update(
199 metadata.update(
197 {"filename": filename,
200 {"filename": filename,
198 "size": size,
201 "size": size,
199 "time": time.time(),
202 "time": time.time(),
200 "sha256": file_hash,
203 "sha256": file_hash,
201 "meta_ver": METADATA_VER})
204 "meta_ver": METADATA_VER})
202
205
203 filename_meta = filename + '.meta'
206 filename_meta = filename + '.meta'
204 with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta:
207 with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta:
205 dest_meta.write(json.dumps(metadata))
208 dest_meta.write(json.dumps(metadata))
206
209
207 if directory:
210 if directory:
208 filename = os.path.join(directory, filename)
211 filename = os.path.join(directory, filename)
209
212
210 return filename, metadata
213 return filename, metadata
211
214
212 def get_metadata(self, filename):
215 def get_metadata(self, filename):
213 """
216 """
214 Reads JSON stored metadata for a file
217 Reads JSON stored metadata for a file
215
218
216 :param filename:
219 :param filename:
217 :return:
220 :return:
218 """
221 """
219 filename = self.store_path(filename)
222 filename = self.store_path(filename)
220 filename_meta = filename + '.meta'
223 filename_meta = filename + '.meta'
221
224
222 with open(filename_meta, "rb") as source_meta:
225 with open(filename_meta, "rb") as source_meta:
223 return json.loads(source_meta.read())
226 return json.loads(source_meta.read())
@@ -1,144 +1,146 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 from pyramid.view import view_config
22 from pyramid.view import view_config
23 from pyramid.response import FileResponse
23 from pyramid.response import FileResponse
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
24 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps.file_store import utils
27 from rhodecode.apps.file_store import utils
28 from rhodecode.apps.file_store.exceptions import (
28 from rhodecode.apps.file_store.exceptions import (
29 FileNotAllowedException, FileOverSizeException)
29 FileNotAllowedException, FileOverSizeException)
30
30
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.auth import (CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny)
33 from rhodecode.lib.auth import (CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny)
34 from rhodecode.model.db import Session, FileStore
34 from rhodecode.model.db import Session, FileStore
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class FileStoreView(BaseAppView):
39 class FileStoreView(BaseAppView):
40 upload_key = 'store_file'
40 upload_key = 'store_file'
41
41
42 def load_default_context(self):
42 def load_default_context(self):
43 c = self._get_local_tmpl_context()
43 c = self._get_local_tmpl_context()
44 self.storage = utils.get_file_storage(self.request.registry.settings)
44 self.storage = utils.get_file_storage(self.request.registry.settings)
45 return c
45 return c
46
46
47 @NotAnonymous()
47 @NotAnonymous()
48 @CSRFRequired()
48 @CSRFRequired()
49 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
49 @view_config(route_name='upload_file', request_method='POST', renderer='json_ext')
50 def upload_file(self):
50 def upload_file(self):
51 self.load_default_context()
51 self.load_default_context()
52 file_obj = self.request.POST.get(self.upload_key)
52 file_obj = self.request.POST.get(self.upload_key)
53
53
54 if file_obj is None:
54 if file_obj is None:
55 return {'store_fid': None,
55 return {'store_fid': None,
56 'access_path': None,
56 'access_path': None,
57 'error': '{} data field is missing'.format(self.upload_key)}
57 'error': '{} data field is missing'.format(self.upload_key)}
58
58
59 if not hasattr(file_obj, 'filename'):
59 if not hasattr(file_obj, 'filename'):
60 return {'store_fid': None,
60 return {'store_fid': None,
61 'access_path': None,
61 'access_path': None,
62 'error': 'filename cannot be read from the data field'}
62 'error': 'filename cannot be read from the data field'}
63
63
64 filename = file_obj.filename
64 filename = file_obj.filename
65
65
66 metadata = {
66 metadata = {
67 'user_uploaded': {'username': self._rhodecode_user.username,
67 'user_uploaded': {'username': self._rhodecode_user.username,
68 'user_id': self._rhodecode_user.user_id,
68 'user_id': self._rhodecode_user.user_id,
69 'ip': self._rhodecode_user.ip_addr}}
69 'ip': self._rhodecode_user.ip_addr}}
70 try:
70 try:
71 store_uid, metadata = self.storage.save_file(
71 store_uid, metadata = self.storage.save_file(
72 file_obj.file, filename, extra_metadata=metadata)
72 file_obj.file, filename, extra_metadata=metadata)
73 except FileNotAllowedException:
73 except FileNotAllowedException:
74 return {'store_fid': None,
74 return {'store_fid': None,
75 'access_path': None,
75 'access_path': None,
76 'error': 'File {} is not allowed.'.format(filename)}
76 'error': 'File {} is not allowed.'.format(filename)}
77
77
78 except FileOverSizeException:
78 except FileOverSizeException:
79 return {'store_fid': None,
79 return {'store_fid': None,
80 'access_path': None,
80 'access_path': None,
81 'error': 'File {} is exceeding allowed limit.'.format(filename)}
81 'error': 'File {} is exceeding allowed limit.'.format(filename)}
82
82
83 try:
83 try:
84 entry = FileStore.create(
84 entry = FileStore.create(
85 file_uid=store_uid, filename=metadata["filename"],
85 file_uid=store_uid, filename=metadata["filename"],
86 file_hash=metadata["sha256"], file_size=metadata["size"],
86 file_hash=metadata["sha256"], file_size=metadata["size"],
87 file_description='upload attachment',
87 file_description='upload attachment',
88 check_acl=False, user_id=self._rhodecode_user.user_id
88 check_acl=False, user_id=self._rhodecode_user.user_id
89 )
89 )
90 Session().add(entry)
90 Session().add(entry)
91 Session().commit()
91 Session().commit()
92 log.debug('Stored upload in DB as %s', entry)
92 log.debug('Stored upload in DB as %s', entry)
93 except Exception:
93 except Exception:
94 log.exception('Failed to store file %s', filename)
94 log.exception('Failed to store file %s', filename)
95 return {'store_fid': None,
95 return {'store_fid': None,
96 'access_path': None,
96 'access_path': None,
97 'error': 'File {} failed to store in DB.'.format(filename)}
97 'error': 'File {} failed to store in DB.'.format(filename)}
98
98
99 return {'store_fid': store_uid,
99 return {'store_fid': store_uid,
100 'access_path': h.route_path('download_file', fid=store_uid)}
100 'access_path': h.route_path('download_file', fid=store_uid)}
101
101
102 @view_config(route_name='download_file')
102 @view_config(route_name='download_file')
103 def download_file(self):
103 def download_file(self):
104 self.load_default_context()
104 self.load_default_context()
105 file_uid = self.request.matchdict['fid']
105 file_uid = self.request.matchdict['fid']
106 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
106 log.debug('Requesting FID:%s from store %s', file_uid, self.storage)
107
107
108 if not self.storage.exists(file_uid):
108 if not self.storage.exists(file_uid):
109 log.debug('File with FID:%s not found in the store', file_uid)
109 store_path = self.storage.store_path(file_uid)
110 log.debug('File with FID:%s not found in the store under `%s`',
111 file_uid, store_path)
110 raise HTTPNotFound()
112 raise HTTPNotFound()
111
113
112 db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar()
114 db_obj = FileStore().query().filter(FileStore.file_uid == file_uid).scalar()
113 if not db_obj:
115 if not db_obj:
114 raise HTTPNotFound()
116 raise HTTPNotFound()
115
117
116 # private upload for user
118 # private upload for user
117 if db_obj.check_acl and db_obj.scope_user_id:
119 if db_obj.check_acl and db_obj.scope_user_id:
118 user = db_obj.user
120 user = db_obj.user
119 if self._rhodecode_db_user.user_id != user.user_id:
121 if self._rhodecode_db_user.user_id != user.user_id:
120 log.warning('Access to file store object forbidden')
122 log.warning('Access to file store object forbidden')
121 raise HTTPNotFound()
123 raise HTTPNotFound()
122
124
123 # scoped to repository permissions
125 # scoped to repository permissions
124 if db_obj.check_acl and db_obj.scope_repo_id:
126 if db_obj.check_acl and db_obj.scope_repo_id:
125 repo = db_obj.repo
127 repo = db_obj.repo
126 perm_set = ['repository.read', 'repository.write', 'repository.admin']
128 perm_set = ['repository.read', 'repository.write', 'repository.admin']
127 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
129 has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'FileStore check')
128 if not has_perm:
130 if not has_perm:
129 log.warning('Access to file store object forbidden')
131 log.warning('Access to file store object forbidden')
130 raise HTTPNotFound()
132 raise HTTPNotFound()
131
133
132 # scoped to repository group permissions
134 # scoped to repository group permissions
133 if db_obj.check_acl and db_obj.scope_repo_group_id:
135 if db_obj.check_acl and db_obj.scope_repo_group_id:
134 repo_group = db_obj.repo_group
136 repo_group = db_obj.repo_group
135 perm_set = ['group.read', 'group.write', 'group.admin']
137 perm_set = ['group.read', 'group.write', 'group.admin']
136 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
138 has_perm = HasRepoGroupPermissionAny(*perm_set)(repo_group.group_name, 'FileStore check')
137 if not has_perm:
139 if not has_perm:
138 log.warning('Access to file store object forbidden')
140 log.warning('Access to file store object forbidden')
139 raise HTTPNotFound()
141 raise HTTPNotFound()
140
142
141 FileStore.bump_access_counter(file_uid)
143 FileStore.bump_access_counter(file_uid)
142
144
143 file_path = self.storage.store_path(file_uid)
145 file_path = self.storage.store_path(file_uid)
144 return FileResponse(file_path)
146 return FileResponse(file_path)
General Comments 0
You need to be logged in to leave comments. Login now