Show More
@@ -1,1716 +1,1716 b'' | |||||
1 | # Copyright (C) 2011-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2011-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import itertools |
|
19 | import itertools | |
20 | import logging |
|
20 | import logging | |
21 | import os |
|
21 | import os | |
22 | import collections |
|
22 | import collections | |
23 | import urllib.request |
|
23 | import urllib.request | |
24 | import urllib.parse |
|
24 | import urllib.parse | |
25 | import urllib.error |
|
25 | import urllib.error | |
26 | import pathlib |
|
26 | import pathlib | |
27 | import time |
|
27 | import time | |
28 | import random |
|
28 | import random | |
29 |
|
29 | |||
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound | |
31 |
|
31 | |||
32 | from pyramid.renderers import render |
|
32 | from pyramid.renderers import render | |
33 | from pyramid.response import Response |
|
33 | from pyramid.response import Response | |
34 |
|
34 | |||
35 | import rhodecode |
|
35 | import rhodecode | |
36 | from rhodecode.apps._base import RepoAppView |
|
36 | from rhodecode.apps._base import RepoAppView | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | from rhodecode.lib import diffs, helpers as h, rc_cache |
|
39 | from rhodecode.lib import diffs, helpers as h, rc_cache | |
40 | from rhodecode.lib import audit_logger |
|
40 | from rhodecode.lib import audit_logger | |
41 | from rhodecode.lib.hash_utils import sha1_safe |
|
41 | from rhodecode.lib.hash_utils import sha1_safe | |
42 | from rhodecode.lib.rc_cache.archive_cache import ( |
|
42 | from rhodecode.lib.rc_cache.archive_cache import ( | |
43 | get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator) |
|
43 | get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator) | |
44 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars |
|
44 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars | |
45 | from rhodecode.lib.view_utils import parse_path_ref |
|
45 | from rhodecode.lib.view_utils import parse_path_ref | |
46 | from rhodecode.lib.exceptions import NonRelativePathError |
|
46 | from rhodecode.lib.exceptions import NonRelativePathError | |
47 | from rhodecode.lib.codeblocks import ( |
|
47 | from rhodecode.lib.codeblocks import ( | |
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) | |
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode |
|
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode | |
50 | from rhodecode.lib.type_utils import str2bool |
|
50 | from rhodecode.lib.type_utils import str2bool | |
51 | from rhodecode.lib.str_utils import safe_str, safe_int |
|
51 | from rhodecode.lib.str_utils import safe_str, safe_int | |
52 | from rhodecode.lib.auth import ( |
|
52 | from rhodecode.lib.auth import ( | |
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
54 | from rhodecode.lib.vcs import path as vcspath |
|
54 | from rhodecode.lib.vcs import path as vcspath | |
55 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
55 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
56 | from rhodecode.lib.vcs.conf import settings |
|
56 | from rhodecode.lib.vcs.conf import settings | |
57 | from rhodecode.lib.vcs.nodes import FileNode |
|
57 | from rhodecode.lib.vcs.nodes import FileNode | |
58 | from rhodecode.lib.vcs.exceptions import ( |
|
58 | from rhodecode.lib.vcs.exceptions import ( | |
59 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
59 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, | |
60 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
60 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, | |
61 | NodeDoesNotExistError, CommitError, NodeError) |
|
61 | NodeDoesNotExistError, CommitError, NodeError) | |
62 |
|
62 | |||
63 | from rhodecode.model.scm import ScmModel |
|
63 | from rhodecode.model.scm import ScmModel | |
64 | from rhodecode.model.db import Repository |
|
64 | from rhodecode.model.db import Repository | |
65 |
|
65 | |||
66 | log = logging.getLogger(__name__) |
|
66 | log = logging.getLogger(__name__) | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): |
|
69 | def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): | |
70 | # original backward compat name of archive |
|
70 | # original backward compat name of archive | |
71 | clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_')) |
|
71 | clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_')) | |
72 |
|
72 | |||
73 | # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip |
|
73 | # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip | |
74 | # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip |
|
74 | # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip | |
75 | id_sha = sha1_safe(str(db_repo_id))[:4] |
|
75 | id_sha = sha1_safe(str(db_repo_id))[:4] | |
76 | sub_repo = 'sub-1' if subrepos else 'sub-0' |
|
76 | sub_repo = 'sub-1' if subrepos else 'sub-0' | |
77 | commit = commit_sha if with_hash else 'archive' |
|
77 | commit = commit_sha if with_hash else 'archive' | |
78 | path_marker = (path_sha if with_hash else '') or 'all' |
|
78 | path_marker = (path_sha if with_hash else '') or 'all' | |
79 | archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}' |
|
79 | archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}' | |
80 |
|
80 | |||
81 | return archive_name |
|
81 | return archive_name | |
82 |
|
82 | |||
83 |
|
83 | |||
84 | def get_path_sha(at_path): |
|
84 | def get_path_sha(at_path): | |
85 | return safe_str(sha1_safe(at_path)[:8]) |
|
85 | return safe_str(sha1_safe(at_path)[:8]) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def _get_archive_spec(fname): |
|
88 | def _get_archive_spec(fname): | |
89 | log.debug('Detecting archive spec for: `%s`', fname) |
|
89 | log.debug('Detecting archive spec for: `%s`', fname) | |
90 |
|
90 | |||
91 | fileformat = None |
|
91 | fileformat = None | |
92 | ext = None |
|
92 | ext = None | |
93 | content_type = None |
|
93 | content_type = None | |
94 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
94 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
95 |
|
95 | |||
96 | if fname.endswith(extension): |
|
96 | if fname.endswith(extension): | |
97 | fileformat = a_type |
|
97 | fileformat = a_type | |
98 | log.debug('archive is of type: %s', fileformat) |
|
98 | log.debug('archive is of type: %s', fileformat) | |
99 | ext = extension |
|
99 | ext = extension | |
100 | break |
|
100 | break | |
101 |
|
101 | |||
102 | if not fileformat: |
|
102 | if not fileformat: | |
103 | raise ValueError() |
|
103 | raise ValueError() | |
104 |
|
104 | |||
105 | # left over part of whole fname is the commit |
|
105 | # left over part of whole fname is the commit | |
106 | commit_id = fname[:-len(ext)] |
|
106 | commit_id = fname[:-len(ext)] | |
107 |
|
107 | |||
108 | return commit_id, ext, fileformat, content_type |
|
108 | return commit_id, ext, fileformat, content_type | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class RepoFilesView(RepoAppView): |
|
111 | class RepoFilesView(RepoAppView): | |
112 |
|
112 | |||
113 | @staticmethod |
|
113 | @staticmethod | |
114 | def adjust_file_path_for_svn(f_path, repo): |
|
114 | def adjust_file_path_for_svn(f_path, repo): | |
115 | """ |
|
115 | """ | |
116 | Computes the relative path of `f_path`. |
|
116 | Computes the relative path of `f_path`. | |
117 |
|
117 | |||
118 | This is mainly based on prefix matching of the recognized tags and |
|
118 | This is mainly based on prefix matching of the recognized tags and | |
119 | branches in the underlying repository. |
|
119 | branches in the underlying repository. | |
120 | """ |
|
120 | """ | |
121 | tags_and_branches = itertools.chain( |
|
121 | tags_and_branches = itertools.chain( | |
122 | repo.branches.keys(), |
|
122 | repo.branches.keys(), | |
123 | repo.tags.keys()) |
|
123 | repo.tags.keys()) | |
124 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
124 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) | |
125 |
|
125 | |||
126 | for name in tags_and_branches: |
|
126 | for name in tags_and_branches: | |
127 | if f_path.startswith(f'{name}/'): |
|
127 | if f_path.startswith(f'{name}/'): | |
128 | f_path = vcspath.relpath(f_path, name) |
|
128 | f_path = vcspath.relpath(f_path, name) | |
129 | break |
|
129 | break | |
130 | return f_path |
|
130 | return f_path | |
131 |
|
131 | |||
132 | def load_default_context(self): |
|
132 | def load_default_context(self): | |
133 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
133 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
134 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
134 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
135 | c.enable_downloads = self.db_repo.enable_downloads |
|
135 | c.enable_downloads = self.db_repo.enable_downloads | |
136 | return c |
|
136 | return c | |
137 |
|
137 | |||
138 | def _ensure_not_locked(self, commit_id='tip'): |
|
138 | def _ensure_not_locked(self, commit_id='tip'): | |
139 | _ = self.request.translate |
|
139 | _ = self.request.translate | |
140 |
|
140 | |||
141 | repo = self.db_repo |
|
141 | repo = self.db_repo | |
142 | if repo.enable_locking and repo.locked[0]: |
|
142 | if repo.enable_locking and repo.locked[0]: | |
143 | h.flash(_('This repository has been locked by %s on %s') |
|
143 | h.flash(_('This repository has been locked by %s on %s') | |
144 | % (h.person_by_id(repo.locked[0]), |
|
144 | % (h.person_by_id(repo.locked[0]), | |
145 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
145 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
146 | 'warning') |
|
146 | 'warning') | |
147 | files_url = h.route_path( |
|
147 | files_url = h.route_path( | |
148 | 'repo_files:default_path', |
|
148 | 'repo_files:default_path', | |
149 | repo_name=self.db_repo_name, commit_id=commit_id) |
|
149 | repo_name=self.db_repo_name, commit_id=commit_id) | |
150 | raise HTTPFound(files_url) |
|
150 | raise HTTPFound(files_url) | |
151 |
|
151 | |||
152 | def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False): |
|
152 | def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False): | |
153 | _ = self.request.translate |
|
153 | _ = self.request.translate | |
154 |
|
154 | |||
155 | if not is_head: |
|
155 | if not is_head: | |
156 | message = _('Cannot modify file. ' |
|
156 | message = _('Cannot modify file. ' | |
157 | 'Given commit `{}` is not head of a branch.').format(commit_id) |
|
157 | 'Given commit `{}` is not head of a branch.').format(commit_id) | |
158 | h.flash(message, category='warning') |
|
158 | h.flash(message, category='warning') | |
159 |
|
159 | |||
160 | if json_mode: |
|
160 | if json_mode: | |
161 | return message |
|
161 | return message | |
162 |
|
162 | |||
163 | files_url = h.route_path( |
|
163 | files_url = h.route_path( | |
164 | 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id, |
|
164 | 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id, | |
165 | f_path=f_path) |
|
165 | f_path=f_path) | |
166 | raise HTTPFound(files_url) |
|
166 | raise HTTPFound(files_url) | |
167 |
|
167 | |||
168 | def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False): |
|
168 | def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False): | |
169 | _ = self.request.translate |
|
169 | _ = self.request.translate | |
170 |
|
170 | |||
171 | rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission( |
|
171 | rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission( | |
172 | self.db_repo_name, branch_name) |
|
172 | self.db_repo_name, branch_name) | |
173 | if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']: |
|
173 | if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']: | |
174 | message = _('Branch `{}` changes forbidden by rule {}.').format( |
|
174 | message = _('Branch `{}` changes forbidden by rule {}.').format( | |
175 | h.escape(branch_name), h.escape(rule)) |
|
175 | h.escape(branch_name), h.escape(rule)) | |
176 | h.flash(message, 'warning') |
|
176 | h.flash(message, 'warning') | |
177 |
|
177 | |||
178 | if json_mode: |
|
178 | if json_mode: | |
179 | return message |
|
179 | return message | |
180 |
|
180 | |||
181 | files_url = h.route_path( |
|
181 | files_url = h.route_path( | |
182 | 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id) |
|
182 | 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id) | |
183 |
|
183 | |||
184 | raise HTTPFound(files_url) |
|
184 | raise HTTPFound(files_url) | |
185 |
|
185 | |||
186 | def _get_commit_and_path(self): |
|
186 | def _get_commit_and_path(self): | |
187 | default_commit_id = self.db_repo.landing_ref_name |
|
187 | default_commit_id = self.db_repo.landing_ref_name | |
188 | default_f_path = '/' |
|
188 | default_f_path = '/' | |
189 |
|
189 | |||
190 | commit_id = self.request.matchdict.get( |
|
190 | commit_id = self.request.matchdict.get( | |
191 | 'commit_id', default_commit_id) |
|
191 | 'commit_id', default_commit_id) | |
192 | f_path = self._get_f_path(self.request.matchdict, default_f_path) |
|
192 | f_path = self._get_f_path(self.request.matchdict, default_f_path) | |
193 | return commit_id, f_path |
|
193 | return commit_id, f_path | |
194 |
|
194 | |||
195 | def _get_default_encoding(self, c): |
|
195 | def _get_default_encoding(self, c): | |
196 | enc_list = getattr(c, 'default_encodings', []) |
|
196 | enc_list = getattr(c, 'default_encodings', []) | |
197 | return enc_list[0] if enc_list else 'UTF-8' |
|
197 | return enc_list[0] if enc_list else 'UTF-8' | |
198 |
|
198 | |||
199 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): |
|
199 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): | |
200 | """ |
|
200 | """ | |
201 | This is a safe way to get commit. If an error occurs it redirects to |
|
201 | This is a safe way to get commit. If an error occurs it redirects to | |
202 | tip with proper message |
|
202 | tip with proper message | |
203 |
|
203 | |||
204 | :param commit_id: id of commit to fetch |
|
204 | :param commit_id: id of commit to fetch | |
205 | :param redirect_after: toggle redirection |
|
205 | :param redirect_after: toggle redirection | |
206 | """ |
|
206 | """ | |
207 | _ = self.request.translate |
|
207 | _ = self.request.translate | |
208 |
|
208 | |||
209 | try: |
|
209 | try: | |
210 | return self.rhodecode_vcs_repo.get_commit(commit_id) |
|
210 | return self.rhodecode_vcs_repo.get_commit(commit_id) | |
211 | except EmptyRepositoryError: |
|
211 | except EmptyRepositoryError: | |
212 | if not redirect_after: |
|
212 | if not redirect_after: | |
213 | return None |
|
213 | return None | |
214 |
|
214 | |||
215 | add_new = upload_new = "" |
|
215 | add_new = upload_new = "" | |
216 | if h.HasRepoPermissionAny( |
|
216 | if h.HasRepoPermissionAny( | |
217 | 'repository.write', 'repository.admin')(self.db_repo_name): |
|
217 | 'repository.write', 'repository.admin')(self.db_repo_name): | |
218 | _url = h.route_path( |
|
218 | _url = h.route_path( | |
219 | 'repo_files_add_file', |
|
219 | 'repo_files_add_file', | |
220 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
220 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |
221 | add_new = h.link_to( |
|
221 | add_new = h.link_to( | |
222 | _('add a new file'), _url, class_="alert-link") |
|
222 | _('add a new file'), _url, class_="alert-link") | |
223 |
|
223 | |||
224 | _url_upld = h.route_path( |
|
224 | _url_upld = h.route_path( | |
225 | 'repo_files_upload_file', |
|
225 | 'repo_files_upload_file', | |
226 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
226 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |
227 | upload_new = h.link_to( |
|
227 | upload_new = h.link_to( | |
228 | _('upload a new file'), _url_upld, class_="alert-link") |
|
228 | _('upload a new file'), _url_upld, class_="alert-link") | |
229 |
|
229 | |||
230 | h.flash(h.literal( |
|
230 | h.flash(h.literal( | |
231 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') |
|
231 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') | |
232 | raise HTTPFound( |
|
232 | raise HTTPFound( | |
233 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
233 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |
234 |
|
234 | |||
235 | except (CommitDoesNotExistError, LookupError) as e: |
|
235 | except (CommitDoesNotExistError, LookupError) as e: | |
236 | msg = _('No such commit exists for this repository. Commit: {}').format(commit_id) |
|
236 | msg = _('No such commit exists for this repository. Commit: {}').format(commit_id) | |
237 | h.flash(msg, category='error') |
|
237 | h.flash(msg, category='error') | |
238 | raise HTTPNotFound() |
|
238 | raise HTTPNotFound() | |
239 | except RepositoryError as e: |
|
239 | except RepositoryError as e: | |
240 | h.flash(h.escape(safe_str(e)), category='error') |
|
240 | h.flash(h.escape(safe_str(e)), category='error') | |
241 | raise HTTPNotFound() |
|
241 | raise HTTPNotFound() | |
242 |
|
242 | |||
243 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): |
|
243 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): | |
244 | """ |
|
244 | """ | |
245 | Returns file_node, if error occurs or given path is directory, |
|
245 | Returns file_node, if error occurs or given path is directory, | |
246 | it'll redirect to top level path |
|
246 | it'll redirect to top level path | |
247 | """ |
|
247 | """ | |
248 | _ = self.request.translate |
|
248 | _ = self.request.translate | |
249 |
|
249 | |||
250 | try: |
|
250 | try: | |
251 | file_node = commit_obj.get_node(path, pre_load=pre_load) |
|
251 | file_node = commit_obj.get_node(path, pre_load=pre_load) | |
252 | if file_node.is_dir(): |
|
252 | if file_node.is_dir(): | |
253 | raise RepositoryError('The given path is a directory') |
|
253 | raise RepositoryError('The given path is a directory') | |
254 | except CommitDoesNotExistError: |
|
254 | except CommitDoesNotExistError: | |
255 | log.exception('No such commit exists for this repository') |
|
255 | log.exception('No such commit exists for this repository') | |
256 | h.flash(_('No such commit exists for this repository'), category='error') |
|
256 | h.flash(_('No such commit exists for this repository'), category='error') | |
257 | raise HTTPNotFound() |
|
257 | raise HTTPNotFound() | |
258 | except RepositoryError as e: |
|
258 | except RepositoryError as e: | |
259 | log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e) |
|
259 | log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e) | |
260 | h.flash(h.escape(safe_str(e)), category='error') |
|
260 | h.flash(h.escape(safe_str(e)), category='error') | |
261 | raise HTTPNotFound() |
|
261 | raise HTTPNotFound() | |
262 |
|
262 | |||
263 | return file_node |
|
263 | return file_node | |
264 |
|
264 | |||
265 | def _is_valid_head(self, commit_id, repo, landing_ref): |
|
265 | def _is_valid_head(self, commit_id, repo, landing_ref): | |
266 | branch_name = sha_commit_id = '' |
|
266 | branch_name = sha_commit_id = '' | |
267 | is_head = False |
|
267 | is_head = False | |
268 | log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo) |
|
268 | log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo) | |
269 |
|
269 | |||
270 | for _branch_name, branch_commit_id in repo.branches.items(): |
|
270 | for _branch_name, branch_commit_id in repo.branches.items(): | |
271 | # simple case we pass in branch name, it's a HEAD |
|
271 | # simple case we pass in branch name, it's a HEAD | |
272 | if commit_id == _branch_name: |
|
272 | if commit_id == _branch_name: | |
273 | is_head = True |
|
273 | is_head = True | |
274 | branch_name = _branch_name |
|
274 | branch_name = _branch_name | |
275 | sha_commit_id = branch_commit_id |
|
275 | sha_commit_id = branch_commit_id | |
276 | break |
|
276 | break | |
277 | # case when we pass in full sha commit_id, which is a head |
|
277 | # case when we pass in full sha commit_id, which is a head | |
278 | elif commit_id == branch_commit_id: |
|
278 | elif commit_id == branch_commit_id: | |
279 | is_head = True |
|
279 | is_head = True | |
280 | branch_name = _branch_name |
|
280 | branch_name = _branch_name | |
281 | sha_commit_id = branch_commit_id |
|
281 | sha_commit_id = branch_commit_id | |
282 | break |
|
282 | break | |
283 |
|
283 | |||
284 | if h.is_svn(repo) and not repo.is_empty(): |
|
284 | if h.is_svn(repo) and not repo.is_empty(): | |
285 | # Note: Subversion only has one head. |
|
285 | # Note: Subversion only has one head. | |
286 | if commit_id == repo.get_commit(commit_idx=-1).raw_id: |
|
286 | if commit_id == repo.get_commit(commit_idx=-1).raw_id: | |
287 | is_head = True |
|
287 | is_head = True | |
288 | return branch_name, sha_commit_id, is_head |
|
288 | return branch_name, sha_commit_id, is_head | |
289 |
|
289 | |||
290 | # checked branches, means we only need to try to get the branch/commit_sha |
|
290 | # checked branches, means we only need to try to get the branch/commit_sha | |
291 | if repo.is_empty(): |
|
291 | if repo.is_empty(): | |
292 | is_head = True |
|
292 | is_head = True | |
293 | branch_name = landing_ref |
|
293 | branch_name = landing_ref | |
294 | sha_commit_id = EmptyCommit().raw_id |
|
294 | sha_commit_id = EmptyCommit().raw_id | |
295 | else: |
|
295 | else: | |
296 | commit = repo.get_commit(commit_id=commit_id) |
|
296 | commit = repo.get_commit(commit_id=commit_id) | |
297 | if commit: |
|
297 | if commit: | |
298 | branch_name = commit.branch |
|
298 | branch_name = commit.branch | |
299 | sha_commit_id = commit.raw_id |
|
299 | sha_commit_id = commit.raw_id | |
300 |
|
300 | |||
301 | return branch_name, sha_commit_id, is_head |
|
301 | return branch_name, sha_commit_id, is_head | |
302 |
|
302 | |||
303 | def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None): |
|
303 | def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None): | |
304 |
|
304 | |||
305 | repo_id = self.db_repo.repo_id |
|
305 | repo_id = self.db_repo.repo_id | |
306 | force_recache = self.get_recache_flag() |
|
306 | force_recache = self.get_recache_flag() | |
307 |
|
307 | |||
308 | cache_seconds = safe_int( |
|
308 | cache_seconds = safe_int( | |
309 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
309 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) | |
310 | cache_on = not force_recache and cache_seconds > 0 |
|
310 | cache_on = not force_recache and cache_seconds > 0 | |
311 | log.debug( |
|
311 | log.debug( | |
312 | 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`' |
|
312 | 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`' | |
313 | 'with caching: %s[TTL: %ss]' % ( |
|
313 | 'with caching: %s[TTL: %ss]' % ( | |
314 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
314 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
315 |
|
315 | |||
316 | cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}' |
|
316 | cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}' | |
317 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
317 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
318 |
|
318 | |||
319 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
319 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
320 | def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev): |
|
320 | def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev): | |
321 | log.debug('Generating cached file tree at for repo_id: %s, %s, %s', |
|
321 | log.debug('Generating cached file tree at for repo_id: %s, %s, %s', | |
322 | _repo_id, _commit_id, _f_path) |
|
322 | _repo_id, _commit_id, _f_path) | |
323 |
|
323 | |||
324 | c.full_load = _full_load |
|
324 | c.full_load = _full_load | |
325 | return render( |
|
325 | return render( | |
326 | 'rhodecode:templates/files/files_browser_tree.mako', |
|
326 | 'rhodecode:templates/files/files_browser_tree.mako', | |
327 | self._get_template_context(c), self.request, _at_rev) |
|
327 | self._get_template_context(c), self.request, _at_rev) | |
328 |
|
328 | |||
329 | return compute_file_tree( |
|
329 | return compute_file_tree( | |
330 | self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) |
|
330 | self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) | |
331 |
|
331 | |||
332 | def create_pure_path(self, *parts): |
|
332 | def create_pure_path(self, *parts): | |
333 | # Split paths and sanitize them, removing any ../ etc |
|
333 | # Split paths and sanitize them, removing any ../ etc | |
334 | sanitized_path = [ |
|
334 | sanitized_path = [ | |
335 | x for x in pathlib.PurePath(*parts).parts |
|
335 | x for x in pathlib.PurePath(*parts).parts | |
336 | if x not in ['.', '..']] |
|
336 | if x not in ['.', '..']] | |
337 |
|
337 | |||
338 | pure_path = pathlib.PurePath(*sanitized_path) |
|
338 | pure_path = pathlib.PurePath(*sanitized_path) | |
339 | return pure_path |
|
339 | return pure_path | |
340 |
|
340 | |||
341 | def _is_lf_enabled(self, target_repo): |
|
341 | def _is_lf_enabled(self, target_repo): | |
342 | lf_enabled = False |
|
342 | lf_enabled = False | |
343 |
|
343 | |||
344 | lf_key_for_vcs_map = { |
|
344 | lf_key_for_vcs_map = { | |
345 | 'hg': 'extensions_largefiles', |
|
345 | 'hg': 'extensions_largefiles', | |
346 | 'git': 'vcs_git_lfs_enabled' |
|
346 | 'git': 'vcs_git_lfs_enabled' | |
347 | } |
|
347 | } | |
348 |
|
348 | |||
349 | lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type) |
|
349 | lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type) | |
350 |
|
350 | |||
351 | if lf_key_for_vcs: |
|
351 | if lf_key_for_vcs: | |
352 | lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs) |
|
352 | lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs) | |
353 |
|
353 | |||
354 | return lf_enabled |
|
354 | return lf_enabled | |
355 |
|
355 | |||
356 | @LoginRequired() |
|
356 | @LoginRequired() | |
357 | @HasRepoPermissionAnyDecorator( |
|
357 | @HasRepoPermissionAnyDecorator( | |
358 | 'repository.read', 'repository.write', 'repository.admin') |
|
358 | 'repository.read', 'repository.write', 'repository.admin') | |
359 | def repo_archivefile(self): |
|
359 | def repo_archivefile(self): | |
360 | # archive cache config |
|
360 | # archive cache config | |
361 | from rhodecode import CONFIG |
|
361 | from rhodecode import CONFIG | |
362 | _ = self.request.translate |
|
362 | _ = self.request.translate | |
363 | self.load_default_context() |
|
363 | self.load_default_context() | |
364 | default_at_path = '/' |
|
364 | default_at_path = '/' | |
365 | fname = self.request.matchdict['fname'] |
|
365 | fname = self.request.matchdict['fname'] | |
366 | subrepos = self.request.GET.get('subrepos') == 'true' |
|
366 | subrepos = self.request.GET.get('subrepos') == 'true' | |
367 | with_hash = str2bool(self.request.GET.get('with_hash', '1')) |
|
367 | with_hash = str2bool(self.request.GET.get('with_hash', '1')) | |
368 | at_path = self.request.GET.get('at_path') or default_at_path |
|
368 | at_path = self.request.GET.get('at_path') or default_at_path | |
369 |
|
369 | |||
370 | if not self.db_repo.enable_downloads: |
|
370 | if not self.db_repo.enable_downloads: | |
371 | return Response(_('Downloads disabled')) |
|
371 | return Response(_('Downloads disabled')) | |
372 |
|
372 | |||
373 | try: |
|
373 | try: | |
374 | commit_id, ext, fileformat, content_type = \ |
|
374 | commit_id, ext, fileformat, content_type = \ | |
375 | _get_archive_spec(fname) |
|
375 | _get_archive_spec(fname) | |
376 | except ValueError: |
|
376 | except ValueError: | |
377 | return Response(_('Unknown archive type for: `{}`').format( |
|
377 | return Response(_('Unknown archive type for: `{}`').format( | |
378 | h.escape(fname))) |
|
378 | h.escape(fname))) | |
379 |
|
379 | |||
380 | try: |
|
380 | try: | |
381 | commit = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
381 | commit = self.rhodecode_vcs_repo.get_commit(commit_id) | |
382 | except CommitDoesNotExistError: |
|
382 | except CommitDoesNotExistError: | |
383 | return Response(_('Unknown commit_id {}').format( |
|
383 | return Response(_('Unknown commit_id {}').format( | |
384 | h.escape(commit_id))) |
|
384 | h.escape(commit_id))) | |
385 | except EmptyRepositoryError: |
|
385 | except EmptyRepositoryError: | |
386 | return Response(_('Empty repository')) |
|
386 | return Response(_('Empty repository')) | |
387 |
|
387 | |||
388 | # we used a ref, or a shorter version, lets redirect client ot use explicit hash |
|
388 | # we used a ref, or a shorter version, lets redirect client ot use explicit hash | |
389 | if commit_id != commit.raw_id: |
|
389 | if commit_id != commit.raw_id: | |
390 | fname=f'{commit.raw_id}{ext}' |
|
390 | fname=f'{commit.raw_id}{ext}' | |
391 | raise HTTPFound(self.request.current_route_path(fname=fname)) |
|
391 | raise HTTPFound(self.request.current_route_path(fname=fname)) | |
392 |
|
392 | |||
393 | try: |
|
393 | try: | |
394 | at_path = commit.get_node(at_path).path or default_at_path |
|
394 | at_path = commit.get_node(at_path).path or default_at_path | |
395 | except Exception: |
|
395 | except Exception: | |
396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) |
|
396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) | |
397 |
|
397 | |||
398 | path_sha = get_path_sha(at_path) |
|
398 | path_sha = get_path_sha(at_path) | |
399 |
|
399 | |||
400 | # used for cache etc, consistent unique archive name |
|
400 | # used for cache etc, consistent unique archive name | |
401 | archive_name_key = get_archive_name( |
|
401 | archive_name_key = get_archive_name( | |
402 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, |
|
402 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
403 | path_sha=path_sha, with_hash=True) |
|
403 | path_sha=path_sha, with_hash=True) | |
404 |
|
404 | |||
405 | if not with_hash: |
|
405 | if not with_hash: | |
406 | path_sha = '' |
|
406 | path_sha = '' | |
407 |
|
407 | |||
408 | # what end client gets served |
|
408 | # what end client gets served | |
409 | response_archive_name = get_archive_name( |
|
409 | response_archive_name = get_archive_name( | |
410 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, |
|
410 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
411 | path_sha=path_sha, with_hash=with_hash) |
|
411 | path_sha=path_sha, with_hash=with_hash) | |
412 |
|
412 | |||
413 | # remove extension from our archive directory name |
|
413 | # remove extension from our archive directory name | |
414 | archive_dir_name = response_archive_name[:-len(ext)] |
|
414 | archive_dir_name = response_archive_name[:-len(ext)] | |
415 |
|
415 | |||
416 | archive_cache_disable = self.request.GET.get('no_cache') |
|
416 | archive_cache_disable = self.request.GET.get('no_cache') | |
417 |
|
417 | |||
418 | d_cache = get_archival_cache_store(config=CONFIG) |
|
418 | d_cache = get_archival_cache_store(config=CONFIG) | |
419 |
|
419 | |||
420 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver |
|
420 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver | |
421 | d_cache_conf = get_archival_config(config=CONFIG) |
|
421 | d_cache_conf = get_archival_config(config=CONFIG) | |
422 |
|
422 | |||
423 | # This is also a cache key, and lock key |
|
423 | # This is also a cache key, and lock key | |
424 | reentrant_lock_key = archive_name_key + '.lock' |
|
424 | reentrant_lock_key = archive_name_key + '.lock' | |
425 |
|
425 | |||
426 | use_cached_archive = False |
|
426 | use_cached_archive = False | |
427 | if not archive_cache_disable and archive_name_key in d_cache: |
|
427 | if not archive_cache_disable and archive_name_key in d_cache: | |
428 | reader, metadata = d_cache.fetch(archive_name_key) |
|
428 | reader, metadata = d_cache.fetch(archive_name_key) | |
429 |
|
429 | |||
430 | use_cached_archive = True |
|
430 | use_cached_archive = True | |
431 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', |
|
431 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', | |
432 | archive_name_key, metadata, reader.name) |
|
432 | archive_name_key, metadata, reader.name) | |
433 | else: |
|
433 | else: | |
434 | reader = None |
|
434 | reader = None | |
435 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) |
|
435 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) | |
436 |
|
436 | |||
437 | if not reader: |
|
437 | if not reader: | |
438 | # generate new archive, as previous was not found in the cache |
|
438 | # generate new archive, as previous was not found in the cache | |
439 | try: |
|
439 | try: | |
440 | with d_cache.get_lock(reentrant_lock_key): |
|
440 | with d_cache.get_lock(reentrant_lock_key): | |
441 | try: |
|
441 | try: | |
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, |
|
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, | |
443 | kind=fileformat, subrepos=subrepos, |
|
443 | kind=fileformat, subrepos=subrepos, | |
444 | archive_at_path=at_path, cache_config=d_cache_conf) |
|
444 | archive_at_path=at_path, cache_config=d_cache_conf) | |
445 | except ImproperArchiveTypeError: |
|
445 | except ImproperArchiveTypeError: | |
446 | return _('Unknown archive type') |
|
446 | return _('Unknown archive type') | |
447 |
|
447 | |||
448 | except ArchiveCacheGenerationLock: |
|
448 | except ArchiveCacheGenerationLock: | |
449 | retry_after = round(random.uniform(0.3, 3.0), 1) |
|
449 | retry_after = round(random.uniform(0.3, 3.0), 1) | |
450 | time.sleep(retry_after) |
|
450 | time.sleep(retry_after) | |
451 |
|
451 | |||
452 | location = self.request.url |
|
452 | location = self.request.url | |
453 | response = Response( |
|
453 | response = Response( | |
454 | f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}" |
|
454 | f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}" | |
455 | ) |
|
455 | ) | |
456 | response.headers["Retry-After"] = str(retry_after) |
|
456 | response.headers["Retry-After"] = str(retry_after) | |
457 | response.status_code = 307 # temporary redirect |
|
457 | response.status_code = 307 # temporary redirect | |
458 |
|
458 | |||
459 | response.location = location |
|
459 | response.location = location | |
460 | return response |
|
460 | return response | |
461 |
|
461 | |||
462 | reader, metadata = d_cache.fetch(archive_name_key) |
|
462 | reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30) | |
463 |
|
463 | |||
464 | response = Response(app_iter=archive_iterator(reader)) |
|
464 | response = Response(app_iter=archive_iterator(reader)) | |
465 | response.content_disposition = f'attachment; filename={response_archive_name}' |
|
465 | response.content_disposition = f'attachment; filename={response_archive_name}' | |
466 | response.content_type = str(content_type) |
|
466 | response.content_type = str(content_type) | |
467 |
|
467 | |||
468 | try: |
|
468 | try: | |
469 | return response |
|
469 | return response | |
470 | finally: |
|
470 | finally: | |
471 | # store download action |
|
471 | # store download action | |
472 | audit_logger.store_web( |
|
472 | audit_logger.store_web( | |
473 | 'repo.archive.download', action_data={ |
|
473 | 'repo.archive.download', action_data={ | |
474 | 'user_agent': self.request.user_agent, |
|
474 | 'user_agent': self.request.user_agent, | |
475 | 'archive_name': archive_name_key, |
|
475 | 'archive_name': archive_name_key, | |
476 | 'archive_spec': fname, |
|
476 | 'archive_spec': fname, | |
477 | 'archive_cached': use_cached_archive}, |
|
477 | 'archive_cached': use_cached_archive}, | |
478 | user=self._rhodecode_user, |
|
478 | user=self._rhodecode_user, | |
479 | repo=self.db_repo, |
|
479 | repo=self.db_repo, | |
480 | commit=True |
|
480 | commit=True | |
481 | ) |
|
481 | ) | |
482 |
|
482 | |||
483 | def _get_file_node(self, commit_id, f_path): |
|
483 | def _get_file_node(self, commit_id, f_path): | |
484 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
484 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: | |
485 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
485 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
486 | try: |
|
486 | try: | |
487 | node = commit.get_node(f_path) |
|
487 | node = commit.get_node(f_path) | |
488 | if node.is_dir(): |
|
488 | if node.is_dir(): | |
489 | raise NodeError(f'{node} path is a {type(node)} not a file') |
|
489 | raise NodeError(f'{node} path is a {type(node)} not a file') | |
490 | except NodeDoesNotExistError: |
|
490 | except NodeDoesNotExistError: | |
491 | commit = EmptyCommit( |
|
491 | commit = EmptyCommit( | |
492 | commit_id=commit_id, |
|
492 | commit_id=commit_id, | |
493 | idx=commit.idx, |
|
493 | idx=commit.idx, | |
494 | repo=commit.repository, |
|
494 | repo=commit.repository, | |
495 | alias=commit.repository.alias, |
|
495 | alias=commit.repository.alias, | |
496 | message=commit.message, |
|
496 | message=commit.message, | |
497 | author=commit.author, |
|
497 | author=commit.author, | |
498 | date=commit.date) |
|
498 | date=commit.date) | |
499 | node = FileNode(safe_bytes(f_path), b'', commit=commit) |
|
499 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
500 | else: |
|
500 | else: | |
501 | commit = EmptyCommit( |
|
501 | commit = EmptyCommit( | |
502 | repo=self.rhodecode_vcs_repo, |
|
502 | repo=self.rhodecode_vcs_repo, | |
503 | alias=self.rhodecode_vcs_repo.alias) |
|
503 | alias=self.rhodecode_vcs_repo.alias) | |
504 | node = FileNode(safe_bytes(f_path), b'', commit=commit) |
|
504 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
505 | return node |
|
505 | return node | |
506 |
|
506 | |||
507 | @LoginRequired() |
|
507 | @LoginRequired() | |
508 | @HasRepoPermissionAnyDecorator( |
|
508 | @HasRepoPermissionAnyDecorator( | |
509 | 'repository.read', 'repository.write', 'repository.admin') |
|
509 | 'repository.read', 'repository.write', 'repository.admin') | |
510 | def repo_files_diff(self): |
|
510 | def repo_files_diff(self): | |
511 | c = self.load_default_context() |
|
511 | c = self.load_default_context() | |
512 | f_path = self._get_f_path(self.request.matchdict) |
|
512 | f_path = self._get_f_path(self.request.matchdict) | |
513 | diff1 = self.request.GET.get('diff1', '') |
|
513 | diff1 = self.request.GET.get('diff1', '') | |
514 | diff2 = self.request.GET.get('diff2', '') |
|
514 | diff2 = self.request.GET.get('diff2', '') | |
515 |
|
515 | |||
516 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
516 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) | |
517 |
|
517 | |||
518 | ignore_whitespace = str2bool(self.request.GET.get('ignorews')) |
|
518 | ignore_whitespace = str2bool(self.request.GET.get('ignorews')) | |
519 | line_context = self.request.GET.get('context', 3) |
|
519 | line_context = self.request.GET.get('context', 3) | |
520 |
|
520 | |||
521 | if not any((diff1, diff2)): |
|
521 | if not any((diff1, diff2)): | |
522 | h.flash( |
|
522 | h.flash( | |
523 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
523 | 'Need query parameter "diff1" or "diff2" to generate a diff.', | |
524 | category='error') |
|
524 | category='error') | |
525 | raise HTTPBadRequest() |
|
525 | raise HTTPBadRequest() | |
526 |
|
526 | |||
527 | c.action = self.request.GET.get('diff') |
|
527 | c.action = self.request.GET.get('diff') | |
528 | if c.action not in ['download', 'raw']: |
|
528 | if c.action not in ['download', 'raw']: | |
529 | compare_url = h.route_path( |
|
529 | compare_url = h.route_path( | |
530 | 'repo_compare', |
|
530 | 'repo_compare', | |
531 | repo_name=self.db_repo_name, |
|
531 | repo_name=self.db_repo_name, | |
532 | source_ref_type='rev', |
|
532 | source_ref_type='rev', | |
533 | source_ref=diff1, |
|
533 | source_ref=diff1, | |
534 | target_repo=self.db_repo_name, |
|
534 | target_repo=self.db_repo_name, | |
535 | target_ref_type='rev', |
|
535 | target_ref_type='rev', | |
536 | target_ref=diff2, |
|
536 | target_ref=diff2, | |
537 | _query=dict(f_path=f_path)) |
|
537 | _query=dict(f_path=f_path)) | |
538 | # redirect to new view if we render diff |
|
538 | # redirect to new view if we render diff | |
539 | raise HTTPFound(compare_url) |
|
539 | raise HTTPFound(compare_url) | |
540 |
|
540 | |||
541 | try: |
|
541 | try: | |
542 | node1 = self._get_file_node(diff1, path1) |
|
542 | node1 = self._get_file_node(diff1, path1) | |
543 | node2 = self._get_file_node(diff2, f_path) |
|
543 | node2 = self._get_file_node(diff2, f_path) | |
544 | except (RepositoryError, NodeError): |
|
544 | except (RepositoryError, NodeError): | |
545 | log.exception("Exception while trying to get node from repository") |
|
545 | log.exception("Exception while trying to get node from repository") | |
546 | raise HTTPFound( |
|
546 | raise HTTPFound( | |
547 | h.route_path('repo_files', repo_name=self.db_repo_name, |
|
547 | h.route_path('repo_files', repo_name=self.db_repo_name, | |
548 | commit_id='tip', f_path=f_path)) |
|
548 | commit_id='tip', f_path=f_path)) | |
549 |
|
549 | |||
550 | if all(isinstance(node.commit, EmptyCommit) |
|
550 | if all(isinstance(node.commit, EmptyCommit) | |
551 | for node in (node1, node2)): |
|
551 | for node in (node1, node2)): | |
552 | raise HTTPNotFound() |
|
552 | raise HTTPNotFound() | |
553 |
|
553 | |||
554 | c.commit_1 = node1.commit |
|
554 | c.commit_1 = node1.commit | |
555 | c.commit_2 = node2.commit |
|
555 | c.commit_2 = node2.commit | |
556 |
|
556 | |||
557 | if c.action == 'download': |
|
557 | if c.action == 'download': | |
558 | _diff = diffs.get_gitdiff(node1, node2, |
|
558 | _diff = diffs.get_gitdiff(node1, node2, | |
559 | ignore_whitespace=ignore_whitespace, |
|
559 | ignore_whitespace=ignore_whitespace, | |
560 | context=line_context) |
|
560 | context=line_context) | |
561 | # NOTE: this was using diff_format='gitdiff' |
|
561 | # NOTE: this was using diff_format='gitdiff' | |
562 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') |
|
562 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
563 |
|
563 | |||
564 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
564 | response = Response(self.path_filter.get_raw_patch(diff)) | |
565 | response.content_type = 'text/plain' |
|
565 | response.content_type = 'text/plain' | |
566 | response.content_disposition = ( |
|
566 | response.content_disposition = ( | |
567 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' |
|
567 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' | |
568 | ) |
|
568 | ) | |
569 | charset = self._get_default_encoding(c) |
|
569 | charset = self._get_default_encoding(c) | |
570 | if charset: |
|
570 | if charset: | |
571 | response.charset = charset |
|
571 | response.charset = charset | |
572 | return response |
|
572 | return response | |
573 |
|
573 | |||
574 | elif c.action == 'raw': |
|
574 | elif c.action == 'raw': | |
575 | _diff = diffs.get_gitdiff(node1, node2, |
|
575 | _diff = diffs.get_gitdiff(node1, node2, | |
576 | ignore_whitespace=ignore_whitespace, |
|
576 | ignore_whitespace=ignore_whitespace, | |
577 | context=line_context) |
|
577 | context=line_context) | |
578 | # NOTE: this was using diff_format='gitdiff' |
|
578 | # NOTE: this was using diff_format='gitdiff' | |
579 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') |
|
579 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
580 |
|
580 | |||
581 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
581 | response = Response(self.path_filter.get_raw_patch(diff)) | |
582 | response.content_type = 'text/plain' |
|
582 | response.content_type = 'text/plain' | |
583 | charset = self._get_default_encoding(c) |
|
583 | charset = self._get_default_encoding(c) | |
584 | if charset: |
|
584 | if charset: | |
585 | response.charset = charset |
|
585 | response.charset = charset | |
586 | return response |
|
586 | return response | |
587 |
|
587 | |||
588 | # in case we ever end up here |
|
588 | # in case we ever end up here | |
589 | raise HTTPNotFound() |
|
589 | raise HTTPNotFound() | |
590 |
|
590 | |||
591 | @LoginRequired() |
|
591 | @LoginRequired() | |
592 | @HasRepoPermissionAnyDecorator( |
|
592 | @HasRepoPermissionAnyDecorator( | |
593 | 'repository.read', 'repository.write', 'repository.admin') |
|
593 | 'repository.read', 'repository.write', 'repository.admin') | |
594 | def repo_files_diff_2way_redirect(self): |
|
594 | def repo_files_diff_2way_redirect(self): | |
595 | """ |
|
595 | """ | |
596 | Kept only to make OLD links work |
|
596 | Kept only to make OLD links work | |
597 | """ |
|
597 | """ | |
598 | f_path = self._get_f_path_unchecked(self.request.matchdict) |
|
598 | f_path = self._get_f_path_unchecked(self.request.matchdict) | |
599 | diff1 = self.request.GET.get('diff1', '') |
|
599 | diff1 = self.request.GET.get('diff1', '') | |
600 | diff2 = self.request.GET.get('diff2', '') |
|
600 | diff2 = self.request.GET.get('diff2', '') | |
601 |
|
601 | |||
602 | if not any((diff1, diff2)): |
|
602 | if not any((diff1, diff2)): | |
603 | h.flash( |
|
603 | h.flash( | |
604 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
604 | 'Need query parameter "diff1" or "diff2" to generate a diff.', | |
605 | category='error') |
|
605 | category='error') | |
606 | raise HTTPBadRequest() |
|
606 | raise HTTPBadRequest() | |
607 |
|
607 | |||
608 | compare_url = h.route_path( |
|
608 | compare_url = h.route_path( | |
609 | 'repo_compare', |
|
609 | 'repo_compare', | |
610 | repo_name=self.db_repo_name, |
|
610 | repo_name=self.db_repo_name, | |
611 | source_ref_type='rev', |
|
611 | source_ref_type='rev', | |
612 | source_ref=diff1, |
|
612 | source_ref=diff1, | |
613 | target_ref_type='rev', |
|
613 | target_ref_type='rev', | |
614 | target_ref=diff2, |
|
614 | target_ref=diff2, | |
615 | _query=dict(f_path=f_path, diffmode='sideside', |
|
615 | _query=dict(f_path=f_path, diffmode='sideside', | |
616 | target_repo=self.db_repo_name,)) |
|
616 | target_repo=self.db_repo_name,)) | |
617 | raise HTTPFound(compare_url) |
|
617 | raise HTTPFound(compare_url) | |
618 |
|
618 | |||
619 | @LoginRequired() |
|
619 | @LoginRequired() | |
620 | def repo_files_default_commit_redirect(self): |
|
620 | def repo_files_default_commit_redirect(self): | |
621 | """ |
|
621 | """ | |
622 | Special page that redirects to the landing page of files based on the default |
|
622 | Special page that redirects to the landing page of files based on the default | |
623 | commit for repository |
|
623 | commit for repository | |
624 | """ |
|
624 | """ | |
625 | c = self.load_default_context() |
|
625 | c = self.load_default_context() | |
626 | ref_name = c.rhodecode_db_repo.landing_ref_name |
|
626 | ref_name = c.rhodecode_db_repo.landing_ref_name | |
627 | landing_url = h.repo_files_by_ref_url( |
|
627 | landing_url = h.repo_files_by_ref_url( | |
628 | c.rhodecode_db_repo.repo_name, |
|
628 | c.rhodecode_db_repo.repo_name, | |
629 | c.rhodecode_db_repo.repo_type, |
|
629 | c.rhodecode_db_repo.repo_type, | |
630 | f_path='', |
|
630 | f_path='', | |
631 | ref_name=ref_name, |
|
631 | ref_name=ref_name, | |
632 | commit_id='tip', |
|
632 | commit_id='tip', | |
633 | query=dict(at=ref_name) |
|
633 | query=dict(at=ref_name) | |
634 | ) |
|
634 | ) | |
635 |
|
635 | |||
636 | raise HTTPFound(landing_url) |
|
636 | raise HTTPFound(landing_url) | |
637 |
|
637 | |||
638 | @LoginRequired() |
|
638 | @LoginRequired() | |
639 | @HasRepoPermissionAnyDecorator( |
|
639 | @HasRepoPermissionAnyDecorator( | |
640 | 'repository.read', 'repository.write', 'repository.admin') |
|
640 | 'repository.read', 'repository.write', 'repository.admin') | |
641 | def repo_files(self): |
|
641 | def repo_files(self): | |
642 | c = self.load_default_context() |
|
642 | c = self.load_default_context() | |
643 |
|
643 | |||
644 | view_name = getattr(self.request.matched_route, 'name', None) |
|
644 | view_name = getattr(self.request.matched_route, 'name', None) | |
645 |
|
645 | |||
646 | c.annotate = view_name == 'repo_files:annotated' |
|
646 | c.annotate = view_name == 'repo_files:annotated' | |
647 | # default is false, but .rst/.md files later are auto rendered, we can |
|
647 | # default is false, but .rst/.md files later are auto rendered, we can | |
648 | # overwrite auto rendering by setting this GET flag |
|
648 | # overwrite auto rendering by setting this GET flag | |
649 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) |
|
649 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) | |
650 |
|
650 | |||
651 | commit_id, f_path = self._get_commit_and_path() |
|
651 | commit_id, f_path = self._get_commit_and_path() | |
652 |
|
652 | |||
653 | c.commit = self._get_commit_or_redirect(commit_id) |
|
653 | c.commit = self._get_commit_or_redirect(commit_id) | |
654 | c.branch = self.request.GET.get('branch', None) |
|
654 | c.branch = self.request.GET.get('branch', None) | |
655 | c.f_path = f_path |
|
655 | c.f_path = f_path | |
656 | at_rev = self.request.GET.get('at') |
|
656 | at_rev = self.request.GET.get('at') | |
657 |
|
657 | |||
658 | # files or dirs |
|
658 | # files or dirs | |
659 | try: |
|
659 | try: | |
660 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) |
|
660 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) | |
661 |
|
661 | |||
662 | c.file_author = True |
|
662 | c.file_author = True | |
663 | c.file_tree = '' |
|
663 | c.file_tree = '' | |
664 |
|
664 | |||
665 | # prev link |
|
665 | # prev link | |
666 | try: |
|
666 | try: | |
667 | prev_commit = c.commit.prev(c.branch) |
|
667 | prev_commit = c.commit.prev(c.branch) | |
668 | c.prev_commit = prev_commit |
|
668 | c.prev_commit = prev_commit | |
669 | c.url_prev = h.route_path( |
|
669 | c.url_prev = h.route_path( | |
670 | 'repo_files', repo_name=self.db_repo_name, |
|
670 | 'repo_files', repo_name=self.db_repo_name, | |
671 | commit_id=prev_commit.raw_id, f_path=f_path) |
|
671 | commit_id=prev_commit.raw_id, f_path=f_path) | |
672 | if c.branch: |
|
672 | if c.branch: | |
673 | c.url_prev += '?branch=%s' % c.branch |
|
673 | c.url_prev += '?branch=%s' % c.branch | |
674 | except (CommitDoesNotExistError, VCSError): |
|
674 | except (CommitDoesNotExistError, VCSError): | |
675 | c.url_prev = '#' |
|
675 | c.url_prev = '#' | |
676 | c.prev_commit = EmptyCommit() |
|
676 | c.prev_commit = EmptyCommit() | |
677 |
|
677 | |||
678 | # next link |
|
678 | # next link | |
679 | try: |
|
679 | try: | |
680 | next_commit = c.commit.next(c.branch) |
|
680 | next_commit = c.commit.next(c.branch) | |
681 | c.next_commit = next_commit |
|
681 | c.next_commit = next_commit | |
682 | c.url_next = h.route_path( |
|
682 | c.url_next = h.route_path( | |
683 | 'repo_files', repo_name=self.db_repo_name, |
|
683 | 'repo_files', repo_name=self.db_repo_name, | |
684 | commit_id=next_commit.raw_id, f_path=f_path) |
|
684 | commit_id=next_commit.raw_id, f_path=f_path) | |
685 | if c.branch: |
|
685 | if c.branch: | |
686 | c.url_next += '?branch=%s' % c.branch |
|
686 | c.url_next += '?branch=%s' % c.branch | |
687 | except (CommitDoesNotExistError, VCSError): |
|
687 | except (CommitDoesNotExistError, VCSError): | |
688 | c.url_next = '#' |
|
688 | c.url_next = '#' | |
689 | c.next_commit = EmptyCommit() |
|
689 | c.next_commit = EmptyCommit() | |
690 |
|
690 | |||
691 | # load file content |
|
691 | # load file content | |
692 | if c.file.is_file(): |
|
692 | if c.file.is_file(): | |
693 |
|
693 | |||
694 | c.lf_node = {} |
|
694 | c.lf_node = {} | |
695 |
|
695 | |||
696 | has_lf_enabled = self._is_lf_enabled(self.db_repo) |
|
696 | has_lf_enabled = self._is_lf_enabled(self.db_repo) | |
697 | if has_lf_enabled: |
|
697 | if has_lf_enabled: | |
698 | c.lf_node = c.file.get_largefile_node() |
|
698 | c.lf_node = c.file.get_largefile_node() | |
699 |
|
699 | |||
700 | c.file_source_page = 'true' |
|
700 | c.file_source_page = 'true' | |
701 | c.file_last_commit = c.file.last_commit |
|
701 | c.file_last_commit = c.file.last_commit | |
702 |
|
702 | |||
703 | c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file |
|
703 | c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file | |
704 |
|
704 | |||
705 | if not (c.file_size_too_big or c.file.is_binary): |
|
705 | if not (c.file_size_too_big or c.file.is_binary): | |
706 | if c.annotate: # annotation has precedence over renderer |
|
706 | if c.annotate: # annotation has precedence over renderer | |
707 | c.annotated_lines = filenode_as_annotated_lines_tokens( |
|
707 | c.annotated_lines = filenode_as_annotated_lines_tokens( | |
708 | c.file |
|
708 | c.file | |
709 | ) |
|
709 | ) | |
710 | else: |
|
710 | else: | |
711 | c.renderer = ( |
|
711 | c.renderer = ( | |
712 | c.renderer and h.renderer_from_filename(c.file.path) |
|
712 | c.renderer and h.renderer_from_filename(c.file.path) | |
713 | ) |
|
713 | ) | |
714 | if not c.renderer: |
|
714 | if not c.renderer: | |
715 | c.lines = filenode_as_lines_tokens(c.file) |
|
715 | c.lines = filenode_as_lines_tokens(c.file) | |
716 |
|
716 | |||
717 | _branch_name, _sha_commit_id, is_head = \ |
|
717 | _branch_name, _sha_commit_id, is_head = \ | |
718 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
718 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
719 | landing_ref=self.db_repo.landing_ref_name) |
|
719 | landing_ref=self.db_repo.landing_ref_name) | |
720 | c.on_branch_head = is_head |
|
720 | c.on_branch_head = is_head | |
721 |
|
721 | |||
722 | branch = c.commit.branch if ( |
|
722 | branch = c.commit.branch if ( | |
723 | c.commit.branch and '/' not in c.commit.branch) else None |
|
723 | c.commit.branch and '/' not in c.commit.branch) else None | |
724 | c.branch_or_raw_id = branch or c.commit.raw_id |
|
724 | c.branch_or_raw_id = branch or c.commit.raw_id | |
725 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) |
|
725 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) | |
726 |
|
726 | |||
727 | author = c.file_last_commit.author |
|
727 | author = c.file_last_commit.author | |
728 | c.authors = [[ |
|
728 | c.authors = [[ | |
729 | h.email(author), |
|
729 | h.email(author), | |
730 | h.person(author, 'username_or_name_or_email'), |
|
730 | h.person(author, 'username_or_name_or_email'), | |
731 | 1 |
|
731 | 1 | |
732 | ]] |
|
732 | ]] | |
733 |
|
733 | |||
734 | else: # load tree content at path |
|
734 | else: # load tree content at path | |
735 | c.file_source_page = 'false' |
|
735 | c.file_source_page = 'false' | |
736 | c.authors = [] |
|
736 | c.authors = [] | |
737 | # this loads a simple tree without metadata to speed things up |
|
737 | # this loads a simple tree without metadata to speed things up | |
738 | # later via ajax we call repo_nodetree_full and fetch whole |
|
738 | # later via ajax we call repo_nodetree_full and fetch whole | |
739 | c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev) |
|
739 | c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev) | |
740 |
|
740 | |||
741 | c.readme_data, c.readme_file = \ |
|
741 | c.readme_data, c.readme_file = \ | |
742 | self._get_readme_data(self.db_repo, c.visual.default_renderer, |
|
742 | self._get_readme_data(self.db_repo, c.visual.default_renderer, | |
743 | c.commit.raw_id, f_path) |
|
743 | c.commit.raw_id, f_path) | |
744 |
|
744 | |||
745 | except RepositoryError as e: |
|
745 | except RepositoryError as e: | |
746 | h.flash(h.escape(safe_str(e)), category='error') |
|
746 | h.flash(h.escape(safe_str(e)), category='error') | |
747 | raise HTTPNotFound() |
|
747 | raise HTTPNotFound() | |
748 |
|
748 | |||
749 | if self.request.environ.get('HTTP_X_PJAX'): |
|
749 | if self.request.environ.get('HTTP_X_PJAX'): | |
750 | html = render('rhodecode:templates/files/files_pjax.mako', |
|
750 | html = render('rhodecode:templates/files/files_pjax.mako', | |
751 | self._get_template_context(c), self.request) |
|
751 | self._get_template_context(c), self.request) | |
752 | else: |
|
752 | else: | |
753 | html = render('rhodecode:templates/files/files.mako', |
|
753 | html = render('rhodecode:templates/files/files.mako', | |
754 | self._get_template_context(c), self.request) |
|
754 | self._get_template_context(c), self.request) | |
755 | return Response(html) |
|
755 | return Response(html) | |
756 |
|
756 | |||
757 | @HasRepoPermissionAnyDecorator( |
|
757 | @HasRepoPermissionAnyDecorator( | |
758 | 'repository.read', 'repository.write', 'repository.admin') |
|
758 | 'repository.read', 'repository.write', 'repository.admin') | |
759 | def repo_files_annotated_previous(self): |
|
759 | def repo_files_annotated_previous(self): | |
760 | self.load_default_context() |
|
760 | self.load_default_context() | |
761 |
|
761 | |||
762 | commit_id, f_path = self._get_commit_and_path() |
|
762 | commit_id, f_path = self._get_commit_and_path() | |
763 | commit = self._get_commit_or_redirect(commit_id) |
|
763 | commit = self._get_commit_or_redirect(commit_id) | |
764 | prev_commit_id = commit.raw_id |
|
764 | prev_commit_id = commit.raw_id | |
765 | line_anchor = self.request.GET.get('line_anchor') |
|
765 | line_anchor = self.request.GET.get('line_anchor') | |
766 | is_file = False |
|
766 | is_file = False | |
767 | try: |
|
767 | try: | |
768 | _file = commit.get_node(f_path) |
|
768 | _file = commit.get_node(f_path) | |
769 | is_file = _file.is_file() |
|
769 | is_file = _file.is_file() | |
770 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): |
|
770 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): | |
771 | pass |
|
771 | pass | |
772 |
|
772 | |||
773 | if is_file: |
|
773 | if is_file: | |
774 | history = commit.get_path_history(f_path) |
|
774 | history = commit.get_path_history(f_path) | |
775 | prev_commit_id = history[1].raw_id \ |
|
775 | prev_commit_id = history[1].raw_id \ | |
776 | if len(history) > 1 else prev_commit_id |
|
776 | if len(history) > 1 else prev_commit_id | |
777 | prev_url = h.route_path( |
|
777 | prev_url = h.route_path( | |
778 | 'repo_files:annotated', repo_name=self.db_repo_name, |
|
778 | 'repo_files:annotated', repo_name=self.db_repo_name, | |
779 | commit_id=prev_commit_id, f_path=f_path, |
|
779 | commit_id=prev_commit_id, f_path=f_path, | |
780 | _anchor=f'L{line_anchor}') |
|
780 | _anchor=f'L{line_anchor}') | |
781 |
|
781 | |||
782 | raise HTTPFound(prev_url) |
|
782 | raise HTTPFound(prev_url) | |
783 |
|
783 | |||
784 | @LoginRequired() |
|
784 | @LoginRequired() | |
785 | @HasRepoPermissionAnyDecorator( |
|
785 | @HasRepoPermissionAnyDecorator( | |
786 | 'repository.read', 'repository.write', 'repository.admin') |
|
786 | 'repository.read', 'repository.write', 'repository.admin') | |
787 | def repo_nodetree_full(self): |
|
787 | def repo_nodetree_full(self): | |
788 | """ |
|
788 | """ | |
789 | Returns rendered html of file tree that contains commit date, |
|
789 | Returns rendered html of file tree that contains commit date, | |
790 | author, commit_id for the specified combination of |
|
790 | author, commit_id for the specified combination of | |
791 | repo, commit_id and file path |
|
791 | repo, commit_id and file path | |
792 | """ |
|
792 | """ | |
793 | c = self.load_default_context() |
|
793 | c = self.load_default_context() | |
794 |
|
794 | |||
795 | commit_id, f_path = self._get_commit_and_path() |
|
795 | commit_id, f_path = self._get_commit_and_path() | |
796 | commit = self._get_commit_or_redirect(commit_id) |
|
796 | commit = self._get_commit_or_redirect(commit_id) | |
797 | try: |
|
797 | try: | |
798 | dir_node = commit.get_node(f_path) |
|
798 | dir_node = commit.get_node(f_path) | |
799 | except RepositoryError as e: |
|
799 | except RepositoryError as e: | |
800 | return Response(f'error: {h.escape(safe_str(e))}') |
|
800 | return Response(f'error: {h.escape(safe_str(e))}') | |
801 |
|
801 | |||
802 | if dir_node.is_file(): |
|
802 | if dir_node.is_file(): | |
803 | return Response('') |
|
803 | return Response('') | |
804 |
|
804 | |||
805 | c.file = dir_node |
|
805 | c.file = dir_node | |
806 | c.commit = commit |
|
806 | c.commit = commit | |
807 | at_rev = self.request.GET.get('at') |
|
807 | at_rev = self.request.GET.get('at') | |
808 |
|
808 | |||
809 | html = self._get_tree_at_commit( |
|
809 | html = self._get_tree_at_commit( | |
810 | c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev) |
|
810 | c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev) | |
811 |
|
811 | |||
812 | return Response(html) |
|
812 | return Response(html) | |
813 |
|
813 | |||
814 | def _get_attachement_headers(self, f_path): |
|
814 | def _get_attachement_headers(self, f_path): | |
815 | f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
815 | f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1]) | |
816 | safe_path = f_name.replace('"', '\\"') |
|
816 | safe_path = f_name.replace('"', '\\"') | |
817 | encoded_path = urllib.parse.quote(f_name) |
|
817 | encoded_path = urllib.parse.quote(f_name) | |
818 |
|
818 | |||
819 | headers = "attachment; " \ |
|
819 | headers = "attachment; " \ | |
820 | "filename=\"{}\"; " \ |
|
820 | "filename=\"{}\"; " \ | |
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) |
|
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) | |
822 |
|
822 | |||
823 | return safe_bytes(headers).decode('latin-1', errors='replace') |
|
823 | return safe_bytes(headers).decode('latin-1', errors='replace') | |
824 |
|
824 | |||
825 | @LoginRequired() |
|
825 | @LoginRequired() | |
826 | @HasRepoPermissionAnyDecorator( |
|
826 | @HasRepoPermissionAnyDecorator( | |
827 | 'repository.read', 'repository.write', 'repository.admin') |
|
827 | 'repository.read', 'repository.write', 'repository.admin') | |
828 | def repo_file_raw(self): |
|
828 | def repo_file_raw(self): | |
829 | """ |
|
829 | """ | |
830 | Action for show as raw, some mimetypes are "rendered", |
|
830 | Action for show as raw, some mimetypes are "rendered", | |
831 | those include images, icons. |
|
831 | those include images, icons. | |
832 | """ |
|
832 | """ | |
833 | c = self.load_default_context() |
|
833 | c = self.load_default_context() | |
834 |
|
834 | |||
835 | commit_id, f_path = self._get_commit_and_path() |
|
835 | commit_id, f_path = self._get_commit_and_path() | |
836 | commit = self._get_commit_or_redirect(commit_id) |
|
836 | commit = self._get_commit_or_redirect(commit_id) | |
837 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
837 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
838 |
|
838 | |||
839 | raw_mimetype_mapping = { |
|
839 | raw_mimetype_mapping = { | |
840 | # map original mimetype to a mimetype used for "show as raw" |
|
840 | # map original mimetype to a mimetype used for "show as raw" | |
841 | # you can also provide a content-disposition to override the |
|
841 | # you can also provide a content-disposition to override the | |
842 | # default "attachment" disposition. |
|
842 | # default "attachment" disposition. | |
843 | # orig_type: (new_type, new_dispo) |
|
843 | # orig_type: (new_type, new_dispo) | |
844 |
|
844 | |||
845 | # show images inline: |
|
845 | # show images inline: | |
846 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
846 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can | |
847 | # for example render an SVG with javascript inside or even render |
|
847 | # for example render an SVG with javascript inside or even render | |
848 | # HTML. |
|
848 | # HTML. | |
849 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
849 | 'image/x-icon': ('image/x-icon', 'inline'), | |
850 | 'image/png': ('image/png', 'inline'), |
|
850 | 'image/png': ('image/png', 'inline'), | |
851 | 'image/gif': ('image/gif', 'inline'), |
|
851 | 'image/gif': ('image/gif', 'inline'), | |
852 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
852 | 'image/jpeg': ('image/jpeg', 'inline'), | |
853 | 'application/pdf': ('application/pdf', 'inline'), |
|
853 | 'application/pdf': ('application/pdf', 'inline'), | |
854 | } |
|
854 | } | |
855 |
|
855 | |||
856 | mimetype = file_node.mimetype |
|
856 | mimetype = file_node.mimetype | |
857 | try: |
|
857 | try: | |
858 | mimetype, disposition = raw_mimetype_mapping[mimetype] |
|
858 | mimetype, disposition = raw_mimetype_mapping[mimetype] | |
859 | except KeyError: |
|
859 | except KeyError: | |
860 | # we don't know anything special about this, handle it safely |
|
860 | # we don't know anything special about this, handle it safely | |
861 | if file_node.is_binary: |
|
861 | if file_node.is_binary: | |
862 | # do same as download raw for binary files |
|
862 | # do same as download raw for binary files | |
863 | mimetype, disposition = 'application/octet-stream', 'attachment' |
|
863 | mimetype, disposition = 'application/octet-stream', 'attachment' | |
864 | else: |
|
864 | else: | |
865 | # do not just use the original mimetype, but force text/plain, |
|
865 | # do not just use the original mimetype, but force text/plain, | |
866 | # otherwise it would serve text/html and that might be unsafe. |
|
866 | # otherwise it would serve text/html and that might be unsafe. | |
867 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
867 | # Note: underlying vcs library fakes text/plain mimetype if the | |
868 | # mimetype can not be determined and it thinks it is not |
|
868 | # mimetype can not be determined and it thinks it is not | |
869 | # binary.This might lead to erroneous text display in some |
|
869 | # binary.This might lead to erroneous text display in some | |
870 | # cases, but helps in other cases, like with text files |
|
870 | # cases, but helps in other cases, like with text files | |
871 | # without extension. |
|
871 | # without extension. | |
872 | mimetype, disposition = 'text/plain', 'inline' |
|
872 | mimetype, disposition = 'text/plain', 'inline' | |
873 |
|
873 | |||
874 | if disposition == 'attachment': |
|
874 | if disposition == 'attachment': | |
875 | disposition = self._get_attachement_headers(f_path) |
|
875 | disposition = self._get_attachement_headers(f_path) | |
876 |
|
876 | |||
877 | stream_content = file_node.stream_bytes() |
|
877 | stream_content = file_node.stream_bytes() | |
878 |
|
878 | |||
879 | response = Response(app_iter=stream_content) |
|
879 | response = Response(app_iter=stream_content) | |
880 | response.content_disposition = disposition |
|
880 | response.content_disposition = disposition | |
881 | response.content_type = mimetype |
|
881 | response.content_type = mimetype | |
882 |
|
882 | |||
883 | charset = self._get_default_encoding(c) |
|
883 | charset = self._get_default_encoding(c) | |
884 | if charset: |
|
884 | if charset: | |
885 | response.charset = charset |
|
885 | response.charset = charset | |
886 |
|
886 | |||
887 | return response |
|
887 | return response | |
888 |
|
888 | |||
889 | @LoginRequired() |
|
889 | @LoginRequired() | |
890 | @HasRepoPermissionAnyDecorator( |
|
890 | @HasRepoPermissionAnyDecorator( | |
891 | 'repository.read', 'repository.write', 'repository.admin') |
|
891 | 'repository.read', 'repository.write', 'repository.admin') | |
892 | def repo_file_download(self): |
|
892 | def repo_file_download(self): | |
893 | c = self.load_default_context() |
|
893 | c = self.load_default_context() | |
894 |
|
894 | |||
895 | commit_id, f_path = self._get_commit_and_path() |
|
895 | commit_id, f_path = self._get_commit_and_path() | |
896 | commit = self._get_commit_or_redirect(commit_id) |
|
896 | commit = self._get_commit_or_redirect(commit_id) | |
897 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
897 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
898 |
|
898 | |||
899 | if self.request.GET.get('lf'): |
|
899 | if self.request.GET.get('lf'): | |
900 | # only if lf get flag is passed, we download this file |
|
900 | # only if lf get flag is passed, we download this file | |
901 | # as LFS/Largefile |
|
901 | # as LFS/Largefile | |
902 | lf_node = file_node.get_largefile_node() |
|
902 | lf_node = file_node.get_largefile_node() | |
903 | if lf_node: |
|
903 | if lf_node: | |
904 | # overwrite our pointer with the REAL large-file |
|
904 | # overwrite our pointer with the REAL large-file | |
905 | file_node = lf_node |
|
905 | file_node = lf_node | |
906 |
|
906 | |||
907 | disposition = self._get_attachement_headers(f_path) |
|
907 | disposition = self._get_attachement_headers(f_path) | |
908 |
|
908 | |||
909 | stream_content = file_node.stream_bytes() |
|
909 | stream_content = file_node.stream_bytes() | |
910 |
|
910 | |||
911 | response = Response(app_iter=stream_content) |
|
911 | response = Response(app_iter=stream_content) | |
912 | response.content_disposition = disposition |
|
912 | response.content_disposition = disposition | |
913 | response.content_type = file_node.mimetype |
|
913 | response.content_type = file_node.mimetype | |
914 |
|
914 | |||
915 | charset = self._get_default_encoding(c) |
|
915 | charset = self._get_default_encoding(c) | |
916 | if charset: |
|
916 | if charset: | |
917 | response.charset = charset |
|
917 | response.charset = charset | |
918 |
|
918 | |||
919 | return response |
|
919 | return response | |
920 |
|
920 | |||
921 | def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path): |
|
921 | def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path): | |
922 |
|
922 | |||
923 | cache_seconds = safe_int( |
|
923 | cache_seconds = safe_int( | |
924 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
924 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) | |
925 | cache_on = cache_seconds > 0 |
|
925 | cache_on = cache_seconds > 0 | |
926 | log.debug( |
|
926 | log.debug( | |
927 | 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`' |
|
927 | 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`' | |
928 | 'with caching: %s[TTL: %ss]' % ( |
|
928 | 'with caching: %s[TTL: %ss]' % ( | |
929 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
929 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
930 |
|
930 | |||
931 | cache_namespace_uid = f'repo.{repo_id}' |
|
931 | cache_namespace_uid = f'repo.{repo_id}' | |
932 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
932 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
933 |
|
933 | |||
934 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
934 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
935 | def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path): |
|
935 | def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path): | |
936 | log.debug('Generating cached nodelist for repo_id:%s, %s, %s', |
|
936 | log.debug('Generating cached nodelist for repo_id:%s, %s, %s', | |
937 | _repo_id, commit_id, f_path) |
|
937 | _repo_id, commit_id, f_path) | |
938 | try: |
|
938 | try: | |
939 | _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path) |
|
939 | _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path) | |
940 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
940 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: | |
941 | log.exception(safe_str(e)) |
|
941 | log.exception(safe_str(e)) | |
942 | h.flash(h.escape(safe_str(e)), category='error') |
|
942 | h.flash(h.escape(safe_str(e)), category='error') | |
943 | raise HTTPFound(h.route_path( |
|
943 | raise HTTPFound(h.route_path( | |
944 | 'repo_files', repo_name=self.db_repo_name, |
|
944 | 'repo_files', repo_name=self.db_repo_name, | |
945 | commit_id='tip', f_path='/')) |
|
945 | commit_id='tip', f_path='/')) | |
946 |
|
946 | |||
947 | return _d + _f |
|
947 | return _d + _f | |
948 |
|
948 | |||
949 | result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, |
|
949 | result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, | |
950 | commit_id, f_path) |
|
950 | commit_id, f_path) | |
951 | return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result) |
|
951 | return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result) | |
952 |
|
952 | |||
953 | @LoginRequired() |
|
953 | @LoginRequired() | |
954 | @HasRepoPermissionAnyDecorator( |
|
954 | @HasRepoPermissionAnyDecorator( | |
955 | 'repository.read', 'repository.write', 'repository.admin') |
|
955 | 'repository.read', 'repository.write', 'repository.admin') | |
956 | def repo_nodelist(self): |
|
956 | def repo_nodelist(self): | |
957 | self.load_default_context() |
|
957 | self.load_default_context() | |
958 |
|
958 | |||
959 | commit_id, f_path = self._get_commit_and_path() |
|
959 | commit_id, f_path = self._get_commit_and_path() | |
960 | commit = self._get_commit_or_redirect(commit_id) |
|
960 | commit = self._get_commit_or_redirect(commit_id) | |
961 |
|
961 | |||
962 | metadata = self._get_nodelist_at_commit( |
|
962 | metadata = self._get_nodelist_at_commit( | |
963 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) |
|
963 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) | |
964 | return {'nodes': [x for x in metadata]} |
|
964 | return {'nodes': [x for x in metadata]} | |
965 |
|
965 | |||
966 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): |
|
966 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): | |
967 | items = [] |
|
967 | items = [] | |
968 | for name, commit_id in branches_or_tags.items(): |
|
968 | for name, commit_id in branches_or_tags.items(): | |
969 | sym_ref = symbolic_reference(commit_id, name, f_path, ref_type) |
|
969 | sym_ref = symbolic_reference(commit_id, name, f_path, ref_type) | |
970 | items.append((sym_ref, name, ref_type)) |
|
970 | items.append((sym_ref, name, ref_type)) | |
971 | return items |
|
971 | return items | |
972 |
|
972 | |||
973 | def _symbolic_reference(self, commit_id, name, f_path, ref_type): |
|
973 | def _symbolic_reference(self, commit_id, name, f_path, ref_type): | |
974 | return commit_id |
|
974 | return commit_id | |
975 |
|
975 | |||
976 | def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type): |
|
976 | def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type): | |
977 | return commit_id |
|
977 | return commit_id | |
978 |
|
978 | |||
979 | # NOTE(dan): old code we used in "diff" mode compare |
|
979 | # NOTE(dan): old code we used in "diff" mode compare | |
980 | new_f_path = vcspath.join(name, f_path) |
|
980 | new_f_path = vcspath.join(name, f_path) | |
981 | return f'{new_f_path}@{commit_id}' |
|
981 | return f'{new_f_path}@{commit_id}' | |
982 |
|
982 | |||
983 | def _get_node_history(self, commit_obj, f_path, commits=None): |
|
983 | def _get_node_history(self, commit_obj, f_path, commits=None): | |
984 | """ |
|
984 | """ | |
985 | get commit history for given node |
|
985 | get commit history for given node | |
986 |
|
986 | |||
987 | :param commit_obj: commit to calculate history |
|
987 | :param commit_obj: commit to calculate history | |
988 | :param f_path: path for node to calculate history for |
|
988 | :param f_path: path for node to calculate history for | |
989 | :param commits: if passed don't calculate history and take |
|
989 | :param commits: if passed don't calculate history and take | |
990 | commits defined in this list |
|
990 | commits defined in this list | |
991 | """ |
|
991 | """ | |
992 | _ = self.request.translate |
|
992 | _ = self.request.translate | |
993 |
|
993 | |||
994 | # calculate history based on tip |
|
994 | # calculate history based on tip | |
995 | tip = self.rhodecode_vcs_repo.get_commit() |
|
995 | tip = self.rhodecode_vcs_repo.get_commit() | |
996 | if commits is None: |
|
996 | if commits is None: | |
997 | pre_load = ["author", "branch"] |
|
997 | pre_load = ["author", "branch"] | |
998 | try: |
|
998 | try: | |
999 | commits = tip.get_path_history(f_path, pre_load=pre_load) |
|
999 | commits = tip.get_path_history(f_path, pre_load=pre_load) | |
1000 | except (NodeDoesNotExistError, CommitError): |
|
1000 | except (NodeDoesNotExistError, CommitError): | |
1001 | # this node is not present at tip! |
|
1001 | # this node is not present at tip! | |
1002 | commits = commit_obj.get_path_history(f_path, pre_load=pre_load) |
|
1002 | commits = commit_obj.get_path_history(f_path, pre_load=pre_load) | |
1003 |
|
1003 | |||
1004 | history = [] |
|
1004 | history = [] | |
1005 | commits_group = ([], _("Changesets")) |
|
1005 | commits_group = ([], _("Changesets")) | |
1006 | for commit in commits: |
|
1006 | for commit in commits: | |
1007 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1007 | branch = ' (%s)' % commit.branch if commit.branch else '' | |
1008 | n_desc = f'r{commit.idx}:{commit.short_id}{branch}' |
|
1008 | n_desc = f'r{commit.idx}:{commit.short_id}{branch}' | |
1009 | commits_group[0].append((commit.raw_id, n_desc, 'sha')) |
|
1009 | commits_group[0].append((commit.raw_id, n_desc, 'sha')) | |
1010 | history.append(commits_group) |
|
1010 | history.append(commits_group) | |
1011 |
|
1011 | |||
1012 | symbolic_reference = self._symbolic_reference |
|
1012 | symbolic_reference = self._symbolic_reference | |
1013 |
|
1013 | |||
1014 | if self.rhodecode_vcs_repo.alias == 'svn': |
|
1014 | if self.rhodecode_vcs_repo.alias == 'svn': | |
1015 | adjusted_f_path = RepoFilesView.adjust_file_path_for_svn( |
|
1015 | adjusted_f_path = RepoFilesView.adjust_file_path_for_svn( | |
1016 | f_path, self.rhodecode_vcs_repo) |
|
1016 | f_path, self.rhodecode_vcs_repo) | |
1017 | if adjusted_f_path != f_path: |
|
1017 | if adjusted_f_path != f_path: | |
1018 | log.debug( |
|
1018 | log.debug( | |
1019 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1019 | 'Recognized svn tag or branch in file "%s", using svn ' | |
1020 | 'specific symbolic references', f_path) |
|
1020 | 'specific symbolic references', f_path) | |
1021 | f_path = adjusted_f_path |
|
1021 | f_path = adjusted_f_path | |
1022 | symbolic_reference = self._symbolic_reference_svn |
|
1022 | symbolic_reference = self._symbolic_reference_svn | |
1023 |
|
1023 | |||
1024 | branches = self._create_references( |
|
1024 | branches = self._create_references( | |
1025 | self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch') |
|
1025 | self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch') | |
1026 | branches_group = (branches, _("Branches")) |
|
1026 | branches_group = (branches, _("Branches")) | |
1027 |
|
1027 | |||
1028 | tags = self._create_references( |
|
1028 | tags = self._create_references( | |
1029 | self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag') |
|
1029 | self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag') | |
1030 | tags_group = (tags, _("Tags")) |
|
1030 | tags_group = (tags, _("Tags")) | |
1031 |
|
1031 | |||
1032 | history.append(branches_group) |
|
1032 | history.append(branches_group) | |
1033 | history.append(tags_group) |
|
1033 | history.append(tags_group) | |
1034 |
|
1034 | |||
1035 | return history, commits |
|
1035 | return history, commits | |
1036 |
|
1036 | |||
1037 | @LoginRequired() |
|
1037 | @LoginRequired() | |
1038 | @HasRepoPermissionAnyDecorator( |
|
1038 | @HasRepoPermissionAnyDecorator( | |
1039 | 'repository.read', 'repository.write', 'repository.admin') |
|
1039 | 'repository.read', 'repository.write', 'repository.admin') | |
1040 | def repo_file_history(self): |
|
1040 | def repo_file_history(self): | |
1041 | self.load_default_context() |
|
1041 | self.load_default_context() | |
1042 |
|
1042 | |||
1043 | commit_id, f_path = self._get_commit_and_path() |
|
1043 | commit_id, f_path = self._get_commit_and_path() | |
1044 | commit = self._get_commit_or_redirect(commit_id) |
|
1044 | commit = self._get_commit_or_redirect(commit_id) | |
1045 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
1045 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
1046 |
|
1046 | |||
1047 | if file_node.is_file(): |
|
1047 | if file_node.is_file(): | |
1048 | file_history, _hist = self._get_node_history(commit, f_path) |
|
1048 | file_history, _hist = self._get_node_history(commit, f_path) | |
1049 |
|
1049 | |||
1050 | res = [] |
|
1050 | res = [] | |
1051 | for section_items, section in file_history: |
|
1051 | for section_items, section in file_history: | |
1052 | items = [] |
|
1052 | items = [] | |
1053 | for obj_id, obj_text, obj_type in section_items: |
|
1053 | for obj_id, obj_text, obj_type in section_items: | |
1054 | at_rev = '' |
|
1054 | at_rev = '' | |
1055 | if obj_type in ['branch', 'bookmark', 'tag']: |
|
1055 | if obj_type in ['branch', 'bookmark', 'tag']: | |
1056 | at_rev = obj_text |
|
1056 | at_rev = obj_text | |
1057 | entry = { |
|
1057 | entry = { | |
1058 | 'id': obj_id, |
|
1058 | 'id': obj_id, | |
1059 | 'text': obj_text, |
|
1059 | 'text': obj_text, | |
1060 | 'type': obj_type, |
|
1060 | 'type': obj_type, | |
1061 | 'at_rev': at_rev |
|
1061 | 'at_rev': at_rev | |
1062 | } |
|
1062 | } | |
1063 |
|
1063 | |||
1064 | items.append(entry) |
|
1064 | items.append(entry) | |
1065 |
|
1065 | |||
1066 | res.append({ |
|
1066 | res.append({ | |
1067 | 'text': section, |
|
1067 | 'text': section, | |
1068 | 'children': items |
|
1068 | 'children': items | |
1069 | }) |
|
1069 | }) | |
1070 |
|
1070 | |||
1071 | data = { |
|
1071 | data = { | |
1072 | 'more': False, |
|
1072 | 'more': False, | |
1073 | 'results': res |
|
1073 | 'results': res | |
1074 | } |
|
1074 | } | |
1075 | return data |
|
1075 | return data | |
1076 |
|
1076 | |||
1077 | log.warning('Cannot fetch history for directory') |
|
1077 | log.warning('Cannot fetch history for directory') | |
1078 | raise HTTPBadRequest() |
|
1078 | raise HTTPBadRequest() | |
1079 |
|
1079 | |||
1080 | @LoginRequired() |
|
1080 | @LoginRequired() | |
1081 | @HasRepoPermissionAnyDecorator( |
|
1081 | @HasRepoPermissionAnyDecorator( | |
1082 | 'repository.read', 'repository.write', 'repository.admin') |
|
1082 | 'repository.read', 'repository.write', 'repository.admin') | |
1083 | def repo_file_authors(self): |
|
1083 | def repo_file_authors(self): | |
1084 | c = self.load_default_context() |
|
1084 | c = self.load_default_context() | |
1085 |
|
1085 | |||
1086 | commit_id, f_path = self._get_commit_and_path() |
|
1086 | commit_id, f_path = self._get_commit_and_path() | |
1087 | commit = self._get_commit_or_redirect(commit_id) |
|
1087 | commit = self._get_commit_or_redirect(commit_id) | |
1088 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
1088 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
1089 |
|
1089 | |||
1090 | if not file_node.is_file(): |
|
1090 | if not file_node.is_file(): | |
1091 | raise HTTPBadRequest() |
|
1091 | raise HTTPBadRequest() | |
1092 |
|
1092 | |||
1093 | c.file_last_commit = file_node.last_commit |
|
1093 | c.file_last_commit = file_node.last_commit | |
1094 | if self.request.GET.get('annotate') == '1': |
|
1094 | if self.request.GET.get('annotate') == '1': | |
1095 | # use _hist from annotation if annotation mode is on |
|
1095 | # use _hist from annotation if annotation mode is on | |
1096 | commit_ids = {x[1] for x in file_node.annotate} |
|
1096 | commit_ids = {x[1] for x in file_node.annotate} | |
1097 | _hist = ( |
|
1097 | _hist = ( | |
1098 | self.rhodecode_vcs_repo.get_commit(commit_id) |
|
1098 | self.rhodecode_vcs_repo.get_commit(commit_id) | |
1099 | for commit_id in commit_ids) |
|
1099 | for commit_id in commit_ids) | |
1100 | else: |
|
1100 | else: | |
1101 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
1101 | _f_history, _hist = self._get_node_history(commit, f_path) | |
1102 | c.file_author = False |
|
1102 | c.file_author = False | |
1103 |
|
1103 | |||
1104 | unique = collections.OrderedDict() |
|
1104 | unique = collections.OrderedDict() | |
1105 | for commit in _hist: |
|
1105 | for commit in _hist: | |
1106 | author = commit.author |
|
1106 | author = commit.author | |
1107 | if author not in unique: |
|
1107 | if author not in unique: | |
1108 | unique[commit.author] = [ |
|
1108 | unique[commit.author] = [ | |
1109 | h.email(author), |
|
1109 | h.email(author), | |
1110 | h.person(author, 'username_or_name_or_email'), |
|
1110 | h.person(author, 'username_or_name_or_email'), | |
1111 | 1 # counter |
|
1111 | 1 # counter | |
1112 | ] |
|
1112 | ] | |
1113 |
|
1113 | |||
1114 | else: |
|
1114 | else: | |
1115 | # increase counter |
|
1115 | # increase counter | |
1116 | unique[commit.author][2] += 1 |
|
1116 | unique[commit.author][2] += 1 | |
1117 |
|
1117 | |||
1118 | c.authors = [val for val in unique.values()] |
|
1118 | c.authors = [val for val in unique.values()] | |
1119 |
|
1119 | |||
1120 | return self._get_template_context(c) |
|
1120 | return self._get_template_context(c) | |
1121 |
|
1121 | |||
1122 | @LoginRequired() |
|
1122 | @LoginRequired() | |
1123 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1123 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1124 | def repo_files_check_head(self): |
|
1124 | def repo_files_check_head(self): | |
1125 | self.load_default_context() |
|
1125 | self.load_default_context() | |
1126 |
|
1126 | |||
1127 | commit_id, f_path = self._get_commit_and_path() |
|
1127 | commit_id, f_path = self._get_commit_and_path() | |
1128 | _branch_name, _sha_commit_id, is_head = \ |
|
1128 | _branch_name, _sha_commit_id, is_head = \ | |
1129 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1129 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1130 | landing_ref=self.db_repo.landing_ref_name) |
|
1130 | landing_ref=self.db_repo.landing_ref_name) | |
1131 |
|
1131 | |||
1132 | new_path = self.request.POST.get('path') |
|
1132 | new_path = self.request.POST.get('path') | |
1133 | operation = self.request.POST.get('operation') |
|
1133 | operation = self.request.POST.get('operation') | |
1134 | path_exist = '' |
|
1134 | path_exist = '' | |
1135 |
|
1135 | |||
1136 | if new_path and operation in ['create', 'upload']: |
|
1136 | if new_path and operation in ['create', 'upload']: | |
1137 | new_f_path = os.path.join(f_path.lstrip('/'), new_path) |
|
1137 | new_f_path = os.path.join(f_path.lstrip('/'), new_path) | |
1138 | try: |
|
1138 | try: | |
1139 | commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
1139 | commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id) | |
1140 | # NOTE(dan): construct whole path without leading / |
|
1140 | # NOTE(dan): construct whole path without leading / | |
1141 | file_node = commit_obj.get_node(new_f_path) |
|
1141 | file_node = commit_obj.get_node(new_f_path) | |
1142 | if file_node is not None: |
|
1142 | if file_node is not None: | |
1143 | path_exist = new_f_path |
|
1143 | path_exist = new_f_path | |
1144 | except EmptyRepositoryError: |
|
1144 | except EmptyRepositoryError: | |
1145 | pass |
|
1145 | pass | |
1146 | except Exception: |
|
1146 | except Exception: | |
1147 | pass |
|
1147 | pass | |
1148 |
|
1148 | |||
1149 | return { |
|
1149 | return { | |
1150 | 'branch': _branch_name, |
|
1150 | 'branch': _branch_name, | |
1151 | 'sha': _sha_commit_id, |
|
1151 | 'sha': _sha_commit_id, | |
1152 | 'is_head': is_head, |
|
1152 | 'is_head': is_head, | |
1153 | 'path_exists': path_exist |
|
1153 | 'path_exists': path_exist | |
1154 | } |
|
1154 | } | |
1155 |
|
1155 | |||
1156 | @LoginRequired() |
|
1156 | @LoginRequired() | |
1157 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1157 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1158 | def repo_files_remove_file(self): |
|
1158 | def repo_files_remove_file(self): | |
1159 | _ = self.request.translate |
|
1159 | _ = self.request.translate | |
1160 | c = self.load_default_context() |
|
1160 | c = self.load_default_context() | |
1161 | commit_id, f_path = self._get_commit_and_path() |
|
1161 | commit_id, f_path = self._get_commit_and_path() | |
1162 |
|
1162 | |||
1163 | self._ensure_not_locked() |
|
1163 | self._ensure_not_locked() | |
1164 | _branch_name, _sha_commit_id, is_head = \ |
|
1164 | _branch_name, _sha_commit_id, is_head = \ | |
1165 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1165 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1166 | landing_ref=self.db_repo.landing_ref_name) |
|
1166 | landing_ref=self.db_repo.landing_ref_name) | |
1167 |
|
1167 | |||
1168 | self.forbid_non_head(is_head, f_path) |
|
1168 | self.forbid_non_head(is_head, f_path) | |
1169 | self.check_branch_permission(_branch_name) |
|
1169 | self.check_branch_permission(_branch_name) | |
1170 |
|
1170 | |||
1171 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1171 | c.commit = self._get_commit_or_redirect(commit_id) | |
1172 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1172 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1173 |
|
1173 | |||
1174 | c.default_message = _( |
|
1174 | c.default_message = _( | |
1175 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
1175 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) | |
1176 | c.f_path = f_path |
|
1176 | c.f_path = f_path | |
1177 |
|
1177 | |||
1178 | return self._get_template_context(c) |
|
1178 | return self._get_template_context(c) | |
1179 |
|
1179 | |||
1180 | @LoginRequired() |
|
1180 | @LoginRequired() | |
1181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1182 | @CSRFRequired() |
|
1182 | @CSRFRequired() | |
1183 | def repo_files_delete_file(self): |
|
1183 | def repo_files_delete_file(self): | |
1184 | _ = self.request.translate |
|
1184 | _ = self.request.translate | |
1185 |
|
1185 | |||
1186 | c = self.load_default_context() |
|
1186 | c = self.load_default_context() | |
1187 | commit_id, f_path = self._get_commit_and_path() |
|
1187 | commit_id, f_path = self._get_commit_and_path() | |
1188 |
|
1188 | |||
1189 | self._ensure_not_locked() |
|
1189 | self._ensure_not_locked() | |
1190 | _branch_name, _sha_commit_id, is_head = \ |
|
1190 | _branch_name, _sha_commit_id, is_head = \ | |
1191 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1191 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1192 | landing_ref=self.db_repo.landing_ref_name) |
|
1192 | landing_ref=self.db_repo.landing_ref_name) | |
1193 |
|
1193 | |||
1194 | self.forbid_non_head(is_head, f_path) |
|
1194 | self.forbid_non_head(is_head, f_path) | |
1195 | self.check_branch_permission(_branch_name) |
|
1195 | self.check_branch_permission(_branch_name) | |
1196 |
|
1196 | |||
1197 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1197 | c.commit = self._get_commit_or_redirect(commit_id) | |
1198 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1198 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1199 |
|
1199 | |||
1200 | c.default_message = _( |
|
1200 | c.default_message = _( | |
1201 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
1201 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) | |
1202 | c.f_path = f_path |
|
1202 | c.f_path = f_path | |
1203 | node_path = f_path |
|
1203 | node_path = f_path | |
1204 | author = self._rhodecode_db_user.full_contact |
|
1204 | author = self._rhodecode_db_user.full_contact | |
1205 | message = self.request.POST.get('message') or c.default_message |
|
1205 | message = self.request.POST.get('message') or c.default_message | |
1206 | try: |
|
1206 | try: | |
1207 | nodes = { |
|
1207 | nodes = { | |
1208 | safe_bytes(node_path): { |
|
1208 | safe_bytes(node_path): { | |
1209 | 'content': b'' |
|
1209 | 'content': b'' | |
1210 | } |
|
1210 | } | |
1211 | } |
|
1211 | } | |
1212 | ScmModel().delete_nodes( |
|
1212 | ScmModel().delete_nodes( | |
1213 | user=self._rhodecode_db_user.user_id, repo=self.db_repo, |
|
1213 | user=self._rhodecode_db_user.user_id, repo=self.db_repo, | |
1214 | message=message, |
|
1214 | message=message, | |
1215 | nodes=nodes, |
|
1215 | nodes=nodes, | |
1216 | parent_commit=c.commit, |
|
1216 | parent_commit=c.commit, | |
1217 | author=author, |
|
1217 | author=author, | |
1218 | ) |
|
1218 | ) | |
1219 |
|
1219 | |||
1220 | h.flash( |
|
1220 | h.flash( | |
1221 | _('Successfully deleted file `{}`').format( |
|
1221 | _('Successfully deleted file `{}`').format( | |
1222 | h.escape(f_path)), category='success') |
|
1222 | h.escape(f_path)), category='success') | |
1223 | except Exception: |
|
1223 | except Exception: | |
1224 | log.exception('Error during commit operation') |
|
1224 | log.exception('Error during commit operation') | |
1225 | h.flash(_('Error occurred during commit'), category='error') |
|
1225 | h.flash(_('Error occurred during commit'), category='error') | |
1226 | raise HTTPFound( |
|
1226 | raise HTTPFound( | |
1227 | h.route_path('repo_commit', repo_name=self.db_repo_name, |
|
1227 | h.route_path('repo_commit', repo_name=self.db_repo_name, | |
1228 | commit_id='tip')) |
|
1228 | commit_id='tip')) | |
1229 |
|
1229 | |||
1230 | @LoginRequired() |
|
1230 | @LoginRequired() | |
1231 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1231 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1232 | def repo_files_edit_file(self): |
|
1232 | def repo_files_edit_file(self): | |
1233 | _ = self.request.translate |
|
1233 | _ = self.request.translate | |
1234 | c = self.load_default_context() |
|
1234 | c = self.load_default_context() | |
1235 | commit_id, f_path = self._get_commit_and_path() |
|
1235 | commit_id, f_path = self._get_commit_and_path() | |
1236 |
|
1236 | |||
1237 | self._ensure_not_locked() |
|
1237 | self._ensure_not_locked() | |
1238 | _branch_name, _sha_commit_id, is_head = \ |
|
1238 | _branch_name, _sha_commit_id, is_head = \ | |
1239 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1239 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1240 | landing_ref=self.db_repo.landing_ref_name) |
|
1240 | landing_ref=self.db_repo.landing_ref_name) | |
1241 |
|
1241 | |||
1242 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1242 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1243 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1243 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1244 |
|
1244 | |||
1245 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1245 | c.commit = self._get_commit_or_redirect(commit_id) | |
1246 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1246 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1247 |
|
1247 | |||
1248 | if c.file.is_binary: |
|
1248 | if c.file.is_binary: | |
1249 | files_url = h.route_path( |
|
1249 | files_url = h.route_path( | |
1250 | 'repo_files', |
|
1250 | 'repo_files', | |
1251 | repo_name=self.db_repo_name, |
|
1251 | repo_name=self.db_repo_name, | |
1252 | commit_id=c.commit.raw_id, f_path=f_path) |
|
1252 | commit_id=c.commit.raw_id, f_path=f_path) | |
1253 | raise HTTPFound(files_url) |
|
1253 | raise HTTPFound(files_url) | |
1254 |
|
1254 | |||
1255 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1255 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) | |
1256 | c.f_path = f_path |
|
1256 | c.f_path = f_path | |
1257 |
|
1257 | |||
1258 | return self._get_template_context(c) |
|
1258 | return self._get_template_context(c) | |
1259 |
|
1259 | |||
1260 | @LoginRequired() |
|
1260 | @LoginRequired() | |
1261 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1261 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1262 | @CSRFRequired() |
|
1262 | @CSRFRequired() | |
1263 | def repo_files_update_file(self): |
|
1263 | def repo_files_update_file(self): | |
1264 | _ = self.request.translate |
|
1264 | _ = self.request.translate | |
1265 | c = self.load_default_context() |
|
1265 | c = self.load_default_context() | |
1266 | commit_id, f_path = self._get_commit_and_path() |
|
1266 | commit_id, f_path = self._get_commit_and_path() | |
1267 |
|
1267 | |||
1268 | self._ensure_not_locked() |
|
1268 | self._ensure_not_locked() | |
1269 |
|
1269 | |||
1270 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1270 | c.commit = self._get_commit_or_redirect(commit_id) | |
1271 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1271 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1272 |
|
1272 | |||
1273 | if c.file.is_binary: |
|
1273 | if c.file.is_binary: | |
1274 | raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, |
|
1274 | raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, | |
1275 | commit_id=c.commit.raw_id, f_path=f_path)) |
|
1275 | commit_id=c.commit.raw_id, f_path=f_path)) | |
1276 |
|
1276 | |||
1277 | _branch_name, _sha_commit_id, is_head = \ |
|
1277 | _branch_name, _sha_commit_id, is_head = \ | |
1278 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1278 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1279 | landing_ref=self.db_repo.landing_ref_name) |
|
1279 | landing_ref=self.db_repo.landing_ref_name) | |
1280 |
|
1280 | |||
1281 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1281 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1282 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1282 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1283 |
|
1283 | |||
1284 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1284 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) | |
1285 | c.f_path = f_path |
|
1285 | c.f_path = f_path | |
1286 |
|
1286 | |||
1287 | old_content = c.file.str_content |
|
1287 | old_content = c.file.str_content | |
1288 | sl = old_content.splitlines(1) |
|
1288 | sl = old_content.splitlines(1) | |
1289 | first_line = sl[0] if sl else '' |
|
1289 | first_line = sl[0] if sl else '' | |
1290 |
|
1290 | |||
1291 | r_post = self.request.POST |
|
1291 | r_post = self.request.POST | |
1292 | # line endings: 0 - Unix, 1 - Mac, 2 - DOS |
|
1292 | # line endings: 0 - Unix, 1 - Mac, 2 - DOS | |
1293 | line_ending_mode = detect_mode(first_line, 0) |
|
1293 | line_ending_mode = detect_mode(first_line, 0) | |
1294 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) |
|
1294 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) | |
1295 |
|
1295 | |||
1296 | message = r_post.get('message') or c.default_message |
|
1296 | message = r_post.get('message') or c.default_message | |
1297 |
|
1297 | |||
1298 | org_node_path = c.file.str_path |
|
1298 | org_node_path = c.file.str_path | |
1299 | filename = r_post['filename'] |
|
1299 | filename = r_post['filename'] | |
1300 |
|
1300 | |||
1301 | root_path = c.file.dir_path |
|
1301 | root_path = c.file.dir_path | |
1302 | pure_path = self.create_pure_path(root_path, filename) |
|
1302 | pure_path = self.create_pure_path(root_path, filename) | |
1303 | node_path = pure_path.as_posix() |
|
1303 | node_path = pure_path.as_posix() | |
1304 |
|
1304 | |||
1305 | default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name, |
|
1305 | default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name, | |
1306 | commit_id=commit_id) |
|
1306 | commit_id=commit_id) | |
1307 | if content == old_content and node_path == org_node_path: |
|
1307 | if content == old_content and node_path == org_node_path: | |
1308 | h.flash(_('No changes detected on {}').format(h.escape(org_node_path)), |
|
1308 | h.flash(_('No changes detected on {}').format(h.escape(org_node_path)), | |
1309 | category='warning') |
|
1309 | category='warning') | |
1310 | raise HTTPFound(default_redirect_url) |
|
1310 | raise HTTPFound(default_redirect_url) | |
1311 |
|
1311 | |||
1312 | try: |
|
1312 | try: | |
1313 | mapping = { |
|
1313 | mapping = { | |
1314 | c.file.bytes_path: { |
|
1314 | c.file.bytes_path: { | |
1315 | 'org_filename': org_node_path, |
|
1315 | 'org_filename': org_node_path, | |
1316 | 'filename': safe_bytes(node_path), |
|
1316 | 'filename': safe_bytes(node_path), | |
1317 | 'content': safe_bytes(content), |
|
1317 | 'content': safe_bytes(content), | |
1318 | 'lexer': '', |
|
1318 | 'lexer': '', | |
1319 | 'op': 'mod', |
|
1319 | 'op': 'mod', | |
1320 | 'mode': c.file.mode |
|
1320 | 'mode': c.file.mode | |
1321 | } |
|
1321 | } | |
1322 | } |
|
1322 | } | |
1323 |
|
1323 | |||
1324 | commit = ScmModel().update_nodes( |
|
1324 | commit = ScmModel().update_nodes( | |
1325 | user=self._rhodecode_db_user.user_id, |
|
1325 | user=self._rhodecode_db_user.user_id, | |
1326 | repo=self.db_repo, |
|
1326 | repo=self.db_repo, | |
1327 | message=message, |
|
1327 | message=message, | |
1328 | nodes=mapping, |
|
1328 | nodes=mapping, | |
1329 | parent_commit=c.commit, |
|
1329 | parent_commit=c.commit, | |
1330 | ) |
|
1330 | ) | |
1331 |
|
1331 | |||
1332 | h.flash(_('Successfully committed changes to file `{}`').format( |
|
1332 | h.flash(_('Successfully committed changes to file `{}`').format( | |
1333 | h.escape(f_path)), category='success') |
|
1333 | h.escape(f_path)), category='success') | |
1334 | default_redirect_url = h.route_path( |
|
1334 | default_redirect_url = h.route_path( | |
1335 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1335 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1336 |
|
1336 | |||
1337 | except Exception: |
|
1337 | except Exception: | |
1338 | log.exception('Error occurred during commit') |
|
1338 | log.exception('Error occurred during commit') | |
1339 | h.flash(_('Error occurred during commit'), category='error') |
|
1339 | h.flash(_('Error occurred during commit'), category='error') | |
1340 |
|
1340 | |||
1341 | raise HTTPFound(default_redirect_url) |
|
1341 | raise HTTPFound(default_redirect_url) | |
1342 |
|
1342 | |||
1343 | @LoginRequired() |
|
1343 | @LoginRequired() | |
1344 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1344 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1345 | def repo_files_add_file(self): |
|
1345 | def repo_files_add_file(self): | |
1346 | _ = self.request.translate |
|
1346 | _ = self.request.translate | |
1347 | c = self.load_default_context() |
|
1347 | c = self.load_default_context() | |
1348 | commit_id, f_path = self._get_commit_and_path() |
|
1348 | commit_id, f_path = self._get_commit_and_path() | |
1349 |
|
1349 | |||
1350 | self._ensure_not_locked() |
|
1350 | self._ensure_not_locked() | |
1351 |
|
1351 | |||
1352 | # Check if we need to use this page to upload binary |
|
1352 | # Check if we need to use this page to upload binary | |
1353 | upload_binary = str2bool(self.request.params.get('upload_binary', False)) |
|
1353 | upload_binary = str2bool(self.request.params.get('upload_binary', False)) | |
1354 |
|
1354 | |||
1355 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1355 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1356 | if c.commit is None: |
|
1356 | if c.commit is None: | |
1357 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1357 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1358 |
|
1358 | |||
1359 | if self.rhodecode_vcs_repo.is_empty(): |
|
1359 | if self.rhodecode_vcs_repo.is_empty(): | |
1360 | # for empty repository we cannot check for current branch, we rely on |
|
1360 | # for empty repository we cannot check for current branch, we rely on | |
1361 | # c.commit.branch instead |
|
1361 | # c.commit.branch instead | |
1362 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1362 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1363 | else: |
|
1363 | else: | |
1364 | _branch_name, _sha_commit_id, is_head = \ |
|
1364 | _branch_name, _sha_commit_id, is_head = \ | |
1365 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1365 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1366 | landing_ref=self.db_repo.landing_ref_name) |
|
1366 | landing_ref=self.db_repo.landing_ref_name) | |
1367 |
|
1367 | |||
1368 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1368 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1369 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1369 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1370 |
|
1370 | |||
1371 | c.default_message = (_('Added file via RhodeCode Enterprise')) \ |
|
1371 | c.default_message = (_('Added file via RhodeCode Enterprise')) \ | |
1372 | if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path)) |
|
1372 | if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path)) | |
1373 | c.f_path = f_path.lstrip('/') # ensure not relative path |
|
1373 | c.f_path = f_path.lstrip('/') # ensure not relative path | |
1374 | c.replace_binary = upload_binary |
|
1374 | c.replace_binary = upload_binary | |
1375 |
|
1375 | |||
1376 | return self._get_template_context(c) |
|
1376 | return self._get_template_context(c) | |
1377 |
|
1377 | |||
1378 | @LoginRequired() |
|
1378 | @LoginRequired() | |
1379 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1379 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1380 | @CSRFRequired() |
|
1380 | @CSRFRequired() | |
1381 | def repo_files_create_file(self): |
|
1381 | def repo_files_create_file(self): | |
1382 | _ = self.request.translate |
|
1382 | _ = self.request.translate | |
1383 | c = self.load_default_context() |
|
1383 | c = self.load_default_context() | |
1384 | commit_id, f_path = self._get_commit_and_path() |
|
1384 | commit_id, f_path = self._get_commit_and_path() | |
1385 |
|
1385 | |||
1386 | self._ensure_not_locked() |
|
1386 | self._ensure_not_locked() | |
1387 |
|
1387 | |||
1388 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1388 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1389 | if c.commit is None: |
|
1389 | if c.commit is None: | |
1390 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1390 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1391 |
|
1391 | |||
1392 | # calculate redirect URL |
|
1392 | # calculate redirect URL | |
1393 | if self.rhodecode_vcs_repo.is_empty(): |
|
1393 | if self.rhodecode_vcs_repo.is_empty(): | |
1394 | default_redirect_url = h.route_path( |
|
1394 | default_redirect_url = h.route_path( | |
1395 | 'repo_summary', repo_name=self.db_repo_name) |
|
1395 | 'repo_summary', repo_name=self.db_repo_name) | |
1396 | else: |
|
1396 | else: | |
1397 | default_redirect_url = h.route_path( |
|
1397 | default_redirect_url = h.route_path( | |
1398 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1398 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1399 |
|
1399 | |||
1400 | if self.rhodecode_vcs_repo.is_empty(): |
|
1400 | if self.rhodecode_vcs_repo.is_empty(): | |
1401 | # for empty repository we cannot check for current branch, we rely on |
|
1401 | # for empty repository we cannot check for current branch, we rely on | |
1402 | # c.commit.branch instead |
|
1402 | # c.commit.branch instead | |
1403 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1403 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1404 | else: |
|
1404 | else: | |
1405 | _branch_name, _sha_commit_id, is_head = \ |
|
1405 | _branch_name, _sha_commit_id, is_head = \ | |
1406 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1406 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1407 | landing_ref=self.db_repo.landing_ref_name) |
|
1407 | landing_ref=self.db_repo.landing_ref_name) | |
1408 |
|
1408 | |||
1409 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1409 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1410 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1410 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1411 |
|
1411 | |||
1412 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1412 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
1413 | c.f_path = f_path |
|
1413 | c.f_path = f_path | |
1414 |
|
1414 | |||
1415 | r_post = self.request.POST |
|
1415 | r_post = self.request.POST | |
1416 | message = r_post.get('message') or c.default_message |
|
1416 | message = r_post.get('message') or c.default_message | |
1417 | filename = r_post.get('filename') |
|
1417 | filename = r_post.get('filename') | |
1418 | unix_mode = 0 |
|
1418 | unix_mode = 0 | |
1419 |
|
1419 | |||
1420 | if not filename: |
|
1420 | if not filename: | |
1421 | # If there's no commit, redirect to repo summary |
|
1421 | # If there's no commit, redirect to repo summary | |
1422 | if type(c.commit) is EmptyCommit: |
|
1422 | if type(c.commit) is EmptyCommit: | |
1423 | redirect_url = h.route_path( |
|
1423 | redirect_url = h.route_path( | |
1424 | 'repo_summary', repo_name=self.db_repo_name) |
|
1424 | 'repo_summary', repo_name=self.db_repo_name) | |
1425 | else: |
|
1425 | else: | |
1426 | redirect_url = default_redirect_url |
|
1426 | redirect_url = default_redirect_url | |
1427 | h.flash(_('No filename specified'), category='warning') |
|
1427 | h.flash(_('No filename specified'), category='warning') | |
1428 | raise HTTPFound(redirect_url) |
|
1428 | raise HTTPFound(redirect_url) | |
1429 |
|
1429 | |||
1430 | root_path = f_path |
|
1430 | root_path = f_path | |
1431 | pure_path = self.create_pure_path(root_path, filename) |
|
1431 | pure_path = self.create_pure_path(root_path, filename) | |
1432 | node_path = pure_path.as_posix().lstrip('/') |
|
1432 | node_path = pure_path.as_posix().lstrip('/') | |
1433 |
|
1433 | |||
1434 | author = self._rhodecode_db_user.full_contact |
|
1434 | author = self._rhodecode_db_user.full_contact | |
1435 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
1435 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
1436 | nodes = { |
|
1436 | nodes = { | |
1437 | safe_bytes(node_path): { |
|
1437 | safe_bytes(node_path): { | |
1438 | 'content': safe_bytes(content) |
|
1438 | 'content': safe_bytes(content) | |
1439 | } |
|
1439 | } | |
1440 | } |
|
1440 | } | |
1441 |
|
1441 | |||
1442 | try: |
|
1442 | try: | |
1443 |
|
1443 | |||
1444 | commit = ScmModel().create_nodes( |
|
1444 | commit = ScmModel().create_nodes( | |
1445 | user=self._rhodecode_db_user.user_id, |
|
1445 | user=self._rhodecode_db_user.user_id, | |
1446 | repo=self.db_repo, |
|
1446 | repo=self.db_repo, | |
1447 | message=message, |
|
1447 | message=message, | |
1448 | nodes=nodes, |
|
1448 | nodes=nodes, | |
1449 | parent_commit=c.commit, |
|
1449 | parent_commit=c.commit, | |
1450 | author=author, |
|
1450 | author=author, | |
1451 | ) |
|
1451 | ) | |
1452 |
|
1452 | |||
1453 | h.flash(_('Successfully committed new file `{}`').format( |
|
1453 | h.flash(_('Successfully committed new file `{}`').format( | |
1454 | h.escape(node_path)), category='success') |
|
1454 | h.escape(node_path)), category='success') | |
1455 |
|
1455 | |||
1456 | default_redirect_url = h.route_path( |
|
1456 | default_redirect_url = h.route_path( | |
1457 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1457 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1458 |
|
1458 | |||
1459 | except NonRelativePathError: |
|
1459 | except NonRelativePathError: | |
1460 | log.exception('Non Relative path found') |
|
1460 | log.exception('Non Relative path found') | |
1461 | h.flash(_('The location specified must be a relative path and must not ' |
|
1461 | h.flash(_('The location specified must be a relative path and must not ' | |
1462 | 'contain .. in the path'), category='warning') |
|
1462 | 'contain .. in the path'), category='warning') | |
1463 | raise HTTPFound(default_redirect_url) |
|
1463 | raise HTTPFound(default_redirect_url) | |
1464 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1464 | except (NodeError, NodeAlreadyExistsError) as e: | |
1465 | h.flash(h.escape(safe_str(e)), category='error') |
|
1465 | h.flash(h.escape(safe_str(e)), category='error') | |
1466 | except Exception: |
|
1466 | except Exception: | |
1467 | log.exception('Error occurred during commit') |
|
1467 | log.exception('Error occurred during commit') | |
1468 | h.flash(_('Error occurred during commit'), category='error') |
|
1468 | h.flash(_('Error occurred during commit'), category='error') | |
1469 |
|
1469 | |||
1470 | raise HTTPFound(default_redirect_url) |
|
1470 | raise HTTPFound(default_redirect_url) | |
1471 |
|
1471 | |||
1472 | @LoginRequired() |
|
1472 | @LoginRequired() | |
1473 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1473 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1474 | @CSRFRequired() |
|
1474 | @CSRFRequired() | |
1475 | def repo_files_upload_file(self): |
|
1475 | def repo_files_upload_file(self): | |
1476 | _ = self.request.translate |
|
1476 | _ = self.request.translate | |
1477 | c = self.load_default_context() |
|
1477 | c = self.load_default_context() | |
1478 | commit_id, f_path = self._get_commit_and_path() |
|
1478 | commit_id, f_path = self._get_commit_and_path() | |
1479 |
|
1479 | |||
1480 | self._ensure_not_locked() |
|
1480 | self._ensure_not_locked() | |
1481 |
|
1481 | |||
1482 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1482 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1483 | if c.commit is None: |
|
1483 | if c.commit is None: | |
1484 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1484 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1485 |
|
1485 | |||
1486 | # calculate redirect URL |
|
1486 | # calculate redirect URL | |
1487 | if self.rhodecode_vcs_repo.is_empty(): |
|
1487 | if self.rhodecode_vcs_repo.is_empty(): | |
1488 | default_redirect_url = h.route_path( |
|
1488 | default_redirect_url = h.route_path( | |
1489 | 'repo_summary', repo_name=self.db_repo_name) |
|
1489 | 'repo_summary', repo_name=self.db_repo_name) | |
1490 | else: |
|
1490 | else: | |
1491 | default_redirect_url = h.route_path( |
|
1491 | default_redirect_url = h.route_path( | |
1492 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1492 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1493 |
|
1493 | |||
1494 | if self.rhodecode_vcs_repo.is_empty(): |
|
1494 | if self.rhodecode_vcs_repo.is_empty(): | |
1495 | # for empty repository we cannot check for current branch, we rely on |
|
1495 | # for empty repository we cannot check for current branch, we rely on | |
1496 | # c.commit.branch instead |
|
1496 | # c.commit.branch instead | |
1497 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1497 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1498 | else: |
|
1498 | else: | |
1499 | _branch_name, _sha_commit_id, is_head = \ |
|
1499 | _branch_name, _sha_commit_id, is_head = \ | |
1500 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1500 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1501 | landing_ref=self.db_repo.landing_ref_name) |
|
1501 | landing_ref=self.db_repo.landing_ref_name) | |
1502 |
|
1502 | |||
1503 | error = self.forbid_non_head(is_head, f_path, json_mode=True) |
|
1503 | error = self.forbid_non_head(is_head, f_path, json_mode=True) | |
1504 | if error: |
|
1504 | if error: | |
1505 | return { |
|
1505 | return { | |
1506 | 'error': error, |
|
1506 | 'error': error, | |
1507 | 'redirect_url': default_redirect_url |
|
1507 | 'redirect_url': default_redirect_url | |
1508 | } |
|
1508 | } | |
1509 | error = self.check_branch_permission(_branch_name, json_mode=True) |
|
1509 | error = self.check_branch_permission(_branch_name, json_mode=True) | |
1510 | if error: |
|
1510 | if error: | |
1511 | return { |
|
1511 | return { | |
1512 | 'error': error, |
|
1512 | 'error': error, | |
1513 | 'redirect_url': default_redirect_url |
|
1513 | 'redirect_url': default_redirect_url | |
1514 | } |
|
1514 | } | |
1515 |
|
1515 | |||
1516 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1516 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
1517 | c.f_path = f_path |
|
1517 | c.f_path = f_path | |
1518 |
|
1518 | |||
1519 | r_post = self.request.POST |
|
1519 | r_post = self.request.POST | |
1520 |
|
1520 | |||
1521 | message = c.default_message |
|
1521 | message = c.default_message | |
1522 | user_message = r_post.getall('message') |
|
1522 | user_message = r_post.getall('message') | |
1523 | if isinstance(user_message, list) and user_message: |
|
1523 | if isinstance(user_message, list) and user_message: | |
1524 | # we take the first from duplicated results if it's not empty |
|
1524 | # we take the first from duplicated results if it's not empty | |
1525 | message = user_message[0] if user_message[0] else message |
|
1525 | message = user_message[0] if user_message[0] else message | |
1526 |
|
1526 | |||
1527 | nodes = {} |
|
1527 | nodes = {} | |
1528 |
|
1528 | |||
1529 | for file_obj in r_post.getall('files_upload') or []: |
|
1529 | for file_obj in r_post.getall('files_upload') or []: | |
1530 | content = file_obj.file |
|
1530 | content = file_obj.file | |
1531 | filename = file_obj.filename |
|
1531 | filename = file_obj.filename | |
1532 |
|
1532 | |||
1533 | root_path = f_path |
|
1533 | root_path = f_path | |
1534 | pure_path = self.create_pure_path(root_path, filename) |
|
1534 | pure_path = self.create_pure_path(root_path, filename) | |
1535 | node_path = pure_path.as_posix().lstrip('/') |
|
1535 | node_path = pure_path.as_posix().lstrip('/') | |
1536 |
|
1536 | |||
1537 | nodes[safe_bytes(node_path)] = { |
|
1537 | nodes[safe_bytes(node_path)] = { | |
1538 | 'content': content |
|
1538 | 'content': content | |
1539 | } |
|
1539 | } | |
1540 |
|
1540 | |||
1541 | if not nodes: |
|
1541 | if not nodes: | |
1542 | error = 'missing files' |
|
1542 | error = 'missing files' | |
1543 | return { |
|
1543 | return { | |
1544 | 'error': error, |
|
1544 | 'error': error, | |
1545 | 'redirect_url': default_redirect_url |
|
1545 | 'redirect_url': default_redirect_url | |
1546 | } |
|
1546 | } | |
1547 |
|
1547 | |||
1548 | author = self._rhodecode_db_user.full_contact |
|
1548 | author = self._rhodecode_db_user.full_contact | |
1549 |
|
1549 | |||
1550 | try: |
|
1550 | try: | |
1551 | commit = ScmModel().create_nodes( |
|
1551 | commit = ScmModel().create_nodes( | |
1552 | user=self._rhodecode_db_user.user_id, |
|
1552 | user=self._rhodecode_db_user.user_id, | |
1553 | repo=self.db_repo, |
|
1553 | repo=self.db_repo, | |
1554 | message=message, |
|
1554 | message=message, | |
1555 | nodes=nodes, |
|
1555 | nodes=nodes, | |
1556 | parent_commit=c.commit, |
|
1556 | parent_commit=c.commit, | |
1557 | author=author, |
|
1557 | author=author, | |
1558 | ) |
|
1558 | ) | |
1559 | if len(nodes) == 1: |
|
1559 | if len(nodes) == 1: | |
1560 | flash_message = _('Successfully committed {} new files').format(len(nodes)) |
|
1560 | flash_message = _('Successfully committed {} new files').format(len(nodes)) | |
1561 | else: |
|
1561 | else: | |
1562 | flash_message = _('Successfully committed 1 new file') |
|
1562 | flash_message = _('Successfully committed 1 new file') | |
1563 |
|
1563 | |||
1564 | h.flash(flash_message, category='success') |
|
1564 | h.flash(flash_message, category='success') | |
1565 |
|
1565 | |||
1566 | default_redirect_url = h.route_path( |
|
1566 | default_redirect_url = h.route_path( | |
1567 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1567 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1568 |
|
1568 | |||
1569 | except NonRelativePathError: |
|
1569 | except NonRelativePathError: | |
1570 | log.exception('Non Relative path found') |
|
1570 | log.exception('Non Relative path found') | |
1571 | error = _('The location specified must be a relative path and must not ' |
|
1571 | error = _('The location specified must be a relative path and must not ' | |
1572 | 'contain .. in the path') |
|
1572 | 'contain .. in the path') | |
1573 | h.flash(error, category='warning') |
|
1573 | h.flash(error, category='warning') | |
1574 |
|
1574 | |||
1575 | return { |
|
1575 | return { | |
1576 | 'error': error, |
|
1576 | 'error': error, | |
1577 | 'redirect_url': default_redirect_url |
|
1577 | 'redirect_url': default_redirect_url | |
1578 | } |
|
1578 | } | |
1579 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1579 | except (NodeError, NodeAlreadyExistsError) as e: | |
1580 | error = h.escape(e) |
|
1580 | error = h.escape(e) | |
1581 | h.flash(error, category='error') |
|
1581 | h.flash(error, category='error') | |
1582 |
|
1582 | |||
1583 | return { |
|
1583 | return { | |
1584 | 'error': error, |
|
1584 | 'error': error, | |
1585 | 'redirect_url': default_redirect_url |
|
1585 | 'redirect_url': default_redirect_url | |
1586 | } |
|
1586 | } | |
1587 | except Exception: |
|
1587 | except Exception: | |
1588 | log.exception('Error occurred during commit') |
|
1588 | log.exception('Error occurred during commit') | |
1589 | error = _('Error occurred during commit') |
|
1589 | error = _('Error occurred during commit') | |
1590 | h.flash(error, category='error') |
|
1590 | h.flash(error, category='error') | |
1591 | return { |
|
1591 | return { | |
1592 | 'error': error, |
|
1592 | 'error': error, | |
1593 | 'redirect_url': default_redirect_url |
|
1593 | 'redirect_url': default_redirect_url | |
1594 | } |
|
1594 | } | |
1595 |
|
1595 | |||
1596 | return { |
|
1596 | return { | |
1597 | 'error': None, |
|
1597 | 'error': None, | |
1598 | 'redirect_url': default_redirect_url |
|
1598 | 'redirect_url': default_redirect_url | |
1599 | } |
|
1599 | } | |
1600 |
|
1600 | |||
1601 | @LoginRequired() |
|
1601 | @LoginRequired() | |
1602 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1602 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1603 | @CSRFRequired() |
|
1603 | @CSRFRequired() | |
1604 | def repo_files_replace_file(self): |
|
1604 | def repo_files_replace_file(self): | |
1605 | _ = self.request.translate |
|
1605 | _ = self.request.translate | |
1606 | c = self.load_default_context() |
|
1606 | c = self.load_default_context() | |
1607 | commit_id, f_path = self._get_commit_and_path() |
|
1607 | commit_id, f_path = self._get_commit_and_path() | |
1608 |
|
1608 | |||
1609 | self._ensure_not_locked() |
|
1609 | self._ensure_not_locked() | |
1610 |
|
1610 | |||
1611 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1611 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1612 | if c.commit is None: |
|
1612 | if c.commit is None: | |
1613 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1613 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1614 |
|
1614 | |||
1615 | if self.rhodecode_vcs_repo.is_empty(): |
|
1615 | if self.rhodecode_vcs_repo.is_empty(): | |
1616 | default_redirect_url = h.route_path( |
|
1616 | default_redirect_url = h.route_path( | |
1617 | 'repo_summary', repo_name=self.db_repo_name) |
|
1617 | 'repo_summary', repo_name=self.db_repo_name) | |
1618 | else: |
|
1618 | else: | |
1619 | default_redirect_url = h.route_path( |
|
1619 | default_redirect_url = h.route_path( | |
1620 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1620 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1621 |
|
1621 | |||
1622 | if self.rhodecode_vcs_repo.is_empty(): |
|
1622 | if self.rhodecode_vcs_repo.is_empty(): | |
1623 | # for empty repository we cannot check for current branch, we rely on |
|
1623 | # for empty repository we cannot check for current branch, we rely on | |
1624 | # c.commit.branch instead |
|
1624 | # c.commit.branch instead | |
1625 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1625 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1626 | else: |
|
1626 | else: | |
1627 | _branch_name, _sha_commit_id, is_head = \ |
|
1627 | _branch_name, _sha_commit_id, is_head = \ | |
1628 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1628 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1629 | landing_ref=self.db_repo.landing_ref_name) |
|
1629 | landing_ref=self.db_repo.landing_ref_name) | |
1630 |
|
1630 | |||
1631 | error = self.forbid_non_head(is_head, f_path, json_mode=True) |
|
1631 | error = self.forbid_non_head(is_head, f_path, json_mode=True) | |
1632 | if error: |
|
1632 | if error: | |
1633 | return { |
|
1633 | return { | |
1634 | 'error': error, |
|
1634 | 'error': error, | |
1635 | 'redirect_url': default_redirect_url |
|
1635 | 'redirect_url': default_redirect_url | |
1636 | } |
|
1636 | } | |
1637 | error = self.check_branch_permission(_branch_name, json_mode=True) |
|
1637 | error = self.check_branch_permission(_branch_name, json_mode=True) | |
1638 | if error: |
|
1638 | if error: | |
1639 | return { |
|
1639 | return { | |
1640 | 'error': error, |
|
1640 | 'error': error, | |
1641 | 'redirect_url': default_redirect_url |
|
1641 | 'redirect_url': default_redirect_url | |
1642 | } |
|
1642 | } | |
1643 |
|
1643 | |||
1644 | c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path)) |
|
1644 | c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path)) | |
1645 | c.f_path = f_path |
|
1645 | c.f_path = f_path | |
1646 |
|
1646 | |||
1647 | r_post = self.request.POST |
|
1647 | r_post = self.request.POST | |
1648 |
|
1648 | |||
1649 | message = c.default_message |
|
1649 | message = c.default_message | |
1650 | user_message = r_post.getall('message') |
|
1650 | user_message = r_post.getall('message') | |
1651 | if isinstance(user_message, list) and user_message: |
|
1651 | if isinstance(user_message, list) and user_message: | |
1652 | # we take the first from duplicated results if it's not empty |
|
1652 | # we take the first from duplicated results if it's not empty | |
1653 | message = user_message[0] if user_message[0] else message |
|
1653 | message = user_message[0] if user_message[0] else message | |
1654 |
|
1654 | |||
1655 | data_for_replacement = r_post.getall('files_upload') or [] |
|
1655 | data_for_replacement = r_post.getall('files_upload') or [] | |
1656 | if (objects_count := len(data_for_replacement)) > 1: |
|
1656 | if (objects_count := len(data_for_replacement)) > 1: | |
1657 | return { |
|
1657 | return { | |
1658 | 'error': 'too many files for replacement', |
|
1658 | 'error': 'too many files for replacement', | |
1659 | 'redirect_url': default_redirect_url |
|
1659 | 'redirect_url': default_redirect_url | |
1660 | } |
|
1660 | } | |
1661 | elif not objects_count: |
|
1661 | elif not objects_count: | |
1662 | return { |
|
1662 | return { | |
1663 | 'error': 'missing files', |
|
1663 | 'error': 'missing files', | |
1664 | 'redirect_url': default_redirect_url |
|
1664 | 'redirect_url': default_redirect_url | |
1665 | } |
|
1665 | } | |
1666 |
|
1666 | |||
1667 | content = data_for_replacement[0].file |
|
1667 | content = data_for_replacement[0].file | |
1668 | retrieved_filename = data_for_replacement[0].filename |
|
1668 | retrieved_filename = data_for_replacement[0].filename | |
1669 |
|
1669 | |||
1670 | if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]: |
|
1670 | if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]: | |
1671 | return { |
|
1671 | return { | |
1672 | 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension', |
|
1672 | 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension', | |
1673 | 'redirect_url': default_redirect_url |
|
1673 | 'redirect_url': default_redirect_url | |
1674 | } |
|
1674 | } | |
1675 |
|
1675 | |||
1676 | author = self._rhodecode_db_user.full_contact |
|
1676 | author = self._rhodecode_db_user.full_contact | |
1677 |
|
1677 | |||
1678 | try: |
|
1678 | try: | |
1679 | commit = ScmModel().update_binary_node( |
|
1679 | commit = ScmModel().update_binary_node( | |
1680 | user=self._rhodecode_db_user.user_id, |
|
1680 | user=self._rhodecode_db_user.user_id, | |
1681 | repo=self.db_repo, |
|
1681 | repo=self.db_repo, | |
1682 | message=message, |
|
1682 | message=message, | |
1683 | node={ |
|
1683 | node={ | |
1684 | 'content': content, |
|
1684 | 'content': content, | |
1685 | 'file_path': f_path.encode(), |
|
1685 | 'file_path': f_path.encode(), | |
1686 | }, |
|
1686 | }, | |
1687 | parent_commit=c.commit, |
|
1687 | parent_commit=c.commit, | |
1688 | author=author, |
|
1688 | author=author, | |
1689 | ) |
|
1689 | ) | |
1690 |
|
1690 | |||
1691 | h.flash(_('Successfully committed 1 new file'), category='success') |
|
1691 | h.flash(_('Successfully committed 1 new file'), category='success') | |
1692 |
|
1692 | |||
1693 | default_redirect_url = h.route_path( |
|
1693 | default_redirect_url = h.route_path( | |
1694 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1694 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1695 |
|
1695 | |||
1696 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1696 | except (NodeError, NodeAlreadyExistsError) as e: | |
1697 | error = h.escape(e) |
|
1697 | error = h.escape(e) | |
1698 | h.flash(error, category='error') |
|
1698 | h.flash(error, category='error') | |
1699 |
|
1699 | |||
1700 | return { |
|
1700 | return { | |
1701 | 'error': error, |
|
1701 | 'error': error, | |
1702 | 'redirect_url': default_redirect_url |
|
1702 | 'redirect_url': default_redirect_url | |
1703 | } |
|
1703 | } | |
1704 | except Exception: |
|
1704 | except Exception: | |
1705 | log.exception('Error occurred during commit') |
|
1705 | log.exception('Error occurred during commit') | |
1706 | error = _('Error occurred during commit') |
|
1706 | error = _('Error occurred during commit') | |
1707 | h.flash(error, category='error') |
|
1707 | h.flash(error, category='error') | |
1708 | return { |
|
1708 | return { | |
1709 | 'error': error, |
|
1709 | 'error': error, | |
1710 | 'redirect_url': default_redirect_url |
|
1710 | 'redirect_url': default_redirect_url | |
1711 | } |
|
1711 | } | |
1712 |
|
1712 | |||
1713 | return { |
|
1713 | return { | |
1714 | 'error': None, |
|
1714 | 'error': None, | |
1715 | 'redirect_url': default_redirect_url |
|
1715 | 'redirect_url': default_redirect_url | |
1716 | } |
|
1716 | } |
@@ -1,449 +1,456 b'' | |||||
1 | # Copyright (C) 2015-2024 RhodeCode GmbH |
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import codecs |
|
19 | import codecs | |
20 | import contextlib |
|
20 | import contextlib | |
21 | import functools |
|
21 | import functools | |
22 | import os |
|
22 | import os | |
23 | import logging |
|
23 | import logging | |
24 | import time |
|
24 | import time | |
25 | import typing |
|
25 | import typing | |
26 | import zlib |
|
26 | import zlib | |
27 | import sqlite3 |
|
27 | import sqlite3 | |
28 |
|
28 | |||
29 | from ...ext_json import json |
|
29 | from ...ext_json import json | |
30 | from .lock import GenerationLock |
|
30 | from .lock import GenerationLock | |
31 | from .utils import format_size |
|
31 | from .utils import format_size | |
32 |
|
32 | |||
33 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
34 |
|
34 | |||
35 | cache_meta = None |
|
35 | cache_meta = None | |
36 |
|
36 | |||
37 | UNKNOWN = -241 |
|
37 | UNKNOWN = -241 | |
38 | NO_VAL = -917 |
|
38 | NO_VAL = -917 | |
39 |
|
39 | |||
40 | MODE_BINARY = 'BINARY' |
|
40 | MODE_BINARY = 'BINARY' | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | EVICTION_POLICY = { |
|
43 | EVICTION_POLICY = { | |
44 | 'none': { |
|
44 | 'none': { | |
45 | 'evict': None, |
|
45 | 'evict': None, | |
46 | }, |
|
46 | }, | |
47 | 'least-recently-stored': { |
|
47 | 'least-recently-stored': { | |
48 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time', |
|
48 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time', | |
49 | }, |
|
49 | }, | |
50 | 'least-recently-used': { |
|
50 | 'least-recently-used': { | |
51 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time', |
|
51 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time', | |
52 | }, |
|
52 | }, | |
53 | 'least-frequently-used': { |
|
53 | 'least-frequently-used': { | |
54 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count', |
|
54 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count', | |
55 | }, |
|
55 | }, | |
56 | } |
|
56 | } | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class DB: |
|
59 | class DB: | |
60 |
|
60 | |||
61 | def __init__(self): |
|
61 | def __init__(self): | |
62 | self.connection = sqlite3.connect(':memory:') |
|
62 | self.connection = sqlite3.connect(':memory:') | |
63 | self._init_db() |
|
63 | self._init_db() | |
64 |
|
64 | |||
65 | def _init_db(self): |
|
65 | def _init_db(self): | |
66 | qry = ''' |
|
66 | qry = ''' | |
67 | CREATE TABLE IF NOT EXISTS archive_cache ( |
|
67 | CREATE TABLE IF NOT EXISTS archive_cache ( | |
68 | rowid INTEGER PRIMARY KEY, |
|
68 | rowid INTEGER PRIMARY KEY, | |
69 | key_file TEXT, |
|
69 | key_file TEXT, | |
70 | key_file_path TEXT, |
|
70 | key_file_path TEXT, | |
71 | filename TEXT, |
|
71 | filename TEXT, | |
72 | full_path TEXT, |
|
72 | full_path TEXT, | |
73 | store_time REAL, |
|
73 | store_time REAL, | |
74 | access_time REAL, |
|
74 | access_time REAL, | |
75 | access_count INTEGER DEFAULT 0, |
|
75 | access_count INTEGER DEFAULT 0, | |
76 | size INTEGER DEFAULT 0 |
|
76 | size INTEGER DEFAULT 0 | |
77 | ) |
|
77 | ) | |
78 | ''' |
|
78 | ''' | |
79 |
|
79 | |||
80 | self.sql(qry) |
|
80 | self.sql(qry) | |
81 | self.connection.commit() |
|
81 | self.connection.commit() | |
82 |
|
82 | |||
83 | @property |
|
83 | @property | |
84 | def sql(self): |
|
84 | def sql(self): | |
85 | return self.connection.execute |
|
85 | return self.connection.execute | |
86 |
|
86 | |||
87 | def bulk_insert(self, rows): |
|
87 | def bulk_insert(self, rows): | |
88 | qry = ''' |
|
88 | qry = ''' | |
89 | INSERT INTO archive_cache ( |
|
89 | INSERT INTO archive_cache ( | |
90 | rowid, |
|
90 | rowid, | |
91 | key_file, |
|
91 | key_file, | |
92 | key_file_path, |
|
92 | key_file_path, | |
93 | filename, |
|
93 | filename, | |
94 | full_path, |
|
94 | full_path, | |
95 | store_time, |
|
95 | store_time, | |
96 | access_time, |
|
96 | access_time, | |
97 | access_count, |
|
97 | access_count, | |
98 | size |
|
98 | size | |
99 | ) |
|
99 | ) | |
100 | VALUES ( |
|
100 | VALUES ( | |
101 | ?, ?, ?, ?, ?, ?, ?, ?, ? |
|
101 | ?, ?, ?, ?, ?, ?, ?, ?, ? | |
102 | ) |
|
102 | ) | |
103 | ''' |
|
103 | ''' | |
104 | cursor = self.connection.cursor() |
|
104 | cursor = self.connection.cursor() | |
105 | cursor.executemany(qry, rows) |
|
105 | cursor.executemany(qry, rows) | |
106 | self.connection.commit() |
|
106 | self.connection.commit() | |
107 |
|
107 | |||
108 |
|
108 | |||
109 | class FileSystemCache: |
|
109 | class FileSystemCache: | |
110 |
|
110 | |||
111 | def __init__(self, index, directory, **settings): |
|
111 | def __init__(self, index, directory, **settings): | |
112 | self._index = index |
|
112 | self._index = index | |
113 | self._directory = directory |
|
113 | self._directory = directory | |
114 |
|
114 | |||
115 | @property |
|
115 | @property | |
116 | def directory(self): |
|
116 | def directory(self): | |
117 | """Cache directory.""" |
|
117 | """Cache directory.""" | |
118 | return self._directory |
|
118 | return self._directory | |
119 |
|
119 | |||
120 | def _write_file(self, full_path, iterator, mode, encoding=None): |
|
120 | def _write_file(self, full_path, iterator, mode, encoding=None): | |
121 | full_dir, _ = os.path.split(full_path) |
|
121 | full_dir, _ = os.path.split(full_path) | |
122 |
|
122 | |||
123 | for count in range(1, 11): |
|
123 | for count in range(1, 11): | |
124 | with contextlib.suppress(OSError): |
|
124 | with contextlib.suppress(OSError): | |
125 | os.makedirs(full_dir) |
|
125 | os.makedirs(full_dir) | |
126 |
|
126 | |||
127 | try: |
|
127 | try: | |
128 | # Another cache may have deleted the directory before |
|
128 | # Another cache may have deleted the directory before | |
129 | # the file could be opened. |
|
129 | # the file could be opened. | |
130 | writer = open(full_path, mode, encoding=encoding) |
|
130 | writer = open(full_path, mode, encoding=encoding) | |
131 | except OSError: |
|
131 | except OSError: | |
132 | if count == 10: |
|
132 | if count == 10: | |
133 | # Give up after 10 tries to open the file. |
|
133 | # Give up after 10 tries to open the file. | |
134 | raise |
|
134 | raise | |
135 | continue |
|
135 | continue | |
136 |
|
136 | |||
137 | with writer: |
|
137 | with writer: | |
138 | size = 0 |
|
138 | size = 0 | |
139 | for chunk in iterator: |
|
139 | for chunk in iterator: | |
140 | size += len(chunk) |
|
140 | size += len(chunk) | |
141 | writer.write(chunk) |
|
141 | writer.write(chunk) | |
|
142 | writer.flush() | |||
|
143 | # Get the file descriptor | |||
|
144 | fd = writer.fileno() | |||
|
145 | ||||
|
146 | # Sync the file descriptor to disk, helps with NFS cases... | |||
|
147 | os.fsync(fd) | |||
|
148 | log.debug('written new archive cache under %s', full_path) | |||
142 | return size |
|
149 | return size | |
143 |
|
150 | |||
144 | def _get_keyfile(self, key): |
|
151 | def _get_keyfile(self, key): | |
145 | return os.path.join(self._directory, f'{key}.key') |
|
152 | return os.path.join(self._directory, f'{key}.key') | |
146 |
|
153 | |||
147 | def store(self, key, value_reader, metadata): |
|
154 | def store(self, key, value_reader, metadata): | |
148 | filename, full_path = self.random_filename() |
|
155 | filename, full_path = self.random_filename() | |
149 | key_file = self._get_keyfile(key) |
|
156 | key_file = self._get_keyfile(key) | |
150 |
|
157 | |||
151 | # STORE METADATA |
|
158 | # STORE METADATA | |
152 | _metadata = { |
|
159 | _metadata = { | |
153 | "version": "v1", |
|
160 | "version": "v1", | |
154 | "filename": filename, |
|
161 | "filename": filename, | |
155 | "full_path": full_path, |
|
162 | "full_path": full_path, | |
156 | "key_file": key_file, |
|
163 | "key_file": key_file, | |
157 | "store_time": time.time(), |
|
164 | "store_time": time.time(), | |
158 | "access_count": 1, |
|
165 | "access_count": 1, | |
159 | "access_time": 0, |
|
166 | "access_time": 0, | |
160 | "size": 0 |
|
167 | "size": 0 | |
161 | } |
|
168 | } | |
162 | if metadata: |
|
169 | if metadata: | |
163 | _metadata.update(metadata) |
|
170 | _metadata.update(metadata) | |
164 |
|
171 | |||
165 | reader = functools.partial(value_reader.read, 2**22) |
|
172 | reader = functools.partial(value_reader.read, 2**22) | |
166 |
|
173 | |||
167 | iterator = iter(reader, b'') |
|
174 | iterator = iter(reader, b'') | |
168 | size = self._write_file(full_path, iterator, 'xb') |
|
175 | size = self._write_file(full_path, iterator, 'xb') | |
169 | metadata['size'] = size |
|
176 | metadata['size'] = size | |
170 |
|
177 | |||
171 | # after archive is finished, we create a key to save the presence of the binary file |
|
178 | # after archive is finished, we create a key to save the presence of the binary file | |
172 | with open(key_file, 'wb') as f: |
|
179 | with open(key_file, 'wb') as f: | |
173 | f.write(json.dumps(_metadata)) |
|
180 | f.write(json.dumps(_metadata)) | |
174 |
|
181 | |||
175 | return key, size, MODE_BINARY, filename, _metadata |
|
182 | return key, size, MODE_BINARY, filename, _metadata | |
176 |
|
183 | |||
177 | def fetch(self, key, retry=False, retry_attempts=10) -> tuple[typing.BinaryIO, dict]: |
|
184 | def fetch(self, key, retry=False, retry_attempts=10) -> tuple[typing.BinaryIO, dict]: | |
178 |
|
185 | |||
179 | if retry: |
|
186 | if retry: | |
180 | for attempt in range(retry_attempts): |
|
187 | for attempt in range(retry_attempts): | |
181 | if key in self: |
|
188 | if key in self: | |
182 | break |
|
189 | break | |
183 | # we dind't find the key, wait 1s, and re-check |
|
190 | # we dind't find the key, wait 1s, and re-check | |
184 | time.sleep(1) |
|
191 | time.sleep(1) | |
185 |
|
192 | |||
186 | if key not in self: |
|
193 | if key not in self: | |
187 | log.exception('requested {key} not found in {self}', key, self) |
|
194 | log.exception('requested {key} not found in {self}', key, self) | |
188 | raise KeyError(key) |
|
195 | raise KeyError(key) | |
189 |
|
196 | |||
190 | key_file = self._get_keyfile(key) |
|
197 | key_file = self._get_keyfile(key) | |
191 | with open(key_file, 'rb') as f: |
|
198 | with open(key_file, 'rb') as f: | |
192 | metadata = json.loads(f.read()) |
|
199 | metadata = json.loads(f.read()) | |
193 |
|
200 | |||
194 | filename = metadata['filename'] |
|
201 | filename = metadata['filename'] | |
195 |
|
202 | |||
196 | try: |
|
203 | try: | |
197 | return open(os.path.join(self.directory, filename), 'rb'), metadata |
|
204 | return open(os.path.join(self.directory, filename), 'rb'), metadata | |
198 | finally: |
|
205 | finally: | |
199 | # update usage stats, count and accessed |
|
206 | # update usage stats, count and accessed | |
200 | metadata["access_count"] = metadata.get("access_count", 0) + 1 |
|
207 | metadata["access_count"] = metadata.get("access_count", 0) + 1 | |
201 | metadata["access_time"] = time.time() |
|
208 | metadata["access_time"] = time.time() | |
202 |
|
209 | |||
203 | with open(key_file, 'wb') as f: |
|
210 | with open(key_file, 'wb') as f: | |
204 | f.write(json.dumps(metadata)) |
|
211 | f.write(json.dumps(metadata)) | |
205 |
|
212 | |||
206 | def random_filename(self): |
|
213 | def random_filename(self): | |
207 | """Return filename and full-path tuple for file storage. |
|
214 | """Return filename and full-path tuple for file storage. | |
208 |
|
215 | |||
209 | Filename will be a randomly generated 28 character hexadecimal string |
|
216 | Filename will be a randomly generated 28 character hexadecimal string | |
210 | with ".archive_cache" suffixed. Two levels of sub-directories will be used to |
|
217 | with ".archive_cache" suffixed. Two levels of sub-directories will be used to | |
211 | reduce the size of directories. On older filesystems, lookups in |
|
218 | reduce the size of directories. On older filesystems, lookups in | |
212 | directories with many files may be slow. |
|
219 | directories with many files may be slow. | |
213 | """ |
|
220 | """ | |
214 |
|
221 | |||
215 | hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') |
|
222 | hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') | |
216 | sub_dir = os.path.join(hex_name[:2], hex_name[2:4]) |
|
223 | sub_dir = os.path.join(hex_name[:2], hex_name[2:4]) | |
217 | name = hex_name[4:] + '.archive_cache' |
|
224 | name = hex_name[4:] + '.archive_cache' | |
218 | filename = os.path.join(sub_dir, name) |
|
225 | filename = os.path.join(sub_dir, name) | |
219 | full_path = os.path.join(self.directory, filename) |
|
226 | full_path = os.path.join(self.directory, filename) | |
220 | return filename, full_path |
|
227 | return filename, full_path | |
221 |
|
228 | |||
222 | def hash(self, key): |
|
229 | def hash(self, key): | |
223 | """Compute portable hash for `key`. |
|
230 | """Compute portable hash for `key`. | |
224 |
|
231 | |||
225 | :param key: key to hash |
|
232 | :param key: key to hash | |
226 | :return: hash value |
|
233 | :return: hash value | |
227 |
|
234 | |||
228 | """ |
|
235 | """ | |
229 | mask = 0xFFFFFFFF |
|
236 | mask = 0xFFFFFFFF | |
230 | return zlib.adler32(key.encode('utf-8')) & mask # noqa |
|
237 | return zlib.adler32(key.encode('utf-8')) & mask # noqa | |
231 |
|
238 | |||
232 | def __contains__(self, key): |
|
239 | def __contains__(self, key): | |
233 | """Return `True` if `key` matching item is found in cache. |
|
240 | """Return `True` if `key` matching item is found in cache. | |
234 |
|
241 | |||
235 | :param key: key matching item |
|
242 | :param key: key matching item | |
236 | :return: True if key matching item |
|
243 | :return: True if key matching item | |
237 |
|
244 | |||
238 | """ |
|
245 | """ | |
239 | key_file = self._get_keyfile(key) |
|
246 | key_file = self._get_keyfile(key) | |
240 | return os.path.exists(key_file) |
|
247 | return os.path.exists(key_file) | |
241 |
|
248 | |||
242 | def __repr__(self): |
|
249 | def __repr__(self): | |
243 | return f'FileSystemCache(index={self._index}, dir={self.directory})' |
|
250 | return f'FileSystemCache(index={self._index}, dir={self.directory})' | |
244 |
|
251 | |||
245 |
|
252 | |||
246 | class FanoutCache: |
|
253 | class FanoutCache: | |
247 | """Cache that shards keys and values.""" |
|
254 | """Cache that shards keys and values.""" | |
248 |
|
255 | |||
249 | def __init__( |
|
256 | def __init__( | |
250 | self, directory=None, **settings |
|
257 | self, directory=None, **settings | |
251 | ): |
|
258 | ): | |
252 | """Initialize cache instance. |
|
259 | """Initialize cache instance. | |
253 |
|
260 | |||
254 | :param str directory: cache directory |
|
261 | :param str directory: cache directory | |
255 | :param settings: settings dict |
|
262 | :param settings: settings dict | |
256 |
|
263 | |||
257 | """ |
|
264 | """ | |
258 | if directory is None: |
|
265 | if directory is None: | |
259 | raise ValueError('directory cannot be None') |
|
266 | raise ValueError('directory cannot be None') | |
260 |
|
267 | |||
261 | directory = str(directory) |
|
268 | directory = str(directory) | |
262 | directory = os.path.expanduser(directory) |
|
269 | directory = os.path.expanduser(directory) | |
263 | directory = os.path.expandvars(directory) |
|
270 | directory = os.path.expandvars(directory) | |
264 | self._directory = directory |
|
271 | self._directory = directory | |
265 |
|
272 | |||
266 | self._count = settings.pop('cache_shards') |
|
273 | self._count = settings.pop('cache_shards') | |
267 | self._locking_url = settings.pop('locking_url') |
|
274 | self._locking_url = settings.pop('locking_url') | |
268 |
|
275 | |||
269 | self._eviction_policy = settings['cache_eviction_policy'] |
|
276 | self._eviction_policy = settings['cache_eviction_policy'] | |
270 | self._cache_size_limit = settings['cache_size_limit'] |
|
277 | self._cache_size_limit = settings['cache_size_limit'] | |
271 |
|
278 | |||
272 | self._shards = tuple( |
|
279 | self._shards = tuple( | |
273 | FileSystemCache( |
|
280 | FileSystemCache( | |
274 | index=num, |
|
281 | index=num, | |
275 | directory=os.path.join(directory, 'shard_%03d' % num), |
|
282 | directory=os.path.join(directory, 'shard_%03d' % num), | |
276 | **settings, |
|
283 | **settings, | |
277 | ) |
|
284 | ) | |
278 | for num in range(self._count) |
|
285 | for num in range(self._count) | |
279 | ) |
|
286 | ) | |
280 | self._hash = self._shards[0].hash |
|
287 | self._hash = self._shards[0].hash | |
281 |
|
288 | |||
282 | @property |
|
289 | @property | |
283 | def directory(self): |
|
290 | def directory(self): | |
284 | """Cache directory.""" |
|
291 | """Cache directory.""" | |
285 | return self._directory |
|
292 | return self._directory | |
286 |
|
293 | |||
287 | def get_lock(self, lock_key): |
|
294 | def get_lock(self, lock_key): | |
288 | return GenerationLock(lock_key, self._locking_url) |
|
295 | return GenerationLock(lock_key, self._locking_url) | |
289 |
|
296 | |||
290 | def _get_shard(self, key) -> FileSystemCache: |
|
297 | def _get_shard(self, key) -> FileSystemCache: | |
291 | index = self._hash(key) % self._count |
|
298 | index = self._hash(key) % self._count | |
292 | shard = self._shards[index] |
|
299 | shard = self._shards[index] | |
293 | return shard |
|
300 | return shard | |
294 |
|
301 | |||
295 | def store(self, key, value_reader, metadata=None): |
|
302 | def store(self, key, value_reader, metadata=None): | |
296 | shard = self._get_shard(key) |
|
303 | shard = self._get_shard(key) | |
297 | return shard.store(key, value_reader, metadata) |
|
304 | return shard.store(key, value_reader, metadata) | |
298 |
|
305 | |||
299 | def fetch(self, key, retry=False, retry_attempts=10): |
|
306 | def fetch(self, key, retry=False, retry_attempts=10): | |
300 | """Return file handle corresponding to `key` from cache. |
|
307 | """Return file handle corresponding to `key` from cache. | |
301 | """ |
|
308 | """ | |
302 | shard = self._get_shard(key) |
|
309 | shard = self._get_shard(key) | |
303 | return shard.fetch(key, retry=retry, retry_attempts=retry_attempts) |
|
310 | return shard.fetch(key, retry=retry, retry_attempts=retry_attempts) | |
304 |
|
311 | |||
305 | def has_key(self, key): |
|
312 | def has_key(self, key): | |
306 | """Return `True` if `key` matching item is found in cache. |
|
313 | """Return `True` if `key` matching item is found in cache. | |
307 |
|
314 | |||
308 | :param key: key for item |
|
315 | :param key: key for item | |
309 | :return: True if key is found |
|
316 | :return: True if key is found | |
310 |
|
317 | |||
311 | """ |
|
318 | """ | |
312 | shard = self._get_shard(key) |
|
319 | shard = self._get_shard(key) | |
313 | return key in shard |
|
320 | return key in shard | |
314 |
|
321 | |||
315 | def __contains__(self, item): |
|
322 | def __contains__(self, item): | |
316 | return self.has_key(item) |
|
323 | return self.has_key(item) | |
317 |
|
324 | |||
318 | def evict(self, policy=None, size_limit=None): |
|
325 | def evict(self, policy=None, size_limit=None): | |
319 | """ |
|
326 | """ | |
320 | Remove old items based on the conditions |
|
327 | Remove old items based on the conditions | |
321 |
|
328 | |||
322 |
|
329 | |||
323 | explanation of this algo: |
|
330 | explanation of this algo: | |
324 | iterate over each shard, then for each shard iterate over the .key files |
|
331 | iterate over each shard, then for each shard iterate over the .key files | |
325 | read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and |
|
332 | read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and | |
326 | access data, time creation, and access counts. |
|
333 | access data, time creation, and access counts. | |
327 |
|
334 | |||
328 | Store that into a memory DB so we can run different sorting strategies easily. |
|
335 | Store that into a memory DB so we can run different sorting strategies easily. | |
329 | Summing the size is a sum sql query. |
|
336 | Summing the size is a sum sql query. | |
330 |
|
337 | |||
331 | Then we run a sorting strategy based on eviction policy. |
|
338 | Then we run a sorting strategy based on eviction policy. | |
332 | We iterate over sorted keys, and remove each checking if we hit the overall limit. |
|
339 | We iterate over sorted keys, and remove each checking if we hit the overall limit. | |
333 | """ |
|
340 | """ | |
334 |
|
341 | |||
335 | policy = policy or self._eviction_policy |
|
342 | policy = policy or self._eviction_policy | |
336 | size_limit = size_limit or self._cache_size_limit |
|
343 | size_limit = size_limit or self._cache_size_limit | |
337 |
|
344 | |||
338 | select_policy = EVICTION_POLICY[policy]['evict'] |
|
345 | select_policy = EVICTION_POLICY[policy]['evict'] | |
339 |
|
346 | |||
340 | log.debug('Running eviction policy \'%s\', and checking for size limit: %s', |
|
347 | log.debug('Running eviction policy \'%s\', and checking for size limit: %s', | |
341 | policy, format_size(size_limit)) |
|
348 | policy, format_size(size_limit)) | |
342 |
|
349 | |||
343 | if select_policy is None: |
|
350 | if select_policy is None: | |
344 | return 0 |
|
351 | return 0 | |
345 |
|
352 | |||
346 | db = DB() |
|
353 | db = DB() | |
347 |
|
354 | |||
348 | data = [] |
|
355 | data = [] | |
349 | cnt = 1 |
|
356 | cnt = 1 | |
350 | for shard in self._shards: |
|
357 | for shard in self._shards: | |
351 | for key_file in os.listdir(shard.directory): |
|
358 | for key_file in os.listdir(shard.directory): | |
352 | if key_file.endswith('.key'): |
|
359 | if key_file.endswith('.key'): | |
353 | key_file_path = os.path.join(shard.directory, key_file) |
|
360 | key_file_path = os.path.join(shard.directory, key_file) | |
354 | with open(key_file_path, 'rb') as f: |
|
361 | with open(key_file_path, 'rb') as f: | |
355 | metadata = json.loads(f.read()) |
|
362 | metadata = json.loads(f.read()) | |
356 |
|
363 | |||
357 | size = metadata.get('size') |
|
364 | size = metadata.get('size') | |
358 | filename = metadata.get('filename') |
|
365 | filename = metadata.get('filename') | |
359 | full_path = metadata.get('full_path') |
|
366 | full_path = metadata.get('full_path') | |
360 |
|
367 | |||
361 | if not size: |
|
368 | if not size: | |
362 | # in case we don't have size re-calc it... |
|
369 | # in case we don't have size re-calc it... | |
363 | size = os.stat(full_path).st_size |
|
370 | size = os.stat(full_path).st_size | |
364 |
|
371 | |||
365 | data.append([ |
|
372 | data.append([ | |
366 | cnt, |
|
373 | cnt, | |
367 | key_file, |
|
374 | key_file, | |
368 | key_file_path, |
|
375 | key_file_path, | |
369 | filename, |
|
376 | filename, | |
370 | full_path, |
|
377 | full_path, | |
371 | metadata.get('store_time', 0), |
|
378 | metadata.get('store_time', 0), | |
372 | metadata.get('access_time', 0), |
|
379 | metadata.get('access_time', 0), | |
373 | metadata.get('access_count', 0), |
|
380 | metadata.get('access_count', 0), | |
374 | size, |
|
381 | size, | |
375 | ]) |
|
382 | ]) | |
376 | cnt += 1 |
|
383 | cnt += 1 | |
377 |
|
384 | |||
378 | # Insert bulk data using executemany |
|
385 | # Insert bulk data using executemany | |
379 | db.bulk_insert(data) |
|
386 | db.bulk_insert(data) | |
380 |
|
387 | |||
381 | ((total_size,),) = db.sql('SELECT COALESCE(SUM(size), 0) FROM archive_cache').fetchall() |
|
388 | ((total_size,),) = db.sql('SELECT COALESCE(SUM(size), 0) FROM archive_cache').fetchall() | |
382 | log.debug('Analyzed %s keys, occupied: %s', len(data), format_size(total_size)) |
|
389 | log.debug('Analyzed %s keys, occupied: %s', len(data), format_size(total_size)) | |
383 | select_policy_qry = select_policy.format(fields='key_file_path, full_path, size') |
|
390 | select_policy_qry = select_policy.format(fields='key_file_path, full_path, size') | |
384 | sorted_keys = db.sql(select_policy_qry).fetchall() |
|
391 | sorted_keys = db.sql(select_policy_qry).fetchall() | |
385 |
|
392 | |||
386 | removed_items = 0 |
|
393 | removed_items = 0 | |
387 | removed_size = 0 |
|
394 | removed_size = 0 | |
388 | for key, cached_file, size in sorted_keys: |
|
395 | for key, cached_file, size in sorted_keys: | |
389 | # simulate removal impact BEFORE removal |
|
396 | # simulate removal impact BEFORE removal | |
390 | total_size -= size |
|
397 | total_size -= size | |
391 |
|
398 | |||
392 | if total_size <= size_limit: |
|
399 | if total_size <= size_limit: | |
393 | # we obtained what we wanted... |
|
400 | # we obtained what we wanted... | |
394 | break |
|
401 | break | |
395 |
|
402 | |||
396 | os.remove(cached_file) |
|
403 | os.remove(cached_file) | |
397 | os.remove(key) |
|
404 | os.remove(key) | |
398 | removed_items += 1 |
|
405 | removed_items += 1 | |
399 | removed_size += size |
|
406 | removed_size += size | |
400 |
|
407 | |||
401 | log.debug('Removed %s cache archives, and reduced size: %s', removed_items, format_size(removed_size)) |
|
408 | log.debug('Removed %s cache archives, and reduced size: %s', removed_items, format_size(removed_size)) | |
402 | return removed_items |
|
409 | return removed_items | |
403 |
|
410 | |||
404 |
|
411 | |||
405 | def get_archival_config(config): |
|
412 | def get_archival_config(config): | |
406 |
|
413 | |||
407 | final_config = { |
|
414 | final_config = { | |
408 |
|
415 | |||
409 | } |
|
416 | } | |
410 |
|
417 | |||
411 | for k, v in config.items(): |
|
418 | for k, v in config.items(): | |
412 | if k.startswith('archive_cache'): |
|
419 | if k.startswith('archive_cache'): | |
413 | final_config[k] = v |
|
420 | final_config[k] = v | |
414 |
|
421 | |||
415 | return final_config |
|
422 | return final_config | |
416 |
|
423 | |||
417 |
|
424 | |||
418 | def get_archival_cache_store(config): |
|
425 | def get_archival_cache_store(config): | |
419 |
|
426 | |||
420 | global cache_meta |
|
427 | global cache_meta | |
421 | if cache_meta is not None: |
|
428 | if cache_meta is not None: | |
422 | return cache_meta |
|
429 | return cache_meta | |
423 |
|
430 | |||
424 | config = get_archival_config(config) |
|
431 | config = get_archival_config(config) | |
425 | backend = config['archive_cache.backend.type'] |
|
432 | backend = config['archive_cache.backend.type'] | |
426 | if backend != 'filesystem': |
|
433 | if backend != 'filesystem': | |
427 | raise ValueError('archive_cache.backend.type only supports "filesystem"') |
|
434 | raise ValueError('archive_cache.backend.type only supports "filesystem"') | |
428 |
|
435 | |||
429 | archive_cache_locking_url = config['archive_cache.locking.url'] |
|
436 | archive_cache_locking_url = config['archive_cache.locking.url'] | |
430 | archive_cache_dir = config['archive_cache.filesystem.store_dir'] |
|
437 | archive_cache_dir = config['archive_cache.filesystem.store_dir'] | |
431 | archive_cache_size_gb = config['archive_cache.filesystem.cache_size_gb'] |
|
438 | archive_cache_size_gb = config['archive_cache.filesystem.cache_size_gb'] | |
432 | archive_cache_shards = config['archive_cache.filesystem.cache_shards'] |
|
439 | archive_cache_shards = config['archive_cache.filesystem.cache_shards'] | |
433 | archive_cache_eviction_policy = config['archive_cache.filesystem.eviction_policy'] |
|
440 | archive_cache_eviction_policy = config['archive_cache.filesystem.eviction_policy'] | |
434 |
|
441 | |||
435 | log.debug('Initializing archival cache instance under %s', archive_cache_dir) |
|
442 | log.debug('Initializing archival cache instance under %s', archive_cache_dir) | |
436 |
|
443 | |||
437 | # check if it's ok to write, and re-create the archive cache |
|
444 | # check if it's ok to write, and re-create the archive cache | |
438 | if not os.path.isdir(archive_cache_dir): |
|
445 | if not os.path.isdir(archive_cache_dir): | |
439 | os.makedirs(archive_cache_dir, exist_ok=True) |
|
446 | os.makedirs(archive_cache_dir, exist_ok=True) | |
440 |
|
447 | |||
441 | d_cache = FanoutCache( |
|
448 | d_cache = FanoutCache( | |
442 | archive_cache_dir, |
|
449 | archive_cache_dir, | |
443 | locking_url=archive_cache_locking_url, |
|
450 | locking_url=archive_cache_locking_url, | |
444 | cache_shards=archive_cache_shards, |
|
451 | cache_shards=archive_cache_shards, | |
445 | cache_size_limit=archive_cache_size_gb * 1024 * 1024 * 1024, |
|
452 | cache_size_limit=archive_cache_size_gb * 1024 * 1024 * 1024, | |
446 | cache_eviction_policy=archive_cache_eviction_policy |
|
453 | cache_eviction_policy=archive_cache_eviction_policy | |
447 | ) |
|
454 | ) | |
448 | cache_meta = d_cache |
|
455 | cache_meta = d_cache | |
449 | return cache_meta |
|
456 | return cache_meta |
General Comments 0
You need to be logged in to leave comments.
Login now