Show More
@@ -1,4 +1,5 b'' | |||
|
1 | 1 | |
|
2 | import dataclasses | |
|
2 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | 4 | # |
|
4 | 5 | # This program is free software: you can redistribute it and/or modify |
@@ -30,8 +31,13 b' from rhodecode.translation import _' | |||
|
30 | 31 | |
|
31 | 32 | log = logging.getLogger(__name__) |
|
32 | 33 | |
|
33 | NavListEntry = collections.namedtuple( | |
|
34 | 'NavListEntry', ['key', 'name', 'url', 'active_list']) | |
|
34 | ||
|
35 | @dataclasses.dataclass | |
|
36 | class NavListEntry: | |
|
37 | key: str | |
|
38 | name: str | |
|
39 | url: str | |
|
40 | active_list: list | |
|
35 | 41 | |
|
36 | 42 | |
|
37 | 43 | class NavEntry(object): |
@@ -105,7 +111,7 b' class NavigationRegistry(object):' | |||
|
105 | 111 | def __init__(self, labs_active=False): |
|
106 | 112 | self._registered_entries = collections.OrderedDict() |
|
107 | 113 | for item in self.__class__._base_entries: |
|
108 |
self. |
|
|
114 | self.add_entry(item) | |
|
109 | 115 | |
|
110 | 116 | if labs_active: |
|
111 | 117 | self.add_entry(self._labs_entry) |
@@ -117,7 +123,8 b' class NavigationRegistry(object):' | |||
|
117 | 123 | nav_list = [ |
|
118 | 124 | NavListEntry(i.key, i.get_localized_name(request), |
|
119 | 125 | i.generate_url(request), i.active_list) |
|
120 |
for i in self._registered_entries.values() |
|
|
126 | for i in self._registered_entries.values() | |
|
127 | ] | |
|
121 | 128 | return nav_list |
|
122 | 129 | |
|
123 | 130 |
@@ -41,13 +41,12 b' def trigger_user_permission_flush(event)' | |||
|
41 | 41 | automatic flush of permission caches, so the users affected receive new permissions |
|
42 | 42 | Right Away |
|
43 | 43 | """ |
|
44 | invalidate = True | |
|
44 | ||
|
45 | 45 | affected_user_ids = set(event.user_ids) |
|
46 | 46 | for user_id in affected_user_ids: |
|
47 | 47 | for cache_namespace_uid_tmpl in cache_namespaces: |
|
48 | 48 | cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id) |
|
49 | del_keys = rc_cache.clear_cache_namespace( | |
|
50 | 'cache_perms', cache_namespace_uid, invalidate=invalidate) | |
|
49 | del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE) | |
|
51 | 50 | log.debug('Invalidated %s cache keys for user_id: %s and namespace %s', |
|
52 | 51 | del_keys, user_id, cache_namespace_uid) |
|
53 | 52 |
@@ -28,6 +28,7 b' from pyramid.httpexceptions import HTTPF' | |||
|
28 | 28 | |
|
29 | 29 | from pyramid.renderers import render |
|
30 | 30 | from pyramid.response import Response |
|
31 | from sqlalchemy.orm import aliased | |
|
31 | 32 | |
|
32 | 33 | from rhodecode import events |
|
33 | 34 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
@@ -160,6 +161,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
160 | 161 | .count() |
|
161 | 162 | |
|
162 | 163 | repo_count = count(Repository.repo_id) |
|
164 | OwnerUser = aliased(User) | |
|
163 | 165 | base_q = Session.query( |
|
164 | 166 | RepoGroup.group_name, |
|
165 | 167 | RepoGroup.group_name_hash, |
@@ -167,19 +169,20 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
167 | 169 | RepoGroup.group_id, |
|
168 | 170 | RepoGroup.personal, |
|
169 | 171 | RepoGroup.updated_on, |
|
170 | User, | |
|
172 | OwnerUser.username.label('owner_username'), | |
|
171 | 173 | repo_count.label('repos_count') |
|
172 | 174 | ) \ |
|
173 | 175 | .filter(or_( |
|
174 | 176 | # generate multiple IN to fix limitation problems |
|
175 | 177 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
176 | 178 | )) \ |
|
177 |
.outerjoin(Repository, Repo |
|
|
178 |
.join(User, |
|
|
179 | .group_by(RepoGroup, User) | |
|
179 | .outerjoin(Repository, RepoGroup.group_id == Repository.group_id) \ | |
|
180 | .join(OwnerUser, RepoGroup.user_id == OwnerUser.user_id) | |
|
181 | ||
|
182 | base_q = base_q.group_by(RepoGroup, OwnerUser) | |
|
180 | 183 | |
|
181 | 184 | if search_q: |
|
182 |
like_expression = u'%{}%'.format(safe_ |
|
|
185 | like_expression = u'%{}%'.format(safe_str(search_q)) | |
|
183 | 186 | base_q = base_q.filter(or_( |
|
184 | 187 | RepoGroup.group_name.ilike(like_expression), |
|
185 | 188 | )) |
@@ -197,7 +200,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
197 | 200 | sort_col = repo_count |
|
198 | 201 | sort_defined = True |
|
199 | 202 | elif order_by == 'user_username': |
|
200 | sort_col = User.username | |
|
203 | sort_col = OwnerUser.username | |
|
201 | 204 | else: |
|
202 | 205 | sort_col = getattr(RepoGroup, order_by, None) |
|
203 | 206 | |
@@ -225,11 +228,10 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
225 | 228 | "last_changeset_raw": "", |
|
226 | 229 | |
|
227 | 230 | "desc": desc(repo_gr.group_description, repo_gr.personal), |
|
228 |
"owner": user_profile(repo_gr. |
|
|
231 | "owner": user_profile(repo_gr.owner_username), | |
|
229 | 232 | "top_level_repos": repo_gr.repos_count, |
|
230 | 233 | "action": repo_group_actions( |
|
231 | 234 | repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count), |
|
232 | ||
|
233 | 235 | } |
|
234 | 236 | |
|
235 | 237 | repo_groups_data.append(row) |
@@ -267,7 +269,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
267 | 269 | |
|
268 | 270 | if parent_group_id and _gr: |
|
269 | 271 | if parent_group_id in [x[0] for x in c.repo_groups]: |
|
270 |
parent_group_choice = safe_ |
|
|
272 | parent_group_choice = safe_str(parent_group_id) | |
|
271 | 273 | |
|
272 | 274 | defaults.update({'group_parent_id': parent_group_choice}) |
|
273 | 275 | |
@@ -297,7 +299,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||
|
297 | 299 | self._load_form_data(c) |
|
298 | 300 | # permissions for can create group based on parent_id are checked |
|
299 | 301 | # here in the Form |
|
300 |
available_groups = map(lambda k: safe_ |
|
|
302 | available_groups = list(map(lambda k: safe_str(k[0]), c.repo_groups)) | |
|
301 | 303 | repo_group_form = RepoGroupForm( |
|
302 | 304 | self.request.translate, available_groups=available_groups, |
|
303 | 305 | can_create_in_root=can_create)() |
@@ -26,6 +26,7 b' from pyramid.httpexceptions import HTTPF' | |||
|
26 | 26 | |
|
27 | 27 | from pyramid.renderers import render |
|
28 | 28 | from pyramid.response import Response |
|
29 | from sqlalchemy.orm import aliased | |
|
29 | 30 | |
|
30 | 31 | from rhodecode import events |
|
31 | 32 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
@@ -93,6 +94,8 b' class AdminReposView(BaseAppView, DataGr' | |||
|
93 | 94 | ) \ |
|
94 | 95 | .count() |
|
95 | 96 | |
|
97 | RepoFork = aliased(Repository) | |
|
98 | OwnerUser = aliased(User) | |
|
96 | 99 | base_q = Session.query( |
|
97 | 100 | Repository.repo_id, |
|
98 | 101 | Repository.repo_name, |
@@ -101,17 +104,17 b' class AdminReposView(BaseAppView, DataGr' | |||
|
101 | 104 | Repository.repo_state, |
|
102 | 105 | Repository.private, |
|
103 | 106 | Repository.archived, |
|
104 | Repository.fork, | |
|
105 | 107 | Repository.updated_on, |
|
106 | 108 | Repository._changeset_cache, |
|
107 | User, | |
|
109 | RepoFork.repo_name.label('fork_repo_name'), | |
|
110 | OwnerUser.username.label('owner_username'), | |
|
108 | 111 | ) \ |
|
109 | 112 | .filter(or_( |
|
110 | 113 | # generate multiple IN to fix limitation problems |
|
111 | 114 | *in_filter_generator(Repository.repo_id, allowed_ids)) |
|
112 | 115 | ) \ |
|
113 |
.join( |
|
|
114 | .group_by(Repository, User) | |
|
116 | .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \ | |
|
117 | .join(OwnerUser, Repository.user_id == OwnerUser.user_id) | |
|
115 | 118 | |
|
116 | 119 | if search_q: |
|
117 | 120 | like_expression = u'%{}%'.format(safe_str(search_q)) |
@@ -119,6 +122,9 b' class AdminReposView(BaseAppView, DataGr' | |||
|
119 | 122 | Repository.repo_name.ilike(like_expression), |
|
120 | 123 | )) |
|
121 | 124 | |
|
125 | #TODO: check if we need group_by here ? | |
|
126 | #base_q = base_q.group_by(Repository, User) | |
|
127 | ||
|
122 | 128 | repos_data_total_filtered_count = base_q.count() |
|
123 | 129 | |
|
124 | 130 | sort_defined = False |
@@ -126,7 +132,7 b' class AdminReposView(BaseAppView, DataGr' | |||
|
126 | 132 | sort_col = func.lower(Repository.repo_name) |
|
127 | 133 | sort_defined = True |
|
128 | 134 | elif order_by == 'user_username': |
|
129 | sort_col = User.username | |
|
135 | sort_col = OwnerUser.username | |
|
130 | 136 | else: |
|
131 | 137 | sort_col = getattr(Repository, order_by, None) |
|
132 | 138 | |
@@ -188,7 +194,7 b' class AdminReposView(BaseAppView, DataGr' | |||
|
188 | 194 | |
|
189 | 195 | if parent_group_id and _gr: |
|
190 | 196 | if parent_group_id in [x[0] for x in c.repo_groups]: |
|
191 |
parent_group_choice = safe_ |
|
|
197 | parent_group_choice = safe_str(parent_group_id) | |
|
192 | 198 | |
|
193 | 199 | defaults.update({'repo_group': parent_group_choice}) |
|
194 | 200 |
@@ -362,11 +362,11 b' class UsersView(UserAppView):' | |||
|
362 | 362 | c = self.load_default_context() |
|
363 | 363 | c.user = self.db_user |
|
364 | 364 | |
|
365 | _repos = c.user.repositories | |
|
366 | _repo_groups = c.user.repository_groups | |
|
367 | _user_groups = c.user.user_groups | |
|
368 | _pull_requests = c.user.user_pull_requests | |
|
369 | _artifacts = c.user.artifacts | |
|
365 | _repos = len(c.user.repositories) | |
|
366 | _repo_groups = len(c.user.repository_groups) | |
|
367 | _user_groups = len(c.user.user_groups) | |
|
368 | _pull_requests = len(c.user.user_pull_requests) | |
|
369 | _artifacts = len(c.user.artifacts) | |
|
370 | 370 | |
|
371 | 371 | handle_repos = None |
|
372 | 372 | handle_repo_groups = None |
@@ -378,46 +378,46 b' class UsersView(UserAppView):' | |||
|
378 | 378 | def set_handle_flash_repos(): |
|
379 | 379 | handle = handle_repos |
|
380 | 380 | if handle == 'detach': |
|
381 |
h.flash(_('Detached %s repositories') % |
|
|
381 | h.flash(_('Detached %s repositories') % _repos, | |
|
382 | 382 | category='success') |
|
383 | 383 | elif handle == 'delete': |
|
384 |
h.flash(_('Deleted %s repositories') % |
|
|
384 | h.flash(_('Deleted %s repositories') % _repos, | |
|
385 | 385 | category='success') |
|
386 | 386 | |
|
387 | 387 | def set_handle_flash_repo_groups(): |
|
388 | 388 | handle = handle_repo_groups |
|
389 | 389 | if handle == 'detach': |
|
390 |
h.flash(_('Detached %s repository groups') % |
|
|
390 | h.flash(_('Detached %s repository groups') % _repo_groups, | |
|
391 | 391 | category='success') |
|
392 | 392 | elif handle == 'delete': |
|
393 |
h.flash(_('Deleted %s repository groups') % |
|
|
393 | h.flash(_('Deleted %s repository groups') % _repo_groups, | |
|
394 | 394 | category='success') |
|
395 | 395 | |
|
396 | 396 | def set_handle_flash_user_groups(): |
|
397 | 397 | handle = handle_user_groups |
|
398 | 398 | if handle == 'detach': |
|
399 |
h.flash(_('Detached %s user groups') % |
|
|
399 | h.flash(_('Detached %s user groups') % _user_groups, | |
|
400 | 400 | category='success') |
|
401 | 401 | elif handle == 'delete': |
|
402 |
h.flash(_('Deleted %s user groups') % |
|
|
402 | h.flash(_('Deleted %s user groups') % _user_groups, | |
|
403 | 403 | category='success') |
|
404 | 404 | |
|
405 | 405 | def set_handle_flash_pull_requests(): |
|
406 | 406 | handle = handle_pull_requests |
|
407 | 407 | if handle == 'detach': |
|
408 |
h.flash(_('Detached %s pull requests') % |
|
|
408 | h.flash(_('Detached %s pull requests') % _pull_requests, | |
|
409 | 409 | category='success') |
|
410 | 410 | elif handle == 'delete': |
|
411 |
h.flash(_('Deleted %s pull requests') % |
|
|
411 | h.flash(_('Deleted %s pull requests') % _pull_requests, | |
|
412 | 412 | category='success') |
|
413 | 413 | |
|
414 | 414 | def set_handle_flash_artifacts(): |
|
415 | 415 | handle = handle_artifacts |
|
416 | 416 | if handle == 'detach': |
|
417 |
h.flash(_('Detached %s artifacts') % |
|
|
417 | h.flash(_('Detached %s artifacts') % _artifacts, | |
|
418 | 418 | category='success') |
|
419 | 419 | elif handle == 'delete': |
|
420 |
h.flash(_('Deleted %s artifacts') % |
|
|
420 | h.flash(_('Deleted %s artifacts') % _artifacts, | |
|
421 | 421 | category='success') |
|
422 | 422 | |
|
423 | 423 | handle_user = User.get_first_super_admin() |
@@ -155,7 +155,6 b' class GistView(BaseAppView):' | |||
|
155 | 155 | lifetime_options=[x[0] for x in c.lifetime_values]) |
|
156 | 156 | |
|
157 | 157 | try: |
|
158 | ||
|
159 | 158 | schema_data = schema.deserialize(data) |
|
160 | 159 | |
|
161 | 160 | # convert to safer format with just KEYs so we sure no duplicates |
@@ -263,7 +262,7 b' class GistView(BaseAppView):' | |||
|
263 | 262 | raise HTTPNotFound() |
|
264 | 263 | |
|
265 | 264 | if return_format == 'raw': |
|
266 | content = '\n\n'.join([f.content for f in c.files | |
|
265 | content = b'\n\n'.join([f.content for f in c.files | |
|
267 | 266 | if (f_path is None or f.path == f_path)]) |
|
268 | 267 | response = Response(content) |
|
269 | 268 | response.content_type = 'text/plain' |
@@ -70,7 +70,7 b' class HomeView(BaseAppView, DataGridAppV' | |||
|
70 | 70 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
71 | 71 | return False |
|
72 | 72 | return True |
|
73 | _users = filter(maybe_skip_default_user, _users) | |
|
73 | _users = list(filter(maybe_skip_default_user, _users)) | |
|
74 | 74 | |
|
75 | 75 | if include_groups: |
|
76 | 76 | # extend with user groups |
@@ -229,7 +229,7 b' class HomeView(BaseAppView, DataGridAppV' | |||
|
229 | 229 | 'value': org_query, |
|
230 | 230 | 'value_display': 'user: `{}`'.format(obj.username), |
|
231 | 231 | 'type': 'user', |
|
232 | 'icon_link': h.gravatar_url(obj.email, 30), | |
|
232 | 'icon_link': h.gravatar_url(obj.email, 30, request=self.request), | |
|
233 | 233 | 'url': h.route_path( |
|
234 | 234 | 'user_profile', username=obj.username) |
|
235 | 235 | } |
@@ -63,9 +63,8 b' class RepoGroupSettingsView(RepoGroupApp' | |||
|
63 | 63 | show_empty_group=show_root_location) |
|
64 | 64 | # filter out current repo group |
|
65 | 65 | exclude_group_ids = [c.repo_group.group_id] |
|
66 |
c.repo_groups = |
|
|
67 | c.repo_groups) | |
|
68 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) | |
|
66 | c.repo_groups = [x for x in c.repo_groups if x[0] not in exclude_group_ids] | |
|
67 | c.repo_groups_choices = [k[0] for k in c.repo_groups] | |
|
69 | 68 | |
|
70 | 69 | parent_group = c.repo_group.parent_group |
|
71 | 70 | |
@@ -135,7 +134,7 b' class RepoGroupSettingsView(RepoGroupApp' | |||
|
135 | 134 | schema = self._get_schema(c, old_values=old_values) |
|
136 | 135 | |
|
137 | 136 | c.form = RcForm(schema) |
|
138 | pstruct = self.request.POST.items() | |
|
137 | pstruct = list(self.request.POST.items()) | |
|
139 | 138 | |
|
140 | 139 | try: |
|
141 | 140 | schema_data = c.form.validate(pstruct) |
@@ -990,14 +990,14 b' def includeme(config):' | |||
|
990 | 990 | route_name='edit_repo_fields_delete', request_method='POST', |
|
991 | 991 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
992 | 992 | |
|
993 | # Locking | |
|
993 | # quick actions: locking | |
|
994 | 994 | config.add_route( |
|
995 |
name='repo_ |
|
|
996 |
pattern='/{repo_name:.*?[^/]}/settings/ |
|
|
995 | name='repo_settings_quick_actions', | |
|
996 | pattern='/{repo_name:.*?[^/]}/settings/quick-action', repo_route=True) | |
|
997 | 997 | config.add_view( |
|
998 | 998 | RepoSettingsView, |
|
999 | attr='edit_advanced_toggle_locking', | |
|
1000 |
route_name='repo_ |
|
|
999 | attr='repo_settings_quick_actions', | |
|
1000 | route_name='repo_settings_quick_actions', request_method='GET', | |
|
1001 | 1001 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
1002 | 1002 | |
|
1003 | 1003 | # Remote |
@@ -260,9 +260,10 b' class RepoCommitsView(RepoAppView):' | |||
|
260 | 260 | ignore_whitespace=hide_whitespace_changes, |
|
261 | 261 | context=diff_context) |
|
262 | 262 | |
|
263 | diff_processor = diffs.DiffProcessor( | |
|
264 |
|
|
|
265 |
file_limit=file_limit, |
|
|
263 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', | |
|
264 | diff_limit=diff_limit, | |
|
265 | file_limit=file_limit, | |
|
266 | show_full_diff=c.fulldiff) | |
|
266 | 267 | |
|
267 | 268 | _parsed = diff_processor.prepare() |
|
268 | 269 | |
@@ -285,8 +286,8 b' class RepoCommitsView(RepoAppView):' | |||
|
285 | 286 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
286 | 287 | commit1, commit2, |
|
287 | 288 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
288 | diff_processor = diffs.DiffProcessor( | |
|
289 |
|
|
|
289 | diff_processor = diffs.DiffProcessor(_diff, diff_format='newdiff', | |
|
290 | diff_limit=diff_limit, | |
|
290 | 291 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
291 | 292 | # downloads/raw we only need RAW diff nothing else |
|
292 | 293 | diff = self.path_filter.get_raw_patch(diff_processor) |
@@ -643,17 +644,28 b' class RepoCommitsView(RepoAppView):' | |||
|
643 | 644 | |
|
644 | 645 | Session().commit() |
|
645 | 646 | |
|
646 |
|
|
|
647 | data = { | |
|
647 | 648 | 'store_fid': store_uid, |
|
648 | 649 | 'access_path': h.route_path( |
|
649 | 650 | 'download_file', fid=store_uid), |
|
650 | 651 | 'fqn_access_path': h.route_url( |
|
651 | 652 | 'download_file', fid=store_uid), |
|
653 | # for EE those are replaced by FQN links on repo-only like | |
|
654 | 'repo_access_path': h.route_url( | |
|
655 | 'download_file', fid=store_uid), | |
|
656 | 'repo_fqn_access_path': h.route_url( | |
|
657 | 'download_file', fid=store_uid), | |
|
658 | } | |
|
659 | # this data is a part of CE/EE additional code | |
|
660 | if c.rhodecode_edition_id == 'EE': | |
|
661 | data.update({ | |
|
652 | 662 | 'repo_access_path': h.route_path( |
|
653 | 663 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
654 | 664 | 'repo_fqn_access_path': h.route_url( |
|
655 | 665 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
656 | } | |
|
666 | }) | |
|
667 | ||
|
668 | return data | |
|
657 | 669 | |
|
658 | 670 | @LoginRequired() |
|
659 | 671 | @NotAnonymous() |
@@ -766,7 +778,7 b' class RepoCommitsView(RepoAppView):' | |||
|
766 | 778 | 'comment_id': comment.comment_id, |
|
767 | 779 | 'comment_version': comment_history.version, |
|
768 | 780 | 'comment_author_username': comment_history.author.username, |
|
769 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), | |
|
781 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16, request=self.request), | |
|
770 | 782 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
771 | 783 | time_is_local=True), |
|
772 | 784 | } |
@@ -282,9 +282,10 b' class RepoCompareView(RepoAppView):' | |||
|
282 | 282 | path=target_path, path1=source_path, |
|
283 | 283 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
284 | 284 | |
|
285 | diff_processor = diffs.DiffProcessor( | |
|
286 | txt_diff, format='newdiff', diff_limit=diff_limit, | |
|
287 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
|
285 | diff_processor = diffs.DiffProcessor(txt_diff, diff_format='newdiff', | |
|
286 | diff_limit=diff_limit, | |
|
287 | file_limit=file_limit, | |
|
288 | show_full_diff=c.fulldiff) | |
|
288 | 289 | _parsed = diff_processor.prepare() |
|
289 | 290 | |
|
290 | 291 | diffset = codeblocks.DiffSet( |
@@ -17,8 +17,9 b'' | |||
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | import pytz | |
|
20 | ||
|
21 | 21 | import logging |
|
22 | import datetime | |
|
22 | 23 | |
|
23 | 24 | from pyramid.response import Response |
|
24 | 25 | |
@@ -72,8 +73,9 b' class RepoFeedView(RepoAppView):' | |||
|
72 | 73 | self.feed_items_per_page = config['feed_items_per_page'] |
|
73 | 74 | |
|
74 | 75 | def _changes(self, commit): |
|
75 | diff_processor = DiffProcessor( | |
|
76 | commit.diff(), diff_limit=self.feed_diff_limit) | |
|
76 | diff = commit.diff() | |
|
77 | diff_processor = DiffProcessor(diff, diff_format='newdiff', | |
|
78 | diff_limit=self.feed_diff_limit) | |
|
77 | 79 | _parsed = diff_processor.prepare(inline_diff=False) |
|
78 | 80 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
79 | 81 | |
@@ -97,7 +99,7 b' class RepoFeedView(RepoAppView):' | |||
|
97 | 99 | has_hidden_changes=has_hidden_changes |
|
98 | 100 | ) |
|
99 | 101 | |
|
100 |
def _set_timezone(self, date, tzinfo= |
|
|
102 | def _set_timezone(self, date, tzinfo=datetime.timezone.utc): | |
|
101 | 103 | if not getattr(date, "tzinfo", None): |
|
102 | 104 | date.replace(tzinfo=tzinfo) |
|
103 | 105 | return date |
@@ -114,7 +116,10 b' class RepoFeedView(RepoAppView):' | |||
|
114 | 116 | return list(collection[-self.feed_items_per_page:]) |
|
115 | 117 | |
|
116 | 118 | def uid(self, repo_id, commit_id): |
|
117 |
return '{}:{}'.format( |
|
|
119 | return '{}:{}'.format( | |
|
120 | md5_safe(repo_id, return_type='str'), | |
|
121 | md5_safe(commit_id, return_type='str') | |
|
122 | ) | |
|
118 | 123 | |
|
119 | 124 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
120 | 125 | @HasRepoPermissionAnyDecorator( |
@@ -126,7 +131,7 b' class RepoFeedView(RepoAppView):' | |||
|
126 | 131 | self.load_default_context() |
|
127 | 132 | force_recache = self.get_recache_flag() |
|
128 | 133 | |
|
129 |
cache_namespace_uid = ' |
|
|
134 | cache_namespace_uid = 'repo_feed.{}'.format(self.db_repo.repo_id) | |
|
130 | 135 | condition = not (self.path_filter.is_enabled or force_recache) |
|
131 | 136 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
132 | 137 | |
@@ -144,7 +149,7 b' class RepoFeedView(RepoAppView):' | |||
|
144 | 149 | for commit in reversed(self._get_commits()): |
|
145 | 150 | date = self._set_timezone(commit.date) |
|
146 | 151 | feed.add_item( |
|
147 | unique_id=self.uid(repo_id, commit.raw_id), | |
|
152 | unique_id=self.uid(str(repo_id), commit.raw_id), | |
|
148 | 153 | title=self._get_title(commit), |
|
149 | 154 | author_name=commit.author, |
|
150 | 155 | description=self._get_description(commit), |
@@ -173,7 +178,7 b' class RepoFeedView(RepoAppView):' | |||
|
173 | 178 | self.load_default_context() |
|
174 | 179 | force_recache = self.get_recache_flag() |
|
175 | 180 | |
|
176 |
cache_namespace_uid = ' |
|
|
181 | cache_namespace_uid = 'repo_feed.{}'.format(self.db_repo.repo_id) | |
|
177 | 182 | condition = not (self.path_filter.is_enabled or force_recache) |
|
178 | 183 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
179 | 184 | |
@@ -191,7 +196,7 b' class RepoFeedView(RepoAppView):' | |||
|
191 | 196 | for commit in reversed(self._get_commits()): |
|
192 | 197 | date = self._set_timezone(commit.date) |
|
193 | 198 | feed.add_item( |
|
194 | unique_id=self.uid(repo_id, commit.raw_id), | |
|
199 | unique_id=self.uid(str(repo_id), commit.raw_id), | |
|
195 | 200 | title=self._get_title(commit), |
|
196 | 201 | author_name=commit.author, |
|
197 | 202 | description=self._get_description(commit), |
@@ -21,10 +21,10 b'' | |||
|
21 | 21 | import itertools |
|
22 | 22 | import logging |
|
23 | 23 | import os |
|
24 | import shutil | |
|
25 | import tempfile | |
|
26 | 24 | import collections |
|
27 |
import urllib.request |
|
|
25 | import urllib.request | |
|
26 | import urllib.parse | |
|
27 | import urllib.error | |
|
28 | 28 | import pathlib |
|
29 | 29 | |
|
30 | 30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
@@ -38,12 +38,16 b' from rhodecode.apps._base import RepoApp' | |||
|
38 | 38 | |
|
39 | 39 | from rhodecode.lib import diffs, helpers as h, rc_cache |
|
40 | 40 | from rhodecode.lib import audit_logger |
|
41 | from rhodecode.lib.hash_utils import sha1_safe | |
|
42 | from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock | |
|
43 | from rhodecode.lib.str_utils import safe_bytes | |
|
41 | 44 | from rhodecode.lib.view_utils import parse_path_ref |
|
42 | 45 | from rhodecode.lib.exceptions import NonRelativePathError |
|
43 | 46 | from rhodecode.lib.codeblocks import ( |
|
44 | 47 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
45 |
from rhodecode.lib.utils2 import |
|
|
46 | convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1) | |
|
48 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode | |
|
49 | from rhodecode.lib.type_utils import str2bool | |
|
50 | from rhodecode.lib.str_utils import safe_str, safe_int | |
|
47 | 51 | from rhodecode.lib.auth import ( |
|
48 | 52 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
49 | 53 | from rhodecode.lib.vcs import path as vcspath |
@@ -61,6 +65,48 b' from rhodecode.model.db import Repositor' | |||
|
61 | 65 | log = logging.getLogger(__name__) |
|
62 | 66 | |
|
63 | 67 | |
|
68 | def get_archive_name(db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): | |
|
69 | # original backward compat name of archive | |
|
70 | clean_name = safe_str(db_repo_name.replace('/', '_')) | |
|
71 | ||
|
72 | # e.g vcsserver-sub-1-abcfdef-archive-all.zip | |
|
73 | # vcsserver-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip | |
|
74 | ||
|
75 | sub_repo = 'sub-1' if subrepos else 'sub-0' | |
|
76 | commit = commit_sha if with_hash else 'archive' | |
|
77 | path_marker = (path_sha if with_hash else '') or 'all' | |
|
78 | archive_name = f'{clean_name}-{sub_repo}-{commit}-{path_marker}{ext}' | |
|
79 | ||
|
80 | return archive_name | |
|
81 | ||
|
82 | ||
|
83 | def get_path_sha(at_path): | |
|
84 | return safe_str(sha1_safe(at_path)[:8]) | |
|
85 | ||
|
86 | ||
|
87 | def _get_archive_spec(fname): | |
|
88 | log.debug('Detecting archive spec for: `%s`', fname) | |
|
89 | ||
|
90 | fileformat = None | |
|
91 | ext = None | |
|
92 | content_type = None | |
|
93 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
|
94 | ||
|
95 | if fname.endswith(extension): | |
|
96 | fileformat = a_type | |
|
97 | log.debug('archive is of type: %s', fileformat) | |
|
98 | ext = extension | |
|
99 | break | |
|
100 | ||
|
101 | if not fileformat: | |
|
102 | raise ValueError() | |
|
103 | ||
|
104 | # left over part of whole fname is the commit | |
|
105 | commit_id = fname[:-len(ext)] | |
|
106 | ||
|
107 | return commit_id, ext, fileformat, content_type | |
|
108 | ||
|
109 | ||
|
64 | 110 | class RepoFilesView(RepoAppView): |
|
65 | 111 | |
|
66 | 112 | @staticmethod |
@@ -72,12 +118,12 b' class RepoFilesView(RepoAppView):' | |||
|
72 | 118 | branches in the underlying repository. |
|
73 | 119 | """ |
|
74 | 120 | tags_and_branches = itertools.chain( |
|
75 |
repo.branches. |
|
|
76 |
repo.tags. |
|
|
121 | repo.branches.keys(), | |
|
122 | repo.tags.keys()) | |
|
77 | 123 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
78 | 124 | |
|
79 | 125 | for name in tags_and_branches: |
|
80 |
if f_path.startswith('{}/' |
|
|
126 | if f_path.startswith(f'{name}/'): | |
|
81 | 127 | f_path = vcspath.relpath(f_path, name) |
|
82 | 128 | break |
|
83 | 129 | return f_path |
@@ -165,19 +211,23 b' class RepoFilesView(RepoAppView):' | |||
|
165 | 211 | if not redirect_after: |
|
166 | 212 | return None |
|
167 | 213 | |
|
214 | add_new = upload_new = "" | |
|
215 | if h.HasRepoPermissionAny( | |
|
216 | 'repository.write', 'repository.admin')(self.db_repo_name): | |
|
168 | 217 | _url = h.route_path( |
|
169 | 218 | 'repo_files_add_file', |
|
170 | 219 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
220 | add_new = h.link_to( | |
|
221 | _('add a new file'), _url, class_="alert-link") | |
|
171 | 222 | |
|
172 | if h.HasRepoPermissionAny( | |
|
173 | 'repository.write', 'repository.admin')(self.db_repo_name): | |
|
174 | add_new = h.link_to( | |
|
175 | _('Click here to add a new file.'), _url, class_="alert-link") | |
|
176 | else: | |
|
177 | add_new = "" | |
|
223 | _url_upld = h.route_path( | |
|
224 | 'repo_files_upload_file', | |
|
225 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |
|
226 | upload_new = h.link_to( | |
|
227 | _('upload a new file'), _url_upld, class_="alert-link") | |
|
178 | 228 | |
|
179 | 229 | h.flash(h.literal( |
|
180 | _('There are no files yet. %s') % add_new), category='warning') | |
|
230 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') | |
|
181 | 231 | raise HTTPFound( |
|
182 | 232 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
183 | 233 | |
@@ -189,7 +239,7 b' class RepoFilesView(RepoAppView):' | |||
|
189 | 239 | h.flash(h.escape(safe_str(e)), category='error') |
|
190 | 240 | raise HTTPNotFound() |
|
191 | 241 | |
|
192 | def _get_filenode_or_redirect(self, commit_obj, path): | |
|
242 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): | |
|
193 | 243 | """ |
|
194 | 244 | Returns file_node, if error occurs or given path is directory, |
|
195 | 245 | it'll redirect to top level path |
@@ -197,7 +247,7 b' class RepoFilesView(RepoAppView):' | |||
|
197 | 247 | _ = self.request.translate |
|
198 | 248 | |
|
199 | 249 | try: |
|
200 | file_node = commit_obj.get_node(path) | |
|
250 | file_node = commit_obj.get_node(path, pre_load=pre_load) | |
|
201 | 251 | if file_node.is_dir(): |
|
202 | 252 | raise RepositoryError('The given path is a directory') |
|
203 | 253 | except CommitDoesNotExistError: |
@@ -262,7 +312,7 b' class RepoFilesView(RepoAppView):' | |||
|
262 | 312 | 'with caching: %s[TTL: %ss]' % ( |
|
263 | 313 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
264 | 314 | |
|
265 |
cache_namespace_uid = ' |
|
|
315 | cache_namespace_uid = 'repo.{}'.format(repo_id) | |
|
266 | 316 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
267 | 317 | |
|
268 | 318 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
@@ -279,28 +329,6 b' class RepoFilesView(RepoAppView):' | |||
|
279 | 329 | rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash, |
|
280 | 330 | self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) |
|
281 | 331 | |
|
282 | def _get_archive_spec(self, fname): | |
|
283 | log.debug('Detecting archive spec for: `%s`', fname) | |
|
284 | ||
|
285 | fileformat = None | |
|
286 | ext = None | |
|
287 | content_type = None | |
|
288 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
|
289 | ||
|
290 | if fname.endswith(extension): | |
|
291 | fileformat = a_type | |
|
292 | log.debug('archive is of type: %s', fileformat) | |
|
293 | ext = extension | |
|
294 | break | |
|
295 | ||
|
296 | if not fileformat: | |
|
297 | raise ValueError() | |
|
298 | ||
|
299 | # left over part of whole fname is the commit | |
|
300 | commit_id = fname[:-len(ext)] | |
|
301 | ||
|
302 | return commit_id, ext, fileformat, content_type | |
|
303 | ||
|
304 | 332 | def create_pure_path(self, *parts): |
|
305 | 333 | # Split paths and sanitize them, removing any ../ etc |
|
306 | 334 | sanitized_path = [ |
@@ -325,22 +353,6 b' class RepoFilesView(RepoAppView):' | |||
|
325 | 353 | |
|
326 | 354 | return lf_enabled |
|
327 | 355 | |
|
328 | def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): | |
|
329 | # original backward compat name of archive | |
|
330 | clean_name = safe_str(db_repo_name.replace('/', '_')) | |
|
331 | ||
|
332 | # e.g vcsserver.zip | |
|
333 | # e.g vcsserver-abcdefgh.zip | |
|
334 | # e.g vcsserver-abcdefgh-defghijk.zip | |
|
335 | archive_name = '{}{}{}{}{}{}'.format( | |
|
336 | clean_name, | |
|
337 | '-sub' if subrepos else '', | |
|
338 | commit_sha, | |
|
339 | '-{}'.format('plain') if not with_hash else '', | |
|
340 | '-{}'.format(path_sha) if path_sha else '', | |
|
341 | ext) | |
|
342 | return archive_name | |
|
343 | ||
|
344 | 356 | @LoginRequired() |
|
345 | 357 | @HasRepoPermissionAnyDecorator( |
|
346 | 358 | 'repository.read', 'repository.write', 'repository.admin') |
@@ -360,7 +372,7 b' class RepoFilesView(RepoAppView):' | |||
|
360 | 372 | |
|
361 | 373 | try: |
|
362 | 374 | commit_id, ext, fileformat, content_type = \ |
|
363 |
|
|
|
375 | _get_archive_spec(fname) | |
|
364 | 376 | except ValueError: |
|
365 | 377 | return Response(_('Unknown archive type for: `{}`').format( |
|
366 | 378 | h.escape(fname))) |
@@ -383,69 +395,80 b' class RepoFilesView(RepoAppView):' | |||
|
383 | 395 | except Exception: |
|
384 | 396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) |
|
385 | 397 | |
|
386 | # path sha is part of subdir | |
|
387 | path_sha = '' | |
|
388 | if at_path != default_at_path: | |
|
389 | path_sha = sha1(at_path)[:8] | |
|
390 | short_sha = '-{}'.format(safe_str(commit.short_id)) | |
|
391 | # used for cache etc | |
|
392 | archive_name = self._get_archive_name( | |
|
393 | self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos, | |
|
394 | path_sha=path_sha, with_hash=with_hash) | |
|
398 | path_sha = get_path_sha(at_path) | |
|
399 | ||
|
400 | # used for cache etc, consistent unique archive name | |
|
401 | archive_name_key = get_archive_name( | |
|
402 | self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
|
403 | path_sha=path_sha, with_hash=True) | |
|
395 | 404 | |
|
396 | 405 | if not with_hash: |
|
397 | short_sha = '' | |
|
398 | 406 | path_sha = '' |
|
399 | 407 | |
|
400 | 408 | # what end client gets served |
|
401 |
response_archive_name = |
|
|
402 |
self.db_repo_name, commit_sha=short_ |
|
|
409 | response_archive_name = get_archive_name( | |
|
410 | self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
|
403 | 411 | path_sha=path_sha, with_hash=with_hash) |
|
412 | ||
|
404 | 413 | # remove extension from our archive directory name |
|
405 | 414 | archive_dir_name = response_archive_name[:-len(ext)] |
|
406 | 415 | |
|
407 | use_cached_archive = False | |
|
408 | archive_cache_dir = CONFIG.get('archive_cache_dir') | |
|
409 | archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache') | |
|
410 | cached_archive_path = None | |
|
416 | archive_cache_disable = self.request.GET.get('no_cache') | |
|
417 | ||
|
418 | d_cache = get_archival_cache_store(config=CONFIG) | |
|
419 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver | |
|
420 | d_cache_conf = get_archival_config(config=CONFIG) | |
|
411 | 421 | |
|
412 | if archive_cache_enabled: | |
|
413 | # check if we it's ok to write, and re-create the archive cache | |
|
414 | if not os.path.isdir(CONFIG['archive_cache_dir']): | |
|
415 | os.makedirs(CONFIG['archive_cache_dir']) | |
|
416 | ||
|
417 | cached_archive_path = os.path.join( | |
|
418 | CONFIG['archive_cache_dir'], archive_name) | |
|
419 | if os.path.isfile(cached_archive_path): | |
|
420 | log.debug('Found cached archive in %s', cached_archive_path) | |
|
421 | fd, archive = None, cached_archive_path | |
|
422 | reentrant_lock_key = archive_name_key + '.lock' | |
|
423 | with ReentrantLock(d_cache, reentrant_lock_key): | |
|
424 | # This is also a cache key | |
|
425 | use_cached_archive = False | |
|
426 | if archive_name_key in d_cache and not archive_cache_disable: | |
|
427 | reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True) | |
|
422 | 428 | use_cached_archive = True |
|
429 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', | |
|
430 | archive_name_key, tag, reader.name) | |
|
423 | 431 | else: |
|
424 | log.debug('Archive %s is not yet cached', archive_name) | |
|
432 | reader = None | |
|
433 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) | |
|
425 | 434 | |
|
426 | 435 | # generate new archive, as previous was not found in the cache |
|
427 |
if not |
|
|
428 | _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None | |
|
429 | fd, archive = tempfile.mkstemp(dir=_dir) | |
|
430 | log.debug('Creating new temp archive in %s', archive) | |
|
436 | if not reader: | |
|
437 | # first remove expired items, before generating a new one :) | |
|
438 | # we di this manually because automatic eviction is disabled | |
|
439 | d_cache.cull(retry=True) | |
|
440 | ||
|
431 | 441 | try: |
|
432 | commit.archive_repo(archive, archive_dir_name=archive_dir_name, | |
|
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, | |
|
433 | 443 | kind=fileformat, subrepos=subrepos, |
|
434 | archive_at_path=at_path) | |
|
444 | archive_at_path=at_path, cache_config=d_cache_conf) | |
|
435 | 445 | except ImproperArchiveTypeError: |
|
436 | 446 | return _('Unknown archive type') |
|
437 | if archive_cache_enabled: | |
|
438 | # if we generated the archive and we have cache enabled | |
|
439 | # let's use this for future | |
|
440 | log.debug('Storing new archive in %s', cached_archive_path) | |
|
441 | shutil.move(archive, cached_archive_path) | |
|
442 | archive = cached_archive_path | |
|
447 | ||
|
448 | reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True) | |
|
449 | ||
|
450 | if not reader: | |
|
451 | raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache') | |
|
443 | 452 | |
|
453 | def archive_iterator(_reader): | |
|
454 | while 1: | |
|
455 | data = _reader.read(1024) | |
|
456 | if not data: | |
|
457 | break | |
|
458 | yield data | |
|
459 | ||
|
460 | response = Response(app_iter=archive_iterator(reader)) | |
|
461 | response.content_disposition = f'attachment; filename={response_archive_name}' | |
|
462 | response.content_type = str(content_type) | |
|
463 | ||
|
464 | try: | |
|
465 | return response | |
|
466 | finally: | |
|
444 | 467 | # store download action |
|
445 | 468 | audit_logger.store_web( |
|
446 | 469 | 'repo.archive.download', action_data={ |
|
447 | 470 | 'user_agent': self.request.user_agent, |
|
448 | 'archive_name': archive_name, | |
|
471 | 'archive_name': archive_name_key, | |
|
449 | 472 | 'archive_spec': fname, |
|
450 | 473 | 'archive_cached': use_cached_archive}, |
|
451 | 474 | user=self._rhodecode_user, |
@@ -453,33 +476,13 b' class RepoFilesView(RepoAppView):' | |||
|
453 | 476 | commit=True |
|
454 | 477 | ) |
|
455 | 478 | |
|
456 | def get_chunked_archive(archive_path): | |
|
457 | with open(archive_path, 'rb') as stream: | |
|
458 | while True: | |
|
459 | data = stream.read(16 * 1024) | |
|
460 | if not data: | |
|
461 | if fd: # fd means we used temporary file | |
|
462 | os.close(fd) | |
|
463 | if not archive_cache_enabled: | |
|
464 | log.debug('Destroying temp archive %s', archive_path) | |
|
465 | os.remove(archive_path) | |
|
466 | break | |
|
467 | yield data | |
|
468 | ||
|
469 | response = Response(app_iter=get_chunked_archive(archive)) | |
|
470 | response.content_disposition = str('attachment; filename=%s' % response_archive_name) | |
|
471 | response.content_type = str(content_type) | |
|
472 | ||
|
473 | return response | |
|
474 | ||
|
475 | 479 | def _get_file_node(self, commit_id, f_path): |
|
476 | 480 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
477 | 481 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
478 | 482 | try: |
|
479 | 483 | node = commit.get_node(f_path) |
|
480 | 484 | if node.is_dir(): |
|
481 |
raise NodeError(' |
|
|
482 | % (node, type(node))) | |
|
485 | raise NodeError(f'{node} path is a {type(node)} not a file') | |
|
483 | 486 | except NodeDoesNotExistError: |
|
484 | 487 | commit = EmptyCommit( |
|
485 | 488 | commit_id=commit_id, |
@@ -489,12 +492,12 b' class RepoFilesView(RepoAppView):' | |||
|
489 | 492 | message=commit.message, |
|
490 | 493 | author=commit.author, |
|
491 | 494 | date=commit.date) |
|
492 | node = FileNode(f_path, '', commit=commit) | |
|
495 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
|
493 | 496 | else: |
|
494 | 497 | commit = EmptyCommit( |
|
495 | 498 | repo=self.rhodecode_vcs_repo, |
|
496 | 499 | alias=self.rhodecode_vcs_repo.alias) |
|
497 | node = FileNode(f_path, '', commit=commit) | |
|
500 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
|
498 | 501 | return node |
|
499 | 502 | |
|
500 | 503 | @LoginRequired() |
@@ -551,12 +554,13 b' class RepoFilesView(RepoAppView):' | |||
|
551 | 554 | _diff = diffs.get_gitdiff(node1, node2, |
|
552 | 555 | ignore_whitespace=ignore_whitespace, |
|
553 | 556 | context=line_context) |
|
554 |
|
|
|
557 | # NOTE: this was using diff_format='gitdiff' | |
|
558 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
|
555 | 559 | |
|
556 | 560 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
557 | 561 | response.content_type = 'text/plain' |
|
558 | 562 | response.content_disposition = ( |
|
559 |
'attachment; filename= |
|
|
563 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' | |
|
560 | 564 | ) |
|
561 | 565 | charset = self._get_default_encoding(c) |
|
562 | 566 | if charset: |
@@ -567,7 +571,8 b' class RepoFilesView(RepoAppView):' | |||
|
567 | 571 | _diff = diffs.get_gitdiff(node1, node2, |
|
568 | 572 | ignore_whitespace=ignore_whitespace, |
|
569 | 573 | context=line_context) |
|
570 |
|
|
|
574 | # NOTE: this was using diff_format='gitdiff' | |
|
575 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
|
571 | 576 | |
|
572 | 577 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
573 | 578 | response.content_type = 'text/plain' |
@@ -637,8 +642,7 b' class RepoFilesView(RepoAppView):' | |||
|
637 | 642 | c.annotate = view_name == 'repo_files:annotated' |
|
638 | 643 | # default is false, but .rst/.md files later are auto rendered, we can |
|
639 | 644 | # overwrite auto rendering by setting this GET flag |
|
640 |
c.renderer = view_name == 'repo_files:rendered' or |
|
|
641 | not self.request.GET.get('no-render', False) | |
|
645 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) | |
|
642 | 646 | |
|
643 | 647 | commit_id, f_path = self._get_commit_and_path() |
|
644 | 648 | |
@@ -675,7 +679,7 b' class RepoFilesView(RepoAppView):' | |||
|
675 | 679 | |
|
676 | 680 | # files or dirs |
|
677 | 681 | try: |
|
678 | c.file = c.commit.get_node(f_path) | |
|
682 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) | |
|
679 | 683 | |
|
680 | 684 | c.file_author = True |
|
681 | 685 | c.file_tree = '' |
@@ -917,7 +921,7 b' class RepoFilesView(RepoAppView):' | |||
|
917 | 921 | 'with caching: %s[TTL: %ss]' % ( |
|
918 | 922 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
919 | 923 | |
|
920 |
cache_namespace_uid = ' |
|
|
924 | cache_namespace_uid = 'repo.{}'.format(repo_id) | |
|
921 | 925 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
922 | 926 | |
|
923 | 927 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
@@ -950,7 +954,7 b' class RepoFilesView(RepoAppView):' | |||
|
950 | 954 | |
|
951 | 955 | metadata = self._get_nodelist_at_commit( |
|
952 | 956 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) |
|
953 | return {'nodes': metadata} | |
|
957 | return {'nodes': [x for x in metadata]} | |
|
954 | 958 | |
|
955 | 959 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): |
|
956 | 960 | items = [] |
@@ -967,7 +971,7 b' class RepoFilesView(RepoAppView):' | |||
|
967 | 971 | |
|
968 | 972 | # NOTE(dan): old code we used in "diff" mode compare |
|
969 | 973 | new_f_path = vcspath.join(name, f_path) |
|
970 |
return |
|
|
974 | return f'{new_f_path}@{commit_id}' | |
|
971 | 975 | |
|
972 | 976 | def _get_node_history(self, commit_obj, f_path, commits=None): |
|
973 | 977 | """ |
@@ -1194,8 +1198,8 b' class RepoFilesView(RepoAppView):' | |||
|
1194 | 1198 | message = self.request.POST.get('message') or c.default_message |
|
1195 | 1199 | try: |
|
1196 | 1200 | nodes = { |
|
1197 | node_path: { | |
|
1198 | 'content': '' | |
|
1201 | safe_bytes(node_path): { | |
|
1202 | 'content': b'' | |
|
1199 | 1203 | } |
|
1200 | 1204 | } |
|
1201 | 1205 | ScmModel().delete_nodes( |
@@ -1273,7 +1277,7 b' class RepoFilesView(RepoAppView):' | |||
|
1273 | 1277 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1274 | 1278 | c.f_path = f_path |
|
1275 | 1279 | |
|
1276 | old_content = c.file.content | |
|
1280 | old_content = c.file.str_content | |
|
1277 | 1281 | sl = old_content.splitlines(1) |
|
1278 | 1282 | first_line = sl[0] if sl else '' |
|
1279 | 1283 | |
@@ -1283,7 +1287,8 b' class RepoFilesView(RepoAppView):' | |||
|
1283 | 1287 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) |
|
1284 | 1288 | |
|
1285 | 1289 | message = r_post.get('message') or c.default_message |
|
1286 | org_node_path = c.file.unicode_path | |
|
1290 | ||
|
1291 | org_node_path = c.file.str_path | |
|
1287 | 1292 | filename = r_post['filename'] |
|
1288 | 1293 | |
|
1289 | 1294 | root_path = c.file.dir_path |
@@ -1299,10 +1304,10 b' class RepoFilesView(RepoAppView):' | |||
|
1299 | 1304 | |
|
1300 | 1305 | try: |
|
1301 | 1306 | mapping = { |
|
1302 |
|
|
|
1307 | c.file.bytes_path: { | |
|
1303 | 1308 | 'org_filename': org_node_path, |
|
1304 | 'filename': node_path, | |
|
1305 | 'content': content, | |
|
1309 | 'filename': safe_bytes(node_path), | |
|
1310 | 'content': safe_bytes(content), | |
|
1306 | 1311 | 'lexer': '', |
|
1307 | 1312 | 'op': 'mod', |
|
1308 | 1313 | 'mode': c.file.mode |
@@ -1400,8 +1405,6 b' class RepoFilesView(RepoAppView):' | |||
|
1400 | 1405 | filename = r_post.get('filename') |
|
1401 | 1406 | unix_mode = 0 |
|
1402 | 1407 | |
|
1403 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
|
1404 | ||
|
1405 | 1408 | if not filename: |
|
1406 | 1409 | # If there's no commit, redirect to repo summary |
|
1407 | 1410 | if type(c.commit) is EmptyCommit: |
@@ -1417,9 +1420,10 b' class RepoFilesView(RepoAppView):' | |||
|
1417 | 1420 | node_path = pure_path.as_posix().lstrip('/') |
|
1418 | 1421 | |
|
1419 | 1422 | author = self._rhodecode_db_user.full_contact |
|
1423 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
|
1420 | 1424 | nodes = { |
|
1421 | node_path: { | |
|
1422 | 'content': content | |
|
1425 | safe_bytes(node_path): { | |
|
1426 | 'content': safe_bytes(content) | |
|
1423 | 1427 | } |
|
1424 | 1428 | } |
|
1425 | 1429 | |
@@ -1518,7 +1522,7 b' class RepoFilesView(RepoAppView):' | |||
|
1518 | 1522 | pure_path = self.create_pure_path(root_path, filename) |
|
1519 | 1523 | node_path = pure_path.as_posix().lstrip('/') |
|
1520 | 1524 | |
|
1521 | nodes[node_path] = { | |
|
1525 | nodes[safe_bytes(node_path)] = { | |
|
1522 | 1526 | 'content': content |
|
1523 | 1527 | } |
|
1524 | 1528 |
@@ -234,8 +234,7 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
234 | 234 | source_repo, source_ref_id, target_ref_id, |
|
235 | 235 | hide_whitespace_changes, diff_context) |
|
236 | 236 | |
|
237 | diff_processor = diffs.DiffProcessor( | |
|
238 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
|
237 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', diff_limit=diff_limit, | |
|
239 | 238 | file_limit=file_limit, show_full_diff=fulldiff) |
|
240 | 239 | |
|
241 | 240 | _parsed = diff_processor.prepare() |
@@ -259,8 +258,8 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
259 | 258 | ignore_whitespace=hide_whitespace_changes, |
|
260 | 259 | context=diff_context) |
|
261 | 260 | |
|
262 | diff_processor = diffs.DiffProcessor( | |
|
263 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
|
261 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', | |
|
262 | diff_limit=diff_limit, | |
|
264 | 263 | file_limit=file_limit, show_full_diff=fulldiff) |
|
265 | 264 | |
|
266 | 265 | _parsed = diff_processor.prepare() |
@@ -933,7 +932,7 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
933 | 932 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
934 | 933 | |
|
935 | 934 | if filter_query: |
|
936 |
ilike_expression = u'%{}%'.format(safe_ |
|
|
935 | ilike_expression = u'%{}%'.format(safe_str(filter_query)) | |
|
937 | 936 | parents_query = parents_query.filter( |
|
938 | 937 | Repository.repo_name.ilike(ilike_expression)) |
|
939 | 938 | parents = parents_query.limit(20).all() |
@@ -249,3 +249,29 b' class RepoSettingsView(RepoAppView):' | |||
|
249 | 249 | category='error') |
|
250 | 250 | raise HTTPFound( |
|
251 | 251 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) |
|
252 | ||
|
253 | @LoginRequired() | |
|
254 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
255 | def repo_settings_quick_actions(self): | |
|
256 | _ = self.request.translate | |
|
257 | ||
|
258 | set_lock = self.request.GET.get('set_lock') | |
|
259 | set_unlock = self.request.GET.get('set_unlock') | |
|
260 | ||
|
261 | try: | |
|
262 | if set_lock: | |
|
263 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, | |
|
264 | lock_reason=Repository.LOCK_WEB) | |
|
265 | h.flash(_('Locked repository'), category='success') | |
|
266 | elif set_unlock: | |
|
267 | Repository.unlock(self.db_repo) | |
|
268 | h.flash(_('Unlocked repository'), category='success') | |
|
269 | except Exception as e: | |
|
270 | log.exception("Exception during unlocking") | |
|
271 | h.flash(_('An error occurred during unlocking'), category='error') | |
|
272 | ||
|
273 | raise HTTPFound( | |
|
274 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |
|
275 | ||
|
276 | ||
|
277 |
@@ -63,23 +63,23 b' class SshWrapper(object):' | |||
|
63 | 63 | from rhodecode.model.meta import raw_query_executor, Base |
|
64 | 64 | |
|
65 | 65 | table = Table('user_ssh_keys', Base.metadata, autoload=False) |
|
66 | atime = datetime.datetime.utcnow() | |
|
66 | 67 | stmt = ( |
|
67 | 68 | table.update() |
|
68 | 69 | .where(table.c.ssh_key_id == key_id) |
|
69 |
.values(accessed_on= |
|
|
70 | .returning(table.c.accessed_on, table.c.ssh_key_fingerprint) | |
|
70 | .values(accessed_on=atime) | |
|
71 | # no MySQL Support for .returning :(( | |
|
72 | #.returning(table.c.accessed_on, table.c.ssh_key_fingerprint) | |
|
71 | 73 | ) |
|
72 | 74 | |
|
73 |
|
|
|
75 | res_count = None | |
|
74 | 76 | with raw_query_executor() as session: |
|
75 | 77 | result = session.execute(stmt) |
|
76 | 78 | if result.rowcount: |
|
77 |
|
|
|
79 | res_count = result.rowcount | |
|
78 | 80 | |
|
79 |
if |
|
|
80 | atime, ssh_key_fingerprint = scalar_res | |
|
81 | log.debug('Update key id:`%s` fingerprint:`%s` access time', | |
|
82 | key_id, ssh_key_fingerprint) | |
|
81 | if res_count: | |
|
82 | log.debug('Update key id:`%s` access time', key_id) | |
|
83 | 83 | |
|
84 | 84 | def get_user(self, user_id): |
|
85 | 85 | user = AttributeDict() |
@@ -40,8 +40,8 b" SSH_OPTS = 'no-pty,no-port-forwarding,no" | |||
|
40 | 40 | |
|
41 | 41 | def get_all_active_keys(): |
|
42 | 42 | result = UserSshKeys.query() \ |
|
43 |
.o |
|
|
44 |
.filter(User |
|
|
43 | .join(User) \ | |
|
44 | .filter(User != User.get_default_user()) \ | |
|
45 | 45 | .filter(User.active == true()) \ |
|
46 | 46 | .all() |
|
47 | 47 | return result |
@@ -55,6 +55,10 b' def _generate_ssh_authorized_keys_file(' | |||
|
55 | 55 | os.path.expanduser(authorized_keys_file_path)) |
|
56 | 56 | tmp_file_dir = os.path.dirname(authorized_keys_file_path) |
|
57 | 57 | |
|
58 | if not os.path.exists(tmp_file_dir): | |
|
59 | log.debug('SSH authorized_keys file dir does not exist, creating one now...') | |
|
60 | os.makedirs(tmp_file_dir) | |
|
61 | ||
|
58 | 62 | all_active_keys = get_all_active_keys() |
|
59 | 63 | |
|
60 | 64 | if allow_shell: |
@@ -65,6 +69,7 b' def _generate_ssh_authorized_keys_file(' | |||
|
65 | 69 | if not os.path.isfile(authorized_keys_file_path): |
|
66 | 70 | log.debug('Creating file at %s', authorized_keys_file_path) |
|
67 | 71 | with open(authorized_keys_file_path, 'w'): |
|
72 | # create a file with write access | |
|
68 | 73 | pass |
|
69 | 74 | |
|
70 | 75 | if not os.access(authorized_keys_file_path, os.R_OK): |
@@ -78,7 +83,7 b' def _generate_ssh_authorized_keys_file(' | |||
|
78 | 83 | dir=tmp_file_dir) |
|
79 | 84 | |
|
80 | 85 | now = datetime.datetime.utcnow().isoformat() |
|
81 |
keys_file = os.fdopen(fd, 'w |
|
|
86 | keys_file = os.fdopen(fd, 'wt') | |
|
82 | 87 | keys_file.write(HEADER.format(len(all_active_keys), now)) |
|
83 | 88 | ini_path = rhodecode.CONFIG['__file__'] |
|
84 | 89 |
@@ -75,7 +75,7 b' class UserGroupsView(UserGroupAppView):' | |||
|
75 | 75 | 'first_name': user.first_name, |
|
76 | 76 | 'last_name': user.last_name, |
|
77 | 77 | 'username': user.username, |
|
78 | 'icon_link': h.gravatar_url(user.email, 30), | |
|
78 | 'icon_link': h.gravatar_url(user.email, 30, request=self.request), | |
|
79 | 79 | 'value_display': h.person(user.email), |
|
80 | 80 | 'value': user.username, |
|
81 | 81 | 'value_type': 'user', |
General Comments 0
You need to be logged in to leave comments.
Login now