Show More
@@ -1,4 +1,5 b'' | |||||
1 |
|
1 | |||
|
2 | import dataclasses | |||
2 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
3 | # |
|
4 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
@@ -30,8 +31,13 b' from rhodecode.translation import _' | |||||
30 |
|
31 | |||
31 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
32 |
|
33 | |||
33 | NavListEntry = collections.namedtuple( |
|
34 | ||
34 | 'NavListEntry', ['key', 'name', 'url', 'active_list']) |
|
35 | @dataclasses.dataclass | |
|
36 | class NavListEntry: | |||
|
37 | key: str | |||
|
38 | name: str | |||
|
39 | url: str | |||
|
40 | active_list: list | |||
35 |
|
41 | |||
36 |
|
42 | |||
37 | class NavEntry(object): |
|
43 | class NavEntry(object): | |
@@ -105,7 +111,7 b' class NavigationRegistry(object):' | |||||
105 | def __init__(self, labs_active=False): |
|
111 | def __init__(self, labs_active=False): | |
106 | self._registered_entries = collections.OrderedDict() |
|
112 | self._registered_entries = collections.OrderedDict() | |
107 | for item in self.__class__._base_entries: |
|
113 | for item in self.__class__._base_entries: | |
108 |
self. |
|
114 | self.add_entry(item) | |
109 |
|
115 | |||
110 | if labs_active: |
|
116 | if labs_active: | |
111 | self.add_entry(self._labs_entry) |
|
117 | self.add_entry(self._labs_entry) | |
@@ -117,7 +123,8 b' class NavigationRegistry(object):' | |||||
117 | nav_list = [ |
|
123 | nav_list = [ | |
118 | NavListEntry(i.key, i.get_localized_name(request), |
|
124 | NavListEntry(i.key, i.get_localized_name(request), | |
119 | i.generate_url(request), i.active_list) |
|
125 | i.generate_url(request), i.active_list) | |
120 |
for i in self._registered_entries.values() |
|
126 | for i in self._registered_entries.values() | |
|
127 | ] | |||
121 | return nav_list |
|
128 | return nav_list | |
122 |
|
129 | |||
123 |
|
130 |
@@ -41,13 +41,12 b' def trigger_user_permission_flush(event)' | |||||
41 | automatic flush of permission caches, so the users affected receive new permissions |
|
41 | automatic flush of permission caches, so the users affected receive new permissions | |
42 | Right Away |
|
42 | Right Away | |
43 | """ |
|
43 | """ | |
44 | invalidate = True |
|
44 | ||
45 | affected_user_ids = set(event.user_ids) |
|
45 | affected_user_ids = set(event.user_ids) | |
46 | for user_id in affected_user_ids: |
|
46 | for user_id in affected_user_ids: | |
47 | for cache_namespace_uid_tmpl in cache_namespaces: |
|
47 | for cache_namespace_uid_tmpl in cache_namespaces: | |
48 | cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id) |
|
48 | cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id) | |
49 | del_keys = rc_cache.clear_cache_namespace( |
|
49 | del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE) | |
50 | 'cache_perms', cache_namespace_uid, invalidate=invalidate) |
|
|||
51 | log.debug('Invalidated %s cache keys for user_id: %s and namespace %s', |
|
50 | log.debug('Invalidated %s cache keys for user_id: %s and namespace %s', | |
52 | del_keys, user_id, cache_namespace_uid) |
|
51 | del_keys, user_id, cache_namespace_uid) | |
53 |
|
52 |
@@ -28,6 +28,7 b' from pyramid.httpexceptions import HTTPF' | |||||
28 |
|
28 | |||
29 | from pyramid.renderers import render |
|
29 | from pyramid.renderers import render | |
30 | from pyramid.response import Response |
|
30 | from pyramid.response import Response | |
|
31 | from sqlalchemy.orm import aliased | |||
31 |
|
32 | |||
32 | from rhodecode import events |
|
33 | from rhodecode import events | |
33 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
34 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
@@ -160,6 +161,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
160 | .count() |
|
161 | .count() | |
161 |
|
162 | |||
162 | repo_count = count(Repository.repo_id) |
|
163 | repo_count = count(Repository.repo_id) | |
|
164 | OwnerUser = aliased(User) | |||
163 | base_q = Session.query( |
|
165 | base_q = Session.query( | |
164 | RepoGroup.group_name, |
|
166 | RepoGroup.group_name, | |
165 | RepoGroup.group_name_hash, |
|
167 | RepoGroup.group_name_hash, | |
@@ -167,19 +169,20 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
167 | RepoGroup.group_id, |
|
169 | RepoGroup.group_id, | |
168 | RepoGroup.personal, |
|
170 | RepoGroup.personal, | |
169 | RepoGroup.updated_on, |
|
171 | RepoGroup.updated_on, | |
170 | User, |
|
172 | OwnerUser.username.label('owner_username'), | |
171 | repo_count.label('repos_count') |
|
173 | repo_count.label('repos_count') | |
172 | ) \ |
|
174 | ) \ | |
173 | .filter(or_( |
|
175 | .filter(or_( | |
174 | # generate multiple IN to fix limitation problems |
|
176 | # generate multiple IN to fix limitation problems | |
175 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
177 | *in_filter_generator(RepoGroup.group_id, allowed_ids) | |
176 | )) \ |
|
178 | )) \ | |
177 |
.outerjoin(Repository, Repo |
|
179 | .outerjoin(Repository, RepoGroup.group_id == Repository.group_id) \ | |
178 |
.join(User, |
|
180 | .join(OwnerUser, RepoGroup.user_id == OwnerUser.user_id) | |
179 | .group_by(RepoGroup, User) |
|
181 | ||
|
182 | base_q = base_q.group_by(RepoGroup, OwnerUser) | |||
180 |
|
183 | |||
181 | if search_q: |
|
184 | if search_q: | |
182 |
like_expression = u'%{}%'.format(safe_ |
|
185 | like_expression = u'%{}%'.format(safe_str(search_q)) | |
183 | base_q = base_q.filter(or_( |
|
186 | base_q = base_q.filter(or_( | |
184 | RepoGroup.group_name.ilike(like_expression), |
|
187 | RepoGroup.group_name.ilike(like_expression), | |
185 | )) |
|
188 | )) | |
@@ -197,7 +200,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
197 | sort_col = repo_count |
|
200 | sort_col = repo_count | |
198 | sort_defined = True |
|
201 | sort_defined = True | |
199 | elif order_by == 'user_username': |
|
202 | elif order_by == 'user_username': | |
200 | sort_col = User.username |
|
203 | sort_col = OwnerUser.username | |
201 | else: |
|
204 | else: | |
202 | sort_col = getattr(RepoGroup, order_by, None) |
|
205 | sort_col = getattr(RepoGroup, order_by, None) | |
203 |
|
206 | |||
@@ -225,11 +228,10 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
225 | "last_changeset_raw": "", |
|
228 | "last_changeset_raw": "", | |
226 |
|
229 | |||
227 | "desc": desc(repo_gr.group_description, repo_gr.personal), |
|
230 | "desc": desc(repo_gr.group_description, repo_gr.personal), | |
228 |
"owner": user_profile(repo_gr. |
|
231 | "owner": user_profile(repo_gr.owner_username), | |
229 | "top_level_repos": repo_gr.repos_count, |
|
232 | "top_level_repos": repo_gr.repos_count, | |
230 | "action": repo_group_actions( |
|
233 | "action": repo_group_actions( | |
231 | repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count), |
|
234 | repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count), | |
232 |
|
||||
233 | } |
|
235 | } | |
234 |
|
236 | |||
235 | repo_groups_data.append(row) |
|
237 | repo_groups_data.append(row) | |
@@ -267,7 +269,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
267 |
|
269 | |||
268 | if parent_group_id and _gr: |
|
270 | if parent_group_id and _gr: | |
269 | if parent_group_id in [x[0] for x in c.repo_groups]: |
|
271 | if parent_group_id in [x[0] for x in c.repo_groups]: | |
270 |
parent_group_choice = safe_ |
|
272 | parent_group_choice = safe_str(parent_group_id) | |
271 |
|
273 | |||
272 | defaults.update({'group_parent_id': parent_group_choice}) |
|
274 | defaults.update({'group_parent_id': parent_group_choice}) | |
273 |
|
275 | |||
@@ -297,7 +299,7 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
297 | self._load_form_data(c) |
|
299 | self._load_form_data(c) | |
298 | # permissions for can create group based on parent_id are checked |
|
300 | # permissions for can create group based on parent_id are checked | |
299 | # here in the Form |
|
301 | # here in the Form | |
300 |
available_groups = map(lambda k: safe_ |
|
302 | available_groups = list(map(lambda k: safe_str(k[0]), c.repo_groups)) | |
301 | repo_group_form = RepoGroupForm( |
|
303 | repo_group_form = RepoGroupForm( | |
302 | self.request.translate, available_groups=available_groups, |
|
304 | self.request.translate, available_groups=available_groups, | |
303 | can_create_in_root=can_create)() |
|
305 | can_create_in_root=can_create)() |
@@ -26,6 +26,7 b' from pyramid.httpexceptions import HTTPF' | |||||
26 |
|
26 | |||
27 | from pyramid.renderers import render |
|
27 | from pyramid.renderers import render | |
28 | from pyramid.response import Response |
|
28 | from pyramid.response import Response | |
|
29 | from sqlalchemy.orm import aliased | |||
29 |
|
30 | |||
30 | from rhodecode import events |
|
31 | from rhodecode import events | |
31 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
32 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
@@ -93,6 +94,8 b' class AdminReposView(BaseAppView, DataGr' | |||||
93 | ) \ |
|
94 | ) \ | |
94 | .count() |
|
95 | .count() | |
95 |
|
96 | |||
|
97 | RepoFork = aliased(Repository) | |||
|
98 | OwnerUser = aliased(User) | |||
96 | base_q = Session.query( |
|
99 | base_q = Session.query( | |
97 | Repository.repo_id, |
|
100 | Repository.repo_id, | |
98 | Repository.repo_name, |
|
101 | Repository.repo_name, | |
@@ -101,17 +104,17 b' class AdminReposView(BaseAppView, DataGr' | |||||
101 | Repository.repo_state, |
|
104 | Repository.repo_state, | |
102 | Repository.private, |
|
105 | Repository.private, | |
103 | Repository.archived, |
|
106 | Repository.archived, | |
104 | Repository.fork, |
|
|||
105 | Repository.updated_on, |
|
107 | Repository.updated_on, | |
106 | Repository._changeset_cache, |
|
108 | Repository._changeset_cache, | |
107 | User, |
|
109 | RepoFork.repo_name.label('fork_repo_name'), | |
|
110 | OwnerUser.username.label('owner_username'), | |||
108 | ) \ |
|
111 | ) \ | |
109 | .filter(or_( |
|
112 | .filter(or_( | |
110 | # generate multiple IN to fix limitation problems |
|
113 | # generate multiple IN to fix limitation problems | |
111 | *in_filter_generator(Repository.repo_id, allowed_ids)) |
|
114 | *in_filter_generator(Repository.repo_id, allowed_ids)) | |
112 | ) \ |
|
115 | ) \ | |
113 |
.join( |
|
116 | .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \ | |
114 | .group_by(Repository, User) |
|
117 | .join(OwnerUser, Repository.user_id == OwnerUser.user_id) | |
115 |
|
118 | |||
116 | if search_q: |
|
119 | if search_q: | |
117 | like_expression = u'%{}%'.format(safe_str(search_q)) |
|
120 | like_expression = u'%{}%'.format(safe_str(search_q)) | |
@@ -119,6 +122,9 b' class AdminReposView(BaseAppView, DataGr' | |||||
119 | Repository.repo_name.ilike(like_expression), |
|
122 | Repository.repo_name.ilike(like_expression), | |
120 | )) |
|
123 | )) | |
121 |
|
124 | |||
|
125 | #TODO: check if we need group_by here ? | |||
|
126 | #base_q = base_q.group_by(Repository, User) | |||
|
127 | ||||
122 | repos_data_total_filtered_count = base_q.count() |
|
128 | repos_data_total_filtered_count = base_q.count() | |
123 |
|
129 | |||
124 | sort_defined = False |
|
130 | sort_defined = False | |
@@ -126,7 +132,7 b' class AdminReposView(BaseAppView, DataGr' | |||||
126 | sort_col = func.lower(Repository.repo_name) |
|
132 | sort_col = func.lower(Repository.repo_name) | |
127 | sort_defined = True |
|
133 | sort_defined = True | |
128 | elif order_by == 'user_username': |
|
134 | elif order_by == 'user_username': | |
129 | sort_col = User.username |
|
135 | sort_col = OwnerUser.username | |
130 | else: |
|
136 | else: | |
131 | sort_col = getattr(Repository, order_by, None) |
|
137 | sort_col = getattr(Repository, order_by, None) | |
132 |
|
138 | |||
@@ -188,7 +194,7 b' class AdminReposView(BaseAppView, DataGr' | |||||
188 |
|
194 | |||
189 | if parent_group_id and _gr: |
|
195 | if parent_group_id and _gr: | |
190 | if parent_group_id in [x[0] for x in c.repo_groups]: |
|
196 | if parent_group_id in [x[0] for x in c.repo_groups]: | |
191 |
parent_group_choice = safe_ |
|
197 | parent_group_choice = safe_str(parent_group_id) | |
192 |
|
198 | |||
193 | defaults.update({'repo_group': parent_group_choice}) |
|
199 | defaults.update({'repo_group': parent_group_choice}) | |
194 |
|
200 |
@@ -362,11 +362,11 b' class UsersView(UserAppView):' | |||||
362 | c = self.load_default_context() |
|
362 | c = self.load_default_context() | |
363 | c.user = self.db_user |
|
363 | c.user = self.db_user | |
364 |
|
364 | |||
365 | _repos = c.user.repositories |
|
365 | _repos = len(c.user.repositories) | |
366 | _repo_groups = c.user.repository_groups |
|
366 | _repo_groups = len(c.user.repository_groups) | |
367 | _user_groups = c.user.user_groups |
|
367 | _user_groups = len(c.user.user_groups) | |
368 | _pull_requests = c.user.user_pull_requests |
|
368 | _pull_requests = len(c.user.user_pull_requests) | |
369 | _artifacts = c.user.artifacts |
|
369 | _artifacts = len(c.user.artifacts) | |
370 |
|
370 | |||
371 | handle_repos = None |
|
371 | handle_repos = None | |
372 | handle_repo_groups = None |
|
372 | handle_repo_groups = None | |
@@ -378,46 +378,46 b' class UsersView(UserAppView):' | |||||
378 | def set_handle_flash_repos(): |
|
378 | def set_handle_flash_repos(): | |
379 | handle = handle_repos |
|
379 | handle = handle_repos | |
380 | if handle == 'detach': |
|
380 | if handle == 'detach': | |
381 |
h.flash(_('Detached %s repositories') % |
|
381 | h.flash(_('Detached %s repositories') % _repos, | |
382 | category='success') |
|
382 | category='success') | |
383 | elif handle == 'delete': |
|
383 | elif handle == 'delete': | |
384 |
h.flash(_('Deleted %s repositories') % |
|
384 | h.flash(_('Deleted %s repositories') % _repos, | |
385 | category='success') |
|
385 | category='success') | |
386 |
|
386 | |||
387 | def set_handle_flash_repo_groups(): |
|
387 | def set_handle_flash_repo_groups(): | |
388 | handle = handle_repo_groups |
|
388 | handle = handle_repo_groups | |
389 | if handle == 'detach': |
|
389 | if handle == 'detach': | |
390 |
h.flash(_('Detached %s repository groups') % |
|
390 | h.flash(_('Detached %s repository groups') % _repo_groups, | |
391 | category='success') |
|
391 | category='success') | |
392 | elif handle == 'delete': |
|
392 | elif handle == 'delete': | |
393 |
h.flash(_('Deleted %s repository groups') % |
|
393 | h.flash(_('Deleted %s repository groups') % _repo_groups, | |
394 | category='success') |
|
394 | category='success') | |
395 |
|
395 | |||
396 | def set_handle_flash_user_groups(): |
|
396 | def set_handle_flash_user_groups(): | |
397 | handle = handle_user_groups |
|
397 | handle = handle_user_groups | |
398 | if handle == 'detach': |
|
398 | if handle == 'detach': | |
399 |
h.flash(_('Detached %s user groups') % |
|
399 | h.flash(_('Detached %s user groups') % _user_groups, | |
400 | category='success') |
|
400 | category='success') | |
401 | elif handle == 'delete': |
|
401 | elif handle == 'delete': | |
402 |
h.flash(_('Deleted %s user groups') % |
|
402 | h.flash(_('Deleted %s user groups') % _user_groups, | |
403 | category='success') |
|
403 | category='success') | |
404 |
|
404 | |||
405 | def set_handle_flash_pull_requests(): |
|
405 | def set_handle_flash_pull_requests(): | |
406 | handle = handle_pull_requests |
|
406 | handle = handle_pull_requests | |
407 | if handle == 'detach': |
|
407 | if handle == 'detach': | |
408 |
h.flash(_('Detached %s pull requests') % |
|
408 | h.flash(_('Detached %s pull requests') % _pull_requests, | |
409 | category='success') |
|
409 | category='success') | |
410 | elif handle == 'delete': |
|
410 | elif handle == 'delete': | |
411 |
h.flash(_('Deleted %s pull requests') % |
|
411 | h.flash(_('Deleted %s pull requests') % _pull_requests, | |
412 | category='success') |
|
412 | category='success') | |
413 |
|
413 | |||
414 | def set_handle_flash_artifacts(): |
|
414 | def set_handle_flash_artifacts(): | |
415 | handle = handle_artifacts |
|
415 | handle = handle_artifacts | |
416 | if handle == 'detach': |
|
416 | if handle == 'detach': | |
417 |
h.flash(_('Detached %s artifacts') % |
|
417 | h.flash(_('Detached %s artifacts') % _artifacts, | |
418 | category='success') |
|
418 | category='success') | |
419 | elif handle == 'delete': |
|
419 | elif handle == 'delete': | |
420 |
h.flash(_('Deleted %s artifacts') % |
|
420 | h.flash(_('Deleted %s artifacts') % _artifacts, | |
421 | category='success') |
|
421 | category='success') | |
422 |
|
422 | |||
423 | handle_user = User.get_first_super_admin() |
|
423 | handle_user = User.get_first_super_admin() |
@@ -155,7 +155,6 b' class GistView(BaseAppView):' | |||||
155 | lifetime_options=[x[0] for x in c.lifetime_values]) |
|
155 | lifetime_options=[x[0] for x in c.lifetime_values]) | |
156 |
|
156 | |||
157 | try: |
|
157 | try: | |
158 |
|
||||
159 | schema_data = schema.deserialize(data) |
|
158 | schema_data = schema.deserialize(data) | |
160 |
|
159 | |||
161 | # convert to safer format with just KEYs so we sure no duplicates |
|
160 | # convert to safer format with just KEYs so we sure no duplicates | |
@@ -263,8 +262,8 b' class GistView(BaseAppView):' | |||||
263 | raise HTTPNotFound() |
|
262 | raise HTTPNotFound() | |
264 |
|
263 | |||
265 | if return_format == 'raw': |
|
264 | if return_format == 'raw': | |
266 | content = '\n\n'.join([f.content for f in c.files |
|
265 | content = b'\n\n'.join([f.content for f in c.files | |
267 | if (f_path is None or f.path == f_path)]) |
|
266 | if (f_path is None or f.path == f_path)]) | |
268 | response = Response(content) |
|
267 | response = Response(content) | |
269 | response.content_type = 'text/plain' |
|
268 | response.content_type = 'text/plain' | |
270 | return response |
|
269 | return response |
@@ -70,7 +70,7 b' class HomeView(BaseAppView, DataGridAppV' | |||||
70 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
70 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: | |
71 | return False |
|
71 | return False | |
72 | return True |
|
72 | return True | |
73 | _users = filter(maybe_skip_default_user, _users) |
|
73 | _users = list(filter(maybe_skip_default_user, _users)) | |
74 |
|
74 | |||
75 | if include_groups: |
|
75 | if include_groups: | |
76 | # extend with user groups |
|
76 | # extend with user groups | |
@@ -229,7 +229,7 b' class HomeView(BaseAppView, DataGridAppV' | |||||
229 | 'value': org_query, |
|
229 | 'value': org_query, | |
230 | 'value_display': 'user: `{}`'.format(obj.username), |
|
230 | 'value_display': 'user: `{}`'.format(obj.username), | |
231 | 'type': 'user', |
|
231 | 'type': 'user', | |
232 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
232 | 'icon_link': h.gravatar_url(obj.email, 30, request=self.request), | |
233 | 'url': h.route_path( |
|
233 | 'url': h.route_path( | |
234 | 'user_profile', username=obj.username) |
|
234 | 'user_profile', username=obj.username) | |
235 | } |
|
235 | } |
@@ -63,9 +63,8 b' class RepoGroupSettingsView(RepoGroupApp' | |||||
63 | show_empty_group=show_root_location) |
|
63 | show_empty_group=show_root_location) | |
64 | # filter out current repo group |
|
64 | # filter out current repo group | |
65 | exclude_group_ids = [c.repo_group.group_id] |
|
65 | exclude_group_ids = [c.repo_group.group_id] | |
66 |
c.repo_groups = |
|
66 | c.repo_groups = [x for x in c.repo_groups if x[0] not in exclude_group_ids] | |
67 | c.repo_groups) |
|
67 | c.repo_groups_choices = [k[0] for k in c.repo_groups] | |
68 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) |
|
|||
69 |
|
68 | |||
70 | parent_group = c.repo_group.parent_group |
|
69 | parent_group = c.repo_group.parent_group | |
71 |
|
70 | |||
@@ -135,7 +134,7 b' class RepoGroupSettingsView(RepoGroupApp' | |||||
135 | schema = self._get_schema(c, old_values=old_values) |
|
134 | schema = self._get_schema(c, old_values=old_values) | |
136 |
|
135 | |||
137 | c.form = RcForm(schema) |
|
136 | c.form = RcForm(schema) | |
138 | pstruct = self.request.POST.items() |
|
137 | pstruct = list(self.request.POST.items()) | |
139 |
|
138 | |||
140 | try: |
|
139 | try: | |
141 | schema_data = c.form.validate(pstruct) |
|
140 | schema_data = c.form.validate(pstruct) |
@@ -990,14 +990,14 b' def includeme(config):' | |||||
990 | route_name='edit_repo_fields_delete', request_method='POST', |
|
990 | route_name='edit_repo_fields_delete', request_method='POST', | |
991 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
991 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
992 |
|
992 | |||
993 | # Locking |
|
993 | # quick actions: locking | |
994 | config.add_route( |
|
994 | config.add_route( | |
995 |
name='repo_ |
|
995 | name='repo_settings_quick_actions', | |
996 |
pattern='/{repo_name:.*?[^/]}/settings/ |
|
996 | pattern='/{repo_name:.*?[^/]}/settings/quick-action', repo_route=True) | |
997 | config.add_view( |
|
997 | config.add_view( | |
998 | RepoSettingsView, |
|
998 | RepoSettingsView, | |
999 | attr='edit_advanced_toggle_locking', |
|
999 | attr='repo_settings_quick_actions', | |
1000 |
route_name='repo_ |
|
1000 | route_name='repo_settings_quick_actions', request_method='GET', | |
1001 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
1001 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
1002 |
|
1002 | |||
1003 | # Remote |
|
1003 | # Remote |
@@ -260,9 +260,10 b' class RepoCommitsView(RepoAppView):' | |||||
260 | ignore_whitespace=hide_whitespace_changes, |
|
260 | ignore_whitespace=hide_whitespace_changes, | |
261 | context=diff_context) |
|
261 | context=diff_context) | |
262 |
|
262 | |||
263 | diff_processor = diffs.DiffProcessor( |
|
263 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', | |
264 |
|
|
264 | diff_limit=diff_limit, | |
265 |
file_limit=file_limit, |
|
265 | file_limit=file_limit, | |
|
266 | show_full_diff=c.fulldiff) | |||
266 |
|
267 | |||
267 | _parsed = diff_processor.prepare() |
|
268 | _parsed = diff_processor.prepare() | |
268 |
|
269 | |||
@@ -285,9 +286,9 b' class RepoCommitsView(RepoAppView):' | |||||
285 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
286 | _diff = self.rhodecode_vcs_repo.get_diff( | |
286 | commit1, commit2, |
|
287 | commit1, commit2, | |
287 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
288 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
288 | diff_processor = diffs.DiffProcessor( |
|
289 | diff_processor = diffs.DiffProcessor(_diff, diff_format='newdiff', | |
289 |
|
|
290 | diff_limit=diff_limit, | |
290 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
291 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
291 | # downloads/raw we only need RAW diff nothing else |
|
292 | # downloads/raw we only need RAW diff nothing else | |
292 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
293 | diff = self.path_filter.get_raw_patch(diff_processor) | |
293 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
294 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] | |
@@ -643,17 +644,28 b' class RepoCommitsView(RepoAppView):' | |||||
643 |
|
644 | |||
644 | Session().commit() |
|
645 | Session().commit() | |
645 |
|
646 | |||
646 |
|
|
647 | data = { | |
647 | 'store_fid': store_uid, |
|
648 | 'store_fid': store_uid, | |
648 | 'access_path': h.route_path( |
|
649 | 'access_path': h.route_path( | |
649 | 'download_file', fid=store_uid), |
|
650 | 'download_file', fid=store_uid), | |
650 | 'fqn_access_path': h.route_url( |
|
651 | 'fqn_access_path': h.route_url( | |
651 | 'download_file', fid=store_uid), |
|
652 | 'download_file', fid=store_uid), | |
652 | 'repo_access_path': h.route_path( |
|
653 | # for EE those are replaced by FQN links on repo-only like | |
653 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
654 | 'repo_access_path': h.route_url( | |
|
655 | 'download_file', fid=store_uid), | |||
654 | 'repo_fqn_access_path': h.route_url( |
|
656 | 'repo_fqn_access_path': h.route_url( | |
655 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
657 | 'download_file', fid=store_uid), | |
656 | } |
|
658 | } | |
|
659 | # this data is a part of CE/EE additional code | |||
|
660 | if c.rhodecode_edition_id == 'EE': | |||
|
661 | data.update({ | |||
|
662 | 'repo_access_path': h.route_path( | |||
|
663 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |||
|
664 | 'repo_fqn_access_path': h.route_url( | |||
|
665 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |||
|
666 | }) | |||
|
667 | ||||
|
668 | return data | |||
657 |
|
669 | |||
658 | @LoginRequired() |
|
670 | @LoginRequired() | |
659 | @NotAnonymous() |
|
671 | @NotAnonymous() | |
@@ -766,7 +778,7 b' class RepoCommitsView(RepoAppView):' | |||||
766 | 'comment_id': comment.comment_id, |
|
778 | 'comment_id': comment.comment_id, | |
767 | 'comment_version': comment_history.version, |
|
779 | 'comment_version': comment_history.version, | |
768 | 'comment_author_username': comment_history.author.username, |
|
780 | 'comment_author_username': comment_history.author.username, | |
769 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
781 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16, request=self.request), | |
770 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
782 | 'comment_created_on': h.age_component(comment_history.created_on, | |
771 | time_is_local=True), |
|
783 | time_is_local=True), | |
772 | } |
|
784 | } |
@@ -282,9 +282,10 b' class RepoCompareView(RepoAppView):' | |||||
282 | path=target_path, path1=source_path, |
|
282 | path=target_path, path1=source_path, | |
283 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
283 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
284 |
|
284 | |||
285 | diff_processor = diffs.DiffProcessor( |
|
285 | diff_processor = diffs.DiffProcessor(txt_diff, diff_format='newdiff', | |
286 | txt_diff, format='newdiff', diff_limit=diff_limit, |
|
286 | diff_limit=diff_limit, | |
287 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
287 | file_limit=file_limit, | |
|
288 | show_full_diff=c.fulldiff) | |||
288 | _parsed = diff_processor.prepare() |
|
289 | _parsed = diff_processor.prepare() | |
289 |
|
290 | |||
290 | diffset = codeblocks.DiffSet( |
|
291 | diffset = codeblocks.DiffSet( |
@@ -17,8 +17,9 b'' | |||||
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import pytz |
|
20 | ||
21 | import logging |
|
21 | import logging | |
|
22 | import datetime | |||
22 |
|
23 | |||
23 | from pyramid.response import Response |
|
24 | from pyramid.response import Response | |
24 |
|
25 | |||
@@ -72,8 +73,9 b' class RepoFeedView(RepoAppView):' | |||||
72 | self.feed_items_per_page = config['feed_items_per_page'] |
|
73 | self.feed_items_per_page = config['feed_items_per_page'] | |
73 |
|
74 | |||
74 | def _changes(self, commit): |
|
75 | def _changes(self, commit): | |
75 | diff_processor = DiffProcessor( |
|
76 | diff = commit.diff() | |
76 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
77 | diff_processor = DiffProcessor(diff, diff_format='newdiff', | |
|
78 | diff_limit=self.feed_diff_limit) | |||
77 | _parsed = diff_processor.prepare(inline_diff=False) |
|
79 | _parsed = diff_processor.prepare(inline_diff=False) | |
78 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
80 | limited_diff = isinstance(_parsed, LimitedDiffContainer) | |
79 |
|
81 | |||
@@ -97,7 +99,7 b' class RepoFeedView(RepoAppView):' | |||||
97 | has_hidden_changes=has_hidden_changes |
|
99 | has_hidden_changes=has_hidden_changes | |
98 | ) |
|
100 | ) | |
99 |
|
101 | |||
100 |
def _set_timezone(self, date, tzinfo= |
|
102 | def _set_timezone(self, date, tzinfo=datetime.timezone.utc): | |
101 | if not getattr(date, "tzinfo", None): |
|
103 | if not getattr(date, "tzinfo", None): | |
102 | date.replace(tzinfo=tzinfo) |
|
104 | date.replace(tzinfo=tzinfo) | |
103 | return date |
|
105 | return date | |
@@ -114,7 +116,10 b' class RepoFeedView(RepoAppView):' | |||||
114 | return list(collection[-self.feed_items_per_page:]) |
|
116 | return list(collection[-self.feed_items_per_page:]) | |
115 |
|
117 | |||
116 | def uid(self, repo_id, commit_id): |
|
118 | def uid(self, repo_id, commit_id): | |
117 |
return '{}:{}'.format( |
|
119 | return '{}:{}'.format( | |
|
120 | md5_safe(repo_id, return_type='str'), | |||
|
121 | md5_safe(commit_id, return_type='str') | |||
|
122 | ) | |||
118 |
|
123 | |||
119 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
124 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
120 | @HasRepoPermissionAnyDecorator( |
|
125 | @HasRepoPermissionAnyDecorator( | |
@@ -126,7 +131,7 b' class RepoFeedView(RepoAppView):' | |||||
126 | self.load_default_context() |
|
131 | self.load_default_context() | |
127 | force_recache = self.get_recache_flag() |
|
132 | force_recache = self.get_recache_flag() | |
128 |
|
133 | |||
129 |
cache_namespace_uid = ' |
|
134 | cache_namespace_uid = 'repo_feed.{}'.format(self.db_repo.repo_id) | |
130 | condition = not (self.path_filter.is_enabled or force_recache) |
|
135 | condition = not (self.path_filter.is_enabled or force_recache) | |
131 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
136 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
132 |
|
137 | |||
@@ -144,7 +149,7 b' class RepoFeedView(RepoAppView):' | |||||
144 | for commit in reversed(self._get_commits()): |
|
149 | for commit in reversed(self._get_commits()): | |
145 | date = self._set_timezone(commit.date) |
|
150 | date = self._set_timezone(commit.date) | |
146 | feed.add_item( |
|
151 | feed.add_item( | |
147 | unique_id=self.uid(repo_id, commit.raw_id), |
|
152 | unique_id=self.uid(str(repo_id), commit.raw_id), | |
148 | title=self._get_title(commit), |
|
153 | title=self._get_title(commit), | |
149 | author_name=commit.author, |
|
154 | author_name=commit.author, | |
150 | description=self._get_description(commit), |
|
155 | description=self._get_description(commit), | |
@@ -173,7 +178,7 b' class RepoFeedView(RepoAppView):' | |||||
173 | self.load_default_context() |
|
178 | self.load_default_context() | |
174 | force_recache = self.get_recache_flag() |
|
179 | force_recache = self.get_recache_flag() | |
175 |
|
180 | |||
176 |
cache_namespace_uid = ' |
|
181 | cache_namespace_uid = 'repo_feed.{}'.format(self.db_repo.repo_id) | |
177 | condition = not (self.path_filter.is_enabled or force_recache) |
|
182 | condition = not (self.path_filter.is_enabled or force_recache) | |
178 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
183 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
179 |
|
184 | |||
@@ -191,7 +196,7 b' class RepoFeedView(RepoAppView):' | |||||
191 | for commit in reversed(self._get_commits()): |
|
196 | for commit in reversed(self._get_commits()): | |
192 | date = self._set_timezone(commit.date) |
|
197 | date = self._set_timezone(commit.date) | |
193 | feed.add_item( |
|
198 | feed.add_item( | |
194 | unique_id=self.uid(repo_id, commit.raw_id), |
|
199 | unique_id=self.uid(str(repo_id), commit.raw_id), | |
195 | title=self._get_title(commit), |
|
200 | title=self._get_title(commit), | |
196 | author_name=commit.author, |
|
201 | author_name=commit.author, | |
197 | description=self._get_description(commit), |
|
202 | description=self._get_description(commit), |
@@ -21,10 +21,10 b'' | |||||
21 | import itertools |
|
21 | import itertools | |
22 | import logging |
|
22 | import logging | |
23 | import os |
|
23 | import os | |
24 | import shutil |
|
|||
25 | import tempfile |
|
|||
26 | import collections |
|
24 | import collections | |
27 |
import urllib.request |
|
25 | import urllib.request | |
|
26 | import urllib.parse | |||
|
27 | import urllib.error | |||
28 | import pathlib |
|
28 | import pathlib | |
29 |
|
29 | |||
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound | |
@@ -38,12 +38,16 b' from rhodecode.apps._base import RepoApp' | |||||
38 |
|
38 | |||
39 | from rhodecode.lib import diffs, helpers as h, rc_cache |
|
39 | from rhodecode.lib import diffs, helpers as h, rc_cache | |
40 | from rhodecode.lib import audit_logger |
|
40 | from rhodecode.lib import audit_logger | |
|
41 | from rhodecode.lib.hash_utils import sha1_safe | |||
|
42 | from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock | |||
|
43 | from rhodecode.lib.str_utils import safe_bytes | |||
41 | from rhodecode.lib.view_utils import parse_path_ref |
|
44 | from rhodecode.lib.view_utils import parse_path_ref | |
42 | from rhodecode.lib.exceptions import NonRelativePathError |
|
45 | from rhodecode.lib.exceptions import NonRelativePathError | |
43 | from rhodecode.lib.codeblocks import ( |
|
46 | from rhodecode.lib.codeblocks import ( | |
44 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
47 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) | |
45 |
from rhodecode.lib.utils2 import |
|
48 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode | |
46 | convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1) |
|
49 | from rhodecode.lib.type_utils import str2bool | |
|
50 | from rhodecode.lib.str_utils import safe_str, safe_int | |||
47 | from rhodecode.lib.auth import ( |
|
51 | from rhodecode.lib.auth import ( | |
48 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
52 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
49 | from rhodecode.lib.vcs import path as vcspath |
|
53 | from rhodecode.lib.vcs import path as vcspath | |
@@ -61,6 +65,48 b' from rhodecode.model.db import Repositor' | |||||
61 | log = logging.getLogger(__name__) |
|
65 | log = logging.getLogger(__name__) | |
62 |
|
66 | |||
63 |
|
67 | |||
|
68 | def get_archive_name(db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): | |||
|
69 | # original backward compat name of archive | |||
|
70 | clean_name = safe_str(db_repo_name.replace('/', '_')) | |||
|
71 | ||||
|
72 | # e.g vcsserver-sub-1-abcfdef-archive-all.zip | |||
|
73 | # vcsserver-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip | |||
|
74 | ||||
|
75 | sub_repo = 'sub-1' if subrepos else 'sub-0' | |||
|
76 | commit = commit_sha if with_hash else 'archive' | |||
|
77 | path_marker = (path_sha if with_hash else '') or 'all' | |||
|
78 | archive_name = f'{clean_name}-{sub_repo}-{commit}-{path_marker}{ext}' | |||
|
79 | ||||
|
80 | return archive_name | |||
|
81 | ||||
|
82 | ||||
|
83 | def get_path_sha(at_path): | |||
|
84 | return safe_str(sha1_safe(at_path)[:8]) | |||
|
85 | ||||
|
86 | ||||
|
87 | def _get_archive_spec(fname): | |||
|
88 | log.debug('Detecting archive spec for: `%s`', fname) | |||
|
89 | ||||
|
90 | fileformat = None | |||
|
91 | ext = None | |||
|
92 | content_type = None | |||
|
93 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |||
|
94 | ||||
|
95 | if fname.endswith(extension): | |||
|
96 | fileformat = a_type | |||
|
97 | log.debug('archive is of type: %s', fileformat) | |||
|
98 | ext = extension | |||
|
99 | break | |||
|
100 | ||||
|
101 | if not fileformat: | |||
|
102 | raise ValueError() | |||
|
103 | ||||
|
104 | # left over part of whole fname is the commit | |||
|
105 | commit_id = fname[:-len(ext)] | |||
|
106 | ||||
|
107 | return commit_id, ext, fileformat, content_type | |||
|
108 | ||||
|
109 | ||||
64 | class RepoFilesView(RepoAppView): |
|
110 | class RepoFilesView(RepoAppView): | |
65 |
|
111 | |||
66 | @staticmethod |
|
112 | @staticmethod | |
@@ -72,12 +118,12 b' class RepoFilesView(RepoAppView):' | |||||
72 | branches in the underlying repository. |
|
118 | branches in the underlying repository. | |
73 | """ |
|
119 | """ | |
74 | tags_and_branches = itertools.chain( |
|
120 | tags_and_branches = itertools.chain( | |
75 |
repo.branches. |
|
121 | repo.branches.keys(), | |
76 |
repo.tags. |
|
122 | repo.tags.keys()) | |
77 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
123 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) | |
78 |
|
124 | |||
79 | for name in tags_and_branches: |
|
125 | for name in tags_and_branches: | |
80 |
if f_path.startswith('{}/' |
|
126 | if f_path.startswith(f'{name}/'): | |
81 | f_path = vcspath.relpath(f_path, name) |
|
127 | f_path = vcspath.relpath(f_path, name) | |
82 | break |
|
128 | break | |
83 | return f_path |
|
129 | return f_path | |
@@ -165,19 +211,23 b' class RepoFilesView(RepoAppView):' | |||||
165 | if not redirect_after: |
|
211 | if not redirect_after: | |
166 | return None |
|
212 | return None | |
167 |
|
213 | |||
168 | _url = h.route_path( |
|
214 | add_new = upload_new = "" | |
169 | 'repo_files_add_file', |
|
|||
170 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
|||
171 |
|
||||
172 | if h.HasRepoPermissionAny( |
|
215 | if h.HasRepoPermissionAny( | |
173 | 'repository.write', 'repository.admin')(self.db_repo_name): |
|
216 | 'repository.write', 'repository.admin')(self.db_repo_name): | |
|
217 | _url = h.route_path( | |||
|
218 | 'repo_files_add_file', | |||
|
219 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |||
174 | add_new = h.link_to( |
|
220 | add_new = h.link_to( | |
175 |
_(' |
|
221 | _('add a new file'), _url, class_="alert-link") | |
176 | else: |
|
222 | ||
177 | add_new = "" |
|
223 | _url_upld = h.route_path( | |
|
224 | 'repo_files_upload_file', | |||
|
225 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |||
|
226 | upload_new = h.link_to( | |||
|
227 | _('upload a new file'), _url_upld, class_="alert-link") | |||
178 |
|
228 | |||
179 | h.flash(h.literal( |
|
229 | h.flash(h.literal( | |
180 | _('There are no files yet. %s') % add_new), category='warning') |
|
230 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') | |
181 | raise HTTPFound( |
|
231 | raise HTTPFound( | |
182 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
232 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |
183 |
|
233 | |||
@@ -189,7 +239,7 b' class RepoFilesView(RepoAppView):' | |||||
189 | h.flash(h.escape(safe_str(e)), category='error') |
|
239 | h.flash(h.escape(safe_str(e)), category='error') | |
190 | raise HTTPNotFound() |
|
240 | raise HTTPNotFound() | |
191 |
|
241 | |||
192 | def _get_filenode_or_redirect(self, commit_obj, path): |
|
242 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): | |
193 | """ |
|
243 | """ | |
194 | Returns file_node, if error occurs or given path is directory, |
|
244 | Returns file_node, if error occurs or given path is directory, | |
195 | it'll redirect to top level path |
|
245 | it'll redirect to top level path | |
@@ -197,7 +247,7 b' class RepoFilesView(RepoAppView):' | |||||
197 | _ = self.request.translate |
|
247 | _ = self.request.translate | |
198 |
|
248 | |||
199 | try: |
|
249 | try: | |
200 | file_node = commit_obj.get_node(path) |
|
250 | file_node = commit_obj.get_node(path, pre_load=pre_load) | |
201 | if file_node.is_dir(): |
|
251 | if file_node.is_dir(): | |
202 | raise RepositoryError('The given path is a directory') |
|
252 | raise RepositoryError('The given path is a directory') | |
203 | except CommitDoesNotExistError: |
|
253 | except CommitDoesNotExistError: | |
@@ -262,7 +312,7 b' class RepoFilesView(RepoAppView):' | |||||
262 | 'with caching: %s[TTL: %ss]' % ( |
|
312 | 'with caching: %s[TTL: %ss]' % ( | |
263 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
313 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
264 |
|
314 | |||
265 |
cache_namespace_uid = ' |
|
315 | cache_namespace_uid = 'repo.{}'.format(repo_id) | |
266 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
316 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
267 |
|
317 | |||
268 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
318 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
@@ -279,28 +329,6 b' class RepoFilesView(RepoAppView):' | |||||
279 | rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash, |
|
329 | rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash, | |
280 | self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) |
|
330 | self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) | |
281 |
|
331 | |||
282 | def _get_archive_spec(self, fname): |
|
|||
283 | log.debug('Detecting archive spec for: `%s`', fname) |
|
|||
284 |
|
||||
285 | fileformat = None |
|
|||
286 | ext = None |
|
|||
287 | content_type = None |
|
|||
288 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
|||
289 |
|
||||
290 | if fname.endswith(extension): |
|
|||
291 | fileformat = a_type |
|
|||
292 | log.debug('archive is of type: %s', fileformat) |
|
|||
293 | ext = extension |
|
|||
294 | break |
|
|||
295 |
|
||||
296 | if not fileformat: |
|
|||
297 | raise ValueError() |
|
|||
298 |
|
||||
299 | # left over part of whole fname is the commit |
|
|||
300 | commit_id = fname[:-len(ext)] |
|
|||
301 |
|
||||
302 | return commit_id, ext, fileformat, content_type |
|
|||
303 |
|
||||
304 | def create_pure_path(self, *parts): |
|
332 | def create_pure_path(self, *parts): | |
305 | # Split paths and sanitize them, removing any ../ etc |
|
333 | # Split paths and sanitize them, removing any ../ etc | |
306 | sanitized_path = [ |
|
334 | sanitized_path = [ | |
@@ -325,22 +353,6 b' class RepoFilesView(RepoAppView):' | |||||
325 |
|
353 | |||
326 | return lf_enabled |
|
354 | return lf_enabled | |
327 |
|
355 | |||
328 | def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): |
|
|||
329 | # original backward compat name of archive |
|
|||
330 | clean_name = safe_str(db_repo_name.replace('/', '_')) |
|
|||
331 |
|
||||
332 | # e.g vcsserver.zip |
|
|||
333 | # e.g vcsserver-abcdefgh.zip |
|
|||
334 | # e.g vcsserver-abcdefgh-defghijk.zip |
|
|||
335 | archive_name = '{}{}{}{}{}{}'.format( |
|
|||
336 | clean_name, |
|
|||
337 | '-sub' if subrepos else '', |
|
|||
338 | commit_sha, |
|
|||
339 | '-{}'.format('plain') if not with_hash else '', |
|
|||
340 | '-{}'.format(path_sha) if path_sha else '', |
|
|||
341 | ext) |
|
|||
342 | return archive_name |
|
|||
343 |
|
||||
344 | @LoginRequired() |
|
356 | @LoginRequired() | |
345 | @HasRepoPermissionAnyDecorator( |
|
357 | @HasRepoPermissionAnyDecorator( | |
346 | 'repository.read', 'repository.write', 'repository.admin') |
|
358 | 'repository.read', 'repository.write', 'repository.admin') | |
@@ -360,7 +372,7 b' class RepoFilesView(RepoAppView):' | |||||
360 |
|
372 | |||
361 | try: |
|
373 | try: | |
362 | commit_id, ext, fileformat, content_type = \ |
|
374 | commit_id, ext, fileformat, content_type = \ | |
363 |
|
|
375 | _get_archive_spec(fname) | |
364 | except ValueError: |
|
376 | except ValueError: | |
365 | return Response(_('Unknown archive type for: `{}`').format( |
|
377 | return Response(_('Unknown archive type for: `{}`').format( | |
366 | h.escape(fname))) |
|
378 | h.escape(fname))) | |
@@ -383,94 +395,86 b' class RepoFilesView(RepoAppView):' | |||||
383 | except Exception: |
|
395 | except Exception: | |
384 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) |
|
396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) | |
385 |
|
397 | |||
386 | # path sha is part of subdir |
|
398 | path_sha = get_path_sha(at_path) | |
387 | path_sha = '' |
|
399 | ||
388 | if at_path != default_at_path: |
|
400 | # used for cache etc, consistent unique archive name | |
389 | path_sha = sha1(at_path)[:8] |
|
401 | archive_name_key = get_archive_name( | |
390 | short_sha = '-{}'.format(safe_str(commit.short_id)) |
|
402 | self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
391 | # used for cache etc |
|
403 | path_sha=path_sha, with_hash=True) | |
392 | archive_name = self._get_archive_name( |
|
|||
393 | self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos, |
|
|||
394 | path_sha=path_sha, with_hash=with_hash) |
|
|||
395 |
|
404 | |||
396 | if not with_hash: |
|
405 | if not with_hash: | |
397 | short_sha = '' |
|
|||
398 | path_sha = '' |
|
406 | path_sha = '' | |
399 |
|
407 | |||
400 | # what end client gets served |
|
408 | # what end client gets served | |
401 |
response_archive_name = |
|
409 | response_archive_name = get_archive_name( | |
402 |
self.db_repo_name, commit_sha=short_ |
|
410 | self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
403 | path_sha=path_sha, with_hash=with_hash) |
|
411 | path_sha=path_sha, with_hash=with_hash) | |
|
412 | ||||
404 | # remove extension from our archive directory name |
|
413 | # remove extension from our archive directory name | |
405 | archive_dir_name = response_archive_name[:-len(ext)] |
|
414 | archive_dir_name = response_archive_name[:-len(ext)] | |
406 |
|
415 | |||
407 | use_cached_archive = False |
|
416 | archive_cache_disable = self.request.GET.get('no_cache') | |
408 | archive_cache_dir = CONFIG.get('archive_cache_dir') |
|
417 | ||
409 | archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache') |
|
418 | d_cache = get_archival_cache_store(config=CONFIG) | |
410 | cached_archive_path = None |
|
419 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver | |
|
420 | d_cache_conf = get_archival_config(config=CONFIG) | |||
411 |
|
421 | |||
412 | if archive_cache_enabled: |
|
422 | reentrant_lock_key = archive_name_key + '.lock' | |
413 | # check if we it's ok to write, and re-create the archive cache |
|
423 | with ReentrantLock(d_cache, reentrant_lock_key): | |
414 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
424 | # This is also a cache key | |
415 | os.makedirs(CONFIG['archive_cache_dir']) |
|
425 | use_cached_archive = False | |
416 |
|
426 | if archive_name_key in d_cache and not archive_cache_disable: | ||
417 | cached_archive_path = os.path.join( |
|
427 | reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True) | |
418 | CONFIG['archive_cache_dir'], archive_name) |
|
|||
419 | if os.path.isfile(cached_archive_path): |
|
|||
420 | log.debug('Found cached archive in %s', cached_archive_path) |
|
|||
421 | fd, archive = None, cached_archive_path |
|
|||
422 | use_cached_archive = True |
|
428 | use_cached_archive = True | |
|
429 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', | |||
|
430 | archive_name_key, tag, reader.name) | |||
423 | else: |
|
431 | else: | |
424 | log.debug('Archive %s is not yet cached', archive_name) |
|
432 | reader = None | |
|
433 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) | |||
425 |
|
434 | |||
426 | # generate new archive, as previous was not found in the cache |
|
435 | # generate new archive, as previous was not found in the cache | |
427 |
if not |
|
436 | if not reader: | |
428 | _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None |
|
437 | # first remove expired items, before generating a new one :) | |
429 | fd, archive = tempfile.mkstemp(dir=_dir) |
|
438 | # we di this manually because automatic eviction is disabled | |
430 | log.debug('Creating new temp archive in %s', archive) |
|
439 | d_cache.cull(retry=True) | |
431 | try: |
|
440 | ||
432 | commit.archive_repo(archive, archive_dir_name=archive_dir_name, |
|
441 | try: | |
433 | kind=fileformat, subrepos=subrepos, |
|
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, | |
434 |
|
|
443 | kind=fileformat, subrepos=subrepos, | |
435 | except ImproperArchiveTypeError: |
|
444 | archive_at_path=at_path, cache_config=d_cache_conf) | |
436 | return _('Unknown archive type') |
|
445 | except ImproperArchiveTypeError: | |
437 | if archive_cache_enabled: |
|
446 | return _('Unknown archive type') | |
438 | # if we generated the archive and we have cache enabled |
|
|||
439 | # let's use this for future |
|
|||
440 | log.debug('Storing new archive in %s', cached_archive_path) |
|
|||
441 | shutil.move(archive, cached_archive_path) |
|
|||
442 | archive = cached_archive_path |
|
|||
443 |
|
447 | |||
444 | # store download action |
|
448 | reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True) | |
445 | audit_logger.store_web( |
|
449 | ||
446 | 'repo.archive.download', action_data={ |
|
450 | if not reader: | |
447 | 'user_agent': self.request.user_agent, |
|
451 | raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache') | |
448 | 'archive_name': archive_name, |
|
|||
449 | 'archive_spec': fname, |
|
|||
450 | 'archive_cached': use_cached_archive}, |
|
|||
451 | user=self._rhodecode_user, |
|
|||
452 | repo=self.db_repo, |
|
|||
453 | commit=True |
|
|||
454 | ) |
|
|||
455 |
|
452 | |||
456 | def get_chunked_archive(archive_path): |
|
453 | def archive_iterator(_reader): | |
457 | with open(archive_path, 'rb') as stream: |
|
454 | while 1: | |
458 | while True: |
|
455 | data = _reader.read(1024) | |
459 | data = stream.read(16 * 1024) |
|
456 | if not data: | |
460 |
|
|
457 | break | |
461 | if fd: # fd means we used temporary file |
|
458 | yield data | |
462 | os.close(fd) |
|
|||
463 | if not archive_cache_enabled: |
|
|||
464 | log.debug('Destroying temp archive %s', archive_path) |
|
|||
465 | os.remove(archive_path) |
|
|||
466 | break |
|
|||
467 | yield data |
|
|||
468 |
|
459 | |||
469 |
response = Response(app_iter= |
|
460 | response = Response(app_iter=archive_iterator(reader)) | |
470 |
response.content_disposition = |
|
461 | response.content_disposition = f'attachment; filename={response_archive_name}' | |
471 | response.content_type = str(content_type) |
|
462 | response.content_type = str(content_type) | |
472 |
|
463 | |||
473 | return response |
|
464 | try: | |
|
465 | return response | |||
|
466 | finally: | |||
|
467 | # store download action | |||
|
468 | audit_logger.store_web( | |||
|
469 | 'repo.archive.download', action_data={ | |||
|
470 | 'user_agent': self.request.user_agent, | |||
|
471 | 'archive_name': archive_name_key, | |||
|
472 | 'archive_spec': fname, | |||
|
473 | 'archive_cached': use_cached_archive}, | |||
|
474 | user=self._rhodecode_user, | |||
|
475 | repo=self.db_repo, | |||
|
476 | commit=True | |||
|
477 | ) | |||
474 |
|
478 | |||
475 | def _get_file_node(self, commit_id, f_path): |
|
479 | def _get_file_node(self, commit_id, f_path): | |
476 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
480 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: | |
@@ -478,8 +482,7 b' class RepoFilesView(RepoAppView):' | |||||
478 | try: |
|
482 | try: | |
479 | node = commit.get_node(f_path) |
|
483 | node = commit.get_node(f_path) | |
480 | if node.is_dir(): |
|
484 | if node.is_dir(): | |
481 |
raise NodeError(' |
|
485 | raise NodeError(f'{node} path is a {type(node)} not a file') | |
482 | % (node, type(node))) |
|
|||
483 | except NodeDoesNotExistError: |
|
486 | except NodeDoesNotExistError: | |
484 | commit = EmptyCommit( |
|
487 | commit = EmptyCommit( | |
485 | commit_id=commit_id, |
|
488 | commit_id=commit_id, | |
@@ -489,12 +492,12 b' class RepoFilesView(RepoAppView):' | |||||
489 | message=commit.message, |
|
492 | message=commit.message, | |
490 | author=commit.author, |
|
493 | author=commit.author, | |
491 | date=commit.date) |
|
494 | date=commit.date) | |
492 | node = FileNode(f_path, '', commit=commit) |
|
495 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
493 | else: |
|
496 | else: | |
494 | commit = EmptyCommit( |
|
497 | commit = EmptyCommit( | |
495 | repo=self.rhodecode_vcs_repo, |
|
498 | repo=self.rhodecode_vcs_repo, | |
496 | alias=self.rhodecode_vcs_repo.alias) |
|
499 | alias=self.rhodecode_vcs_repo.alias) | |
497 | node = FileNode(f_path, '', commit=commit) |
|
500 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
498 | return node |
|
501 | return node | |
499 |
|
502 | |||
500 | @LoginRequired() |
|
503 | @LoginRequired() | |
@@ -551,12 +554,13 b' class RepoFilesView(RepoAppView):' | |||||
551 | _diff = diffs.get_gitdiff(node1, node2, |
|
554 | _diff = diffs.get_gitdiff(node1, node2, | |
552 | ignore_whitespace=ignore_whitespace, |
|
555 | ignore_whitespace=ignore_whitespace, | |
553 | context=line_context) |
|
556 | context=line_context) | |
554 |
|
|
557 | # NOTE: this was using diff_format='gitdiff' | |
|
558 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |||
555 |
|
559 | |||
556 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
560 | response = Response(self.path_filter.get_raw_patch(diff)) | |
557 | response.content_type = 'text/plain' |
|
561 | response.content_type = 'text/plain' | |
558 | response.content_disposition = ( |
|
562 | response.content_disposition = ( | |
559 |
'attachment; filename= |
|
563 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' | |
560 | ) |
|
564 | ) | |
561 | charset = self._get_default_encoding(c) |
|
565 | charset = self._get_default_encoding(c) | |
562 | if charset: |
|
566 | if charset: | |
@@ -567,7 +571,8 b' class RepoFilesView(RepoAppView):' | |||||
567 | _diff = diffs.get_gitdiff(node1, node2, |
|
571 | _diff = diffs.get_gitdiff(node1, node2, | |
568 | ignore_whitespace=ignore_whitespace, |
|
572 | ignore_whitespace=ignore_whitespace, | |
569 | context=line_context) |
|
573 | context=line_context) | |
570 |
|
|
574 | # NOTE: this was using diff_format='gitdiff' | |
|
575 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |||
571 |
|
576 | |||
572 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
577 | response = Response(self.path_filter.get_raw_patch(diff)) | |
573 | response.content_type = 'text/plain' |
|
578 | response.content_type = 'text/plain' | |
@@ -637,8 +642,7 b' class RepoFilesView(RepoAppView):' | |||||
637 | c.annotate = view_name == 'repo_files:annotated' |
|
642 | c.annotate = view_name == 'repo_files:annotated' | |
638 | # default is false, but .rst/.md files later are auto rendered, we can |
|
643 | # default is false, but .rst/.md files later are auto rendered, we can | |
639 | # overwrite auto rendering by setting this GET flag |
|
644 | # overwrite auto rendering by setting this GET flag | |
640 |
c.renderer = view_name == 'repo_files:rendered' or |
|
645 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) | |
641 | not self.request.GET.get('no-render', False) |
|
|||
642 |
|
646 | |||
643 | commit_id, f_path = self._get_commit_and_path() |
|
647 | commit_id, f_path = self._get_commit_and_path() | |
644 |
|
648 | |||
@@ -675,7 +679,7 b' class RepoFilesView(RepoAppView):' | |||||
675 |
|
679 | |||
676 | # files or dirs |
|
680 | # files or dirs | |
677 | try: |
|
681 | try: | |
678 | c.file = c.commit.get_node(f_path) |
|
682 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) | |
679 |
|
683 | |||
680 | c.file_author = True |
|
684 | c.file_author = True | |
681 | c.file_tree = '' |
|
685 | c.file_tree = '' | |
@@ -917,7 +921,7 b' class RepoFilesView(RepoAppView):' | |||||
917 | 'with caching: %s[TTL: %ss]' % ( |
|
921 | 'with caching: %s[TTL: %ss]' % ( | |
918 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
922 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
919 |
|
923 | |||
920 |
cache_namespace_uid = ' |
|
924 | cache_namespace_uid = 'repo.{}'.format(repo_id) | |
921 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
925 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
922 |
|
926 | |||
923 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
927 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
@@ -950,7 +954,7 b' class RepoFilesView(RepoAppView):' | |||||
950 |
|
954 | |||
951 | metadata = self._get_nodelist_at_commit( |
|
955 | metadata = self._get_nodelist_at_commit( | |
952 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) |
|
956 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) | |
953 | return {'nodes': metadata} |
|
957 | return {'nodes': [x for x in metadata]} | |
954 |
|
958 | |||
955 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): |
|
959 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): | |
956 | items = [] |
|
960 | items = [] | |
@@ -967,7 +971,7 b' class RepoFilesView(RepoAppView):' | |||||
967 |
|
971 | |||
968 | # NOTE(dan): old code we used in "diff" mode compare |
|
972 | # NOTE(dan): old code we used in "diff" mode compare | |
969 | new_f_path = vcspath.join(name, f_path) |
|
973 | new_f_path = vcspath.join(name, f_path) | |
970 |
return |
|
974 | return f'{new_f_path}@{commit_id}' | |
971 |
|
975 | |||
972 | def _get_node_history(self, commit_obj, f_path, commits=None): |
|
976 | def _get_node_history(self, commit_obj, f_path, commits=None): | |
973 | """ |
|
977 | """ | |
@@ -1194,8 +1198,8 b' class RepoFilesView(RepoAppView):' | |||||
1194 | message = self.request.POST.get('message') or c.default_message |
|
1198 | message = self.request.POST.get('message') or c.default_message | |
1195 | try: |
|
1199 | try: | |
1196 | nodes = { |
|
1200 | nodes = { | |
1197 | node_path: { |
|
1201 | safe_bytes(node_path): { | |
1198 | 'content': '' |
|
1202 | 'content': b'' | |
1199 | } |
|
1203 | } | |
1200 | } |
|
1204 | } | |
1201 | ScmModel().delete_nodes( |
|
1205 | ScmModel().delete_nodes( | |
@@ -1273,7 +1277,7 b' class RepoFilesView(RepoAppView):' | |||||
1273 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1277 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) | |
1274 | c.f_path = f_path |
|
1278 | c.f_path = f_path | |
1275 |
|
1279 | |||
1276 | old_content = c.file.content |
|
1280 | old_content = c.file.str_content | |
1277 | sl = old_content.splitlines(1) |
|
1281 | sl = old_content.splitlines(1) | |
1278 | first_line = sl[0] if sl else '' |
|
1282 | first_line = sl[0] if sl else '' | |
1279 |
|
1283 | |||
@@ -1283,7 +1287,8 b' class RepoFilesView(RepoAppView):' | |||||
1283 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) |
|
1287 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) | |
1284 |
|
1288 | |||
1285 | message = r_post.get('message') or c.default_message |
|
1289 | message = r_post.get('message') or c.default_message | |
1286 | org_node_path = c.file.unicode_path |
|
1290 | ||
|
1291 | org_node_path = c.file.str_path | |||
1287 | filename = r_post['filename'] |
|
1292 | filename = r_post['filename'] | |
1288 |
|
1293 | |||
1289 | root_path = c.file.dir_path |
|
1294 | root_path = c.file.dir_path | |
@@ -1299,10 +1304,10 b' class RepoFilesView(RepoAppView):' | |||||
1299 |
|
1304 | |||
1300 | try: |
|
1305 | try: | |
1301 | mapping = { |
|
1306 | mapping = { | |
1302 |
|
|
1307 | c.file.bytes_path: { | |
1303 | 'org_filename': org_node_path, |
|
1308 | 'org_filename': org_node_path, | |
1304 | 'filename': node_path, |
|
1309 | 'filename': safe_bytes(node_path), | |
1305 | 'content': content, |
|
1310 | 'content': safe_bytes(content), | |
1306 | 'lexer': '', |
|
1311 | 'lexer': '', | |
1307 | 'op': 'mod', |
|
1312 | 'op': 'mod', | |
1308 | 'mode': c.file.mode |
|
1313 | 'mode': c.file.mode | |
@@ -1400,8 +1405,6 b' class RepoFilesView(RepoAppView):' | |||||
1400 | filename = r_post.get('filename') |
|
1405 | filename = r_post.get('filename') | |
1401 | unix_mode = 0 |
|
1406 | unix_mode = 0 | |
1402 |
|
1407 | |||
1403 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
|||
1404 |
|
||||
1405 | if not filename: |
|
1408 | if not filename: | |
1406 | # If there's no commit, redirect to repo summary |
|
1409 | # If there's no commit, redirect to repo summary | |
1407 | if type(c.commit) is EmptyCommit: |
|
1410 | if type(c.commit) is EmptyCommit: | |
@@ -1417,9 +1420,10 b' class RepoFilesView(RepoAppView):' | |||||
1417 | node_path = pure_path.as_posix().lstrip('/') |
|
1420 | node_path = pure_path.as_posix().lstrip('/') | |
1418 |
|
1421 | |||
1419 | author = self._rhodecode_db_user.full_contact |
|
1422 | author = self._rhodecode_db_user.full_contact | |
|
1423 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |||
1420 | nodes = { |
|
1424 | nodes = { | |
1421 | node_path: { |
|
1425 | safe_bytes(node_path): { | |
1422 | 'content': content |
|
1426 | 'content': safe_bytes(content) | |
1423 | } |
|
1427 | } | |
1424 | } |
|
1428 | } | |
1425 |
|
1429 | |||
@@ -1518,7 +1522,7 b' class RepoFilesView(RepoAppView):' | |||||
1518 | pure_path = self.create_pure_path(root_path, filename) |
|
1522 | pure_path = self.create_pure_path(root_path, filename) | |
1519 | node_path = pure_path.as_posix().lstrip('/') |
|
1523 | node_path = pure_path.as_posix().lstrip('/') | |
1520 |
|
1524 | |||
1521 | nodes[node_path] = { |
|
1525 | nodes[safe_bytes(node_path)] = { | |
1522 | 'content': content |
|
1526 | 'content': content | |
1523 | } |
|
1527 | } | |
1524 |
|
1528 |
@@ -234,8 +234,7 b' class RepoPullRequestsView(RepoAppView, ' | |||||
234 | source_repo, source_ref_id, target_ref_id, |
|
234 | source_repo, source_ref_id, target_ref_id, | |
235 | hide_whitespace_changes, diff_context) |
|
235 | hide_whitespace_changes, diff_context) | |
236 |
|
236 | |||
237 | diff_processor = diffs.DiffProcessor( |
|
237 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', diff_limit=diff_limit, | |
238 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
|||
239 | file_limit=file_limit, show_full_diff=fulldiff) |
|
238 | file_limit=file_limit, show_full_diff=fulldiff) | |
240 |
|
239 | |||
241 | _parsed = diff_processor.prepare() |
|
240 | _parsed = diff_processor.prepare() | |
@@ -259,9 +258,9 b' class RepoPullRequestsView(RepoAppView, ' | |||||
259 | ignore_whitespace=hide_whitespace_changes, |
|
258 | ignore_whitespace=hide_whitespace_changes, | |
260 | context=diff_context) |
|
259 | context=diff_context) | |
261 |
|
260 | |||
262 | diff_processor = diffs.DiffProcessor( |
|
261 | diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff', | |
263 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
262 | diff_limit=diff_limit, | |
264 | file_limit=file_limit, show_full_diff=fulldiff) |
|
263 | file_limit=file_limit, show_full_diff=fulldiff) | |
265 |
|
264 | |||
266 | _parsed = diff_processor.prepare() |
|
265 | _parsed = diff_processor.prepare() | |
267 |
|
266 | |||
@@ -933,7 +932,7 b' class RepoPullRequestsView(RepoAppView, ' | |||||
933 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
932 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) | |
934 |
|
933 | |||
935 | if filter_query: |
|
934 | if filter_query: | |
936 |
ilike_expression = u'%{}%'.format(safe_ |
|
935 | ilike_expression = u'%{}%'.format(safe_str(filter_query)) | |
937 | parents_query = parents_query.filter( |
|
936 | parents_query = parents_query.filter( | |
938 | Repository.repo_name.ilike(ilike_expression)) |
|
937 | Repository.repo_name.ilike(ilike_expression)) | |
939 | parents = parents_query.limit(20).all() |
|
938 | parents = parents_query.limit(20).all() |
@@ -249,3 +249,29 b' class RepoSettingsView(RepoAppView):' | |||||
249 | category='error') |
|
249 | category='error') | |
250 | raise HTTPFound( |
|
250 | raise HTTPFound( | |
251 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) |
|
251 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) | |
|
252 | ||||
|
253 | @LoginRequired() | |||
|
254 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
255 | def repo_settings_quick_actions(self): | |||
|
256 | _ = self.request.translate | |||
|
257 | ||||
|
258 | set_lock = self.request.GET.get('set_lock') | |||
|
259 | set_unlock = self.request.GET.get('set_unlock') | |||
|
260 | ||||
|
261 | try: | |||
|
262 | if set_lock: | |||
|
263 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, | |||
|
264 | lock_reason=Repository.LOCK_WEB) | |||
|
265 | h.flash(_('Locked repository'), category='success') | |||
|
266 | elif set_unlock: | |||
|
267 | Repository.unlock(self.db_repo) | |||
|
268 | h.flash(_('Unlocked repository'), category='success') | |||
|
269 | except Exception as e: | |||
|
270 | log.exception("Exception during unlocking") | |||
|
271 | h.flash(_('An error occurred during unlocking'), category='error') | |||
|
272 | ||||
|
273 | raise HTTPFound( | |||
|
274 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |||
|
275 | ||||
|
276 | ||||
|
277 |
@@ -63,23 +63,23 b' class SshWrapper(object):' | |||||
63 | from rhodecode.model.meta import raw_query_executor, Base |
|
63 | from rhodecode.model.meta import raw_query_executor, Base | |
64 |
|
64 | |||
65 | table = Table('user_ssh_keys', Base.metadata, autoload=False) |
|
65 | table = Table('user_ssh_keys', Base.metadata, autoload=False) | |
|
66 | atime = datetime.datetime.utcnow() | |||
66 | stmt = ( |
|
67 | stmt = ( | |
67 | table.update() |
|
68 | table.update() | |
68 | .where(table.c.ssh_key_id == key_id) |
|
69 | .where(table.c.ssh_key_id == key_id) | |
69 |
.values(accessed_on= |
|
70 | .values(accessed_on=atime) | |
70 | .returning(table.c.accessed_on, table.c.ssh_key_fingerprint) |
|
71 | # no MySQL Support for .returning :(( | |
|
72 | #.returning(table.c.accessed_on, table.c.ssh_key_fingerprint) | |||
71 | ) |
|
73 | ) | |
72 |
|
74 | |||
73 |
|
|
75 | res_count = None | |
74 | with raw_query_executor() as session: |
|
76 | with raw_query_executor() as session: | |
75 | result = session.execute(stmt) |
|
77 | result = session.execute(stmt) | |
76 | if result.rowcount: |
|
78 | if result.rowcount: | |
77 |
|
|
79 | res_count = result.rowcount | |
78 |
|
80 | |||
79 |
if |
|
81 | if res_count: | |
80 | atime, ssh_key_fingerprint = scalar_res |
|
82 | log.debug('Update key id:`%s` access time', key_id) | |
81 | log.debug('Update key id:`%s` fingerprint:`%s` access time', |
|
|||
82 | key_id, ssh_key_fingerprint) |
|
|||
83 |
|
83 | |||
84 | def get_user(self, user_id): |
|
84 | def get_user(self, user_id): | |
85 | user = AttributeDict() |
|
85 | user = AttributeDict() |
@@ -40,8 +40,8 b" SSH_OPTS = 'no-pty,no-port-forwarding,no" | |||||
40 |
|
40 | |||
41 | def get_all_active_keys(): |
|
41 | def get_all_active_keys(): | |
42 | result = UserSshKeys.query() \ |
|
42 | result = UserSshKeys.query() \ | |
43 |
.o |
|
43 | .join(User) \ | |
44 |
.filter(User |
|
44 | .filter(User != User.get_default_user()) \ | |
45 | .filter(User.active == true()) \ |
|
45 | .filter(User.active == true()) \ | |
46 | .all() |
|
46 | .all() | |
47 | return result |
|
47 | return result | |
@@ -55,6 +55,10 b' def _generate_ssh_authorized_keys_file(' | |||||
55 | os.path.expanduser(authorized_keys_file_path)) |
|
55 | os.path.expanduser(authorized_keys_file_path)) | |
56 | tmp_file_dir = os.path.dirname(authorized_keys_file_path) |
|
56 | tmp_file_dir = os.path.dirname(authorized_keys_file_path) | |
57 |
|
57 | |||
|
58 | if not os.path.exists(tmp_file_dir): | |||
|
59 | log.debug('SSH authorized_keys file dir does not exist, creating one now...') | |||
|
60 | os.makedirs(tmp_file_dir) | |||
|
61 | ||||
58 | all_active_keys = get_all_active_keys() |
|
62 | all_active_keys = get_all_active_keys() | |
59 |
|
63 | |||
60 | if allow_shell: |
|
64 | if allow_shell: | |
@@ -65,6 +69,7 b' def _generate_ssh_authorized_keys_file(' | |||||
65 | if not os.path.isfile(authorized_keys_file_path): |
|
69 | if not os.path.isfile(authorized_keys_file_path): | |
66 | log.debug('Creating file at %s', authorized_keys_file_path) |
|
70 | log.debug('Creating file at %s', authorized_keys_file_path) | |
67 | with open(authorized_keys_file_path, 'w'): |
|
71 | with open(authorized_keys_file_path, 'w'): | |
|
72 | # create a file with write access | |||
68 | pass |
|
73 | pass | |
69 |
|
74 | |||
70 | if not os.access(authorized_keys_file_path, os.R_OK): |
|
75 | if not os.access(authorized_keys_file_path, os.R_OK): | |
@@ -78,7 +83,7 b' def _generate_ssh_authorized_keys_file(' | |||||
78 | dir=tmp_file_dir) |
|
83 | dir=tmp_file_dir) | |
79 |
|
84 | |||
80 | now = datetime.datetime.utcnow().isoformat() |
|
85 | now = datetime.datetime.utcnow().isoformat() | |
81 |
keys_file = os.fdopen(fd, 'w |
|
86 | keys_file = os.fdopen(fd, 'wt') | |
82 | keys_file.write(HEADER.format(len(all_active_keys), now)) |
|
87 | keys_file.write(HEADER.format(len(all_active_keys), now)) | |
83 | ini_path = rhodecode.CONFIG['__file__'] |
|
88 | ini_path = rhodecode.CONFIG['__file__'] | |
84 |
|
89 |
@@ -75,7 +75,7 b' class UserGroupsView(UserGroupAppView):' | |||||
75 | 'first_name': user.first_name, |
|
75 | 'first_name': user.first_name, | |
76 | 'last_name': user.last_name, |
|
76 | 'last_name': user.last_name, | |
77 | 'username': user.username, |
|
77 | 'username': user.username, | |
78 | 'icon_link': h.gravatar_url(user.email, 30), |
|
78 | 'icon_link': h.gravatar_url(user.email, 30, request=self.request), | |
79 | 'value_display': h.person(user.email), |
|
79 | 'value_display': h.person(user.email), | |
80 | 'value': user.username, |
|
80 | 'value': user.username, | |
81 | 'value_type': 'user', |
|
81 | 'value_type': 'user', |
General Comments 0
You need to be logged in to leave comments.
Login now