##// END OF EJS Templates
fix(file-caching): fixed cases when old cache was used before changes to operate on bytestrings
super-admin -
r5651:bad147da tip default
parent child Browse files
Show More
@@ -1,987 +1,985
1 # Copyright (C) 2016-2024 RhodeCode GmbH
1 # Copyright (C) 2016-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import time
19 import time
20 import logging
20 import logging
21 import operator
21 import operator
22
22
23 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
23 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
24
24
25 from rhodecode.lib import helpers as h, diffs, rc_cache
25 from rhodecode.lib import helpers as h, diffs, rc_cache
26 from rhodecode.lib.str_utils import safe_str
26 from rhodecode.lib.str_utils import safe_str
27 from rhodecode.lib.utils import repo_name_slug
27 from rhodecode.lib.utils import repo_name_slug
28 from rhodecode.lib.utils2 import (
28 from rhodecode.lib.utils2 import (
29 StrictAttributeDict,
29 StrictAttributeDict,
30 str2bool,
30 str2bool,
31 safe_int,
31 safe_int,
32 datetime_to_time,
32 datetime_to_time,
33 )
33 )
34 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
34 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 from rhodecode.lib.vcs.backends.base import EmptyCommit
35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
36 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
37 from rhodecode.model import repo
37 from rhodecode.model import repo
38 from rhodecode.model import repo_group
38 from rhodecode.model import repo_group
39 from rhodecode.model import user_group
39 from rhodecode.model import user_group
40 from rhodecode.model import user
40 from rhodecode.model import user
41 from rhodecode.model.db import User
41 from rhodecode.model.db import User
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.model.scm import ScmModel
43 from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel
43 from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel
44 from rhodecode.model.repo import ReadmeFinder
44 from rhodecode.model.repo import ReadmeFinder
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 ADMIN_PREFIX: str = "/_admin"
49 ADMIN_PREFIX: str = "/_admin"
50 STATIC_FILE_PREFIX: str = "/_static"
50 STATIC_FILE_PREFIX: str = "/_static"
51
51
52 URL_NAME_REQUIREMENTS = {
52 URL_NAME_REQUIREMENTS = {
53 # group name can have a slash in them, but they must not end with a slash
53 # group name can have a slash in them, but they must not end with a slash
54 "group_name": r".*?[^/]",
54 "group_name": r".*?[^/]",
55 "repo_group_name": r".*?[^/]",
55 "repo_group_name": r".*?[^/]",
56 # repo names can have a slash in them, but they must not end with a slash
56 # repo names can have a slash in them, but they must not end with a slash
57 "repo_name": r".*?[^/]",
57 "repo_name": r".*?[^/]",
58 # file path eats up everything at the end
58 # file path eats up everything at the end
59 "f_path": r".*",
59 "f_path": r".*",
60 # reference types
60 # reference types
61 "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)",
61 "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)",
62 "target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)",
62 "target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)",
63 }
63 }
64
64
65
65
66 def add_route_with_slash(config, name, pattern, **kw):
66 def add_route_with_slash(config, name, pattern, **kw):
67 config.add_route(name, pattern, **kw)
67 config.add_route(name, pattern, **kw)
68 if not pattern.endswith("/"):
68 if not pattern.endswith("/"):
69 config.add_route(name + "_slash", pattern + "/", **kw)
69 config.add_route(name + "_slash", pattern + "/", **kw)
70
70
71
71
72 def add_route_requirements(route_path, requirements=None):
72 def add_route_requirements(route_path, requirements=None):
73 """
73 """
74 Adds regex requirements to pyramid routes using a mapping dict
74 Adds regex requirements to pyramid routes using a mapping dict
75 e.g::
75 e.g::
76 add_route_requirements('{repo_name}/settings')
76 add_route_requirements('{repo_name}/settings')
77 """
77 """
78 requirements = requirements or URL_NAME_REQUIREMENTS
78 requirements = requirements or URL_NAME_REQUIREMENTS
79 for key, regex in list(requirements.items()):
79 for key, regex in list(requirements.items()):
80 route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex))
80 route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex))
81 return route_path
81 return route_path
82
82
83
83
84 def get_format_ref_id(repo):
84 def get_format_ref_id(repo):
85 """Returns a `repo` specific reference formatter function"""
85 """Returns a `repo` specific reference formatter function"""
86 if h.is_svn(repo):
86 if h.is_svn(repo):
87 return _format_ref_id_svn
87 return _format_ref_id_svn
88 else:
88 else:
89 return _format_ref_id
89 return _format_ref_id
90
90
91
91
92 def _format_ref_id(name, raw_id):
92 def _format_ref_id(name, raw_id):
93 """Default formatting of a given reference `name`"""
93 """Default formatting of a given reference `name`"""
94 return name
94 return name
95
95
96
96
97 def _format_ref_id_svn(name, raw_id):
97 def _format_ref_id_svn(name, raw_id):
98 """Special way of formatting a reference for Subversion including path"""
98 """Special way of formatting a reference for Subversion including path"""
99 return f"{name}@{raw_id}"
99 return f"{name}@{raw_id}"
100
100
101
101
102 class TemplateArgs(StrictAttributeDict):
102 class TemplateArgs(StrictAttributeDict):
103 pass
103 pass
104
104
105
105
106 class BaseAppView(object):
106 class BaseAppView(object):
107 DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"]
107 DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"]
108 EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout']
108 EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout']
109 SETUP_2FA_VIEW = 'setup_2fa'
109 SETUP_2FA_VIEW = 'setup_2fa'
110 VERIFY_2FA_VIEW = 'check_2fa'
110 VERIFY_2FA_VIEW = 'check_2fa'
111
111
112 def __init__(self, context, request):
112 def __init__(self, context, request):
113 self.request = request
113 self.request = request
114 self.context = context
114 self.context = context
115 self.session = request.session
115 self.session = request.session
116 if not hasattr(request, "user"):
116 if not hasattr(request, "user"):
117 # NOTE(marcink): edge case, we ended up in matched route
117 # NOTE(marcink): edge case, we ended up in matched route
118 # but probably of web-app context, e.g API CALL/VCS CALL
118 # but probably of web-app context, e.g API CALL/VCS CALL
119 if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"):
119 if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"):
120 log.warning("Unable to process request `%s` in this scope", request)
120 log.warning("Unable to process request `%s` in this scope", request)
121 raise HTTPBadRequest()
121 raise HTTPBadRequest()
122
122
123 self._rhodecode_user = request.user # auth user
123 self._rhodecode_user = request.user # auth user
124 self._rhodecode_db_user = self._rhodecode_user.get_instance()
124 self._rhodecode_db_user = self._rhodecode_user.get_instance()
125 self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {}
125 self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {}
126 self._maybe_needs_password_change(
126 self._maybe_needs_password_change(
127 request.matched_route.name, self._rhodecode_db_user
127 request.matched_route.name, self._rhodecode_db_user
128 )
128 )
129 self._maybe_needs_2fa_configuration(
129 self._maybe_needs_2fa_configuration(
130 request.matched_route.name, self._rhodecode_db_user
130 request.matched_route.name, self._rhodecode_db_user
131 )
131 )
132 self._maybe_needs_2fa_check(
132 self._maybe_needs_2fa_check(
133 request.matched_route.name, self._rhodecode_db_user
133 request.matched_route.name, self._rhodecode_db_user
134 )
134 )
135
135
136 def _maybe_needs_password_change(self, view_name, user_obj):
136 def _maybe_needs_password_change(self, view_name, user_obj):
137 if view_name in self.DONT_CHECKOUT_VIEWS:
137 if view_name in self.DONT_CHECKOUT_VIEWS:
138 return
138 return
139
139
140 log.debug(
140 log.debug(
141 "Checking if user %s needs password change on view %s", user_obj, view_name
141 "Checking if user %s needs password change on view %s", user_obj, view_name
142 )
142 )
143
143
144 skip_user_views = [
144 skip_user_views = [
145 "logout",
145 "logout",
146 "login",
146 "login",
147 "check_2fa",
147 "check_2fa",
148 "my_account_password",
148 "my_account_password",
149 "my_account_password_update",
149 "my_account_password_update",
150 ]
150 ]
151
151
152 if not user_obj:
152 if not user_obj:
153 return
153 return
154
154
155 if user_obj.username == User.DEFAULT_USER:
155 if user_obj.username == User.DEFAULT_USER:
156 return
156 return
157
157
158 now = time.time()
158 now = time.time()
159 should_change = self.user_data.get("force_password_change")
159 should_change = self.user_data.get("force_password_change")
160 change_after = safe_int(should_change) or 0
160 change_after = safe_int(should_change) or 0
161 if should_change and now > change_after:
161 if should_change and now > change_after:
162 log.debug("User %s requires password change", user_obj)
162 log.debug("User %s requires password change", user_obj)
163 h.flash(
163 h.flash(
164 "You are required to change your password",
164 "You are required to change your password",
165 "warning",
165 "warning",
166 ignore_duplicate=True,
166 ignore_duplicate=True,
167 )
167 )
168
168
169 if view_name not in skip_user_views:
169 if view_name not in skip_user_views:
170 raise HTTPFound(self.request.route_path("my_account_password"))
170 raise HTTPFound(self.request.route_path("my_account_password"))
171
171
172 def _maybe_needs_2fa_configuration(self, view_name, user_obj):
172 def _maybe_needs_2fa_configuration(self, view_name, user_obj):
173 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
173 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
174 return
174 return
175
175
176 if not user_obj:
176 if not user_obj:
177 return
177 return
178
178
179 if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW:
179 if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW:
180 h.flash(
180 h.flash(
181 "You are required to configure 2FA",
181 "You are required to configure 2FA",
182 "warning",
182 "warning",
183 ignore_duplicate=False,
183 ignore_duplicate=False,
184 )
184 )
185 # Special case for users created "on the fly" (ldap case for new user)
185 # Special case for users created "on the fly" (ldap case for new user)
186 user_obj.check_2fa_required = False
186 user_obj.check_2fa_required = False
187 raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW))
187 raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW))
188
188
189 def _maybe_needs_2fa_check(self, view_name, user_obj):
189 def _maybe_needs_2fa_check(self, view_name, user_obj):
190 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
190 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
191 return
191 return
192
192
193 if not user_obj:
193 if not user_obj:
194 return
194 return
195
195
196 if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW:
196 if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW:
197 raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW))
197 raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW))
198
198
199 def _log_creation_exception(self, e, repo_name):
199 def _log_creation_exception(self, e, repo_name):
200 _ = self.request.translate
200 _ = self.request.translate
201 reason = None
201 reason = None
202 if len(e.args) == 2:
202 if len(e.args) == 2:
203 reason = e.args[1]
203 reason = e.args[1]
204
204
205 if reason == "INVALID_CERTIFICATE":
205 if reason == "INVALID_CERTIFICATE":
206 log.exception("Exception creating a repository: invalid certificate")
206 log.exception("Exception creating a repository: invalid certificate")
207 msg = _("Error creating repository %s: invalid certificate") % repo_name
207 msg = _("Error creating repository %s: invalid certificate") % repo_name
208 else:
208 else:
209 log.exception("Exception creating a repository")
209 log.exception("Exception creating a repository")
210 msg = _("Error creating repository %s") % repo_name
210 msg = _("Error creating repository %s") % repo_name
211 return msg
211 return msg
212
212
213 def _get_local_tmpl_context(self, include_app_defaults=True):
213 def _get_local_tmpl_context(self, include_app_defaults=True):
214 c = TemplateArgs()
214 c = TemplateArgs()
215 c.auth_user = self.request.user
215 c.auth_user = self.request.user
216 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
216 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
217 c.rhodecode_user = self.request.user
217 c.rhodecode_user = self.request.user
218
218
219 if include_app_defaults:
219 if include_app_defaults:
220 from rhodecode.lib.base import attach_context_attributes
220 from rhodecode.lib.base import attach_context_attributes
221
221
222 attach_context_attributes(c, self.request, self.request.user.user_id)
222 attach_context_attributes(c, self.request, self.request.user.user_id)
223
223
224 c.is_super_admin = c.auth_user.is_admin
224 c.is_super_admin = c.auth_user.is_admin
225
225
226 c.can_create_repo = c.is_super_admin
226 c.can_create_repo = c.is_super_admin
227 c.can_create_repo_group = c.is_super_admin
227 c.can_create_repo_group = c.is_super_admin
228 c.can_create_user_group = c.is_super_admin
228 c.can_create_user_group = c.is_super_admin
229
229
230 c.is_delegated_admin = False
230 c.is_delegated_admin = False
231
231
232 if not c.auth_user.is_default and not c.is_super_admin:
232 if not c.auth_user.is_default and not c.is_super_admin:
233 c.can_create_repo = h.HasPermissionAny("hg.create.repository")(
233 c.can_create_repo = h.HasPermissionAny("hg.create.repository")(
234 user=self.request.user
234 user=self.request.user
235 )
235 )
236 repositories = c.auth_user.repositories_admin or c.can_create_repo
236 repositories = c.auth_user.repositories_admin or c.can_create_repo
237
237
238 c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")(
238 c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")(
239 user=self.request.user
239 user=self.request.user
240 )
240 )
241 repository_groups = (
241 repository_groups = (
242 c.auth_user.repository_groups_admin or c.can_create_repo_group
242 c.auth_user.repository_groups_admin or c.can_create_repo_group
243 )
243 )
244
244
245 c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")(
245 c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")(
246 user=self.request.user
246 user=self.request.user
247 )
247 )
248 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
248 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
249 # delegated admin can create, or manage some objects
249 # delegated admin can create, or manage some objects
250 c.is_delegated_admin = repositories or repository_groups or user_groups
250 c.is_delegated_admin = repositories or repository_groups or user_groups
251 return c
251 return c
252
252
253 def _get_template_context(self, tmpl_args, **kwargs):
253 def _get_template_context(self, tmpl_args, **kwargs):
254 local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args}
254 local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args}
255 local_tmpl_args.update(kwargs)
255 local_tmpl_args.update(kwargs)
256 return local_tmpl_args
256 return local_tmpl_args
257
257
258 def load_default_context(self):
258 def load_default_context(self):
259 """
259 """
260 example:
260 example:
261
261
262 def load_default_context(self):
262 def load_default_context(self):
263 c = self._get_local_tmpl_context()
263 c = self._get_local_tmpl_context()
264 c.custom_var = 'foobar'
264 c.custom_var = 'foobar'
265
265
266 return c
266 return c
267 """
267 """
268 raise NotImplementedError("Needs implementation in view class")
268 raise NotImplementedError("Needs implementation in view class")
269
269
270
270
271 class RepoAppView(BaseAppView):
271 class RepoAppView(BaseAppView):
272 def __init__(self, context, request):
272 def __init__(self, context, request):
273 super().__init__(context, request)
273 super().__init__(context, request)
274 self.db_repo = request.db_repo
274 self.db_repo = request.db_repo
275 self.db_repo_name = self.db_repo.repo_name
275 self.db_repo_name = self.db_repo.repo_name
276 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
276 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
277 self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo)
277 self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo)
278 self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo)
278 self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo)
279
279
280 def _handle_missing_requirements(self, error):
280 def _handle_missing_requirements(self, error):
281 log.error(
281 log.error(
282 "Requirements are missing for repository %s: %s",
282 "Requirements are missing for repository %s: %s",
283 self.db_repo_name,
283 self.db_repo_name,
284 safe_str(error),
284 safe_str(error),
285 )
285 )
286
286
287 def _prepare_and_set_clone_url(self, c):
287 def _prepare_and_set_clone_url(self, c):
288 username = ""
288 username = ""
289 if self._rhodecode_user.username != User.DEFAULT_USER:
289 if self._rhodecode_user.username != User.DEFAULT_USER:
290 username = self._rhodecode_user.username
290 username = self._rhodecode_user.username
291
291
292 _def_clone_uri = c.clone_uri_tmpl
292 _def_clone_uri = c.clone_uri_tmpl
293 _def_clone_uri_id = c.clone_uri_id_tmpl
293 _def_clone_uri_id = c.clone_uri_id_tmpl
294 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
294 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
295
295
296 c.clone_repo_url = self.db_repo.clone_url(
296 c.clone_repo_url = self.db_repo.clone_url(
297 user=username, uri_tmpl=_def_clone_uri
297 user=username, uri_tmpl=_def_clone_uri
298 )
298 )
299 c.clone_repo_url_id = self.db_repo.clone_url(
299 c.clone_repo_url_id = self.db_repo.clone_url(
300 user=username, uri_tmpl=_def_clone_uri_id
300 user=username, uri_tmpl=_def_clone_uri_id
301 )
301 )
302 c.clone_repo_url_ssh = self.db_repo.clone_url(
302 c.clone_repo_url_ssh = self.db_repo.clone_url(
303 uri_tmpl=_def_clone_uri_ssh, ssh=True
303 uri_tmpl=_def_clone_uri_ssh, ssh=True
304 )
304 )
305
305
306 def _get_local_tmpl_context(self, include_app_defaults=True):
306 def _get_local_tmpl_context(self, include_app_defaults=True):
307 _ = self.request.translate
307 _ = self.request.translate
308 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
308 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
309
309
310 # register common vars for this type of view
310 # register common vars for this type of view
311 c.rhodecode_db_repo = self.db_repo
311 c.rhodecode_db_repo = self.db_repo
312 c.repo_name = self.db_repo_name
312 c.repo_name = self.db_repo_name
313 c.repository_pull_requests = self.db_repo_pull_requests
313 c.repository_pull_requests = self.db_repo_pull_requests
314 c.repository_artifacts = self.db_repo_artifacts
314 c.repository_artifacts = self.db_repo_artifacts
315 c.repository_is_user_following = ScmModel().is_following_repo(
315 c.repository_is_user_following = ScmModel().is_following_repo(
316 self.db_repo_name, self._rhodecode_user.user_id
316 self.db_repo_name, self._rhodecode_user.user_id
317 )
317 )
318 self.path_filter = PathFilter(None)
318 self.path_filter = PathFilter(None)
319
319
320 c.repository_requirements_missing = {}
320 c.repository_requirements_missing = {}
321 try:
321 try:
322 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
322 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
323 # NOTE(marcink):
323 # NOTE(marcink):
324 # comparison to None since if it's an object __bool__ is expensive to
324 # comparison to None since if it's an object __bool__ is expensive to
325 # calculate
325 # calculate
326 if self.rhodecode_vcs_repo is not None:
326 if self.rhodecode_vcs_repo is not None:
327 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
327 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
328 c.auth_user.username
328 c.auth_user.username
329 )
329 )
330 self.path_filter = PathFilter(path_perms)
330 self.path_filter = PathFilter(path_perms)
331 except RepositoryRequirementError as e:
331 except RepositoryRequirementError as e:
332 c.repository_requirements_missing = {"error": str(e)}
332 c.repository_requirements_missing = {"error": str(e)}
333 self._handle_missing_requirements(e)
333 self._handle_missing_requirements(e)
334 self.rhodecode_vcs_repo = None
334 self.rhodecode_vcs_repo = None
335
335
336 c.path_filter = self.path_filter # used by atom_feed_entry.mako
336 c.path_filter = self.path_filter # used by atom_feed_entry.mako
337
337
338 if self.rhodecode_vcs_repo is None:
338 if self.rhodecode_vcs_repo is None:
339 # unable to fetch this repo as vcs instance, report back to user
339 # unable to fetch this repo as vcs instance, report back to user
340 log.debug(
340 log.debug(
341 "Repository was not found on filesystem, check if it exists or is not damaged"
341 "Repository was not found on filesystem, check if it exists or is not damaged"
342 )
342 )
343 h.flash(
343 h.flash(
344 _(
344 _(
345 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
345 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
346 "Please check if it exist, or is not damaged."
346 "Please check if it exist, or is not damaged."
347 )
347 )
348 % {"repo_name": c.repo_name},
348 % {"repo_name": c.repo_name},
349 category="error",
349 category="error",
350 ignore_duplicate=True,
350 ignore_duplicate=True,
351 )
351 )
352 if c.repository_requirements_missing:
352 if c.repository_requirements_missing:
353 route = self.request.matched_route.name
353 route = self.request.matched_route.name
354 if route.startswith(("edit_repo", "repo_summary")):
354 if route.startswith(("edit_repo", "repo_summary")):
355 # allow summary and edit repo on missing requirements
355 # allow summary and edit repo on missing requirements
356 return c
356 return c
357
357
358 raise HTTPFound(
358 raise HTTPFound(
359 h.route_path("repo_summary", repo_name=self.db_repo_name)
359 h.route_path("repo_summary", repo_name=self.db_repo_name)
360 )
360 )
361
361
362 else: # redirect if we don't show missing requirements
362 else: # redirect if we don't show missing requirements
363 raise HTTPFound(h.route_path("home"))
363 raise HTTPFound(h.route_path("home"))
364
364
365 c.has_origin_repo_read_perm = False
365 c.has_origin_repo_read_perm = False
366 if self.db_repo.fork:
366 if self.db_repo.fork:
367 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
367 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
368 "repository.write", "repository.read", "repository.admin"
368 "repository.write", "repository.read", "repository.admin"
369 )(self.db_repo.fork.repo_name, "summary fork link")
369 )(self.db_repo.fork.repo_name, "summary fork link")
370
370
371 return c
371 return c
372
372
373 def _get_f_path_unchecked(self, matchdict, default=None):
373 def _get_f_path_unchecked(self, matchdict, default=None):
374 """
374 """
375 Should only be used by redirects, everything else should call _get_f_path
375 Should only be used by redirects, everything else should call _get_f_path
376 """
376 """
377 f_path = matchdict.get("f_path")
377 f_path = matchdict.get("f_path")
378 if f_path:
378 if f_path:
379 # fix for multiple initial slashes that causes errors for GIT
379 # fix for multiple initial slashes that causes errors for GIT
380 return f_path.lstrip("/")
380 return f_path.lstrip("/")
381
381
382 return default
382 return default
383
383
384 def _get_f_path(self, matchdict, default=None):
384 def _get_f_path(self, matchdict, default=None):
385 f_path_match = self._get_f_path_unchecked(matchdict, default)
385 f_path_match = self._get_f_path_unchecked(matchdict, default)
386 return self.path_filter.assert_path_permissions(f_path_match)
386 return self.path_filter.assert_path_permissions(f_path_match)
387
387
388 def _get_general_setting(self, target_repo, settings_key, default=False):
388 def _get_general_setting(self, target_repo, settings_key, default=False):
389 settings_model = VcsSettingsModel(repo=target_repo)
389 settings_model = VcsSettingsModel(repo=target_repo)
390 settings = settings_model.get_general_settings()
390 settings = settings_model.get_general_settings()
391 return settings.get(settings_key, default)
391 return settings.get(settings_key, default)
392
392
393 def _get_repo_setting(self, target_repo, settings_key, default=False):
393 def _get_repo_setting(self, target_repo, settings_key, default=False):
394 settings_model = VcsSettingsModel(repo=target_repo)
394 settings_model = VcsSettingsModel(repo=target_repo)
395 settings = settings_model.get_repo_settings_inherited()
395 settings = settings_model.get_repo_settings_inherited()
396 return settings.get(settings_key, default)
396 return settings.get(settings_key, default)
397
397
398 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/"):
398 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/", nodes=None):
399 log.debug("Looking for README file at path %s", path)
399 log.debug("Looking for README file at path %s", path)
400 if commit_id:
400 if commit_id:
401 landing_commit_id = commit_id
401 landing_commit_id = commit_id
402 else:
402 else:
403 landing_commit = db_repo.get_landing_commit()
403 landing_commit = db_repo.get_landing_commit()
404 if isinstance(landing_commit, EmptyCommit):
404 if isinstance(landing_commit, EmptyCommit):
405 return None, None
405 return None, None
406 landing_commit_id = landing_commit.raw_id
406 landing_commit_id = landing_commit.raw_id
407
407
408 cache_namespace_uid = f"repo.{db_repo.repo_id}"
408 cache_namespace_uid = f"repo.{db_repo.repo_id}"
409 region = rc_cache.get_or_create_region(
409 region = rc_cache.get_or_create_region(
410 "cache_repo", cache_namespace_uid, use_async_runner=False
410 "cache_repo", cache_namespace_uid, use_async_runner=False
411 )
411 )
412 start = time.time()
412 start = time.time()
413
413
414 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
414 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
415 def generate_repo_readme(
415 def generate_repo_readme(
416 repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type
416 _repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type
417 ):
417 ):
418 readme_data = None
418 _readme_data = None
419 readme_filename = None
419 _readme_filename = None
420
420
421 commit = db_repo.get_commit(_commit_id)
421 commit = db_repo.get_commit(_commit_id)
422 log.debug("Searching for a README file at commit %s.", _commit_id)
422 log.debug("Searching for a README file at commit %s.", _commit_id)
423 readme_node = ReadmeFinder(_renderer_type).search(
423 readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path, nodes=nodes)
424 commit, path=_readme_search_path
425 )
426
424
427 if readme_node:
425 if readme_node:
428 log.debug("Found README node: %s", readme_node)
426 log.debug("Found README node: %s", readme_node)
429
427
430 relative_urls = {
428 relative_urls = {
431 "raw": h.route_path(
429 "raw": h.route_path(
432 "repo_file_raw",
430 "repo_file_raw",
433 repo_name=_repo_name,
431 repo_name=_repo_name,
434 commit_id=commit.raw_id,
432 commit_id=commit.raw_id,
435 f_path=readme_node.path,
433 f_path=readme_node.path,
436 ),
434 ),
437 "standard": h.route_path(
435 "standard": h.route_path(
438 "repo_files",
436 "repo_files",
439 repo_name=_repo_name,
437 repo_name=_repo_name,
440 commit_id=commit.raw_id,
438 commit_id=commit.raw_id,
441 f_path=readme_node.path,
439 f_path=readme_node.path,
442 ),
440 ),
443 }
441 }
444
442
445 readme_data = self._render_readme_or_none(
443 _readme_data = self._render_readme_or_none(
446 commit, readme_node, relative_urls
444 commit, readme_node, relative_urls
447 )
445 )
448 readme_filename = readme_node.str_path
446 _readme_filename = readme_node.str_path
449
447
450 return readme_data, readme_filename
448 return _readme_data, _readme_filename
451
449
452 readme_data, readme_filename = generate_repo_readme(
450 readme_data, readme_filename = generate_repo_readme(
453 db_repo.repo_id,
451 db_repo.repo_id,
454 landing_commit_id,
452 landing_commit_id,
455 db_repo.repo_name,
453 db_repo.repo_name,
456 path,
454 path,
457 renderer_type,
455 renderer_type
458 )
456 )
459
457
460 compute_time = time.time() - start
458 compute_time = time.time() - start
461 log.debug(
459 log.debug(
462 "Repo README for path %s generated and computed in %.4fs",
460 "Repo README for path %s generated and computed in %.4fs",
463 path,
461 path,
464 compute_time,
462 compute_time,
465 )
463 )
466 return readme_data, readme_filename
464 return readme_data, readme_filename
467
465
468 def _render_readme_or_none(self, commit, readme_node, relative_urls):
466 def _render_readme_or_none(self, commit, readme_node, relative_urls):
469 log.debug("Found README file `%s` rendering...", readme_node.path)
467 log.debug("Found README file `%s` rendering...", readme_node.path)
470 renderer = MarkupRenderer()
468 renderer = MarkupRenderer()
471 try:
469 try:
472 html_source = renderer.render(
470 html_source = renderer.render(
473 readme_node.str_content, filename=readme_node.path
471 readme_node.str_content, filename=readme_node.path
474 )
472 )
475 if relative_urls:
473 if relative_urls:
476 return relative_links(html_source, relative_urls)
474 return relative_links(html_source, relative_urls)
477 return html_source
475 return html_source
478 except Exception:
476 except Exception:
479 log.exception("Exception while trying to render the README")
477 log.exception("Exception while trying to render the README")
480
478
481 def get_recache_flag(self):
479 def get_recache_flag(self):
482 for flag_name in ["force_recache", "force-recache", "no-cache"]:
480 for flag_name in ["force_recache", "force-recache", "no-cache"]:
483 flag_val = self.request.GET.get(flag_name)
481 flag_val = self.request.GET.get(flag_name)
484 if str2bool(flag_val):
482 if str2bool(flag_val):
485 return True
483 return True
486 return False
484 return False
487
485
488 def get_commit_preload_attrs(cls):
486 def get_commit_preload_attrs(cls):
489 pre_load = [
487 pre_load = [
490 "author",
488 "author",
491 "branch",
489 "branch",
492 "date",
490 "date",
493 "message",
491 "message",
494 "parents",
492 "parents",
495 "obsolete",
493 "obsolete",
496 "phase",
494 "phase",
497 "hidden",
495 "hidden",
498 ]
496 ]
499 return pre_load
497 return pre_load
500
498
501
499
502 class PathFilter(object):
500 class PathFilter(object):
503 # Expects and instance of BasePathPermissionChecker or None
501 # Expects and instance of BasePathPermissionChecker or None
504 def __init__(self, permission_checker):
502 def __init__(self, permission_checker):
505 self.permission_checker = permission_checker
503 self.permission_checker = permission_checker
506
504
507 def assert_path_permissions(self, path):
505 def assert_path_permissions(self, path):
508 if self.path_access_allowed(path):
506 if self.path_access_allowed(path):
509 return path
507 return path
510 raise HTTPForbidden()
508 raise HTTPForbidden()
511
509
512 def path_access_allowed(self, path):
510 def path_access_allowed(self, path):
513 log.debug("Checking ACL permissions for PathFilter for `%s`", path)
511 log.debug("Checking ACL permissions for PathFilter for `%s`", path)
514 if self.permission_checker:
512 if self.permission_checker:
515 has_access = path and self.permission_checker.has_access(path)
513 has_access = path and self.permission_checker.has_access(path)
516 log.debug(
514 log.debug(
517 "ACL Permissions checker enabled, ACL Check has_access: %s", has_access
515 "ACL Permissions checker enabled, ACL Check has_access: %s", has_access
518 )
516 )
519 return has_access
517 return has_access
520
518
521 log.debug("ACL permissions checker not enabled, skipping...")
519 log.debug("ACL permissions checker not enabled, skipping...")
522 return True
520 return True
523
521
524 def filter_patchset(self, patchset):
522 def filter_patchset(self, patchset):
525 if not self.permission_checker or not patchset:
523 if not self.permission_checker or not patchset:
526 return patchset, False
524 return patchset, False
527 had_filtered = False
525 had_filtered = False
528 filtered_patchset = []
526 filtered_patchset = []
529 for patch in patchset:
527 for patch in patchset:
530 filename = patch.get("filename", None)
528 filename = patch.get("filename", None)
531 if not filename or self.permission_checker.has_access(filename):
529 if not filename or self.permission_checker.has_access(filename):
532 filtered_patchset.append(patch)
530 filtered_patchset.append(patch)
533 else:
531 else:
534 had_filtered = True
532 had_filtered = True
535 if had_filtered:
533 if had_filtered:
536 if isinstance(patchset, diffs.LimitedDiffContainer):
534 if isinstance(patchset, diffs.LimitedDiffContainer):
537 filtered_patchset = diffs.LimitedDiffContainer(
535 filtered_patchset = diffs.LimitedDiffContainer(
538 patchset.diff_limit, patchset.cur_diff_size, filtered_patchset
536 patchset.diff_limit, patchset.cur_diff_size, filtered_patchset
539 )
537 )
540 return filtered_patchset, True
538 return filtered_patchset, True
541 else:
539 else:
542 return patchset, False
540 return patchset, False
543
541
544 def render_patchset_filtered(
542 def render_patchset_filtered(
545 self, diffset, patchset, source_ref=None, target_ref=None
543 self, diffset, patchset, source_ref=None, target_ref=None
546 ):
544 ):
547 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
545 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
548 result = diffset.render_patchset(
546 result = diffset.render_patchset(
549 filtered_patchset, source_ref=source_ref, target_ref=target_ref
547 filtered_patchset, source_ref=source_ref, target_ref=target_ref
550 )
548 )
551 result.has_hidden_changes = has_hidden_changes
549 result.has_hidden_changes = has_hidden_changes
552 return result
550 return result
553
551
554 def get_raw_patch(self, diff_processor):
552 def get_raw_patch(self, diff_processor):
555 if self.permission_checker is None:
553 if self.permission_checker is None:
556 return diff_processor.as_raw()
554 return diff_processor.as_raw()
557 elif self.permission_checker.has_full_access:
555 elif self.permission_checker.has_full_access:
558 return diff_processor.as_raw()
556 return diff_processor.as_raw()
559 else:
557 else:
560 return "# Repository has user-specific filters, raw patch generation is disabled."
558 return "# Repository has user-specific filters, raw patch generation is disabled."
561
559
562 @property
560 @property
563 def is_enabled(self):
561 def is_enabled(self):
564 return self.permission_checker is not None
562 return self.permission_checker is not None
565
563
566
564
567 class RepoGroupAppView(BaseAppView):
565 class RepoGroupAppView(BaseAppView):
568 def __init__(self, context, request):
566 def __init__(self, context, request):
569 super().__init__(context, request)
567 super().__init__(context, request)
570 self.db_repo_group = request.db_repo_group
568 self.db_repo_group = request.db_repo_group
571 self.db_repo_group_name = self.db_repo_group.group_name
569 self.db_repo_group_name = self.db_repo_group.group_name
572
570
573 def _get_local_tmpl_context(self, include_app_defaults=True):
571 def _get_local_tmpl_context(self, include_app_defaults=True):
574 _ = self.request.translate
572 _ = self.request.translate
575 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
573 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
576 c.repo_group = self.db_repo_group
574 c.repo_group = self.db_repo_group
577 return c
575 return c
578
576
579 def _revoke_perms_on_yourself(self, form_result):
577 def _revoke_perms_on_yourself(self, form_result):
580 _updates = [
578 _updates = [
581 u
579 u
582 for u in form_result["perm_updates"]
580 for u in form_result["perm_updates"]
583 if self._rhodecode_user.user_id == int(u[0])
581 if self._rhodecode_user.user_id == int(u[0])
584 ]
582 ]
585 _additions = [
583 _additions = [
586 u
584 u
587 for u in form_result["perm_additions"]
585 for u in form_result["perm_additions"]
588 if self._rhodecode_user.user_id == int(u[0])
586 if self._rhodecode_user.user_id == int(u[0])
589 ]
587 ]
590 _deletions = [
588 _deletions = [
591 u
589 u
592 for u in form_result["perm_deletions"]
590 for u in form_result["perm_deletions"]
593 if self._rhodecode_user.user_id == int(u[0])
591 if self._rhodecode_user.user_id == int(u[0])
594 ]
592 ]
595 admin_perm = "group.admin"
593 admin_perm = "group.admin"
596 if (
594 if (
597 _updates
595 _updates
598 and _updates[0][1] != admin_perm
596 and _updates[0][1] != admin_perm
599 or _additions
597 or _additions
600 and _additions[0][1] != admin_perm
598 and _additions[0][1] != admin_perm
601 or _deletions
599 or _deletions
602 and _deletions[0][1] != admin_perm
600 and _deletions[0][1] != admin_perm
603 ):
601 ):
604 return True
602 return True
605 return False
603 return False
606
604
607
605
608 class UserGroupAppView(BaseAppView):
606 class UserGroupAppView(BaseAppView):
609 def __init__(self, context, request):
607 def __init__(self, context, request):
610 super().__init__(context, request)
608 super().__init__(context, request)
611 self.db_user_group = request.db_user_group
609 self.db_user_group = request.db_user_group
612 self.db_user_group_name = self.db_user_group.users_group_name
610 self.db_user_group_name = self.db_user_group.users_group_name
613
611
614
612
615 class UserAppView(BaseAppView):
613 class UserAppView(BaseAppView):
616 def __init__(self, context, request):
614 def __init__(self, context, request):
617 super().__init__(context, request)
615 super().__init__(context, request)
618 self.db_user = request.db_user
616 self.db_user = request.db_user
619 self.db_user_id = self.db_user.user_id
617 self.db_user_id = self.db_user.user_id
620
618
621 _ = self.request.translate
619 _ = self.request.translate
622 if not request.db_user_supports_default:
620 if not request.db_user_supports_default:
623 if self.db_user.username == User.DEFAULT_USER:
621 if self.db_user.username == User.DEFAULT_USER:
624 h.flash(
622 h.flash(
625 _("Editing user `{}` is disabled.".format(User.DEFAULT_USER)),
623 _("Editing user `{}` is disabled.".format(User.DEFAULT_USER)),
626 category="warning",
624 category="warning",
627 )
625 )
628 raise HTTPFound(h.route_path("users"))
626 raise HTTPFound(h.route_path("users"))
629
627
630
628
631 class DataGridAppView(object):
629 class DataGridAppView(object):
632 """
630 """
633 Common class to have re-usable grid rendering components
631 Common class to have re-usable grid rendering components
634 """
632 """
635
633
636 def _extract_ordering(self, request, column_map=None):
634 def _extract_ordering(self, request, column_map=None):
637 column_map = column_map or {}
635 column_map = column_map or {}
638 column_index = safe_int(request.GET.get("order[0][column]"))
636 column_index = safe_int(request.GET.get("order[0][column]"))
639 order_dir = request.GET.get("order[0][dir]", "desc")
637 order_dir = request.GET.get("order[0][dir]", "desc")
640 order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw")
638 order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw")
641
639
642 # translate datatable to DB columns
640 # translate datatable to DB columns
643 order_by = column_map.get(order_by) or order_by
641 order_by = column_map.get(order_by) or order_by
644
642
645 search_q = request.GET.get("search[value]")
643 search_q = request.GET.get("search[value]")
646 return search_q, order_by, order_dir
644 return search_q, order_by, order_dir
647
645
648 def _extract_chunk(self, request):
646 def _extract_chunk(self, request):
649 start = safe_int(request.GET.get("start"), 0)
647 start = safe_int(request.GET.get("start"), 0)
650 length = safe_int(request.GET.get("length"), 25)
648 length = safe_int(request.GET.get("length"), 25)
651 draw = safe_int(request.GET.get("draw"))
649 draw = safe_int(request.GET.get("draw"))
652 return draw, start, length
650 return draw, start, length
653
651
654 def _get_order_col(self, order_by, model):
652 def _get_order_col(self, order_by, model):
655 if isinstance(order_by, str):
653 if isinstance(order_by, str):
656 try:
654 try:
657 return operator.attrgetter(order_by)(model)
655 return operator.attrgetter(order_by)(model)
658 except AttributeError:
656 except AttributeError:
659 return None
657 return None
660 else:
658 else:
661 return order_by
659 return order_by
662
660
663
661
664 class BaseReferencesView(RepoAppView):
662 class BaseReferencesView(RepoAppView):
665 """
663 """
666 Base for reference view for branches, tags and bookmarks.
664 Base for reference view for branches, tags and bookmarks.
667 """
665 """
668
666
669 def load_default_context(self):
667 def load_default_context(self):
670 c = self._get_local_tmpl_context()
668 c = self._get_local_tmpl_context()
671 return c
669 return c
672
670
673 def load_refs_context(self, ref_items, partials_template):
671 def load_refs_context(self, ref_items, partials_template):
674 _render = self.request.get_partial_renderer(partials_template)
672 _render = self.request.get_partial_renderer(partials_template)
675 pre_load = ["author", "date", "message", "parents"]
673 pre_load = ["author", "date", "message", "parents"]
676
674
677 is_svn = h.is_svn(self.rhodecode_vcs_repo)
675 is_svn = h.is_svn(self.rhodecode_vcs_repo)
678 is_hg = h.is_hg(self.rhodecode_vcs_repo)
676 is_hg = h.is_hg(self.rhodecode_vcs_repo)
679
677
680 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
678 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
681
679
682 closed_refs = {}
680 closed_refs = {}
683 if is_hg:
681 if is_hg:
684 closed_refs = self.rhodecode_vcs_repo.branches_closed
682 closed_refs = self.rhodecode_vcs_repo.branches_closed
685
683
686 data = []
684 data = []
687 for ref_name, commit_id in ref_items:
685 for ref_name, commit_id in ref_items:
688 commit = self.rhodecode_vcs_repo.get_commit(
686 commit = self.rhodecode_vcs_repo.get_commit(
689 commit_id=commit_id, pre_load=pre_load
687 commit_id=commit_id, pre_load=pre_load
690 )
688 )
691 closed = ref_name in closed_refs
689 closed = ref_name in closed_refs
692
690
693 # TODO: johbo: Unify generation of reference links
691 # TODO: johbo: Unify generation of reference links
694 use_commit_id = "/" in ref_name or is_svn
692 use_commit_id = "/" in ref_name or is_svn
695
693
696 if use_commit_id:
694 if use_commit_id:
697 files_url = h.route_path(
695 files_url = h.route_path(
698 "repo_files",
696 "repo_files",
699 repo_name=self.db_repo_name,
697 repo_name=self.db_repo_name,
700 f_path=ref_name if is_svn else "",
698 f_path=ref_name if is_svn else "",
701 commit_id=commit_id,
699 commit_id=commit_id,
702 _query=dict(at=ref_name),
700 _query=dict(at=ref_name),
703 )
701 )
704
702
705 else:
703 else:
706 files_url = h.route_path(
704 files_url = h.route_path(
707 "repo_files",
705 "repo_files",
708 repo_name=self.db_repo_name,
706 repo_name=self.db_repo_name,
709 f_path=ref_name if is_svn else "",
707 f_path=ref_name if is_svn else "",
710 commit_id=ref_name,
708 commit_id=ref_name,
711 _query=dict(at=ref_name),
709 _query=dict(at=ref_name),
712 )
710 )
713
711
714 data.append(
712 data.append(
715 {
713 {
716 "name": _render("name", ref_name, files_url, closed),
714 "name": _render("name", ref_name, files_url, closed),
717 "name_raw": ref_name,
715 "name_raw": ref_name,
718 "closed": closed,
716 "closed": closed,
719 "date": _render("date", commit.date),
717 "date": _render("date", commit.date),
720 "date_raw": datetime_to_time(commit.date),
718 "date_raw": datetime_to_time(commit.date),
721 "author": _render("author", commit.author),
719 "author": _render("author", commit.author),
722 "commit": _render(
720 "commit": _render(
723 "commit", commit.message, commit.raw_id, commit.idx
721 "commit", commit.message, commit.raw_id, commit.idx
724 ),
722 ),
725 "commit_raw": commit.idx,
723 "commit_raw": commit.idx,
726 "compare": _render(
724 "compare": _render(
727 "compare", format_ref_id(ref_name, commit.raw_id)
725 "compare", format_ref_id(ref_name, commit.raw_id)
728 ),
726 ),
729 }
727 }
730 )
728 )
731
729
732 return data
730 return data
733
731
734
732
735 class RepoRoutePredicate(object):
733 class RepoRoutePredicate(object):
736 def __init__(self, val, config):
734 def __init__(self, val, config):
737 self.val = val
735 self.val = val
738
736
739 def text(self):
737 def text(self):
740 return f"repo_route = {self.val}"
738 return f"repo_route = {self.val}"
741
739
742 phash = text
740 phash = text
743
741
744 def __call__(self, info, request):
742 def __call__(self, info, request):
745 if hasattr(request, "vcs_call"):
743 if hasattr(request, "vcs_call"):
746 # skip vcs calls
744 # skip vcs calls
747 return
745 return
748
746
749 repo_name = info["match"]["repo_name"]
747 repo_name = info["match"]["repo_name"]
750
748
751 repo_name_parts = repo_name.split("/")
749 repo_name_parts = repo_name.split("/")
752 repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)]
750 repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)]
753
751
754 if repo_name_parts != repo_slugs:
752 if repo_name_parts != repo_slugs:
755 # short-skip if the repo-name doesn't follow slug rule
753 # short-skip if the repo-name doesn't follow slug rule
756 log.warning(
754 log.warning(
757 "repo_name: %s is different than slug %s", repo_name_parts, repo_slugs
755 "repo_name: %s is different than slug %s", repo_name_parts, repo_slugs
758 )
756 )
759 return False
757 return False
760
758
761 repo_model = repo.RepoModel()
759 repo_model = repo.RepoModel()
762
760
763 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
761 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
764
762
765 def redirect_if_creating(route_info, db_repo):
763 def redirect_if_creating(route_info, db_repo):
766 skip_views = ["edit_repo_advanced_delete"]
764 skip_views = ["edit_repo_advanced_delete"]
767 route = route_info["route"]
765 route = route_info["route"]
768 # we should skip delete view so we can actually "remove" repositories
766 # we should skip delete view so we can actually "remove" repositories
769 # if they get stuck in creating state.
767 # if they get stuck in creating state.
770 if route.name in skip_views:
768 if route.name in skip_views:
771 return
769 return
772
770
773 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
771 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
774 repo_creating_url = request.route_path(
772 repo_creating_url = request.route_path(
775 "repo_creating", repo_name=db_repo.repo_name
773 "repo_creating", repo_name=db_repo.repo_name
776 )
774 )
777 raise HTTPFound(repo_creating_url)
775 raise HTTPFound(repo_creating_url)
778
776
779 if by_name_match:
777 if by_name_match:
780 # register this as request object we can re-use later
778 # register this as request object we can re-use later
781 request.db_repo = by_name_match
779 request.db_repo = by_name_match
782 request.db_repo_name = request.db_repo.repo_name
780 request.db_repo_name = request.db_repo.repo_name
783
781
784 redirect_if_creating(info, by_name_match)
782 redirect_if_creating(info, by_name_match)
785 return True
783 return True
786
784
787 by_id_match = repo_model.get_repo_by_id(repo_name)
785 by_id_match = repo_model.get_repo_by_id(repo_name)
788 if by_id_match:
786 if by_id_match:
789 request.db_repo = by_id_match
787 request.db_repo = by_id_match
790 request.db_repo_name = request.db_repo.repo_name
788 request.db_repo_name = request.db_repo.repo_name
791 redirect_if_creating(info, by_id_match)
789 redirect_if_creating(info, by_id_match)
792 return True
790 return True
793
791
794 return False
792 return False
795
793
796
794
797 class RepoForbidArchivedRoutePredicate(object):
795 class RepoForbidArchivedRoutePredicate(object):
798 def __init__(self, val, config):
796 def __init__(self, val, config):
799 self.val = val
797 self.val = val
800
798
801 def text(self):
799 def text(self):
802 return f"repo_forbid_archived = {self.val}"
800 return f"repo_forbid_archived = {self.val}"
803
801
804 phash = text
802 phash = text
805
803
806 def __call__(self, info, request):
804 def __call__(self, info, request):
807 _ = request.translate
805 _ = request.translate
808 rhodecode_db_repo = request.db_repo
806 rhodecode_db_repo = request.db_repo
809
807
810 log.debug(
808 log.debug(
811 "%s checking if archived flag for repo for %s",
809 "%s checking if archived flag for repo for %s",
812 self.__class__.__name__,
810 self.__class__.__name__,
813 rhodecode_db_repo.repo_name,
811 rhodecode_db_repo.repo_name,
814 )
812 )
815
813
816 if rhodecode_db_repo.archived:
814 if rhodecode_db_repo.archived:
817 log.warning(
815 log.warning(
818 "Current view is not supported for archived repo:%s",
816 "Current view is not supported for archived repo:%s",
819 rhodecode_db_repo.repo_name,
817 rhodecode_db_repo.repo_name,
820 )
818 )
821
819
822 h.flash(
820 h.flash(
823 h.literal(_("Action not supported for archived repository.")),
821 h.literal(_("Action not supported for archived repository.")),
824 category="warning",
822 category="warning",
825 )
823 )
826 summary_url = request.route_path(
824 summary_url = request.route_path(
827 "repo_summary", repo_name=rhodecode_db_repo.repo_name
825 "repo_summary", repo_name=rhodecode_db_repo.repo_name
828 )
826 )
829 raise HTTPFound(summary_url)
827 raise HTTPFound(summary_url)
830 return True
828 return True
831
829
832
830
833 class RepoTypeRoutePredicate(object):
831 class RepoTypeRoutePredicate(object):
834 def __init__(self, val, config):
832 def __init__(self, val, config):
835 self.val = val or ["hg", "git", "svn"]
833 self.val = val or ["hg", "git", "svn"]
836
834
837 def text(self):
835 def text(self):
838 return f"repo_accepted_type = {self.val}"
836 return f"repo_accepted_type = {self.val}"
839
837
840 phash = text
838 phash = text
841
839
842 def __call__(self, info, request):
840 def __call__(self, info, request):
843 if hasattr(request, "vcs_call"):
841 if hasattr(request, "vcs_call"):
844 # skip vcs calls
842 # skip vcs calls
845 return
843 return
846
844
847 rhodecode_db_repo = request.db_repo
845 rhodecode_db_repo = request.db_repo
848
846
849 log.debug(
847 log.debug(
850 "%s checking repo type for %s in %s",
848 "%s checking repo type for %s in %s",
851 self.__class__.__name__,
849 self.__class__.__name__,
852 rhodecode_db_repo.repo_type,
850 rhodecode_db_repo.repo_type,
853 self.val,
851 self.val,
854 )
852 )
855
853
856 if rhodecode_db_repo.repo_type in self.val:
854 if rhodecode_db_repo.repo_type in self.val:
857 return True
855 return True
858 else:
856 else:
859 log.warning(
857 log.warning(
860 "Current view is not supported for repo type:%s",
858 "Current view is not supported for repo type:%s",
861 rhodecode_db_repo.repo_type,
859 rhodecode_db_repo.repo_type,
862 )
860 )
863 return False
861 return False
864
862
865
863
866 class RepoGroupRoutePredicate(object):
864 class RepoGroupRoutePredicate(object):
867 def __init__(self, val, config):
865 def __init__(self, val, config):
868 self.val = val
866 self.val = val
869
867
870 def text(self):
868 def text(self):
871 return f"repo_group_route = {self.val}"
869 return f"repo_group_route = {self.val}"
872
870
873 phash = text
871 phash = text
874
872
875 def __call__(self, info, request):
873 def __call__(self, info, request):
876 if hasattr(request, "vcs_call"):
874 if hasattr(request, "vcs_call"):
877 # skip vcs calls
875 # skip vcs calls
878 return
876 return
879
877
880 repo_group_name = info["match"]["repo_group_name"]
878 repo_group_name = info["match"]["repo_group_name"]
881
879
882 repo_group_name_parts = repo_group_name.split("/")
880 repo_group_name_parts = repo_group_name.split("/")
883 repo_group_slugs = [
881 repo_group_slugs = [
884 x for x in [repo_name_slug(x) for x in repo_group_name_parts]
882 x for x in [repo_name_slug(x) for x in repo_group_name_parts]
885 ]
883 ]
886 if repo_group_name_parts != repo_group_slugs:
884 if repo_group_name_parts != repo_group_slugs:
887 # short-skip if the repo-name doesn't follow slug rule
885 # short-skip if the repo-name doesn't follow slug rule
888 log.warning(
886 log.warning(
889 "repo_group_name: %s is different than slug %s",
887 "repo_group_name: %s is different than slug %s",
890 repo_group_name_parts,
888 repo_group_name_parts,
891 repo_group_slugs,
889 repo_group_slugs,
892 )
890 )
893 return False
891 return False
894
892
895 repo_group_model = repo_group.RepoGroupModel()
893 repo_group_model = repo_group.RepoGroupModel()
896 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
894 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
897
895
898 if by_name_match:
896 if by_name_match:
899 # register this as request object we can re-use later
897 # register this as request object we can re-use later
900 request.db_repo_group = by_name_match
898 request.db_repo_group = by_name_match
901 request.db_repo_group_name = request.db_repo_group.group_name
899 request.db_repo_group_name = request.db_repo_group.group_name
902 return True
900 return True
903
901
904 return False
902 return False
905
903
906
904
907 class UserGroupRoutePredicate(object):
905 class UserGroupRoutePredicate(object):
908 def __init__(self, val, config):
906 def __init__(self, val, config):
909 self.val = val
907 self.val = val
910
908
911 def text(self):
909 def text(self):
912 return f"user_group_route = {self.val}"
910 return f"user_group_route = {self.val}"
913
911
914 phash = text
912 phash = text
915
913
916 def __call__(self, info, request):
914 def __call__(self, info, request):
917 if hasattr(request, "vcs_call"):
915 if hasattr(request, "vcs_call"):
918 # skip vcs calls
916 # skip vcs calls
919 return
917 return
920
918
921 user_group_id = info["match"]["user_group_id"]
919 user_group_id = info["match"]["user_group_id"]
922 user_group_model = user_group.UserGroup()
920 user_group_model = user_group.UserGroup()
923 by_id_match = user_group_model.get(user_group_id, cache=False)
921 by_id_match = user_group_model.get(user_group_id, cache=False)
924
922
925 if by_id_match:
923 if by_id_match:
926 # register this as request object we can re-use later
924 # register this as request object we can re-use later
927 request.db_user_group = by_id_match
925 request.db_user_group = by_id_match
928 return True
926 return True
929
927
930 return False
928 return False
931
929
932
930
933 class UserRoutePredicateBase(object):
931 class UserRoutePredicateBase(object):
934 supports_default = None
932 supports_default = None
935
933
936 def __init__(self, val, config):
934 def __init__(self, val, config):
937 self.val = val
935 self.val = val
938
936
939 def text(self):
937 def text(self):
940 raise NotImplementedError()
938 raise NotImplementedError()
941
939
942 def __call__(self, info, request):
940 def __call__(self, info, request):
943 if hasattr(request, "vcs_call"):
941 if hasattr(request, "vcs_call"):
944 # skip vcs calls
942 # skip vcs calls
945 return
943 return
946
944
947 user_id = info["match"]["user_id"]
945 user_id = info["match"]["user_id"]
948 user_model = user.User()
946 user_model = user.User()
949 by_id_match = user_model.get(user_id, cache=False)
947 by_id_match = user_model.get(user_id, cache=False)
950
948
951 if by_id_match:
949 if by_id_match:
952 # register this as request object we can re-use later
950 # register this as request object we can re-use later
953 request.db_user = by_id_match
951 request.db_user = by_id_match
954 request.db_user_supports_default = self.supports_default
952 request.db_user_supports_default = self.supports_default
955 return True
953 return True
956
954
957 return False
955 return False
958
956
959
957
960 class UserRoutePredicate(UserRoutePredicateBase):
958 class UserRoutePredicate(UserRoutePredicateBase):
961 supports_default = False
959 supports_default = False
962
960
963 def text(self):
961 def text(self):
964 return f"user_route = {self.val}"
962 return f"user_route = {self.val}"
965
963
966 phash = text
964 phash = text
967
965
968
966
969 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
967 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
970 supports_default = True
968 supports_default = True
971
969
972 def text(self):
970 def text(self):
973 return f"user_with_default_route = {self.val}"
971 return f"user_with_default_route = {self.val}"
974
972
975 phash = text
973 phash = text
976
974
977
975
978 def includeme(config):
976 def includeme(config):
979 config.add_route_predicate("repo_route", RepoRoutePredicate)
977 config.add_route_predicate("repo_route", RepoRoutePredicate)
980 config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate)
978 config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate)
981 config.add_route_predicate(
979 config.add_route_predicate(
982 "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate
980 "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate
983 )
981 )
984 config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate)
982 config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate)
985 config.add_route_predicate("user_group_route", UserGroupRoutePredicate)
983 config.add_route_predicate("user_group_route", UserGroupRoutePredicate)
986 config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate)
984 config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate)
987 config.add_route_predicate("user_route", UserRoutePredicate)
985 config.add_route_predicate("user_route", UserRoutePredicate)
This diff has been collapsed as it changes many lines, (991 lines changed) Show them Hide them
@@ -1,1704 +1,1601
1 # Copyright (C) 2011-2024 RhodeCode GmbH
1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import itertools
19 import itertools
20 import logging
20 import logging
21 import os
21 import os
22 import collections
22 import collections
23 import urllib.request
23 import urllib.request
24 import urllib.parse
24 import urllib.parse
25 import urllib.error
25 import urllib.error
26 import pathlib
26 import pathlib
27 import time
27 import time
28 import random
28 import random
29
29
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31
31
32 from pyramid.renderers import render
32 from pyramid.renderers import render
33 from pyramid.response import Response
33 from pyramid.response import Response
34
34
35 import rhodecode
35 import rhodecode
36 from rhodecode.apps._base import RepoAppView
36 from rhodecode.apps._base import RepoAppView
37
37
38
38
39 from rhodecode.lib import diffs, helpers as h, rc_cache
39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
41 from rhodecode.lib.hash_utils import sha1_safe
41 from rhodecode.lib.hash_utils import sha1_safe
42 from rhodecode.lib.archive_cache import (
42 from rhodecode.lib.archive_cache import (
43 get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator)
43 get_archival_cache_store,
44 get_archival_config,
45 ArchiveCacheGenerationLock,
46 archive_iterator,
47 )
44 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
48 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
45 from rhodecode.lib.view_utils import parse_path_ref
49 from rhodecode.lib.view_utils import parse_path_ref
46 from rhodecode.lib.exceptions import NonRelativePathError
50 from rhodecode.lib.exceptions import NonRelativePathError
47 from rhodecode.lib.codeblocks import (
51 from rhodecode.lib.codeblocks import filenode_as_lines_tokens, filenode_as_annotated_lines_tokens
48 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
49 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
52 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
50 from rhodecode.lib.type_utils import str2bool
53 from rhodecode.lib.type_utils import str2bool
51 from rhodecode.lib.str_utils import safe_str, safe_int, header_safe_str
54 from rhodecode.lib.str_utils import safe_str, safe_int, header_safe_str
52 from rhodecode.lib.auth import (
55 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired
53 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
54 from rhodecode.lib.vcs import path as vcspath
56 from rhodecode.lib.vcs import path as vcspath
55 from rhodecode.lib.vcs.backends.base import EmptyCommit
57 from rhodecode.lib.vcs.backends.base import EmptyCommit
56 from rhodecode.lib.vcs.conf import settings
58 from rhodecode.lib.vcs.conf import settings
57 from rhodecode.lib.vcs.nodes import FileNode
59 from rhodecode.lib.vcs.nodes import FileNode
58 from rhodecode.lib.vcs.exceptions import (
60 from rhodecode.lib.vcs.exceptions import (
59 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
61 RepositoryError,
60 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
62 CommitDoesNotExistError,
61 NodeDoesNotExistError, CommitError, NodeError)
63 EmptyRepositoryError,
64 ImproperArchiveTypeError,
65 VCSError,
66 NodeAlreadyExistsError,
67 NodeDoesNotExistError,
68 CommitError,
69 NodeError,
70 )
62
71
63 from rhodecode.model.scm import ScmModel
72 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.db import Repository
73 from rhodecode.model.db import Repository
65
74
66 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
67
76
68
77
69 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
78 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha="", with_hash=True):
70 # original backward compat name of archive
79 # original backward compat name of archive
71 clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_'))
80 clean_name = safe_str(convert_special_chars(db_repo_name).replace("/", "_"))
72
81
73 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
82 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
74 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
83 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
75 id_sha = sha1_safe(str(db_repo_id))[:4]
84 id_sha = sha1_safe(str(db_repo_id))[:4]
76 sub_repo = 'sub-1' if subrepos else 'sub-0'
85 sub_repo = "sub-1" if subrepos else "sub-0"
77 commit = commit_sha if with_hash else 'archive'
86 commit = commit_sha if with_hash else "archive"
78 path_marker = (path_sha if with_hash else '') or 'all'
87 path_marker = (path_sha if with_hash else "") or "all"
79 archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}'
88 archive_name = f"{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}"
80
89
81 return archive_name
90 return archive_name
82
91
83
92
84 def get_path_sha(at_path):
93 def get_path_sha(at_path):
85 return safe_str(sha1_safe(at_path)[:8])
94 return safe_str(sha1_safe(at_path)[:8])
86
95
87
96
88 def _get_archive_spec(fname):
97 def _get_archive_spec(fname):
89 log.debug('Detecting archive spec for: `%s`', fname)
98 log.debug("Detecting archive spec for: `%s`", fname)
90
99
91 fileformat = None
100 fileformat = None
92 ext = None
101 ext = None
93 content_type = None
102 content_type = None
94 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
103 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
95
96 if fname.endswith(extension):
104 if fname.endswith(extension):
97 fileformat = a_type
105 fileformat = a_type
98 log.debug('archive is of type: %s', fileformat)
106 log.debug("archive is of type: %s", fileformat)
99 ext = extension
107 ext = extension
100 break
108 break
101
109
102 if not fileformat:
110 if not fileformat:
103 raise ValueError()
111 raise ValueError()
104
112
105 # left over part of whole fname is the commit
113 # left over part of whole fname is the commit
106 commit_id = fname[:-len(ext)]
114 commit_id = fname[: -len(ext)]
107
115
108 return commit_id, ext, fileformat, content_type
116 return commit_id, ext, fileformat, content_type
109
117
110
118
111 class RepoFilesView(RepoAppView):
119 class RepoFilesView(RepoAppView):
112
113 @staticmethod
120 @staticmethod
114 def adjust_file_path_for_svn(f_path, repo):
121 def adjust_file_path_for_svn(f_path, repo):
115 """
122 """
116 Computes the relative path of `f_path`.
123 Computes the relative path of `f_path`.
117
124
118 This is mainly based on prefix matching of the recognized tags and
125 This is mainly based on prefix matching of the recognized tags and
119 branches in the underlying repository.
126 branches in the underlying repository.
120 """
127 """
121 tags_and_branches = itertools.chain(
128 tags_and_branches = itertools.chain(repo.branches.keys(), repo.tags.keys())
122 repo.branches.keys(),
123 repo.tags.keys())
124 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
129 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
125
130
126 for name in tags_and_branches:
131 for name in tags_and_branches:
127 if f_path.startswith(f'{name}/'):
132 if f_path.startswith(f"{name}/"):
128 f_path = vcspath.relpath(f_path, name)
133 f_path = vcspath.relpath(f_path, name)
129 break
134 break
130 return f_path
135 return f_path
131
136
132 def load_default_context(self):
137 def load_default_context(self):
133 c = self._get_local_tmpl_context(include_app_defaults=True)
138 c = self._get_local_tmpl_context(include_app_defaults=True)
134 c.rhodecode_repo = self.rhodecode_vcs_repo
139 c.rhodecode_repo = self.rhodecode_vcs_repo
135 c.enable_downloads = self.db_repo.enable_downloads
140 c.enable_downloads = self.db_repo.enable_downloads
136 return c
141 return c
137
142
138 def _ensure_not_locked(self, commit_id='tip'):
143 def _ensure_not_locked(self, commit_id="tip"):
139 _ = self.request.translate
144 _ = self.request.translate
140
145
141 repo = self.db_repo
146 repo = self.db_repo
142 if repo.enable_locking and repo.locked[0]:
147 if repo.enable_locking and repo.locked[0]:
143 h.flash(_('This repository has been locked by %s on %s')
148 h.flash(
144 % (h.person_by_id(repo.locked[0]),
149 _("This repository has been locked by %s on %s")
145 h.format_date(h.time_to_datetime(repo.locked[1]))),
150 % (h.person_by_id(repo.locked[0]), h.format_date(h.time_to_datetime(repo.locked[1]))),
146 'warning')
151 "warning",
147 files_url = h.route_path(
152 )
148 'repo_files:default_path',
153 files_url = h.route_path("repo_files:default_path", repo_name=self.db_repo_name, commit_id=commit_id)
149 repo_name=self.db_repo_name, commit_id=commit_id)
150 raise HTTPFound(files_url)
154 raise HTTPFound(files_url)
151
155
152 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
156 def forbid_non_head(self, is_head, f_path, commit_id="tip", json_mode=False):
153 _ = self.request.translate
157 _ = self.request.translate
154
158
155 if not is_head:
159 if not is_head:
156 message = _('Cannot modify file. '
160 message = _("Cannot modify file. " "Given commit `{}` is not head of a branch.").format(commit_id)
157 'Given commit `{}` is not head of a branch.').format(commit_id)
161 h.flash(message, category="warning")
158 h.flash(message, category='warning')
159
162
160 if json_mode:
163 if json_mode:
161 return message
164 return message
162
165
163 files_url = h.route_path(
166 files_url = h.route_path("repo_files", repo_name=self.db_repo_name, commit_id=commit_id, f_path=f_path)
164 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
165 f_path=f_path)
166 raise HTTPFound(files_url)
167 raise HTTPFound(files_url)
167
168
168 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
169 def check_branch_permission(self, branch_name, commit_id="tip", json_mode=False):
169 _ = self.request.translate
170 _ = self.request.translate
170
171
171 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
172 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(self.db_repo_name, branch_name)
172 self.db_repo_name, branch_name)
173 if branch_perm and branch_perm not in ["branch.push", "branch.push_force"]:
173 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
174 message = _("Branch `{}` changes forbidden by rule {}.").format(h.escape(branch_name), h.escape(rule))
174 message = _('Branch `{}` changes forbidden by rule {}.').format(
175 h.flash(message, "warning")
175 h.escape(branch_name), h.escape(rule))
176 h.flash(message, 'warning')
177
176
178 if json_mode:
177 if json_mode:
179 return message
178 return message
180
179
181 files_url = h.route_path(
180 files_url = h.route_path("repo_files:default_path", repo_name=self.db_repo_name, commit_id=commit_id)
182 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
183
181
184 raise HTTPFound(files_url)
182 raise HTTPFound(files_url)
185
183
186 def _get_commit_and_path(self):
184 def _get_commit_and_path(self):
187 default_commit_id = self.db_repo.landing_ref_name
185 default_commit_id = self.db_repo.landing_ref_name
188 default_f_path = '/'
186 default_f_path = "/"
189
187
190 commit_id = self.request.matchdict.get('commit_id', default_commit_id)
188 commit_id = self.request.matchdict.get("commit_id", default_commit_id)
191 f_path = self._get_f_path(self.request.matchdict, default_f_path)
189 f_path = self._get_f_path(self.request.matchdict, default_f_path)
192
190
193 bytes_path = safe_bytes(f_path)
191 bytes_path = safe_bytes(f_path)
194 return commit_id, f_path, bytes_path
192 return commit_id, f_path, bytes_path
195
193
196 @classmethod
194 @classmethod
197 def _get_default_encoding(cls, c):
195 def _get_default_encoding(cls, c):
198 enc_list = getattr(c, 'default_encodings', [])
196 enc_list = getattr(c, "default_encodings", [])
199 return enc_list[0] if enc_list else 'UTF-8'
197 return enc_list[0] if enc_list else "UTF-8"
200
198
201 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
199 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
202 """
200 """
203 This is a safe way to get commit. If an error occurs it redirects to
201 This is a safe way to get commit. If an error occurs it redirects to
204 tip with proper message
202 tip with proper message
205
203
206 :param commit_id: id of commit to fetch
204 :param commit_id: id of commit to fetch
207 :param redirect_after: toggle redirection
205 :param redirect_after: toggle redirection
208 """
206 """
209 _ = self.request.translate
207 _ = self.request.translate
210
208
211 try:
209 try:
212 return self.rhodecode_vcs_repo.get_commit(commit_id)
210 return self.rhodecode_vcs_repo.get_commit(commit_id)
213 except EmptyRepositoryError:
211 except EmptyRepositoryError:
214 if not redirect_after:
212 if not redirect_after:
215 return None
213 return None
216
214
217 add_new = upload_new = ""
215 add_new = upload_new = ""
218 if h.HasRepoPermissionAny(
216 if h.HasRepoPermissionAny("repository.write", "repository.admin")(self.db_repo_name):
219 'repository.write', 'repository.admin')(self.db_repo_name):
217 _url = h.route_path("repo_files_add_file", repo_name=self.db_repo_name, commit_id=0, f_path="")
220 _url = h.route_path(
218 add_new = h.link_to(_("add a new file"), _url, class_="alert-link")
221 'repo_files_add_file',
222 repo_name=self.db_repo_name, commit_id=0, f_path='')
223 add_new = h.link_to(
224 _('add a new file'), _url, class_="alert-link")
225
219
226 _url_upld = h.route_path(
220 _url_upld = h.route_path("repo_files_upload_file", repo_name=self.db_repo_name, commit_id=0, f_path="")
227 'repo_files_upload_file',
221 upload_new = h.link_to(_("upload a new file"), _url_upld, class_="alert-link")
228 repo_name=self.db_repo_name, commit_id=0, f_path='')
229 upload_new = h.link_to(
230 _('upload a new file'), _url_upld, class_="alert-link")
231
222
232 h.flash(h.literal(
223 h.flash(
233 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
224 h.literal(_("There are no files yet. Click here to %s or %s.") % (add_new, upload_new)),
234 raise HTTPFound(
225 category="warning",
235 h.route_path('repo_summary', repo_name=self.db_repo_name))
226 )
227 raise HTTPFound(h.route_path("repo_summary", repo_name=self.db_repo_name))
236
228
237 except (CommitDoesNotExistError, LookupError) as e:
229 except (CommitDoesNotExistError, LookupError) as e:
238 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
230 msg = _("No such commit exists for this repository. Commit: {}").format(commit_id)
239 h.flash(msg, category='error')
231 h.flash(msg, category="error")
240 raise HTTPNotFound()
232 raise HTTPNotFound()
241 except RepositoryError as e:
233 except RepositoryError as e:
242 h.flash(h.escape(safe_str(e)), category='error')
234 h.flash(h.escape(safe_str(e)), category="error")
243 raise HTTPNotFound()
235 raise HTTPNotFound()
244
236
245 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
237 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
246 """
238 """
247 Returns file_node, if error occurs or given path is directory,
239 Returns file_node, if error occurs or given path is directory,
248 it'll redirect to top level path
240 it'll redirect to top level path
249 """
241 """
250 _ = self.request.translate
242 _ = self.request.translate
251
243
252 try:
244 try:
253 file_node = commit_obj.get_node(path, pre_load=pre_load)
245 file_node = commit_obj.get_node(path, pre_load=pre_load)
254 if file_node.is_dir():
246 if file_node.is_dir():
255 raise RepositoryError('The given path is a directory')
247 raise RepositoryError("The given path is a directory")
256 except CommitDoesNotExistError:
248 except CommitDoesNotExistError:
257 log.exception('No such commit exists for this repository')
249 log.exception("No such commit exists for this repository")
258 h.flash(_('No such commit exists for this repository'), category='error')
250 h.flash(_("No such commit exists for this repository"), category="error")
259 raise HTTPNotFound()
251 raise HTTPNotFound()
260 except RepositoryError as e:
252 except RepositoryError as e:
261 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
253 log.warning("Repository error while fetching filenode `%s`. Err:%s", path, e)
262 h.flash(h.escape(safe_str(e)), category='error')
254 h.flash(h.escape(safe_str(e)), category="error")
263 raise HTTPNotFound()
255 raise HTTPNotFound()
264
256
265 return file_node
257 return file_node
266
258
267 def _is_valid_head(self, commit_id, repo, landing_ref):
259 def _is_valid_head(self, commit_id, repo, landing_ref):
268 branch_name = sha_commit_id = ''
260 branch_name = sha_commit_id = ""
269 is_head = False
261 is_head = False
270 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
262 log.debug("Checking if commit_id `%s` is a head for %s.", commit_id, repo)
271
263
272 for _branch_name, branch_commit_id in repo.branches.items():
264 for _branch_name, branch_commit_id in repo.branches.items():
273 # simple case we pass in branch name, it's a HEAD
265 # simple case we pass in branch name, it's a HEAD
274 if commit_id == _branch_name:
266 if commit_id == _branch_name:
275 is_head = True
267 is_head = True
276 branch_name = _branch_name
268 branch_name = _branch_name
277 sha_commit_id = branch_commit_id
269 sha_commit_id = branch_commit_id
278 break
270 break
279 # case when we pass in full sha commit_id, which is a head
271 # case when we pass in full sha commit_id, which is a head
280 elif commit_id == branch_commit_id:
272 elif commit_id == branch_commit_id:
281 is_head = True
273 is_head = True
282 branch_name = _branch_name
274 branch_name = _branch_name
283 sha_commit_id = branch_commit_id
275 sha_commit_id = branch_commit_id
284 break
276 break
285
277
286 if h.is_svn(repo) and not repo.is_empty():
278 if h.is_svn(repo) and not repo.is_empty():
287 # Note: Subversion only has one head.
279 # Note: Subversion only has one head.
288 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
280 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
289 is_head = True
281 is_head = True
290 return branch_name, sha_commit_id, is_head
282 return branch_name, sha_commit_id, is_head
291
283
292 # checked branches, means we only need to try to get the branch/commit_sha
284 # checked branches, means we only need to try to get the branch/commit_sha
293 if repo.is_empty():
285 if repo.is_empty():
294 is_head = True
286 is_head = True
295 branch_name = landing_ref
287 branch_name = landing_ref
296 sha_commit_id = EmptyCommit().raw_id
288 sha_commit_id = EmptyCommit().raw_id
297 else:
289 else:
298 commit = repo.get_commit(commit_id=commit_id)
290 commit = repo.get_commit(commit_id=commit_id)
299 if commit:
291 if commit:
300 branch_name = commit.branch
292 branch_name = commit.branch
301 sha_commit_id = commit.raw_id
293 sha_commit_id = commit.raw_id
302
294
303 return branch_name, sha_commit_id, is_head
295 return branch_name, sha_commit_id, is_head
304
296
305 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
297 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
306
307 repo_id = self.db_repo.repo_id
298 repo_id = self.db_repo.repo_id
308 force_recache = self.get_recache_flag()
299 force_recache = self.get_recache_flag()
309
300
310 cache_seconds = safe_int(
301 cache_seconds = rhodecode.ConfigGet().get_int("rc_cache.cache_repo.expiration_time")
311 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
312 cache_on = not force_recache and cache_seconds > 0
302 cache_on = not force_recache and cache_seconds > 0
303
313 log.debug(
304 log.debug(
314 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
305 "Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`"
315 'with caching: %s[TTL: %ss]' % (
306 "with caching: %s[TTL: %ss]" % (repo_id, commit_id, f_path, cache_on, cache_seconds or 0)
316 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
307 )
317
308
318 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
309 cache_namespace_uid = f"repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}"
319 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
310 region = rc_cache.get_or_create_region("cache_repo", cache_namespace_uid)
320
311
321 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
312 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
322 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
313 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
323 log.debug('Generating cached file tree at for repo_id: %s, %s, %s',
314 log.debug("Generating cached file tree at for repo_id: %s, %s, %s", _repo_id, _commit_id, _f_path)
324 _repo_id, _commit_id, _f_path)
325
315
326 c.full_load = _full_load
316 c.full_load = _full_load
327 return render(
317 return render(
328 'rhodecode:templates/files/files_browser_tree.mako',
318 "rhodecode:templates/files/files_browser_tree.mako",
329 self._get_template_context(c), self.request, _at_rev)
319 self._get_template_context(c),
320 self.request,
321 _at_rev,
322 )
330
323
331 return compute_file_tree(
324 return compute_file_tree(
332 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
325 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev
326 )
333
327
334 def create_pure_path(self, *parts):
328 def create_pure_path(self, *parts):
335 # Split paths and sanitize them, removing any ../ etc
329 # Split paths and sanitize them, removing any ../ etc
336 sanitized_path = [
330 sanitized_path = [x for x in pathlib.PurePath(*parts).parts if x not in [".", ".."]]
337 x for x in pathlib.PurePath(*parts).parts
338 if x not in ['.', '..']]
339
331
340 pure_path = pathlib.PurePath(*sanitized_path)
332 pure_path = pathlib.PurePath(*sanitized_path)
341 return pure_path
333 return pure_path
342
334
343 def _is_lf_enabled(self, target_repo):
335 def _is_lf_enabled(self, target_repo):
344 lf_enabled = False
336 lf_enabled = False
345
337
346 lf_key_for_vcs_map = {
338 lf_key_for_vcs_map = {"hg": "extensions_largefiles", "git": "vcs_git_lfs_enabled"}
347 'hg': 'extensions_largefiles',
348 'git': 'vcs_git_lfs_enabled'
349 }
350
339
351 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
340 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
352
341
353 if lf_key_for_vcs:
342 if lf_key_for_vcs:
354 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
343 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
355
344
356 return lf_enabled
345 return lf_enabled
357
346
358 @LoginRequired()
347 @LoginRequired()
359 @HasRepoPermissionAnyDecorator(
348 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
360 'repository.read', 'repository.write', 'repository.admin')
361 def repo_archivefile(self):
349 def repo_archivefile(self):
362 # archive cache config
350 # archive cache config
363 from rhodecode import CONFIG
351 from rhodecode import CONFIG
352
364 _ = self.request.translate
353 _ = self.request.translate
365 self.load_default_context()
354 self.load_default_context()
366
355
367 subrepos = self.request.GET.get('subrepos') == 'true'
356 subrepos = self.request.GET.get("subrepos") == "true"
368 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
357 with_hash = str2bool(self.request.GET.get("with_hash", "1"))
369
358
370 default_at_path = '/'
359 default_at_path = "/"
371 fname = self.request.matchdict['fname']
360 fname = self.request.matchdict["fname"]
372 at_path = self.request.GET.get('at_path') or default_at_path
361 at_path = self.request.GET.get("at_path") or default_at_path
373
362
374 if not self.db_repo.enable_downloads:
363 if not self.db_repo.enable_downloads:
375 return Response(_('Downloads disabled'))
364 return Response(_("Downloads disabled"))
376
365
377 try:
366 try:
378 commit_id, ext, file_format, content_type = _get_archive_spec(fname)
367 commit_id, ext, file_format, content_type = _get_archive_spec(fname)
379 except ValueError:
368 except ValueError:
380 return Response(_('Unknown archive type for: `{}`').format(h.escape(fname)))
369 return Response(_("Unknown archive type for: `{}`").format(h.escape(fname)))
381
370
382 try:
371 try:
383 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
372 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
384 except CommitDoesNotExistError:
373 except CommitDoesNotExistError:
385 return Response(_('Unknown commit_id {}').format(
374 return Response(_("Unknown commit_id {}").format(h.escape(commit_id)))
386 h.escape(commit_id)))
387 except EmptyRepositoryError:
375 except EmptyRepositoryError:
388 return Response(_('Empty repository'))
376 return Response(_("Empty repository"))
389
377
390 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
378 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
391 if commit_id != commit.raw_id:
379 if commit_id != commit.raw_id:
392 fname=f'{commit.raw_id}{ext}'
380 fname = f"{commit.raw_id}{ext}"
393 raise HTTPFound(self.request.current_route_path(fname=fname))
381 raise HTTPFound(self.request.current_route_path(fname=fname))
394
382
395 try:
383 try:
396 at_path = commit.get_node(safe_bytes(at_path)).path or default_at_path
384 at_path = commit.get_node(safe_bytes(at_path)).path or default_at_path
397 except Exception:
385 except Exception:
398 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
386 return Response(_("No node at path {} for this repository").format(h.escape(at_path)))
399
387
400 path_sha = get_path_sha(at_path)
388 path_sha = get_path_sha(at_path)
401
389
402 # used for cache etc, consistent unique archive name
390 # used for cache etc, consistent unique archive name
403 archive_name_key = get_archive_name(
391 archive_name_key = get_archive_name(
404 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
392 self.db_repo.repo_id,
405 path_sha=path_sha, with_hash=True)
393 self.db_repo_name,
394 commit_sha=commit.short_id,
395 ext=ext,
396 subrepos=subrepos,
397 path_sha=path_sha,
398 with_hash=True,
399 )
406
400
407 if not with_hash:
401 if not with_hash:
408 path_sha = ''
402 path_sha = ""
409
403
410 # what end client gets served
404 # what end client gets served
411 response_archive_name = get_archive_name(
405 response_archive_name = get_archive_name(
412 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
406 self.db_repo.repo_id,
413 path_sha=path_sha, with_hash=with_hash)
407 self.db_repo_name,
408 commit_sha=commit.short_id,
409 ext=ext,
410 subrepos=subrepos,
411 path_sha=path_sha,
412 with_hash=with_hash,
413 )
414
414
415 # remove extension from our archive directory name
415 # remove extension from our archive directory name
416 archive_dir_name = response_archive_name[:-len(ext)]
416 archive_dir_name = response_archive_name[: -len(ext)]
417
417
418 archive_cache_disable = self.request.GET.get('no_cache')
418 archive_cache_disable = self.request.GET.get("no_cache")
419
419
420 d_cache = get_archival_cache_store(config=CONFIG)
420 d_cache = get_archival_cache_store(config=CONFIG)
421
421
422 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
422 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
423 d_cache_conf = get_archival_config(config=CONFIG)
423 d_cache_conf = get_archival_config(config=CONFIG)
424
424
425 # This is also a cache key, and lock key
425 # This is also a cache key, and lock key
426 reentrant_lock_key = archive_name_key + '.lock'
426 reentrant_lock_key = archive_name_key + ".lock"
427
427
428 use_cached_archive = False
428 use_cached_archive = False
429 if not archive_cache_disable and archive_name_key in d_cache:
429 if not archive_cache_disable and archive_name_key in d_cache:
430 reader, metadata = d_cache.fetch(archive_name_key)
430 reader, metadata = d_cache.fetch(archive_name_key)
431
431
432 use_cached_archive = True
432 use_cached_archive = True
433 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
433 log.debug(
434 archive_name_key, metadata, reader.name)
434 "Found cached archive as key=%s tag=%s, serving archive from cache reader=%s",
435 archive_name_key,
436 metadata,
437 reader.name,
438 )
435 else:
439 else:
436 reader = None
440 reader = None
437 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
441 log.debug("Archive with key=%s is not yet cached, creating one now...", archive_name_key)
438
442
439 if not reader:
443 if not reader:
440 # generate new archive, as previous was not found in the cache
444 # generate new archive, as previous was not found in the cache
441 try:
445 try:
442 with d_cache.get_lock(reentrant_lock_key):
446 with d_cache.get_lock(reentrant_lock_key):
443 try:
447 try:
444 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
448 commit.archive_repo(
445 kind=file_format, subrepos=subrepos,
449 archive_name_key,
446 archive_at_path=at_path, cache_config=d_cache_conf)
450 archive_dir_name=archive_dir_name,
451 kind=file_format,
452 subrepos=subrepos,
453 archive_at_path=at_path,
454 cache_config=d_cache_conf,
455 )
447 except ImproperArchiveTypeError:
456 except ImproperArchiveTypeError:
448 return _('Unknown archive type')
457 return _("Unknown archive type")
449
458
450 except ArchiveCacheGenerationLock:
459 except ArchiveCacheGenerationLock:
451 retry_after = round(random.uniform(0.3, 3.0), 1)
460 retry_after = round(random.uniform(0.3, 3.0), 1)
452 time.sleep(retry_after)
461 time.sleep(retry_after)
453
462
454 location = self.request.url
463 location = self.request.url
455 response = Response(
464 response = Response(
456 f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
465 f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
457 )
466 )
458 response.headers["Retry-After"] = str(retry_after)
467 response.headers["Retry-After"] = str(retry_after)
459 response.status_code = 307 # temporary redirect
468 response.status_code = 307 # temporary redirect
460
469
461 response.location = location
470 response.location = location
462 return response
471 return response
463
472
464 reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30)
473 reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30)
465
474
466 response = Response(app_iter=archive_iterator(reader))
475 response = Response(app_iter=archive_iterator(reader))
467 response.content_disposition = f'attachment; filename={response_archive_name}'
476 response.content_disposition = f"attachment; filename={response_archive_name}"
468 response.content_type = str(content_type)
477 response.content_type = str(content_type)
469
478
470 try:
479 try:
471 return response
480 return response
472 finally:
481 finally:
473 # store download action
482 # store download action
474 audit_logger.store_web(
483 audit_logger.store_web(
475 'repo.archive.download', action_data={
484 "repo.archive.download",
476 'user_agent': self.request.user_agent,
485 action_data={
477 'archive_name': archive_name_key,
486 "user_agent": self.request.user_agent,
478 'archive_spec': fname,
487 "archive_name": archive_name_key,
479 'archive_cached': use_cached_archive},
488 "archive_spec": fname,
489 "archive_cached": use_cached_archive,
490 },
480 user=self._rhodecode_user,
491 user=self._rhodecode_user,
481 repo=self.db_repo,
492 repo=self.db_repo,
482 commit=True
493 commit=True,
483 )
494 )
484
495
485 def _get_file_node(self, commit_id, f_path):
496 def _get_file_node(self, commit_id, f_path):
486 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
497 if commit_id not in ["", None, "None", "0" * 12, "0" * 40]:
487 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
498 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
488 try:
499 try:
489 node = commit.get_node(safe_bytes(f_path))
500 node = commit.get_node(safe_bytes(f_path))
490 if node.is_dir():
501 if node.is_dir():
491 raise NodeError(f'{node} path is a {type(node)} not a file')
502 raise NodeError(f"{node} path is a {type(node)} not a file")
492 except NodeDoesNotExistError:
503 except NodeDoesNotExistError:
493 commit = EmptyCommit(
504 commit = EmptyCommit(
494 commit_id=commit_id,
505 commit_id=commit_id,
495 idx=commit.idx,
506 idx=commit.idx,
496 repo=commit.repository,
507 repo=commit.repository,
497 alias=commit.repository.alias,
508 alias=commit.repository.alias,
498 message=commit.message,
509 message=commit.message,
499 author=commit.author,
510 author=commit.author,
500 date=commit.date)
511 date=commit.date,
501 node = FileNode(safe_bytes(f_path), b'', commit=commit)
512 )
513 node = FileNode(safe_bytes(f_path), b"", commit=commit)
502 else:
514 else:
503 commit = EmptyCommit(
515 commit = EmptyCommit(repo=self.rhodecode_vcs_repo, alias=self.rhodecode_vcs_repo.alias)
504 repo=self.rhodecode_vcs_repo,
516 node = FileNode(safe_bytes(f_path), b"", commit=commit)
505 alias=self.rhodecode_vcs_repo.alias)
506 node = FileNode(safe_bytes(f_path), b'', commit=commit)
507 return node
517 return node
508
518
509 @LoginRequired()
519 @LoginRequired()
510 @HasRepoPermissionAnyDecorator(
520 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
511 'repository.read', 'repository.write', 'repository.admin')
512 def repo_files_diff(self):
521 def repo_files_diff(self):
513 c = self.load_default_context()
522 c = self.load_default_context()
514 f_path = self._get_f_path(self.request.matchdict)
523 f_path = self._get_f_path(self.request.matchdict)
515 diff1 = self.request.GET.get('diff1', '')
524 diff1 = self.request.GET.get("diff1", "")
516 diff2 = self.request.GET.get('diff2', '')
525 diff2 = self.request.GET.get("diff2", "")
517
526
518 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
527 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
519
528
520 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
529 ignore_whitespace = str2bool(self.request.GET.get("ignorews"))
521 line_context = self.request.GET.get('context', 3)
530 line_context = self.request.GET.get("context", 3)
522
531
523 if not any((diff1, diff2)):
532 if not any((diff1, diff2)):
524 h.flash(
533 h.flash('Need query parameter "diff1" or "diff2" to generate a diff.', category="error")
525 'Need query parameter "diff1" or "diff2" to generate a diff.',
526 category='error')
527 raise HTTPBadRequest()
534 raise HTTPBadRequest()
528
535
529 c.action = self.request.GET.get('diff')
536 c.action = self.request.GET.get("diff")
530 if c.action not in ['download', 'raw']:
537 if c.action not in ["download", "raw"]:
531 compare_url = h.route_path(
538 compare_url = h.route_path(
532 'repo_compare',
539 "repo_compare",
533 repo_name=self.db_repo_name,
540 repo_name=self.db_repo_name,
534 source_ref_type='rev',
541 source_ref_type="rev",
535 source_ref=diff1,
542 source_ref=diff1,
536 target_repo=self.db_repo_name,
543 target_repo=self.db_repo_name,
537 target_ref_type='rev',
544 target_ref_type="rev",
538 target_ref=diff2,
545 target_ref=diff2,
539 _query=dict(f_path=f_path))
546 _query=dict(f_path=f_path),
547 )
540 # redirect to new view if we render diff
548 # redirect to new view if we render diff
541 raise HTTPFound(compare_url)
549 raise HTTPFound(compare_url)
542
550
543 try:
551 try:
544 node1 = self._get_file_node(diff1, path1)
552 node1 = self._get_file_node(diff1, path1)
545 node2 = self._get_file_node(diff2, f_path)
553 node2 = self._get_file_node(diff2, f_path)
546 except (RepositoryError, NodeError):
554 except (RepositoryError, NodeError):
547 log.exception("Exception while trying to get node from repository")
555 log.exception("Exception while trying to get node from repository")
548 raise HTTPFound(
556 raise HTTPFound(h.route_path("repo_files", repo_name=self.db_repo_name, commit_id="tip", f_path=f_path))
549 h.route_path('repo_files', repo_name=self.db_repo_name,
550 commit_id='tip', f_path=f_path))
551
557
552 if all(isinstance(node.commit, EmptyCommit)
558 if all(isinstance(node.commit, EmptyCommit) for node in (node1, node2)):
553 for node in (node1, node2)):
554 raise HTTPNotFound()
559 raise HTTPNotFound()
555
560
556 c.commit_1 = node1.commit
561 c.commit_1 = node1.commit
557 c.commit_2 = node2.commit
562 c.commit_2 = node2.commit
558
563
559 if c.action == 'download':
564 if c.action == "download":
560 _diff = diffs.get_gitdiff(node1, node2,
565 _diff = diffs.get_gitdiff(node1, node2, ignore_whitespace=ignore_whitespace, context=line_context)
561 ignore_whitespace=ignore_whitespace,
562 context=line_context)
563 # NOTE: this was using diff_format='gitdiff'
566 # NOTE: this was using diff_format='gitdiff'
564 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
567 diff = diffs.DiffProcessor(_diff, diff_format="newdiff")
565
568
566 response = Response(self.path_filter.get_raw_patch(diff))
569 response = Response(self.path_filter.get_raw_patch(diff))
567 response.content_type = 'text/plain'
570 response.content_type = "text/plain"
568 response.content_disposition = (
571 response.content_disposition = f"attachment; filename={f_path}_{diff1}_vs_{diff2}.diff"
569 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
570 )
571 charset = self._get_default_encoding(c)
572 charset = self._get_default_encoding(c)
572 if charset:
573 if charset:
573 response.charset = charset
574 response.charset = charset
574 return response
575 return response
575
576
576 elif c.action == 'raw':
577 elif c.action == "raw":
577 _diff = diffs.get_gitdiff(node1, node2,
578 _diff = diffs.get_gitdiff(node1, node2, ignore_whitespace=ignore_whitespace, context=line_context)
578 ignore_whitespace=ignore_whitespace,
579 context=line_context)
580 # NOTE: this was using diff_format='gitdiff'
579 # NOTE: this was using diff_format='gitdiff'
581 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
580 diff = diffs.DiffProcessor(_diff, diff_format="newdiff")
582
581
583 response = Response(self.path_filter.get_raw_patch(diff))
582 response = Response(self.path_filter.get_raw_patch(diff))
584 response.content_type = 'text/plain'
583 response.content_type = "text/plain"
585 charset = self._get_default_encoding(c)
584 charset = self._get_default_encoding(c)
586 if charset:
585 if charset:
587 response.charset = charset
586 response.charset = charset
588 return response
587 return response
589
588
590 # in case we ever end up here
589 # in case we ever end up here
591 raise HTTPNotFound()
590 raise HTTPNotFound()
592
591
593 @LoginRequired()
592 @LoginRequired()
594 @HasRepoPermissionAnyDecorator(
593 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
595 'repository.read', 'repository.write', 'repository.admin')
596 def repo_files_diff_2way_redirect(self):
594 def repo_files_diff_2way_redirect(self):
597 """
595 """
598 Kept only to make OLD links work
596 Kept only to make OLD links work
599 """
597 """
600 f_path = self._get_f_path_unchecked(self.request.matchdict)
598 f_path = self._get_f_path_unchecked(self.request.matchdict)
601 diff1 = self.request.GET.get('diff1', '')
599 diff1 = self.request.GET.get("diff1", "")
602 diff2 = self.request.GET.get('diff2', '')
600 diff2 = self.request.GET.get("diff2", "")
603
601
604 if not any((diff1, diff2)):
602 if not any((diff1, diff2)):
605 h.flash(
603 h.flash('Need query parameter "diff1" or "diff2" to generate a diff.', category="error")
606 'Need query parameter "diff1" or "diff2" to generate a diff.',
607 category='error')
608 raise HTTPBadRequest()
604 raise HTTPBadRequest()
609
605
610 compare_url = h.route_path(
606 compare_url = h.route_path(
611 'repo_compare',
607 "repo_compare",
612 repo_name=self.db_repo_name,
608 repo_name=self.db_repo_name,
613 source_ref_type='rev',
609 source_ref_type="rev",
614 source_ref=diff1,
610 source_ref=diff1,
615 target_ref_type='rev',
611 target_ref_type="rev",
616 target_ref=diff2,
612 target_ref=diff2,
617 _query=dict(f_path=f_path, diffmode='sideside',
613 _query=dict(
618 target_repo=self.db_repo_name,))
614 f_path=f_path,
615 diffmode="sideside",
616 target_repo=self.db_repo_name,
617 ),
618 )
619 raise HTTPFound(compare_url)
619 raise HTTPFound(compare_url)
620
620
621 @LoginRequired()
621 @LoginRequired()
622 def repo_files_default_commit_redirect(self):
622 def repo_files_default_commit_redirect(self):
623 """
623 """
624 Special page that redirects to the landing page of files based on the default
624 Special page that redirects to the landing page of files based on the default
625 commit for repository
625 commit for repository
626 """
626 """
627 c = self.load_default_context()
627 c = self.load_default_context()
628 ref_name = c.rhodecode_db_repo.landing_ref_name
628 ref_name = c.rhodecode_db_repo.landing_ref_name
629 landing_url = h.repo_files_by_ref_url(
629 landing_url = h.repo_files_by_ref_url(
630 c.rhodecode_db_repo.repo_name,
630 c.rhodecode_db_repo.repo_name,
631 c.rhodecode_db_repo.repo_type,
631 c.rhodecode_db_repo.repo_type,
632 f_path='',
632 f_path="",
633 ref_name=ref_name,
633 ref_name=ref_name,
634 commit_id='tip',
634 commit_id="tip",
635 query=dict(at=ref_name)
635 query=dict(at=ref_name),
636 )
636 )
637
637
638 raise HTTPFound(landing_url)
638 raise HTTPFound(landing_url)
639
639
640 @LoginRequired()
640 @LoginRequired()
641 @HasRepoPermissionAnyDecorator(
641 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
642 'repository.read', 'repository.write', 'repository.admin')
643 def repo_files(self):
642 def repo_files(self):
644 c = self.load_default_context()
643 c = self.load_default_context()
645
644
646 view_name = getattr(self.request.matched_route, 'name', None)
645 view_name = getattr(self.request.matched_route, "name", None)
647
646
648 c.annotate = view_name == 'repo_files:annotated'
647 c.annotate = view_name == "repo_files:annotated"
649 # default is false, but .rst/.md files later are auto rendered, we can
648 # default is false, but .rst/.md files later are auto rendered, we can
650 # overwrite auto rendering by setting this GET flag
649 # overwrite auto rendering by setting this GET flag
651 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
650 c.renderer = view_name == "repo_files:rendered" or not self.request.GET.get("no-render", False)
652
651
653 commit_id, f_path, bytes_path = self._get_commit_and_path()
652 commit_id, f_path, bytes_path = self._get_commit_and_path()
654
653
655 c.commit = self._get_commit_or_redirect(commit_id)
654 c.commit = self._get_commit_or_redirect(commit_id)
656 c.branch = self.request.GET.get('branch', None)
655 c.branch = self.request.GET.get("branch", None)
657 c.f_path = f_path
656 c.f_path = f_path
658 at_rev = self.request.GET.get('at')
657 at_rev = self.request.GET.get("at")
659
658
660 # files or dirs
659 # files or dirs
661 try:
660 try:
662
661 c.file = c.commit.get_node(bytes_path, pre_load=["is_binary", "size", "data"])
663 c.file = c.commit.get_node(bytes_path, pre_load=['is_binary', 'size', 'data'])
664
662
665 c.file_author = True
663 c.file_author = True
666 c.file_tree = ''
664 c.file_tree = ""
667
665
668 # prev link
666 # prev link
669 try:
667 try:
670 prev_commit = c.commit.prev(c.branch)
668 prev_commit = c.commit.prev(c.branch)
671 c.prev_commit = prev_commit
669 c.prev_commit = prev_commit
672 c.url_prev = h.route_path('repo_files', repo_name=self.db_repo_name, commit_id=prev_commit.raw_id, f_path=f_path)
670 c.url_prev = h.route_path(
671 "repo_files", repo_name=self.db_repo_name, commit_id=prev_commit.raw_id, f_path=f_path
672 )
673 if c.branch:
673 if c.branch:
674 c.url_prev += f'?branch={c.branch}'
674 c.url_prev += f"?branch={c.branch}"
675 except (CommitDoesNotExistError, VCSError):
675 except (CommitDoesNotExistError, VCSError):
676 c.url_prev = '#'
676 c.url_prev = "#"
677 c.prev_commit = EmptyCommit()
677 c.prev_commit = EmptyCommit()
678
678
679 # next link
679 # next link
680 try:
680 try:
681 next_commit = c.commit.next(c.branch)
681 next_commit = c.commit.next(c.branch)
682 c.next_commit = next_commit
682 c.next_commit = next_commit
683 c.url_next = h.route_path('repo_files', repo_name=self.db_repo_name, commit_id=next_commit.raw_id, f_path=f_path)
683 c.url_next = h.route_path(
684 "repo_files", repo_name=self.db_repo_name, commit_id=next_commit.raw_id, f_path=f_path
685 )
684 if c.branch:
686 if c.branch:
685 c.url_next += f'?branch={c.branch}'
687 c.url_next += f"?branch={c.branch}"
686 except (CommitDoesNotExistError, VCSError):
688 except (CommitDoesNotExistError, VCSError):
687 c.url_next = '#'
689 c.url_next = "#"
688 c.next_commit = EmptyCommit()
690 c.next_commit = EmptyCommit()
689
691
690 # load file content
692 # load file content
691 if c.file.is_file():
693 if c.file.is_file():
692
693 c.lf_node = {}
694 c.lf_node = {}
694
695
695 has_lf_enabled = self._is_lf_enabled(self.db_repo)
696 has_lf_enabled = self._is_lf_enabled(self.db_repo)
696 if has_lf_enabled:
697 if has_lf_enabled:
697 c.lf_node = c.file.get_largefile_node()
698 c.lf_node = c.file.get_largefile_node()
698
699
699 c.file_source_page = 'true'
700 c.file_source_page = "true"
700 c.file_last_commit = c.file.last_commit
701 c.file_last_commit = c.file.last_commit
701
702
702 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
703 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
703
704
704 if not (c.file_size_too_big or c.file.is_binary):
705 if not (c.file_size_too_big or c.file.is_binary):
705 if c.annotate: # annotation has precedence over renderer
706 if c.annotate: # annotation has precedence over renderer
706 c.annotated_lines = filenode_as_annotated_lines_tokens(
707 c.annotated_lines = filenode_as_annotated_lines_tokens(c.file)
707 c.file
708 )
709 else:
708 else:
710 c.renderer = (
709 c.renderer = c.renderer and h.renderer_from_filename(c.file.path)
711 c.renderer and h.renderer_from_filename(c.file.path)
712 )
713 if not c.renderer:
710 if not c.renderer:
714 c.lines = filenode_as_lines_tokens(c.file)
711 c.lines = filenode_as_lines_tokens(c.file)
715
712
716 _branch_name, _sha_commit_id, is_head = \
713 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
717 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
714 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
718 landing_ref=self.db_repo.landing_ref_name)
715 )
719 c.on_branch_head = is_head
716 c.on_branch_head = is_head
720
717
721 branch = c.commit.branch if (
718 branch = c.commit.branch if (c.commit.branch and "/" not in c.commit.branch) else None
722 c.commit.branch and '/' not in c.commit.branch) else None
723 c.branch_or_raw_id = branch or c.commit.raw_id
719 c.branch_or_raw_id = branch or c.commit.raw_id
724 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
720 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
725
721
726 author = c.file_last_commit.author
722 author = c.file_last_commit.author
727 c.authors = [[
723 c.authors = [[h.email(author), h.person(author, "username_or_name_or_email"), 1]]
728 h.email(author),
729 h.person(author, 'username_or_name_or_email'),
730 1
731 ]]
732
724
733 else: # load tree content at path
725 else: # load tree content (dir content) at path
734 c.file_source_page = 'false'
726 c.file_source_page = "false"
735 c.authors = []
727 c.authors = []
728
729 dir_node = c.file
730 c.file_nodes = dir_node.commit.get_nodes(dir_node.bytes_path, pre_load=dir_node.default_pre_load)
736 # this loads a simple tree without metadata to speed things up
731 # this loads a simple tree without metadata to speed things up
737 # later via ajax we call repo_nodetree_full and fetch whole
732 # later via ajax we call repo_nodetree_full and fetch whole
738 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
733 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
739
734
740 c.readme_data, c.readme_file = \
735 c.readme_data, c.readme_file = self._get_readme_data(
741 self._get_readme_data(self.db_repo, c.visual.default_renderer, c.commit.raw_id, bytes_path)
736 self.db_repo, c.visual.default_renderer, c.commit.raw_id, bytes_path, nodes=c.file_nodes
737 )
742
738
743 except RepositoryError as e:
739 except RepositoryError as e:
744 h.flash(h.escape(safe_str(e)), category='error')
740 h.flash(h.escape(safe_str(e)), category="error")
745 raise HTTPNotFound()
741 raise HTTPNotFound()
746
742
747 if self.request.environ.get('HTTP_X_PJAX'):
743 if self.request.environ.get("HTTP_X_PJAX"):
748 html = render('rhodecode:templates/files/files_pjax.mako',
744 html = render("rhodecode:templates/files/files_pjax.mako", self._get_template_context(c), self.request)
749 self._get_template_context(c), self.request)
750 else:
745 else:
751 html = render('rhodecode:templates/files/files.mako',
746 html = render("rhodecode:templates/files/files.mako", self._get_template_context(c), self.request)
752 self._get_template_context(c), self.request)
753 return Response(html)
747 return Response(html)
754
748
755 @HasRepoPermissionAnyDecorator(
749 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
756 'repository.read', 'repository.write', 'repository.admin')
757 def repo_files_annotated_previous(self):
750 def repo_files_annotated_previous(self):
758 self.load_default_context()
751 self.load_default_context()
759
752
760 commit_id, bytes_path, bytes_path = self._get_commit_and_path()
753 commit_id, bytes_path, bytes_path = self._get_commit_and_path()
761 commit = self._get_commit_or_redirect(commit_id)
754 commit = self._get_commit_or_redirect(commit_id)
762 prev_commit_id = commit.raw_id
755 prev_commit_id = commit.raw_id
763 line_anchor = self.request.GET.get('line_anchor')
756 line_anchor = self.request.GET.get("line_anchor")
764 is_file = False
757 is_file = False
765 try:
758 try:
766 _file = commit.get_node(bytes_path)
759 _file = commit.get_node(bytes_path)
767 is_file = _file.is_file()
760 is_file = _file.is_file()
768 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
761 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
769 pass
762 pass
770
763
771 if is_file:
764 if is_file:
772 history = commit.get_path_history(bytes_path)
765 history = commit.get_path_history(bytes_path)
773 prev_commit_id = history[1].raw_id \
766 prev_commit_id = history[1].raw_id if len(history) > 1 else prev_commit_id
774 if len(history) > 1 else prev_commit_id
775 prev_url = h.route_path(
767 prev_url = h.route_path(
776 'repo_files:annotated', repo_name=self.db_repo_name,
768 "repo_files:annotated",
777 commit_id=prev_commit_id, f_path=bytes_path,
769 repo_name=self.db_repo_name,
778 _anchor=f'L{line_anchor}')
770 commit_id=prev_commit_id,
771 f_path=bytes_path,
772 _anchor=f"L{line_anchor}",
773 )
779
774
780 raise HTTPFound(prev_url)
775 raise HTTPFound(prev_url)
781
776
782 @LoginRequired()
777 @LoginRequired()
783 @HasRepoPermissionAnyDecorator(
778 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
784 'repository.read', 'repository.write', 'repository.admin')
785 def repo_nodetree_full(self):
779 def repo_nodetree_full(self):
786 """
780 """
787 Returns rendered html of file tree that contains commit date,
781 Returns rendered html of file tree that contains commit date,
788 author, commit_id for the specified combination of
782 author, commit_id for the specified combination of
789 repo, commit_id and file path
783 repo, commit_id and file path
790 """
784 """
791 c = self.load_default_context()
785 c = self.load_default_context()
792
786
793 commit_id, f_path, bytes_path = self._get_commit_and_path()
787 commit_id, f_path, bytes_path = self._get_commit_and_path()
794 commit = self._get_commit_or_redirect(commit_id)
788 commit = self._get_commit_or_redirect(commit_id)
795 try:
789 try:
796 dir_node = commit.get_node(bytes_path)
790 dir_node = commit.get_node(bytes_path)
797 except RepositoryError as e:
791 except RepositoryError as e:
798 return Response(f'error: {h.escape(safe_str(e))}')
792 return Response(f"error: {h.escape(safe_str(e))}")
799
793
800 if dir_node.is_file():
794 if dir_node.is_file():
801 return Response('')
795 return Response("")
802
796
803 c.file = dir_node
797 c.file = dir_node
798 c.file_nodes = dir_node.commit.get_nodes(dir_node.bytes_path, pre_load=dir_node.default_pre_load)
804 c.commit = commit
799 c.commit = commit
805 at_rev = self.request.GET.get('at')
800 at_rev = self.request.GET.get("at")
806
801
807 html = self._get_tree_at_commit(
802 html = self._get_tree_at_commit(c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
808 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
809
803
810 return Response(html)
804 return Response(html)
811
805
812 def _get_attachement_headers(self, f_path):
806 def _get_attachement_headers(self, f_path):
813 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
807 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
814 safe_path = f_name.replace('"', '\\"')
808 safe_path = f_name.replace('"', '\\"')
815 encoded_path = urllib.parse.quote(f_name)
809 encoded_path = urllib.parse.quote(f_name)
816
810
817 headers = f"attachment; " \
811 headers = f"attachment; " f'filename="{safe_path}"; ' f"filename*=UTF-8''{encoded_path}"
818 f"filename=\"{safe_path}\"; " \
819 f"filename*=UTF-8\'\'{encoded_path}"
820
812
821 return header_safe_str(headers)
813 return header_safe_str(headers)
822
814
823 @LoginRequired()
815 @LoginRequired()
824 @HasRepoPermissionAnyDecorator(
816 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
825 'repository.read', 'repository.write', 'repository.admin')
826 def repo_file_raw(self):
817 def repo_file_raw(self):
827 """
818 """
828 Action for show as raw, some mimetypes are "rendered",
819 Action for show as raw, some mimetypes are "rendered",
829 those include images, icons.
820 those include images, icons.
830 """
821 """
831 c = self.load_default_context()
822 c = self.load_default_context()
832
823
833 commit_id, f_path, bytes_path = self._get_commit_and_path()
824 commit_id, f_path, bytes_path = self._get_commit_and_path()
834 commit = self._get_commit_or_redirect(commit_id)
825 commit = self._get_commit_or_redirect(commit_id)
835 file_node = self._get_filenode_or_redirect(commit, bytes_path)
826 file_node = self._get_filenode_or_redirect(commit, bytes_path)
836
827
837 raw_mimetype_mapping = {
828 raw_mimetype_mapping = {
838 # map original mimetype to a mimetype used for "show as raw"
829 # map original mimetype to a mimetype used for "show as raw"
839 # you can also provide a content-disposition to override the
830 # you can also provide a content-disposition to override the
840 # default "attachment" disposition.
831 # default "attachment" disposition.
841 # orig_type: (new_type, new_dispo)
832 # orig_type: (new_type, new_dispo)
842
843 # show images inline:
833 # show images inline:
844 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
845 # for example render an SVG with javascript inside or even render
835 # for example render an SVG with javascript inside or even render
846 # HTML.
836 # HTML.
847 'image/x-icon': ('image/x-icon', 'inline'),
837 "image/x-icon": ("image/x-icon", "inline"),
848 'image/png': ('image/png', 'inline'),
838 "image/png": ("image/png", "inline"),
849 'image/gif': ('image/gif', 'inline'),
839 "image/gif": ("image/gif", "inline"),
850 'image/jpeg': ('image/jpeg', 'inline'),
840 "image/jpeg": ("image/jpeg", "inline"),
851 'application/pdf': ('application/pdf', 'inline'),
841 "application/pdf": ("application/pdf", "inline"),
852 }
842 }
853
843
854 mimetype = file_node.mimetype
844 mimetype = file_node.mimetype
855 try:
845 try:
856 mimetype, disposition = raw_mimetype_mapping[mimetype]
846 mimetype, disposition = raw_mimetype_mapping[mimetype]
857 except KeyError:
847 except KeyError:
858 # we don't know anything special about this, handle it safely
848 # we don't know anything special about this, handle it safely
859 if file_node.is_binary:
849 if file_node.is_binary:
860 # do same as download raw for binary files
850 # do same as download raw for binary files
861 mimetype, disposition = 'application/octet-stream', 'attachment'
851 mimetype, disposition = "application/octet-stream", "attachment"
862 else:
852 else:
863 # do not just use the original mimetype, but force text/plain,
853 # do not just use the original mimetype, but force text/plain,
864 # otherwise it would serve text/html and that might be unsafe.
854 # otherwise it would serve text/html and that might be unsafe.
865 # Note: underlying vcs library fakes text/plain mimetype if the
855 # Note: underlying vcs library fakes text/plain mimetype if the
866 # mimetype can not be determined and it thinks it is not
856 # mimetype can not be determined and it thinks it is not
867 # binary.This might lead to erroneous text display in some
857 # binary.This might lead to erroneous text display in some
868 # cases, but helps in other cases, like with text files
858 # cases, but helps in other cases, like with text files
869 # without extension.
859 # without extension.
870 mimetype, disposition = 'text/plain', 'inline'
860 mimetype, disposition = "text/plain", "inline"
871
861
872 if disposition == 'attachment':
862 if disposition == "attachment":
873 disposition = self._get_attachement_headers(f_path)
863 disposition = self._get_attachement_headers(f_path)
874
864
875 stream_content = file_node.stream_bytes()
865 stream_content = file_node.stream_bytes()
876
866
877 response = Response(app_iter=stream_content)
867 response = Response(app_iter=stream_content)
878 response.content_disposition = disposition
868 response.content_disposition = disposition
879 response.content_type = mimetype
869 response.content_type = mimetype
880
870
881 charset = self._get_default_encoding(c)
871 charset = self._get_default_encoding(c)
882 if charset:
872 if charset:
883 response.charset = charset
873 response.charset = charset
884
874
885 return response
875 return response
886
876
887 @LoginRequired()
877 @LoginRequired()
888 @HasRepoPermissionAnyDecorator(
878 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
889 'repository.read', 'repository.write', 'repository.admin')
890 def repo_file_download(self):
879 def repo_file_download(self):
891 c = self.load_default_context()
880 c = self.load_default_context()
892
881
893 commit_id, f_path, bytes_path = self._get_commit_and_path()
882 commit_id, f_path, bytes_path = self._get_commit_and_path()
894 commit = self._get_commit_or_redirect(commit_id)
883 commit = self._get_commit_or_redirect(commit_id)
895 file_node = self._get_filenode_or_redirect(commit, bytes_path)
884 file_node = self._get_filenode_or_redirect(commit, bytes_path)
896
885
897 if self.request.GET.get('lf'):
886 if self.request.GET.get("lf"):
898 # only if lf get flag is passed, we download this file
887 # only if lf get flag is passed, we download this file
899 # as LFS/Largefile
888 # as LFS/Largefile
900 lf_node = file_node.get_largefile_node()
889 lf_node = file_node.get_largefile_node()
901 if lf_node:
890 if lf_node:
902 # overwrite our pointer with the REAL large-file
891 # overwrite our pointer with the REAL large-file
903 file_node = lf_node
892 file_node = lf_node
904
893
905 disposition = self._get_attachement_headers(f_path)
894 disposition = self._get_attachement_headers(f_path)
906
895
907 stream_content = file_node.stream_bytes()
896 stream_content = file_node.stream_bytes()
908
897
909 response = Response(app_iter=stream_content)
898 response = Response(app_iter=stream_content)
910 response.content_disposition = disposition
899 response.content_disposition = disposition
911 response.content_type = file_node.mimetype
900 response.content_type = file_node.mimetype
912
901
913 charset = self._get_default_encoding(c)
902 charset = self._get_default_encoding(c)
914 if charset:
903 if charset:
915 response.charset = charset
904 response.charset = charset
916
905
917 return response
906 return response
918
907
919 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
908 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
920
909 cache_seconds = rhodecode.ConfigGet().get_int("rc_cache.cache_repo.expiration_time")
921 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
922 cache_on = cache_seconds > 0
910 cache_on = cache_seconds > 0
923 log.debug(
911 log.debug(
924 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
912 "Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`"
925 'with caching: %s[TTL: %ss]' % (
913 "with caching: %s[TTL: %ss]" % (repo_id, commit_id, f_path, cache_on, cache_seconds or 0)
926 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
914 )
927
915
928 cache_namespace_uid = f'repo.{repo_id}'
916 cache_namespace_uid = f"repo.{repo_id}"
929 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
917 region = rc_cache.get_or_create_region("cache_repo", cache_namespace_uid)
930
918
931 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
919 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
932 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
920 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
933 log.debug('Generating cached nodelist for repo_id:%s, %s, %s', _repo_id, commit_id, f_path)
921 log.debug("Generating cached nodelist for repo_id:%s, %s, %s", _repo_id, commit_id, f_path)
934 try:
922 try:
935 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
923 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
936 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
924 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
937 log.exception(safe_str(e))
925 log.exception(safe_str(e))
938 h.flash(h.escape(safe_str(e)), category='error')
926 h.flash(h.escape(safe_str(e)), category="error")
939 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, commit_id='tip', f_path='/'))
927 raise HTTPFound(h.route_path("repo_files", repo_name=self.db_repo_name, commit_id="tip", f_path="/"))
940
928
941 return _d + _f
929 return _d + _f
942
930
943 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path)
931 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path)
944 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
932 return filter(lambda n: self.path_filter.path_access_allowed(n["name"]), result)
945
933
946 @LoginRequired()
934 @LoginRequired()
947 @HasRepoPermissionAnyDecorator(
935 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
948 'repository.read', 'repository.write', 'repository.admin')
949 def repo_nodelist(self):
936 def repo_nodelist(self):
950 self.load_default_context()
937 self.load_default_context()
951
938
952 commit_id, f_path, bytes_path = self._get_commit_and_path()
939 commit_id, f_path, bytes_path = self._get_commit_and_path()
953 commit = self._get_commit_or_redirect(commit_id)
940 commit = self._get_commit_or_redirect(commit_id)
954
941
955 metadata = self._get_nodelist_at_commit(
942 metadata = self._get_nodelist_at_commit(self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
956 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
943 return {"nodes": [x for x in metadata]}
957 return {'nodes': [x for x in metadata]}
958
944
959 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
945 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
960 items = []
946 items = []
961 for name, commit_id in branches_or_tags.items():
947 for name, commit_id in branches_or_tags.items():
962 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
948 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
963 items.append((sym_ref, name, ref_type))
949 items.append((sym_ref, name, ref_type))
964 return items
950 return items
965
951
966 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
952 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
967 return commit_id
953 return commit_id
968
954
969 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
955 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
970 return commit_id
956 return commit_id
971
957
972 # NOTE(dan): old code we used in "diff" mode compare
958 # NOTE(dan): old code we used in "diff" mode compare
973 new_f_path = vcspath.join(name, f_path)
959 new_f_path = vcspath.join(name, f_path)
974 return f'{new_f_path}@{commit_id}'
960 return f"{new_f_path}@{commit_id}"
975
961
976 def _get_node_history(self, commit_obj, f_path, commits=None):
962 def _get_node_history(self, commit_obj, f_path, commits=None):
977 """
963 """
978 get commit history for given node
964 get commit history for given node
979
965
980 :param commit_obj: commit to calculate history
966 :param commit_obj: commit to calculate history
981 :param f_path: path for node to calculate history for
967 :param f_path: path for node to calculate history for
982 :param commits: if passed don't calculate history and take
968 :param commits: if passed don't calculate history and take
983 commits defined in this list
969 commits defined in this list
984 """
970 """
985 _ = self.request.translate
971 _ = self.request.translate
986
972
987 # calculate history based on tip
973 # calculate history based on tip
988 tip = self.rhodecode_vcs_repo.get_commit()
974 tip = self.rhodecode_vcs_repo.get_commit()
989 if commits is None:
975 if commits is None:
990 pre_load = ["author", "branch"]
976 pre_load = ["author", "branch"]
991 try:
977 try:
992 commits = tip.get_path_history(safe_bytes(f_path), pre_load=pre_load)
978 commits = tip.get_path_history(safe_bytes(f_path), pre_load=pre_load)
993 except (NodeDoesNotExistError, CommitError):
979 except (NodeDoesNotExistError, CommitError):
994 # this node is not present at tip!
980 # this node is not present at tip!
995 commits = commit_obj.get_path_history(safe_bytes(f_path), pre_load=pre_load)
981 commits = commit_obj.get_path_history(safe_bytes(f_path), pre_load=pre_load)
996
982
997 history = []
983 history = []
998 commits_group = ([], _("Changesets"))
984 commits_group = ([], _("Changesets"))
999 for commit in commits:
985 for commit in commits:
1000 branch = ' (%s)' % commit.branch if commit.branch else ''
986 branch = " (%s)" % commit.branch if commit.branch else ""
1001 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
987 n_desc = f"r{commit.idx}:{commit.short_id}{branch}"
1002 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
988 commits_group[0].append((commit.raw_id, n_desc, "sha"))
1003 history.append(commits_group)
989 history.append(commits_group)
1004
990
1005 symbolic_reference = self._symbolic_reference
991 symbolic_reference = self._symbolic_reference
1006
992
1007 if self.rhodecode_vcs_repo.alias == 'svn':
993 if self.rhodecode_vcs_repo.alias == "svn":
1008 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
994 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(f_path, self.rhodecode_vcs_repo)
1009 f_path, self.rhodecode_vcs_repo)
1010 if adjusted_f_path != f_path:
995 if adjusted_f_path != f_path:
1011 log.debug(
996 log.debug(
1012 'Recognized svn tag or branch in file "%s", using svn '
997 'Recognized svn tag or branch in file "%s", using svn ' "specific symbolic references", f_path
1013 'specific symbolic references', f_path)
998 )
1014 f_path = adjusted_f_path
999 f_path = adjusted_f_path
1015 symbolic_reference = self._symbolic_reference_svn
1000 symbolic_reference = self._symbolic_reference_svn
1016
1001
1017 branches = self._create_references(
1002 branches = self._create_references(self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, "branch")
1018 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1019 branches_group = (branches, _("Branches"))
1003 branches_group = (branches, _("Branches"))
1020
1004
1021 tags = self._create_references(
1005 tags = self._create_references(self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, "tag")
1022 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1023 tags_group = (tags, _("Tags"))
1006 tags_group = (tags, _("Tags"))
1024
1007
1025 history.append(branches_group)
1008 history.append(branches_group)
1026 history.append(tags_group)
1009 history.append(tags_group)
1027
1010
1028 return history, commits
1011 return history, commits
1029
1012
1030 @LoginRequired()
1013 @LoginRequired()
1031 @HasRepoPermissionAnyDecorator(
1014 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
1032 'repository.read', 'repository.write', 'repository.admin')
1033 def repo_file_history(self):
1015 def repo_file_history(self):
1034 self.load_default_context()
1016 self.load_default_context()
1035
1017
1036 commit_id, f_path, bytes_path = self._get_commit_and_path()
1018 commit_id, f_path, bytes_path = self._get_commit_and_path()
1037 commit = self._get_commit_or_redirect(commit_id)
1019 commit = self._get_commit_or_redirect(commit_id)
1038 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1020 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1039
1021
1040 if file_node.is_file():
1022 if file_node.is_file():
1041 file_history, _hist = self._get_node_history(commit, f_path)
1023 file_history, _hist = self._get_node_history(commit, f_path)
1042
1024
1043 res = []
1025 res = []
1044 for section_items, section in file_history:
1026 for section_items, section in file_history:
1045 items = []
1027 items = []
1046 for obj_id, obj_text, obj_type in section_items:
1028 for obj_id, obj_text, obj_type in section_items:
1047 at_rev = ''
1029 at_rev = ""
1048 if obj_type in ['branch', 'bookmark', 'tag']:
1030 if obj_type in ["branch", "bookmark", "tag"]:
1049 at_rev = obj_text
1031 at_rev = obj_text
1050 entry = {
1032 entry = {"id": obj_id, "text": obj_text, "type": obj_type, "at_rev": at_rev}
1051 'id': obj_id,
1052 'text': obj_text,
1053 'type': obj_type,
1054 'at_rev': at_rev
1055 }
1056
1033
1057 items.append(entry)
1034 items.append(entry)
1058
1035
1059 res.append({
1036 res.append({"text": section, "children": items})
1060 'text': section,
1061 'children': items
1062 })
1063
1037
1064 data = {
1038 data = {"more": False, "results": res}
1065 'more': False,
1066 'results': res
1067 }
1068 return data
1039 return data
1069
1040
1070 log.warning('Cannot fetch history for directory')
1041 log.warning("Cannot fetch history for directory")
1071 raise HTTPBadRequest()
1042 raise HTTPBadRequest()
1072
1043
1073 @LoginRequired()
1044 @LoginRequired()
1074 @HasRepoPermissionAnyDecorator(
1045 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
1075 'repository.read', 'repository.write', 'repository.admin')
1076 def repo_file_authors(self):
1046 def repo_file_authors(self):
1077 c = self.load_default_context()
1047 c = self.load_default_context()
1078
1048
1079 commit_id, f_path, bytes_path = self._get_commit_and_path()
1049 commit_id, f_path, bytes_path = self._get_commit_and_path()
1080 commit = self._get_commit_or_redirect(commit_id)
1050 commit = self._get_commit_or_redirect(commit_id)
1081 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1051 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1082
1052
1083 if not file_node.is_file():
1053 if not file_node.is_file():
1084 raise HTTPBadRequest()
1054 raise HTTPBadRequest()
1085
1055
1086 c.file_last_commit = file_node.last_commit
1056 c.file_last_commit = file_node.last_commit
1087 if self.request.GET.get('annotate') == '1':
1057 if self.request.GET.get("annotate") == "1":
1088 # use _hist from annotation if annotation mode is on
1058 # use _hist from annotation if annotation mode is on
1089 commit_ids = {x[1] for x in file_node.annotate}
1059 commit_ids = {x[1] for x in file_node.annotate}
1090 _hist = (
1060 _hist = (self.rhodecode_vcs_repo.get_commit(commit_id) for commit_id in commit_ids)
1091 self.rhodecode_vcs_repo.get_commit(commit_id)
1092 for commit_id in commit_ids)
1093 else:
1061 else:
1094 _f_history, _hist = self._get_node_history(commit, f_path)
1062 _f_history, _hist = self._get_node_history(commit, f_path)
1095 c.file_author = False
1063 c.file_author = False
1096
1064
1097 unique = collections.OrderedDict()
1065 unique = collections.OrderedDict()
1098 for commit in _hist:
1066 for commit in _hist:
1099 author = commit.author
1067 author = commit.author
1100 if author not in unique:
1068 if author not in unique:
1101 unique[commit.author] = [
1069 unique[commit.author] = [
1102 h.email(author),
1070 h.email(author),
1103 h.person(author, 'username_or_name_or_email'),
1071 h.person(author, "username_or_name_or_email"),
1104 1 # counter
1072 1, # counter
1105 ]
1073 ]
1106
1074
1107 else:
1075 else:
1108 # increase counter
1076 # increase counter
1109 unique[commit.author][2] += 1
1077 unique[commit.author][2] += 1
1110
1078
1111 c.authors = [val for val in unique.values()]
1079 c.authors = [val for val in unique.values()]
1112
1080
1113 return self._get_template_context(c)
1081 return self._get_template_context(c)
1114
1082
1115 @LoginRequired()
1083 @LoginRequired()
1116 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1084 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1117 def repo_files_check_head(self):
1085 def repo_files_check_head(self):
1118 self.load_default_context()
1086 self.load_default_context()
1119
1087
1120 commit_id, f_path, bytes_path = self._get_commit_and_path()
1088 commit_id, f_path, bytes_path = self._get_commit_and_path()
1121 _branch_name, _sha_commit_id, is_head = \
1089 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1122 self._is_valid_head(commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name)
1090 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1091 )
1123
1092
1124 new_path = self.request.POST.get('path')
1093 new_path = self.request.POST.get("path")
1125 operation = self.request.POST.get('operation')
1094 operation = self.request.POST.get("operation")
1126 path_exist = ''
1095 path_exist = ""
1127
1096
1128 if new_path and operation in ['create', 'upload']:
1097 if new_path and operation in ["create", "upload"]:
1129 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1098 new_f_path = os.path.join(f_path.lstrip("/"), new_path)
1130 try:
1099 try:
1131 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1100 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1132 # NOTE(dan): construct whole path without leading /
1101 # NOTE(dan): construct whole path without leading /
1133 file_node = commit_obj.get_node(safe_bytes(new_f_path))
1102 file_node = commit_obj.get_node(safe_bytes(new_f_path))
1134 if file_node:
1103 if file_node:
1135 path_exist = new_f_path
1104 path_exist = new_f_path
1136 except (EmptyRepositoryError, NodeDoesNotExistError):
1105 except (EmptyRepositoryError, NodeDoesNotExistError):
1137 pass
1106 pass
1138
1107
1139 return {
1108 return {"branch": _branch_name, "sha": _sha_commit_id, "is_head": is_head, "path_exists": path_exist}
1140 'branch': _branch_name,
1141 'sha': _sha_commit_id,
1142 'is_head': is_head,
1143 'path_exists': path_exist
1144 }
1145
1109
1146 @LoginRequired()
1110 @LoginRequired()
1147 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1111 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1148 def repo_files_remove_file(self):
1112 def repo_files_remove_file(self):
1149 _ = self.request.translate
1113 _ = self.request.translate
1150 c = self.load_default_context()
1114 c = self.load_default_context()
1151 commit_id, f_path, bytes_path = self._get_commit_and_path()
1115 commit_id, f_path, bytes_path = self._get_commit_and_path()
1152
1116
1153 self._ensure_not_locked()
1117 self._ensure_not_locked()
1154 _branch_name, _sha_commit_id, is_head = \
1118 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1155 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1119 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1156 landing_ref=self.db_repo.landing_ref_name)
1120 )
1157
1121
1158 self.forbid_non_head(is_head, f_path)
1122 self.forbid_non_head(is_head, f_path)
1159 self.check_branch_permission(_branch_name)
1123 self.check_branch_permission(_branch_name)
1160
1124
1161 c.commit = self._get_commit_or_redirect(commit_id)
1125 c.commit = self._get_commit_or_redirect(commit_id)
1162 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1126 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1163
1127
1164 c.default_message = _(
1128 c.default_message = _("Deleted file {} via RhodeCode Enterprise").format(f_path)
1165 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1166 c.f_path = f_path
1129 c.f_path = f_path
1167
1130
1168 return self._get_template_context(c)
1131 return self._get_template_context(c)
1169
1132
1170 @LoginRequired()
1133 @LoginRequired()
1171 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1134 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1172 @CSRFRequired()
1135 @CSRFRequired()
1173 def repo_files_delete_file(self):
1136 def repo_files_delete_file(self):
1174 _ = self.request.translate
1137 _ = self.request.translate
1175
1138
1176 c = self.load_default_context()
1139 c = self.load_default_context()
1177 commit_id, f_path, bytes_path = self._get_commit_and_path()
1140 commit_id, f_path, bytes_path = self._get_commit_and_path()
1178
1141
1179 self._ensure_not_locked()
1142 self._ensure_not_locked()
1180 _branch_name, _sha_commit_id, is_head = \
1143 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1181 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1144 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1182 landing_ref=self.db_repo.landing_ref_name)
1145 )
1183
1146
1184 self.forbid_non_head(is_head, f_path)
1147 self.forbid_non_head(is_head, f_path)
1185 self.check_branch_permission(_branch_name)
1148 self.check_branch_permission(_branch_name)
1186
1149
1187 c.commit = self._get_commit_or_redirect(commit_id)
1150 c.commit = self._get_commit_or_redirect(commit_id)
1188 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1151 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1189
1152
1190 c.default_message = _('Deleted file {} via RhodeCode Enterprise').format(f_path)
1153 c.default_message = _("Deleted file {} via RhodeCode Enterprise").format(f_path)
1191 c.f_path = f_path
1154 c.f_path = f_path
1192 node_path = f_path
1155 node_path = f_path
1193 author = self._rhodecode_db_user.full_contact
1156 author = self._rhodecode_db_user.full_contact
1194 message = self.request.POST.get('message') or c.default_message
1157 message = self.request.POST.get("message") or c.default_message
1195 try:
1158 try:
1196 nodes = {
1159 nodes = {safe_bytes(node_path): {"content": b""}}
1197 safe_bytes(node_path): {
1198 'content': b''
1199 }
1200 }
1201 ScmModel().delete_nodes(
1160 ScmModel().delete_nodes(
1202 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1161 user=self._rhodecode_db_user.user_id,
1162 repo=self.db_repo,
1203 message=message,
1163 message=message,
1204 nodes=nodes,
1164 nodes=nodes,
1205 parent_commit=c.commit,
1165 parent_commit=c.commit,
1206 author=author,
1166 author=author,
1207 )
1167 )
1208
1168
1209 h.flash(
1169 h.flash(_("Successfully deleted file `{}`").format(h.escape(f_path)), category="success")
1210 _('Successfully deleted file `{}`').format(
1211 h.escape(f_path)), category='success')
1212 except Exception:
1170 except Exception:
1213 log.exception('Error during commit operation')
1171 log.exception("Error during commit operation")
1214 h.flash(_('Error occurred during commit'), category='error')
1172 h.flash(_("Error occurred during commit"), category="error")
1215 raise HTTPFound(
1173 raise HTTPFound(h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip"))
1216 h.route_path('repo_commit', repo_name=self.db_repo_name,
1217 commit_id='tip'))
1218
1174
1219 @LoginRequired()
1175 @LoginRequired()
1220 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1176 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1221 def repo_files_edit_file(self):
1177 def repo_files_edit_file(self):
1222 _ = self.request.translate
1178 _ = self.request.translate
1223 c = self.load_default_context()
1179 c = self.load_default_context()
1224 commit_id, f_path, bytes_path = self._get_commit_and_path()
1180 commit_id, f_path, bytes_path = self._get_commit_and_path()
1225
1181
1226 self._ensure_not_locked()
1182 self._ensure_not_locked()
1227 _branch_name, _sha_commit_id, is_head = \
1183 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1228 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1184 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1229 landing_ref=self.db_repo.landing_ref_name)
1185 )
1230
1186
1231 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1187 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1232 self.check_branch_permission(_branch_name, commit_id=commit_id)
1188 self.check_branch_permission(_branch_name, commit_id=commit_id)
1233
1189
1234 c.commit = self._get_commit_or_redirect(commit_id)
1190 c.commit = self._get_commit_or_redirect(commit_id)
1235 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1191 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1236
1192
1237 if c.file.is_binary:
1193 if c.file.is_binary:
1238 files_url = h.route_path(
1194 files_url = h.route_path(
1239 'repo_files',
1195 "repo_files", repo_name=self.db_repo_name, commit_id=c.commit.raw_id, f_path=f_path
1240 repo_name=self.db_repo_name,
1196 )
1241 commit_id=c.commit.raw_id, f_path=f_path)
1242 raise HTTPFound(files_url)
1197 raise HTTPFound(files_url)
1243
1198
1244 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1199 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1245 c.f_path = f_path
1200 c.f_path = f_path
1246
1201
1247 return self._get_template_context(c)
1202 return self._get_template_context(c)
1248
1203
1249 @LoginRequired()
1204 @LoginRequired()
1250 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1205 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1251 @CSRFRequired()
1206 @CSRFRequired()
1252 def repo_files_update_file(self):
1207 def repo_files_update_file(self):
1253 _ = self.request.translate
1208 _ = self.request.translate
1254 c = self.load_default_context()
1209 c = self.load_default_context()
1255 commit_id, f_path, bytes_path = self._get_commit_and_path()
1210 commit_id, f_path, bytes_path = self._get_commit_and_path()
1256
1211
1257 self._ensure_not_locked()
1212 self._ensure_not_locked()
1258
1213
1259 c.commit = self._get_commit_or_redirect(commit_id)
1214 c.commit = self._get_commit_or_redirect(commit_id)
1260 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1215 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1261
1216
1262 if c.file.is_binary:
1217 if c.file.is_binary:
1263 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1218 raise HTTPFound(
1264 commit_id=c.commit.raw_id, f_path=f_path))
1219 h.route_path("repo_files", repo_name=self.db_repo_name, commit_id=c.commit.raw_id, f_path=f_path)
1220 )
1265
1221
1266 _branch_name, _sha_commit_id, is_head = \
1222 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1267 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1223 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1268 landing_ref=self.db_repo.landing_ref_name)
1224 )
1269
1225
1270 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1226 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1271 self.check_branch_permission(_branch_name, commit_id=commit_id)
1227 self.check_branch_permission(_branch_name, commit_id=commit_id)
1272
1228
1273 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1229 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1274 c.f_path = f_path
1230 c.f_path = f_path
1275
1231
1276 old_content = c.file.str_content
1232 old_content = c.file.str_content
1277 sl = old_content.splitlines(1)
1233 sl = old_content.splitlines(1)
1278 first_line = sl[0] if sl else ''
1234 first_line = sl[0] if sl else ""
1279
1235
1280 r_post = self.request.POST
1236 r_post = self.request.POST
1281 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1237 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1282 line_ending_mode = detect_mode(first_line, 0)
1238 line_ending_mode = detect_mode(first_line, 0)
1283 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1239 content = convert_line_endings(r_post.get("content", ""), line_ending_mode)
1284
1240
1285 message = r_post.get('message') or c.default_message
1241 message = r_post.get("message") or c.default_message
1286
1242
1287 org_node_path = c.file.str_path
1243 org_node_path = c.file.str_path
1288 filename = r_post['filename']
1244 filename = r_post["filename"]
1289
1245
1290 root_path = c.file.dir_path
1246 root_path = c.file.dir_path
1291 pure_path = self.create_pure_path(root_path, filename)
1247 pure_path = self.create_pure_path(root_path, filename)
1292 node_path = pure_path.as_posix()
1248 node_path = pure_path.as_posix()
1293
1249
1294 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1250 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit_id)
1295 commit_id=commit_id)
1296 if content == old_content and node_path == org_node_path:
1251 if content == old_content and node_path == org_node_path:
1297 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1252 h.flash(_("No changes detected on {}").format(h.escape(org_node_path)), category="warning")
1298 category='warning')
1299 raise HTTPFound(default_redirect_url)
1253 raise HTTPFound(default_redirect_url)
1300
1254
1301 try:
1255 try:
1302 mapping = {
1256 mapping = {
1303 c.file.bytes_path: {
1257 c.file.bytes_path: {
1304 'org_filename': org_node_path,
1258 "org_filename": org_node_path,
1305 'filename': safe_bytes(node_path),
1259 "filename": safe_bytes(node_path),
1306 'content': safe_bytes(content),
1260 "content": safe_bytes(content),
1307 'lexer': '',
1261 "lexer": "",
1308 'op': 'mod',
1262 "op": "mod",
1309 'mode': c.file.mode
1263 "mode": c.file.mode,
1310 }
1264 }
1311 }
1265 }
1312
1266
1313 commit = ScmModel().update_nodes(
1267 commit = ScmModel().update_nodes(
1314 user=self._rhodecode_db_user.user_id,
1268 user=self._rhodecode_db_user.user_id,
1315 repo=self.db_repo,
1269 repo=self.db_repo,
1316 message=message,
1270 message=message,
1317 nodes=mapping,
1271 nodes=mapping,
1318 parent_commit=c.commit,
1272 parent_commit=c.commit,
1319 )
1273 )
1320
1274
1321 h.flash(_('Successfully committed changes to file `{}`').format(
1275 h.flash(_("Successfully committed changes to file `{}`").format(h.escape(f_path)), category="success")
1322 h.escape(f_path)), category='success')
1276 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1323 default_redirect_url = h.route_path(
1324 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1325
1277
1326 except Exception:
1278 except Exception:
1327 log.exception('Error occurred during commit')
1279 log.exception("Error occurred during commit")
1328 h.flash(_('Error occurred during commit'), category='error')
1280 h.flash(_("Error occurred during commit"), category="error")
1329
1281
1330 raise HTTPFound(default_redirect_url)
1282 raise HTTPFound(default_redirect_url)
1331
1283
1332 @LoginRequired()
1284 @LoginRequired()
1333 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1285 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1334 def repo_files_add_file(self):
1286 def repo_files_add_file(self):
1335 _ = self.request.translate
1287 _ = self.request.translate
1336 c = self.load_default_context()
1288 c = self.load_default_context()
1337 commit_id, f_path, bytes_path = self._get_commit_and_path()
1289 commit_id, f_path, bytes_path = self._get_commit_and_path()
1338
1290
1339 self._ensure_not_locked()
1291 self._ensure_not_locked()
1340
1292
1341 # Check if we need to use this page to upload binary
1293 # Check if we need to use this page to upload binary
1342 upload_binary = str2bool(self.request.params.get('upload_binary', False))
1294 upload_binary = str2bool(self.request.params.get("upload_binary", False))
1343
1295
1344 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1296 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1345 if c.commit is None:
1297 if c.commit is None:
1346 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1298 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1347
1299
1348 if self.rhodecode_vcs_repo.is_empty():
1300 if self.rhodecode_vcs_repo.is_empty():
1349 # for empty repository we cannot check for current branch, we rely on
1301 # for empty repository we cannot check for current branch, we rely on
1350 # c.commit.branch instead
1302 # c.commit.branch instead
1351 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1303 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1352 else:
1304 else:
1353 _branch_name, _sha_commit_id, is_head = \
1305 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1354 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1306 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1355 landing_ref=self.db_repo.landing_ref_name)
1307 )
1356
1308
1357 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1309 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1358 self.check_branch_permission(_branch_name, commit_id=commit_id)
1310 self.check_branch_permission(_branch_name, commit_id=commit_id)
1359
1311
1360 c.default_message = (_('Added file via RhodeCode Enterprise')) \
1312 c.default_message = (
1361 if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1313 (_("Added file via RhodeCode Enterprise"))
1362 c.f_path = f_path.lstrip('/') # ensure not relative path
1314 if not upload_binary
1315 else (_("Edited file {} via RhodeCode Enterprise").format(f_path))
1316 )
1317 c.f_path = f_path.lstrip("/") # ensure not relative path
1363 c.replace_binary = upload_binary
1318 c.replace_binary = upload_binary
1364
1319
1365 return self._get_template_context(c)
1320 return self._get_template_context(c)
1366
1321
1367 @LoginRequired()
1322 @LoginRequired()
1368 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1323 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1369 @CSRFRequired()
1324 @CSRFRequired()
1370 def repo_files_create_file(self):
1325 def repo_files_create_file(self):
1371 _ = self.request.translate
1326 _ = self.request.translate
1372 c = self.load_default_context()
1327 c = self.load_default_context()
1373 commit_id, f_path, bytes_path = self._get_commit_and_path()
1328 commit_id, f_path, bytes_path = self._get_commit_and_path()
1374
1329
1375 self._ensure_not_locked()
1330 self._ensure_not_locked()
1376
1331
1377 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1332 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1378 if c.commit is None:
1333 if c.commit is None:
1379 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1334 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1380
1335
1381 # calculate redirect URL
1336 # calculate redirect URL
1382 if self.rhodecode_vcs_repo.is_empty():
1337 if self.rhodecode_vcs_repo.is_empty():
1383 default_redirect_url = h.route_path(
1338 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1384 'repo_summary', repo_name=self.db_repo_name)
1385 else:
1339 else:
1386 default_redirect_url = h.route_path(
1340 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1387 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1388
1341
1389 if self.rhodecode_vcs_repo.is_empty():
1342 if self.rhodecode_vcs_repo.is_empty():
1390 # for empty repository we cannot check for current branch, we rely on
1343 # for empty repository we cannot check for current branch, we rely on
1391 # c.commit.branch instead
1344 # c.commit.branch instead
1392 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1345 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1393 else:
1346 else:
1394 _branch_name, _sha_commit_id, is_head = \
1347 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1395 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1348 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1396 landing_ref=self.db_repo.landing_ref_name)
1349 )
1397
1350
1398 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1351 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1399 self.check_branch_permission(_branch_name, commit_id=commit_id)
1352 self.check_branch_permission(_branch_name, commit_id=commit_id)
1400
1353
1401 c.default_message = (_('Added file via RhodeCode Enterprise'))
1354 c.default_message = _("Added file via RhodeCode Enterprise")
1402 c.f_path = f_path
1355 c.f_path = f_path
1403
1356
1404 r_post = self.request.POST
1357 r_post = self.request.POST
1405 message = r_post.get('message') or c.default_message
1358 message = r_post.get("message") or c.default_message
1406 filename = r_post.get('filename')
1359 filename = r_post.get("filename")
1407 unix_mode = 0
1360 unix_mode = 0
1408
1361
1409 if not filename:
1362 if not filename:
1410 # If there's no commit, redirect to repo summary
1363 # If there's no commit, redirect to repo summary
1411 if type(c.commit) is EmptyCommit:
1364 if type(c.commit) is EmptyCommit:
1412 redirect_url = h.route_path(
1365 redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1413 'repo_summary', repo_name=self.db_repo_name)
1414 else:
1366 else:
1415 redirect_url = default_redirect_url
1367 redirect_url = default_redirect_url
1416 h.flash(_('No filename specified'), category='warning')
1368 h.flash(_("No filename specified"), category="warning")
1417 raise HTTPFound(redirect_url)
1369 raise HTTPFound(redirect_url)
1418
1370
1419 root_path = f_path
1371 root_path = f_path
1420 pure_path = self.create_pure_path(root_path, filename)
1372 pure_path = self.create_pure_path(root_path, filename)
1421 node_path = pure_path.as_posix().lstrip('/')
1373 node_path = pure_path.as_posix().lstrip("/")
1422
1374
1423 author = self._rhodecode_db_user.full_contact
1375 author = self._rhodecode_db_user.full_contact
1424 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1376 content = convert_line_endings(r_post.get("content", ""), unix_mode)
1425 nodes = {
1377 nodes = {safe_bytes(node_path): {"content": safe_bytes(content)}}
1426 safe_bytes(node_path): {
1427 'content': safe_bytes(content)
1428 }
1429 }
1430
1378
1431 try:
1379 try:
1432
1433 commit = ScmModel().create_nodes(
1380 commit = ScmModel().create_nodes(
1434 user=self._rhodecode_db_user.user_id,
1381 user=self._rhodecode_db_user.user_id,
1435 repo=self.db_repo,
1382 repo=self.db_repo,
1436 message=message,
1383 message=message,
1437 nodes=nodes,
1384 nodes=nodes,
1438 parent_commit=c.commit,
1385 parent_commit=c.commit,
1439 author=author,
1386 author=author,
1440 )
1387 )
1441
1388
1442 h.flash(_('Successfully committed new file `{}`').format(h.escape(node_path)), category='success')
1389 h.flash(_("Successfully committed new file `{}`").format(h.escape(node_path)), category="success")
1443
1390
1444 default_redirect_url = h.route_path(
1391 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1445 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1446
1392
1447 except NonRelativePathError:
1393 except NonRelativePathError:
1448 log.exception('Non Relative path found')
1394 log.exception("Non Relative path found")
1449 h.flash(_('The location specified must be a relative path and must not '
1395 h.flash(
1450 'contain .. in the path'), category='warning')
1396 _("The location specified must be a relative path and must not " "contain .. in the path"),
1397 category="warning",
1398 )
1451 raise HTTPFound(default_redirect_url)
1399 raise HTTPFound(default_redirect_url)
1452 except (NodeError, NodeAlreadyExistsError) as e:
1400 except (NodeError, NodeAlreadyExistsError) as e:
1453 h.flash(h.escape(safe_str(e)), category='error')
1401 h.flash(h.escape(safe_str(e)), category="error")
1454 except Exception:
1402 except Exception:
1455 log.exception('Error occurred during commit')
1403 log.exception("Error occurred during commit")
1456 h.flash(_('Error occurred during commit'), category='error')
1404 h.flash(_("Error occurred during commit"), category="error")
1457
1405
1458 raise HTTPFound(default_redirect_url)
1406 raise HTTPFound(default_redirect_url)
1459
1407
1460 @LoginRequired()
1408 @LoginRequired()
1461 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1409 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1462 @CSRFRequired()
1410 @CSRFRequired()
1463 def repo_files_upload_file(self):
1411 def repo_files_upload_file(self):
1464 _ = self.request.translate
1412 _ = self.request.translate
1465 c = self.load_default_context()
1413 c = self.load_default_context()
1466 commit_id, f_path, bytes_path = self._get_commit_and_path()
1414 commit_id, f_path, bytes_path = self._get_commit_and_path()
1467
1415
1468 self._ensure_not_locked()
1416 self._ensure_not_locked()
1469
1417
1470 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1418 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1471 if c.commit is None:
1419 if c.commit is None:
1472 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1420 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1473
1421
1474 # calculate redirect URL
1422 # calculate redirect URL
1475 if self.rhodecode_vcs_repo.is_empty():
1423 if self.rhodecode_vcs_repo.is_empty():
1476 default_redirect_url = h.route_path(
1424 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1477 'repo_summary', repo_name=self.db_repo_name)
1478 else:
1425 else:
1479 default_redirect_url = h.route_path(
1426 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1480 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1481
1427
1482 if self.rhodecode_vcs_repo.is_empty():
1428 if self.rhodecode_vcs_repo.is_empty():
1483 # for empty repository we cannot check for current branch, we rely on
1429 # for empty repository we cannot check for current branch, we rely on
1484 # c.commit.branch instead
1430 # c.commit.branch instead
1485 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1431 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1486 else:
1432 else:
1487 _branch_name, _sha_commit_id, is_head = \
1433 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1488 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1434 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1489 landing_ref=self.db_repo.landing_ref_name)
1435 )
1490
1436
1491 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1437 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1492 if error:
1438 if error:
1493 return {
1439 return {"error": error, "redirect_url": default_redirect_url}
1494 'error': error,
1495 'redirect_url': default_redirect_url
1496 }
1497 error = self.check_branch_permission(_branch_name, json_mode=True)
1440 error = self.check_branch_permission(_branch_name, json_mode=True)
1498 if error:
1441 if error:
1499 return {
1442 return {"error": error, "redirect_url": default_redirect_url}
1500 'error': error,
1501 'redirect_url': default_redirect_url
1502 }
1503
1443
1504 c.default_message = (_('Added file via RhodeCode Enterprise'))
1444 c.default_message = _("Added file via RhodeCode Enterprise")
1505 c.f_path = f_path
1445 c.f_path = f_path
1506
1446
1507 r_post = self.request.POST
1447 r_post = self.request.POST
1508
1448
1509 message = c.default_message
1449 message = c.default_message
1510 user_message = r_post.getall('message')
1450 user_message = r_post.getall("message")
1511 if isinstance(user_message, list) and user_message:
1451 if isinstance(user_message, list) and user_message:
1512 # we take the first from duplicated results if it's not empty
1452 # we take the first from duplicated results if it's not empty
1513 message = user_message[0] if user_message[0] else message
1453 message = user_message[0] if user_message[0] else message
1514
1454
1515 nodes = {}
1455 nodes = {}
1516
1456
1517 for file_obj in r_post.getall('files_upload') or []:
1457 for file_obj in r_post.getall("files_upload") or []:
1518 content = file_obj.file
1458 content = file_obj.file
1519 filename = file_obj.filename
1459 filename = file_obj.filename
1520
1460
1521 root_path = f_path
1461 root_path = f_path
1522 pure_path = self.create_pure_path(root_path, filename)
1462 pure_path = self.create_pure_path(root_path, filename)
1523 node_path = pure_path.as_posix().lstrip('/')
1463 node_path = pure_path.as_posix().lstrip("/")
1524
1464
1525 nodes[safe_bytes(node_path)] = {
1465 nodes[safe_bytes(node_path)] = {"content": content}
1526 'content': content
1527 }
1528
1466
1529 if not nodes:
1467 if not nodes:
1530 error = 'missing files'
1468 error = "missing files"
1531 return {
1469 return {"error": error, "redirect_url": default_redirect_url}
1532 'error': error,
1533 'redirect_url': default_redirect_url
1534 }
1535
1470
1536 author = self._rhodecode_db_user.full_contact
1471 author = self._rhodecode_db_user.full_contact
1537
1472
1538 try:
1473 try:
1539 commit = ScmModel().create_nodes(
1474 commit = ScmModel().create_nodes(
1540 user=self._rhodecode_db_user.user_id,
1475 user=self._rhodecode_db_user.user_id,
1541 repo=self.db_repo,
1476 repo=self.db_repo,
1542 message=message,
1477 message=message,
1543 nodes=nodes,
1478 nodes=nodes,
1544 parent_commit=c.commit,
1479 parent_commit=c.commit,
1545 author=author,
1480 author=author,
1546 )
1481 )
1547 if len(nodes) == 1:
1482 if len(nodes) == 1:
1548 flash_message = _('Successfully committed {} new files').format(len(nodes))
1483 flash_message = _("Successfully committed {} new files").format(len(nodes))
1549 else:
1484 else:
1550 flash_message = _('Successfully committed 1 new file')
1485 flash_message = _("Successfully committed 1 new file")
1551
1486
1552 h.flash(flash_message, category='success')
1487 h.flash(flash_message, category="success")
1553
1488
1554 default_redirect_url = h.route_path(
1489 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1555 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1556
1490
1557 except NonRelativePathError:
1491 except NonRelativePathError:
1558 log.exception('Non Relative path found')
1492 log.exception("Non Relative path found")
1559 error = _('The location specified must be a relative path and must not '
1493 error = _("The location specified must be a relative path and must not " "contain .. in the path")
1560 'contain .. in the path')
1494 h.flash(error, category="warning")
1561 h.flash(error, category='warning')
1562
1495
1563 return {
1496 return {"error": error, "redirect_url": default_redirect_url}
1564 'error': error,
1565 'redirect_url': default_redirect_url
1566 }
1567 except (NodeError, NodeAlreadyExistsError) as e:
1497 except (NodeError, NodeAlreadyExistsError) as e:
1568 error = h.escape(e)
1498 error = h.escape(e)
1569 h.flash(error, category='error')
1499 h.flash(error, category="error")
1570
1500
1571 return {
1501 return {"error": error, "redirect_url": default_redirect_url}
1572 'error': error,
1573 'redirect_url': default_redirect_url
1574 }
1575 except Exception:
1502 except Exception:
1576 log.exception('Error occurred during commit')
1503 log.exception("Error occurred during commit")
1577 error = _('Error occurred during commit')
1504 error = _("Error occurred during commit")
1578 h.flash(error, category='error')
1505 h.flash(error, category="error")
1579 return {
1506 return {"error": error, "redirect_url": default_redirect_url}
1580 'error': error,
1581 'redirect_url': default_redirect_url
1582 }
1583
1507
1584 return {
1508 return {"error": None, "redirect_url": default_redirect_url}
1585 'error': None,
1586 'redirect_url': default_redirect_url
1587 }
1588
1509
1589 @LoginRequired()
1510 @LoginRequired()
1590 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1511 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1591 @CSRFRequired()
1512 @CSRFRequired()
1592 def repo_files_replace_file(self):
1513 def repo_files_replace_file(self):
1593 _ = self.request.translate
1514 _ = self.request.translate
1594 c = self.load_default_context()
1515 c = self.load_default_context()
1595 commit_id, f_path, bytes_path = self._get_commit_and_path()
1516 commit_id, f_path, bytes_path = self._get_commit_and_path()
1596
1517
1597 self._ensure_not_locked()
1518 self._ensure_not_locked()
1598
1519
1599 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1520 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1600 if c.commit is None:
1521 if c.commit is None:
1601 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1522 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1602
1523
1603 if self.rhodecode_vcs_repo.is_empty():
1524 if self.rhodecode_vcs_repo.is_empty():
1604 default_redirect_url = h.route_path(
1525 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1605 'repo_summary', repo_name=self.db_repo_name)
1606 else:
1526 else:
1607 default_redirect_url = h.route_path(
1527 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1608 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1609
1528
1610 if self.rhodecode_vcs_repo.is_empty():
1529 if self.rhodecode_vcs_repo.is_empty():
1611 # for empty repository we cannot check for current branch, we rely on
1530 # for empty repository we cannot check for current branch, we rely on
1612 # c.commit.branch instead
1531 # c.commit.branch instead
1613 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1532 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1614 else:
1533 else:
1615 _branch_name, _sha_commit_id, is_head = \
1534 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1616 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1535 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1617 landing_ref=self.db_repo.landing_ref_name)
1536 )
1618
1537
1619 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1538 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1620 if error:
1539 if error:
1621 return {
1540 return {"error": error, "redirect_url": default_redirect_url}
1622 'error': error,
1623 'redirect_url': default_redirect_url
1624 }
1625 error = self.check_branch_permission(_branch_name, json_mode=True)
1541 error = self.check_branch_permission(_branch_name, json_mode=True)
1626 if error:
1542 if error:
1627 return {
1543 return {"error": error, "redirect_url": default_redirect_url}
1628 'error': error,
1629 'redirect_url': default_redirect_url
1630 }
1631
1544
1632 c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1545 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1633 c.f_path = f_path
1546 c.f_path = f_path
1634
1547
1635 r_post = self.request.POST
1548 r_post = self.request.POST
1636
1549
1637 message = c.default_message
1550 message = c.default_message
1638 user_message = r_post.getall('message')
1551 user_message = r_post.getall("message")
1639 if isinstance(user_message, list) and user_message:
1552 if isinstance(user_message, list) and user_message:
1640 # we take the first from duplicated results if it's not empty
1553 # we take the first from duplicated results if it's not empty
1641 message = user_message[0] if user_message[0] else message
1554 message = user_message[0] if user_message[0] else message
1642
1555
1643 data_for_replacement = r_post.getall('files_upload') or []
1556 data_for_replacement = r_post.getall("files_upload") or []
1644 if (objects_count := len(data_for_replacement)) > 1:
1557 if (objects_count := len(data_for_replacement)) > 1:
1645 return {
1558 return {"error": "too many files for replacement", "redirect_url": default_redirect_url}
1646 'error': 'too many files for replacement',
1647 'redirect_url': default_redirect_url
1648 }
1649 elif not objects_count:
1559 elif not objects_count:
1650 return {
1560 return {"error": "missing files", "redirect_url": default_redirect_url}
1651 'error': 'missing files',
1652 'redirect_url': default_redirect_url
1653 }
1654
1561
1655 content = data_for_replacement[0].file
1562 content = data_for_replacement[0].file
1656 retrieved_filename = data_for_replacement[0].filename
1563 retrieved_filename = data_for_replacement[0].filename
1657
1564
1658 if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]:
1565 if retrieved_filename.split(".")[-1] != f_path.split(".")[-1]:
1659 return {
1566 return {
1660 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension',
1567 "error": "file extension of uploaded file doesn't match an original file's extension",
1661 'redirect_url': default_redirect_url
1568 "redirect_url": default_redirect_url,
1662 }
1569 }
1663
1570
1664 author = self._rhodecode_db_user.full_contact
1571 author = self._rhodecode_db_user.full_contact
1665
1572
1666 try:
1573 try:
1667 commit = ScmModel().update_binary_node(
1574 commit = ScmModel().update_binary_node(
1668 user=self._rhodecode_db_user.user_id,
1575 user=self._rhodecode_db_user.user_id,
1669 repo=self.db_repo,
1576 repo=self.db_repo,
1670 message=message,
1577 message=message,
1671 node={
1578 node={
1672 'content': content,
1579 "content": content,
1673 'file_path': f_path.encode(),
1580 "file_path": f_path.encode(),
1674 },
1581 },
1675 parent_commit=c.commit,
1582 parent_commit=c.commit,
1676 author=author,
1583 author=author,
1677 )
1584 )
1678
1585
1679 h.flash(_('Successfully committed 1 new file'), category='success')
1586 h.flash(_("Successfully committed 1 new file"), category="success")
1680
1587
1681 default_redirect_url = h.route_path(
1588 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1682 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1683
1589
1684 except (NodeError, NodeAlreadyExistsError) as e:
1590 except (NodeError, NodeAlreadyExistsError) as e:
1685 error = h.escape(e)
1591 error = h.escape(e)
1686 h.flash(error, category='error')
1592 h.flash(error, category="error")
1687
1593
1688 return {
1594 return {"error": error, "redirect_url": default_redirect_url}
1689 'error': error,
1690 'redirect_url': default_redirect_url
1691 }
1692 except Exception:
1595 except Exception:
1693 log.exception('Error occurred during commit')
1596 log.exception("Error occurred during commit")
1694 error = _('Error occurred during commit')
1597 error = _("Error occurred during commit")
1695 h.flash(error, category='error')
1598 h.flash(error, category="error")
1696 return {
1599 return {"error": error, "redirect_url": default_redirect_url}
1697 'error': error,
1698 'redirect_url': default_redirect_url
1699 }
1700
1600
1701 return {
1601 return {"error": None, "redirect_url": default_redirect_url}
1702 'error': None,
1703 'redirect_url': default_redirect_url
1704 }
@@ -1,119 +1,119
1 # Copyright (C) 2015-2024 RhodeCode GmbH
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import threading
20 import threading
21
21
22 from dogpile.cache import register_backend
22 from dogpile.cache import register_backend
23
23
24 from . import region_meta
24 from . import region_meta
25 from .utils import (
25 from .utils import (
26 ActiveRegionCache,
26 ActiveRegionCache,
27 InvalidationContext,
27 InvalidationContext,
28 backend_key_generator,
28 backend_key_generator,
29 clear_cache_namespace,
29 clear_cache_namespace,
30 get_default_cache_settings,
30 get_default_cache_settings,
31 get_or_create_region,
31 get_or_create_region,
32 make_region,
32 make_region,
33 str2bool,
33 str2bool,
34 )
34 )
35
35
36 module_name = 'rhodecode'
36 module_name = 'rhodecode'
37
37
38 register_backend(
38 register_backend(
39 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
39 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
40 "LRUMemoryBackend")
40 "LRUMemoryBackend")
41
41
42 register_backend(
42 register_backend(
43 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
43 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
44 "FileNamespaceBackend")
44 "FileNamespaceBackend")
45
45
46 register_backend(
46 register_backend(
47 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
47 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
48 "RedisPickleBackend")
48 "RedisPickleBackend")
49
49
50 register_backend(
50 register_backend(
51 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
51 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
52 "RedisMsgPackBackend")
52 "RedisMsgPackBackend")
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 FILE_TREE_CACHE_VER = 'v5'
58 FILE_TREE_CACHE_VER = 'v6'
59 LICENSE_CACHE_VER = 'v3'
59 LICENSE_CACHE_VER = 'v3'
60 PERMISSIONS_CACHE_VER = 'v2'
60 PERMISSIONS_CACHE_VER = 'v2'
61
61
62 CLEAR_DELETE = 'delete'
62 CLEAR_DELETE = 'delete'
63 CLEAR_INVALIDATE = 'invalidate'
63 CLEAR_INVALIDATE = 'invalidate'
64
64
65
65
66 def async_creation_runner(cache, cache_key, creator, mutex):
66 def async_creation_runner(cache, cache_key, creator, mutex):
67
67
68 def runner():
68 def runner():
69 try:
69 try:
70 value = creator()
70 value = creator()
71 cache.set(cache_key, value)
71 cache.set(cache_key, value)
72 finally:
72 finally:
73 mutex.release()
73 mutex.release()
74
74
75 thread = threading.Thread(target=runner)
75 thread = threading.Thread(target=runner)
76 thread.start()
76 thread.start()
77
77
78
78
79 def configure_dogpile_cache(settings):
79 def configure_dogpile_cache(settings):
80 cache_dir = settings.get('cache_dir')
80 cache_dir = settings.get('cache_dir')
81 if cache_dir:
81 if cache_dir:
82 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
82 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
83
83
84 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
84 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
85
85
86 # inspect available namespaces
86 # inspect available namespaces
87 avail_regions = set()
87 avail_regions = set()
88 for key in rc_cache_data.keys():
88 for key in rc_cache_data.keys():
89 namespace_name = key.split('.', 1)[0]
89 namespace_name = key.split('.', 1)[0]
90 if namespace_name in avail_regions:
90 if namespace_name in avail_regions:
91 continue
91 continue
92
92
93 avail_regions.add(namespace_name)
93 avail_regions.add(namespace_name)
94 log.debug('dogpile: found following cache regions: %s', namespace_name)
94 log.debug('dogpile: found following cache regions: %s', namespace_name)
95
95
96 new_region = make_region(
96 new_region = make_region(
97 name=namespace_name,
97 name=namespace_name,
98 function_key_generator=None,
98 function_key_generator=None,
99 async_creation_runner=None
99 async_creation_runner=None
100 )
100 )
101
101
102 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
102 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
103 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
103 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
104
104
105 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
105 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
106 if async_creator:
106 if async_creator:
107 log.debug('configuring region %s with async creator', new_region)
107 log.debug('configuring region %s with async creator', new_region)
108 new_region.async_creation_runner = async_creation_runner
108 new_region.async_creation_runner = async_creation_runner
109
109
110 if log.isEnabledFor(logging.DEBUG):
110 if log.isEnabledFor(logging.DEBUG):
111 region_args = dict(backend=new_region.actual_backend,
111 region_args = dict(backend=new_region.actual_backend,
112 region_invalidator=new_region.region_invalidator.__class__)
112 region_invalidator=new_region.region_invalidator.__class__)
113 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
113 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
114
114
115 region_meta.dogpile_cache_regions[namespace_name] = new_region
115 region_meta.dogpile_cache_regions[namespace_name] = new_region
116
116
117
117
118 def includeme(config):
118 def includeme(config):
119 configure_dogpile_cache(config.registry.settings)
119 configure_dogpile_cache(config.registry.settings)
@@ -1,987 +1,988
1 # Copyright (C) 2011-2024 RhodeCode GmbH
1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 Some simple helper functions
21 Some simple helper functions
22 """
22 """
23
23
24 import collections
24 import collections
25 import datetime
25 import datetime
26 import dateutil.relativedelta
26 import dateutil.relativedelta
27 import logging
27 import logging
28 import re
28 import re
29 import sys
29 import sys
30 import time
30 import time
31 import urllib.request
31 import urllib.request
32 import urllib.parse
32 import urllib.parse
33 import urllib.error
33 import urllib.error
34 import urlobject
34 import urlobject
35 import uuid
35 import uuid
36 import getpass
36 import getpass
37 import socket
37 import socket
38 import errno
38 import errno
39 import random
39 import random
40 import functools
40 import functools
41 from contextlib import closing
41 from contextlib import closing
42
42
43 import pygments.lexers
43 import pygments.lexers
44 import sqlalchemy
44 import sqlalchemy
45 import sqlalchemy.event
45 import sqlalchemy.event
46 import sqlalchemy.engine.url
46 import sqlalchemy.engine.url
47 import sqlalchemy.exc
47 import sqlalchemy.exc
48 import sqlalchemy.sql
48 import sqlalchemy.sql
49 import webob
49 import webob
50 from pyramid.settings import asbool
50 from pyramid.settings import asbool
51
51
52 import rhodecode
52 import rhodecode
53 from rhodecode.translation import _, _pluralize
53 from rhodecode.translation import _, _pluralize
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
57
57
58
58
59 def __get_lem(extra_mapping=None):
59 def __get_lem(extra_mapping=None):
60 """
60 """
61 Get language extension map based on what's inside pygments lexers
61 Get language extension map based on what's inside pygments lexers
62 """
62 """
63 d = collections.defaultdict(lambda: [])
63 d = collections.defaultdict(lambda: [])
64
64
65 def __clean(s):
65 def __clean(s):
66 s = s.lstrip('*')
66 s = s.lstrip('*')
67 s = s.lstrip('.')
67 s = s.lstrip('.')
68
68
69 if s.find('[') != -1:
69 if s.find('[') != -1:
70 exts = []
70 exts = []
71 start, stop = s.find('['), s.find(']')
71 start, stop = s.find('['), s.find(']')
72
72
73 for suffix in s[start + 1:stop]:
73 for suffix in s[start + 1:stop]:
74 exts.append(s[:s.find('[')] + suffix)
74 exts.append(s[:s.find('[')] + suffix)
75 return [e.lower() for e in exts]
75 return [e.lower() for e in exts]
76 else:
76 else:
77 return [s.lower()]
77 return [s.lower()]
78
78
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
80 m = list(map(__clean, t[-2]))
80 m = list(map(__clean, t[-2]))
81 if m:
81 if m:
82 m = functools.reduce(lambda x, y: x + y, m)
82 m = functools.reduce(lambda x, y: x + y, m)
83 for ext in m:
83 for ext in m:
84 desc = lx.replace('Lexer', '')
84 desc = lx.replace('Lexer', '')
85 d[ext].append(desc)
85 d[ext].append(desc)
86
86
87 data = dict(d)
87 data = dict(d)
88
88
89 extra_mapping = extra_mapping or {}
89 extra_mapping = extra_mapping or {}
90 if extra_mapping:
90 if extra_mapping:
91 for k, v in list(extra_mapping.items()):
91 for k, v in list(extra_mapping.items()):
92 if k not in data:
92 if k not in data:
93 # register new mapping2lexer
93 # register new mapping2lexer
94 data[k] = [v]
94 data[k] = [v]
95
95
96 return data
96 return data
97
97
98
98
99 def convert_line_endings(line: str, mode) -> str:
99 def convert_line_endings(line: str, mode) -> str:
100 """
100 """
101 Converts a given line "line end" accordingly to given mode
101 Converts a given line "line end" accordingly to given mode
102
102
103 Available modes are::
103 Available modes are::
104 0 - Unix
104 0 - Unix
105 1 - Mac
105 1 - Mac
106 2 - DOS
106 2 - DOS
107
107
108 :param line: given line to convert
108 :param line: given line to convert
109 :param mode: mode to convert to
109 :param mode: mode to convert to
110 :return: converted line according to mode
110 :return: converted line according to mode
111 """
111 """
112 if mode == 0:
112 if mode == 0:
113 line = line.replace('\r\n', '\n')
113 line = line.replace('\r\n', '\n')
114 line = line.replace('\r', '\n')
114 line = line.replace('\r', '\n')
115 elif mode == 1:
115 elif mode == 1:
116 line = line.replace('\r\n', '\r')
116 line = line.replace('\r\n', '\r')
117 line = line.replace('\n', '\r')
117 line = line.replace('\n', '\r')
118 elif mode == 2:
118 elif mode == 2:
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
120 return line
120 return line
121
121
122
122
123 def detect_mode(line: str, default) -> int:
123 def detect_mode(line: str, default) -> int:
124 """
124 """
125 Detects line break for given line, if line break couldn't be found
125 Detects line break for given line, if line break couldn't be found
126 given default value is returned
126 given default value is returned
127
127
128 :param line: str line
128 :param line: str line
129 :param default: default
129 :param default: default
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
131 """
131 """
132 if line.endswith('\r\n'):
132 if line.endswith('\r\n'):
133 return 2
133 return 2
134 elif line.endswith('\n'):
134 elif line.endswith('\n'):
135 return 0
135 return 0
136 elif line.endswith('\r'):
136 elif line.endswith('\r'):
137 return 1
137 return 1
138 else:
138 else:
139 return default
139 return default
140
140
141
141
142 def remove_suffix(s, suffix):
142 def remove_suffix(s, suffix):
143 if s.endswith(suffix):
143 if s.endswith(suffix):
144 s = s[:-1 * len(suffix)]
144 s = s[:-1 * len(suffix)]
145 return s
145 return s
146
146
147
147
148 def remove_prefix(s, prefix):
148 def remove_prefix(s, prefix):
149 if s.startswith(prefix):
149 if s.startswith(prefix):
150 s = s[len(prefix):]
150 s = s[len(prefix):]
151 return s
151 return s
152
152
153
153
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
155 """
155 """
156 How to find calling context:
156 Look through the calling stack and return the frame which called
157 Look through the calling stack and return the frame which called
157 this function and is part of core module ( ie. rhodecode.* )
158 this function and is part of core module ( ie. rhodecode.* )
158
159
159 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
160 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
160 :param depth:
161 :param depth:
161 :param output_writer:
162 :param output_writer:
162 :param indent:
163 :param indent:
163
164
164 usage::
165 usage::
165
166
166 from rhodecode.lib.utils2 import find_calling_context
167 from rhodecode.lib.utils2 import find_calling_context
167
168
168 calling_context = find_calling_context(ignore_modules=[
169 calling_context = find_calling_context(ignore_modules=[
169 'rhodecode.lib.caching_query',
170 'rhodecode.lib.caching_query',
170 'rhodecode.model.settings',
171 'rhodecode.model.settings',
171 ])
172 ])
172
173
173 """
174 """
174 import inspect
175 import inspect
175 if not output_writer:
176 if not output_writer:
176 try:
177 try:
177 from rich import print as pprint
178 from rich import print as pprint
178 except ImportError:
179 except ImportError:
179 pprint = print
180 pprint = print
180 output_writer = pprint
181 output_writer = pprint
181
182
182 frame = inspect.currentframe()
183 frame = inspect.currentframe()
183 cc = []
184 cc = []
184 try:
185 try:
185 for i in range(depth): # current frame + 3 callers
186 for i in range(depth): # current frame + 3 callers
186 frame = frame.f_back
187 frame = frame.f_back
187 if not frame:
188 if not frame:
188 break
189 break
189
190
190 info = inspect.getframeinfo(frame)
191 info = inspect.getframeinfo(frame)
191 name = frame.f_globals.get('__name__')
192 name = frame.f_globals.get('__name__')
192 if name not in ignore_modules:
193 if name not in ignore_modules:
193 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
194 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
194 finally:
195 finally:
195 # Avoids a reference cycle
196 # Avoids a reference cycle
196 del frame
197 del frame
197
198
198 output_writer('* INFO: This code was called from: *')
199 output_writer('* INFO: This code was called from: *')
199 for cnt, frm_info in enumerate(cc):
200 for cnt, frm_info in enumerate(cc):
200 if not indent:
201 if not indent:
201 cnt = 1
202 cnt = 1
202 output_writer(' ' * cnt + frm_info)
203 output_writer(' ' * cnt + frm_info)
203
204
204
205
205 def ping_connection(connection, branch):
206 def ping_connection(connection, branch):
206 if branch:
207 if branch:
207 # "branch" refers to a sub-connection of a connection,
208 # "branch" refers to a sub-connection of a connection,
208 # we don't want to bother pinging on these.
209 # we don't want to bother pinging on these.
209 return
210 return
210
211
211 # turn off "close with result". This flag is only used with
212 # turn off "close with result". This flag is only used with
212 # "connectionless" execution, otherwise will be False in any case
213 # "connectionless" execution, otherwise will be False in any case
213 save_should_close_with_result = connection.should_close_with_result
214 save_should_close_with_result = connection.should_close_with_result
214 connection.should_close_with_result = False
215 connection.should_close_with_result = False
215
216
216 try:
217 try:
217 # run a SELECT 1. use a core select() so that
218 # run a SELECT 1. use a core select() so that
218 # the SELECT of a scalar value without a table is
219 # the SELECT of a scalar value without a table is
219 # appropriately formatted for the backend
220 # appropriately formatted for the backend
220 connection.scalar(sqlalchemy.sql.select([1]))
221 connection.scalar(sqlalchemy.sql.select([1]))
221 except sqlalchemy.exc.DBAPIError as err:
222 except sqlalchemy.exc.DBAPIError as err:
222 # catch SQLAlchemy's DBAPIError, which is a wrapper
223 # catch SQLAlchemy's DBAPIError, which is a wrapper
223 # for the DBAPI's exception. It includes a .connection_invalidated
224 # for the DBAPI's exception. It includes a .connection_invalidated
224 # attribute which specifies if this connection is a "disconnect"
225 # attribute which specifies if this connection is a "disconnect"
225 # condition, which is based on inspection of the original exception
226 # condition, which is based on inspection of the original exception
226 # by the dialect in use.
227 # by the dialect in use.
227 if err.connection_invalidated:
228 if err.connection_invalidated:
228 # run the same SELECT again - the connection will re-validate
229 # run the same SELECT again - the connection will re-validate
229 # itself and establish a new connection. The disconnect detection
230 # itself and establish a new connection. The disconnect detection
230 # here also causes the whole connection pool to be invalidated
231 # here also causes the whole connection pool to be invalidated
231 # so that all stale connections are discarded.
232 # so that all stale connections are discarded.
232 connection.scalar(sqlalchemy.sql.select([1]))
233 connection.scalar(sqlalchemy.sql.select([1]))
233 else:
234 else:
234 raise
235 raise
235 finally:
236 finally:
236 # restore "close with result"
237 # restore "close with result"
237 connection.should_close_with_result = save_should_close_with_result
238 connection.should_close_with_result = save_should_close_with_result
238
239
239
240
240 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
241 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
241 """Custom engine_from_config functions."""
242 """Custom engine_from_config functions."""
242 log = logging.getLogger('sqlalchemy.engine')
243 log = logging.getLogger('sqlalchemy.engine')
243 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
244 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
244 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
245 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
245
246
246 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
247 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
247
248
248 def color_sql(sql):
249 def color_sql(sql):
249 color_seq = '\033[1;33m' # This is yellow: code 33
250 color_seq = '\033[1;33m' # This is yellow: code 33
250 normal = '\x1b[0m'
251 normal = '\x1b[0m'
251 return ''.join([color_seq, sql, normal])
252 return ''.join([color_seq, sql, normal])
252
253
253 if use_ping_connection:
254 if use_ping_connection:
254 log.debug('Adding ping_connection on the engine config.')
255 log.debug('Adding ping_connection on the engine config.')
255 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
256 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
256
257
257 if debug:
258 if debug:
258 # attach events only for debug configuration
259 # attach events only for debug configuration
259 def before_cursor_execute(conn, cursor, statement,
260 def before_cursor_execute(conn, cursor, statement,
260 parameters, context, executemany):
261 parameters, context, executemany):
261 setattr(conn, 'query_start_time', time.time())
262 setattr(conn, 'query_start_time', time.time())
262 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
263 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
263 find_calling_context(ignore_modules=[
264 find_calling_context(ignore_modules=[
264 'rhodecode.lib.caching_query',
265 'rhodecode.lib.caching_query',
265 'rhodecode.model.settings',
266 'rhodecode.model.settings',
266 ], output_writer=log.info)
267 ], output_writer=log.info)
267
268
268 def after_cursor_execute(conn, cursor, statement,
269 def after_cursor_execute(conn, cursor, statement,
269 parameters, context, executemany):
270 parameters, context, executemany):
270 delattr(conn, 'query_start_time')
271 delattr(conn, 'query_start_time')
271
272
272 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
273 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
273 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
274 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
274
275
275 return engine
276 return engine
276
277
277
278
278 def get_encryption_key(config) -> bytes:
279 def get_encryption_key(config) -> bytes:
279 secret = config.get('rhodecode.encrypted_values.secret')
280 secret = config.get('rhodecode.encrypted_values.secret')
280 default = config['beaker.session.secret']
281 default = config['beaker.session.secret']
281 enc_key = secret or default
282 enc_key = secret or default
282
283
283 return safe_bytes(enc_key)
284 return safe_bytes(enc_key)
284
285
285
286
286 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
287 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
287 """
288 """
288 Turns a datetime into an age string.
289 Turns a datetime into an age string.
289 If show_short_version is True, this generates a shorter string with
290 If show_short_version is True, this generates a shorter string with
290 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
291 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
291
292
292 * IMPORTANT*
293 * IMPORTANT*
293 Code of this function is written in special way so it's easier to
294 Code of this function is written in special way so it's easier to
294 backport it to javascript. If you mean to update it, please also update
295 backport it to javascript. If you mean to update it, please also update
295 `jquery.timeago-extension.js` file
296 `jquery.timeago-extension.js` file
296
297
297 :param prevdate: datetime object
298 :param prevdate: datetime object
298 :param now: get current time, if not define we use
299 :param now: get current time, if not define we use
299 `datetime.datetime.now()`
300 `datetime.datetime.now()`
300 :param show_short_version: if it should approximate the date and
301 :param show_short_version: if it should approximate the date and
301 return a shorter string
302 return a shorter string
302 :param show_suffix:
303 :param show_suffix:
303 :param short_format: show short format, eg 2D instead of 2 days
304 :param short_format: show short format, eg 2D instead of 2 days
304 :rtype: unicode
305 :rtype: unicode
305 :returns: unicode words describing age
306 :returns: unicode words describing age
306 """
307 """
307
308
308 def _get_relative_delta(now, prevdate):
309 def _get_relative_delta(now, prevdate):
309 base = dateutil.relativedelta.relativedelta(now, prevdate)
310 base = dateutil.relativedelta.relativedelta(now, prevdate)
310 return {
311 return {
311 'year': base.years,
312 'year': base.years,
312 'month': base.months,
313 'month': base.months,
313 'day': base.days,
314 'day': base.days,
314 'hour': base.hours,
315 'hour': base.hours,
315 'minute': base.minutes,
316 'minute': base.minutes,
316 'second': base.seconds,
317 'second': base.seconds,
317 }
318 }
318
319
319 def _is_leap_year(year):
320 def _is_leap_year(year):
320 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
321 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
321
322
322 def get_month(prevdate):
323 def get_month(prevdate):
323 return prevdate.month
324 return prevdate.month
324
325
325 def get_year(prevdate):
326 def get_year(prevdate):
326 return prevdate.year
327 return prevdate.year
327
328
328 now = now or datetime.datetime.now()
329 now = now or datetime.datetime.now()
329 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
330 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
330 deltas = {}
331 deltas = {}
331 future = False
332 future = False
332
333
333 if prevdate > now:
334 if prevdate > now:
334 now_old = now
335 now_old = now
335 now = prevdate
336 now = prevdate
336 prevdate = now_old
337 prevdate = now_old
337 future = True
338 future = True
338 if future:
339 if future:
339 prevdate = prevdate.replace(microsecond=0)
340 prevdate = prevdate.replace(microsecond=0)
340 # Get date parts deltas
341 # Get date parts deltas
341 for part in order:
342 for part in order:
342 rel_delta = _get_relative_delta(now, prevdate)
343 rel_delta = _get_relative_delta(now, prevdate)
343 deltas[part] = rel_delta[part]
344 deltas[part] = rel_delta[part]
344
345
345 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
346 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
346 # not 1 hour, -59 minutes and -59 seconds)
347 # not 1 hour, -59 minutes and -59 seconds)
347 offsets = [[5, 60], [4, 60], [3, 24]]
348 offsets = [[5, 60], [4, 60], [3, 24]]
348 for element in offsets: # seconds, minutes, hours
349 for element in offsets: # seconds, minutes, hours
349 num = element[0]
350 num = element[0]
350 length = element[1]
351 length = element[1]
351
352
352 part = order[num]
353 part = order[num]
353 carry_part = order[num - 1]
354 carry_part = order[num - 1]
354
355
355 if deltas[part] < 0:
356 if deltas[part] < 0:
356 deltas[part] += length
357 deltas[part] += length
357 deltas[carry_part] -= 1
358 deltas[carry_part] -= 1
358
359
359 # Same thing for days except that the increment depends on the (variable)
360 # Same thing for days except that the increment depends on the (variable)
360 # number of days in the month
361 # number of days in the month
361 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
362 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
362 if deltas['day'] < 0:
363 if deltas['day'] < 0:
363 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
364 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
364 deltas['day'] += 29
365 deltas['day'] += 29
365 else:
366 else:
366 deltas['day'] += month_lengths[get_month(prevdate) - 1]
367 deltas['day'] += month_lengths[get_month(prevdate) - 1]
367
368
368 deltas['month'] -= 1
369 deltas['month'] -= 1
369
370
370 if deltas['month'] < 0:
371 if deltas['month'] < 0:
371 deltas['month'] += 12
372 deltas['month'] += 12
372 deltas['year'] -= 1
373 deltas['year'] -= 1
373
374
374 # Format the result
375 # Format the result
375 if short_format:
376 if short_format:
376 fmt_funcs = {
377 fmt_funcs = {
377 'year': lambda d: '%dy' % d,
378 'year': lambda d: '%dy' % d,
378 'month': lambda d: '%dm' % d,
379 'month': lambda d: '%dm' % d,
379 'day': lambda d: '%dd' % d,
380 'day': lambda d: '%dd' % d,
380 'hour': lambda d: '%dh' % d,
381 'hour': lambda d: '%dh' % d,
381 'minute': lambda d: '%dmin' % d,
382 'minute': lambda d: '%dmin' % d,
382 'second': lambda d: '%dsec' % d,
383 'second': lambda d: '%dsec' % d,
383 }
384 }
384 else:
385 else:
385 fmt_funcs = {
386 fmt_funcs = {
386 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
387 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
387 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
388 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
388 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
389 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
389 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
390 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
390 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
391 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
391 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
392 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
392 }
393 }
393
394
394 i = 0
395 i = 0
395 for part in order:
396 for part in order:
396 value = deltas[part]
397 value = deltas[part]
397 if value != 0:
398 if value != 0:
398
399
399 if i < 5:
400 if i < 5:
400 sub_part = order[i + 1]
401 sub_part = order[i + 1]
401 sub_value = deltas[sub_part]
402 sub_value = deltas[sub_part]
402 else:
403 else:
403 sub_value = 0
404 sub_value = 0
404
405
405 if sub_value == 0 or show_short_version:
406 if sub_value == 0 or show_short_version:
406 _val = fmt_funcs[part](value)
407 _val = fmt_funcs[part](value)
407 if future:
408 if future:
408 if show_suffix:
409 if show_suffix:
409 return _('in ${ago}', mapping={'ago': _val})
410 return _('in ${ago}', mapping={'ago': _val})
410 else:
411 else:
411 return _(_val)
412 return _(_val)
412
413
413 else:
414 else:
414 if show_suffix:
415 if show_suffix:
415 return _('${ago} ago', mapping={'ago': _val})
416 return _('${ago} ago', mapping={'ago': _val})
416 else:
417 else:
417 return _(_val)
418 return _(_val)
418
419
419 val = fmt_funcs[part](value)
420 val = fmt_funcs[part](value)
420 val_detail = fmt_funcs[sub_part](sub_value)
421 val_detail = fmt_funcs[sub_part](sub_value)
421 mapping = {'val': val, 'detail': val_detail}
422 mapping = {'val': val, 'detail': val_detail}
422
423
423 if short_format:
424 if short_format:
424 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
425 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
425 if show_suffix:
426 if show_suffix:
426 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
427 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
427 if future:
428 if future:
428 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
429 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
429 else:
430 else:
430 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
431 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
431 if show_suffix:
432 if show_suffix:
432 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
433 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
433 if future:
434 if future:
434 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
435 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
435
436
436 return datetime_tmpl
437 return datetime_tmpl
437 i += 1
438 i += 1
438 return _('just now')
439 return _('just now')
439
440
440
441
441 def age_from_seconds(seconds):
442 def age_from_seconds(seconds):
442 seconds = safe_int(seconds) or 0
443 seconds = safe_int(seconds) or 0
443 prevdate = time_to_datetime(time.time() + seconds)
444 prevdate = time_to_datetime(time.time() + seconds)
444 return age(prevdate, show_suffix=False, show_short_version=True)
445 return age(prevdate, show_suffix=False, show_short_version=True)
445
446
446
447
447 def cleaned_uri(uri):
448 def cleaned_uri(uri):
448 """
449 """
449 Quotes '[' and ']' from uri if there is only one of them.
450 Quotes '[' and ']' from uri if there is only one of them.
450 according to RFC3986 we cannot use such chars in uri
451 according to RFC3986 we cannot use such chars in uri
451 :param uri:
452 :param uri:
452 :return: uri without this chars
453 :return: uri without this chars
453 """
454 """
454 return urllib.parse.quote(uri, safe='@$:/')
455 return urllib.parse.quote(uri, safe='@$:/')
455
456
456
457
457 def credentials_filter(uri):
458 def credentials_filter(uri):
458 """
459 """
459 Returns a url with removed credentials
460 Returns a url with removed credentials
460
461
461 :param uri:
462 :param uri:
462 """
463 """
463 import urlobject
464 import urlobject
464 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
465 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
465 return 'InvalidDecryptionKey'
466 return 'InvalidDecryptionKey'
466
467
467 url_obj = urlobject.URLObject(cleaned_uri(uri))
468 url_obj = urlobject.URLObject(cleaned_uri(uri))
468 url_obj = url_obj.without_password().without_username()
469 url_obj = url_obj.without_password().without_username()
469
470
470 return url_obj
471 return url_obj
471
472
472
473
473 def get_host_info(request):
474 def get_host_info(request):
474 """
475 """
475 Generate host info, to obtain full url e.g https://server.com
476 Generate host info, to obtain full url e.g https://server.com
476 use this
477 use this
477 `{scheme}://{netloc}`
478 `{scheme}://{netloc}`
478 """
479 """
479 if not request:
480 if not request:
480 return {}
481 return {}
481
482
482 qualified_home_url = request.route_url('home')
483 qualified_home_url = request.route_url('home')
483 parsed_url = urlobject.URLObject(qualified_home_url)
484 parsed_url = urlobject.URLObject(qualified_home_url)
484 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
485 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
485
486
486 return {
487 return {
487 'scheme': parsed_url.scheme,
488 'scheme': parsed_url.scheme,
488 'netloc': parsed_url.netloc+decoded_path,
489 'netloc': parsed_url.netloc+decoded_path,
489 'hostname': parsed_url.hostname,
490 'hostname': parsed_url.hostname,
490 }
491 }
491
492
492
493
493 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
494 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
494 qualified_home_url = request.route_url('home')
495 qualified_home_url = request.route_url('home')
495 parsed_url = urlobject.URLObject(qualified_home_url)
496 parsed_url = urlobject.URLObject(qualified_home_url)
496 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
497 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
497
498
498 args = {
499 args = {
499 'scheme': parsed_url.scheme,
500 'scheme': parsed_url.scheme,
500 'user': '',
501 'user': '',
501 'sys_user': getpass.getuser(),
502 'sys_user': getpass.getuser(),
502 # path if we use proxy-prefix
503 # path if we use proxy-prefix
503 'netloc': parsed_url.netloc+decoded_path,
504 'netloc': parsed_url.netloc+decoded_path,
504 'hostname': parsed_url.hostname,
505 'hostname': parsed_url.hostname,
505 'prefix': decoded_path,
506 'prefix': decoded_path,
506 'repo': repo_name,
507 'repo': repo_name,
507 'repoid': str(repo_id),
508 'repoid': str(repo_id),
508 'repo_type': repo_type
509 'repo_type': repo_type
509 }
510 }
510 args.update(override)
511 args.update(override)
511 args['user'] = urllib.parse.quote(safe_str(args['user']))
512 args['user'] = urllib.parse.quote(safe_str(args['user']))
512
513
513 for k, v in list(args.items()):
514 for k, v in list(args.items()):
514 tmpl_key = '{%s}' % k
515 tmpl_key = '{%s}' % k
515 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
516 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
516
517
517 # special case for SVN clone url
518 # special case for SVN clone url
518 if repo_type == 'svn':
519 if repo_type == 'svn':
519 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
520 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
520
521
521 # remove leading @ sign if it's present. Case of empty user
522 # remove leading @ sign if it's present. Case of empty user
522 url_obj = urlobject.URLObject(uri_tmpl)
523 url_obj = urlobject.URLObject(uri_tmpl)
523 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
524 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
524
525
525 return safe_str(url)
526 return safe_str(url)
526
527
527
528
528 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
529 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
529 maybe_unreachable=False, reference_obj=None):
530 maybe_unreachable=False, reference_obj=None):
530 """
531 """
531 Safe version of get_commit if this commit doesn't exists for a
532 Safe version of get_commit if this commit doesn't exists for a
532 repository it returns a Dummy one instead
533 repository it returns a Dummy one instead
533
534
534 :param repo: repository instance
535 :param repo: repository instance
535 :param commit_id: commit id as str
536 :param commit_id: commit id as str
536 :param commit_idx: numeric commit index
537 :param commit_idx: numeric commit index
537 :param pre_load: optional list of commit attributes to load
538 :param pre_load: optional list of commit attributes to load
538 :param maybe_unreachable: translate unreachable commits on git repos
539 :param maybe_unreachable: translate unreachable commits on git repos
539 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
540 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
540 """
541 """
541 # TODO(skreft): remove these circular imports
542 # TODO(skreft): remove these circular imports
542 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
543 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
543 from rhodecode.lib.vcs.exceptions import RepositoryError
544 from rhodecode.lib.vcs.exceptions import RepositoryError
544 if not isinstance(repo, BaseRepository):
545 if not isinstance(repo, BaseRepository):
545 raise Exception('You must pass an Repository '
546 raise Exception('You must pass an Repository '
546 'object as first argument got %s', type(repo))
547 'object as first argument got %s', type(repo))
547
548
548 try:
549 try:
549 commit = repo.get_commit(
550 commit = repo.get_commit(
550 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
551 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
551 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
552 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
552 except (RepositoryError, LookupError):
553 except (RepositoryError, LookupError):
553 commit = EmptyCommit()
554 commit = EmptyCommit()
554 return commit
555 return commit
555
556
556
557
557 def datetime_to_time(dt):
558 def datetime_to_time(dt):
558 if dt:
559 if dt:
559 return time.mktime(dt.timetuple())
560 return time.mktime(dt.timetuple())
560
561
561
562
562 def time_to_datetime(tm):
563 def time_to_datetime(tm):
563 if tm:
564 if tm:
564 if isinstance(tm, str):
565 if isinstance(tm, str):
565 try:
566 try:
566 tm = float(tm)
567 tm = float(tm)
567 except ValueError:
568 except ValueError:
568 return
569 return
569 return datetime.datetime.fromtimestamp(tm)
570 return datetime.datetime.fromtimestamp(tm)
570
571
571
572
572 def time_to_utcdatetime(tm):
573 def time_to_utcdatetime(tm):
573 if tm:
574 if tm:
574 if isinstance(tm, str):
575 if isinstance(tm, str):
575 try:
576 try:
576 tm = float(tm)
577 tm = float(tm)
577 except ValueError:
578 except ValueError:
578 return
579 return
579 return datetime.datetime.utcfromtimestamp(tm)
580 return datetime.datetime.utcfromtimestamp(tm)
580
581
581
582
582 MENTIONS_REGEX = re.compile(
583 MENTIONS_REGEX = re.compile(
583 # ^@ or @ without any special chars in front
584 # ^@ or @ without any special chars in front
584 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
585 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
585 # main body starts with letter, then can be . - _
586 # main body starts with letter, then can be . - _
586 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
587 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
587 re.VERBOSE | re.MULTILINE)
588 re.VERBOSE | re.MULTILINE)
588
589
589
590
590 def extract_mentioned_users(s):
591 def extract_mentioned_users(s):
591 """
592 """
592 Returns unique usernames from given string s that have @mention
593 Returns unique usernames from given string s that have @mention
593
594
594 :param s: string to get mentions
595 :param s: string to get mentions
595 """
596 """
596 usrs = set()
597 usrs = set()
597 for username in MENTIONS_REGEX.findall(s):
598 for username in MENTIONS_REGEX.findall(s):
598 usrs.add(username)
599 usrs.add(username)
599
600
600 return sorted(list(usrs), key=lambda k: k.lower())
601 return sorted(list(usrs), key=lambda k: k.lower())
601
602
602
603
603 def fix_PATH(os_=None):
604 def fix_PATH(os_=None):
604 """
605 """
605 Get current active python path, and append it to PATH variable to fix
606 Get current active python path, and append it to PATH variable to fix
606 issues of subprocess calls and different python versions
607 issues of subprocess calls and different python versions
607 """
608 """
608 if os_ is None:
609 if os_ is None:
609 import os
610 import os
610 else:
611 else:
611 os = os_
612 os = os_
612
613
613 cur_path = os.path.split(sys.executable)[0]
614 cur_path = os.path.split(sys.executable)[0]
614 os_path = os.environ['PATH']
615 os_path = os.environ['PATH']
615 if not os.environ['PATH'].startswith(cur_path):
616 if not os.environ['PATH'].startswith(cur_path):
616 os.environ['PATH'] = f'{cur_path}:{os_path}'
617 os.environ['PATH'] = f'{cur_path}:{os_path}'
617
618
618
619
619 def obfuscate_url_pw(engine):
620 def obfuscate_url_pw(engine):
620 _url = engine or ''
621 _url = engine or ''
621 try:
622 try:
622 _url = sqlalchemy.engine.url.make_url(engine)
623 _url = sqlalchemy.engine.url.make_url(engine)
623 except Exception:
624 except Exception:
624 pass
625 pass
625 return repr(_url)
626 return repr(_url)
626
627
627
628
628 def get_server_url(environ):
629 def get_server_url(environ):
629 req = webob.Request(environ)
630 req = webob.Request(environ)
630 return req.host_url + req.script_name
631 return req.host_url + req.script_name
631
632
632
633
633 def unique_id(hexlen=32):
634 def unique_id(hexlen=32):
634 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
635 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
635 return suuid(truncate_to=hexlen, alphabet=alphabet)
636 return suuid(truncate_to=hexlen, alphabet=alphabet)
636
637
637
638
638 def suuid(url=None, truncate_to=22, alphabet=None):
639 def suuid(url=None, truncate_to=22, alphabet=None):
639 """
640 """
640 Generate and return a short URL safe UUID.
641 Generate and return a short URL safe UUID.
641
642
642 If the url parameter is provided, set the namespace to the provided
643 If the url parameter is provided, set the namespace to the provided
643 URL and generate a UUID.
644 URL and generate a UUID.
644
645
645 :param url to get the uuid for
646 :param url to get the uuid for
646 :truncate_to: truncate the basic 22 UUID to shorter version
647 :truncate_to: truncate the basic 22 UUID to shorter version
647
648
648 The IDs won't be universally unique any longer, but the probability of
649 The IDs won't be universally unique any longer, but the probability of
649 a collision will still be very low.
650 a collision will still be very low.
650 """
651 """
651 # Define our alphabet.
652 # Define our alphabet.
652 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
653 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
653
654
654 # If no URL is given, generate a random UUID.
655 # If no URL is given, generate a random UUID.
655 if url is None:
656 if url is None:
656 unique_id = uuid.uuid4().int
657 unique_id = uuid.uuid4().int
657 else:
658 else:
658 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
659 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
659
660
660 alphabet_length = len(_ALPHABET)
661 alphabet_length = len(_ALPHABET)
661 output = []
662 output = []
662 while unique_id > 0:
663 while unique_id > 0:
663 digit = unique_id % alphabet_length
664 digit = unique_id % alphabet_length
664 output.append(_ALPHABET[digit])
665 output.append(_ALPHABET[digit])
665 unique_id = int(unique_id / alphabet_length)
666 unique_id = int(unique_id / alphabet_length)
666 return "".join(output)[:truncate_to]
667 return "".join(output)[:truncate_to]
667
668
668
669
669 def get_current_rhodecode_user(request=None):
670 def get_current_rhodecode_user(request=None):
670 """
671 """
671 Gets rhodecode user from request
672 Gets rhodecode user from request
672 """
673 """
673 import pyramid.threadlocal
674 import pyramid.threadlocal
674 pyramid_request = request or pyramid.threadlocal.get_current_request()
675 pyramid_request = request or pyramid.threadlocal.get_current_request()
675
676
676 # web case
677 # web case
677 if pyramid_request and hasattr(pyramid_request, 'user'):
678 if pyramid_request and hasattr(pyramid_request, 'user'):
678 return pyramid_request.user
679 return pyramid_request.user
679
680
680 # api case
681 # api case
681 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
682 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
682 return pyramid_request.rpc_user
683 return pyramid_request.rpc_user
683
684
684 return None
685 return None
685
686
686
687
687 def action_logger_generic(action, namespace=''):
688 def action_logger_generic(action, namespace=''):
688 """
689 """
689 A generic logger for actions useful to the system overview, tries to find
690 A generic logger for actions useful to the system overview, tries to find
690 an acting user for the context of the call otherwise reports unknown user
691 an acting user for the context of the call otherwise reports unknown user
691
692
692 :param action: logging message eg 'comment 5 deleted'
693 :param action: logging message eg 'comment 5 deleted'
693 :param type: string
694 :param type: string
694
695
695 :param namespace: namespace of the logging message eg. 'repo.comments'
696 :param namespace: namespace of the logging message eg. 'repo.comments'
696 :param type: string
697 :param type: string
697
698
698 """
699 """
699
700
700 logger_name = 'rhodecode.actions'
701 logger_name = 'rhodecode.actions'
701
702
702 if namespace:
703 if namespace:
703 logger_name += '.' + namespace
704 logger_name += '.' + namespace
704
705
705 log = logging.getLogger(logger_name)
706 log = logging.getLogger(logger_name)
706
707
707 # get a user if we can
708 # get a user if we can
708 user = get_current_rhodecode_user()
709 user = get_current_rhodecode_user()
709
710
710 logfunc = log.info
711 logfunc = log.info
711
712
712 if not user:
713 if not user:
713 user = '<unknown user>'
714 user = '<unknown user>'
714 logfunc = log.warning
715 logfunc = log.warning
715
716
716 logfunc(f'Logging action by {user}: {action}')
717 logfunc(f'Logging action by {user}: {action}')
717
718
718
719
719 def escape_split(text, sep=',', maxsplit=-1):
720 def escape_split(text, sep=',', maxsplit=-1):
720 r"""
721 r"""
721 Allows for escaping of the separator: e.g. arg='foo\, bar'
722 Allows for escaping of the separator: e.g. arg='foo\, bar'
722
723
723 It should be noted that the way bash et. al. do command line parsing, those
724 It should be noted that the way bash et. al. do command line parsing, those
724 single quotes are required.
725 single quotes are required.
725 """
726 """
726 escaped_sep = r'\%s' % sep
727 escaped_sep = r'\%s' % sep
727
728
728 if escaped_sep not in text:
729 if escaped_sep not in text:
729 return text.split(sep, maxsplit)
730 return text.split(sep, maxsplit)
730
731
731 before, _mid, after = text.partition(escaped_sep)
732 before, _mid, after = text.partition(escaped_sep)
732 startlist = before.split(sep, maxsplit) # a regular split is fine here
733 startlist = before.split(sep, maxsplit) # a regular split is fine here
733 unfinished = startlist[-1]
734 unfinished = startlist[-1]
734 startlist = startlist[:-1]
735 startlist = startlist[:-1]
735
736
736 # recurse because there may be more escaped separators
737 # recurse because there may be more escaped separators
737 endlist = escape_split(after, sep, maxsplit)
738 endlist = escape_split(after, sep, maxsplit)
738
739
739 # finish building the escaped value. we use endlist[0] becaue the first
740 # finish building the escaped value. we use endlist[0] becaue the first
740 # part of the string sent in recursion is the rest of the escaped value.
741 # part of the string sent in recursion is the rest of the escaped value.
741 unfinished += sep + endlist[0]
742 unfinished += sep + endlist[0]
742
743
743 return startlist + [unfinished] + endlist[1:] # put together all the parts
744 return startlist + [unfinished] + endlist[1:] # put together all the parts
744
745
745
746
746 class OptionalAttr(object):
747 class OptionalAttr(object):
747 """
748 """
748 Special Optional Option that defines other attribute. Example::
749 Special Optional Option that defines other attribute. Example::
749
750
750 def test(apiuser, userid=Optional(OAttr('apiuser')):
751 def test(apiuser, userid=Optional(OAttr('apiuser')):
751 user = Optional.extract(userid)
752 user = Optional.extract(userid)
752 # calls
753 # calls
753
754
754 """
755 """
755
756
756 def __init__(self, attr_name):
757 def __init__(self, attr_name):
757 self.attr_name = attr_name
758 self.attr_name = attr_name
758
759
759 def __repr__(self):
760 def __repr__(self):
760 return '<OptionalAttr:%s>' % self.attr_name
761 return '<OptionalAttr:%s>' % self.attr_name
761
762
762 def __call__(self):
763 def __call__(self):
763 return self
764 return self
764
765
765
766
766 # alias
767 # alias
767 OAttr = OptionalAttr
768 OAttr = OptionalAttr
768
769
769
770
770 class Optional(object):
771 class Optional(object):
771 """
772 """
772 Defines an optional parameter::
773 Defines an optional parameter::
773
774
774 param = param.getval() if isinstance(param, Optional) else param
775 param = param.getval() if isinstance(param, Optional) else param
775 param = param() if isinstance(param, Optional) else param
776 param = param() if isinstance(param, Optional) else param
776
777
777 is equivalent of::
778 is equivalent of::
778
779
779 param = Optional.extract(param)
780 param = Optional.extract(param)
780
781
781 """
782 """
782
783
783 def __init__(self, type_):
784 def __init__(self, type_):
784 self.type_ = type_
785 self.type_ = type_
785
786
786 def __repr__(self):
787 def __repr__(self):
787 return '<Optional:%s>' % self.type_.__repr__()
788 return '<Optional:%s>' % self.type_.__repr__()
788
789
789 def __call__(self):
790 def __call__(self):
790 return self.getval()
791 return self.getval()
791
792
792 def getval(self):
793 def getval(self):
793 """
794 """
794 returns value from this Optional instance
795 returns value from this Optional instance
795 """
796 """
796 if isinstance(self.type_, OAttr):
797 if isinstance(self.type_, OAttr):
797 # use params name
798 # use params name
798 return self.type_.attr_name
799 return self.type_.attr_name
799 return self.type_
800 return self.type_
800
801
801 @classmethod
802 @classmethod
802 def extract(cls, val):
803 def extract(cls, val):
803 """
804 """
804 Extracts value from Optional() instance
805 Extracts value from Optional() instance
805
806
806 :param val:
807 :param val:
807 :return: original value if it's not Optional instance else
808 :return: original value if it's not Optional instance else
808 value of instance
809 value of instance
809 """
810 """
810 if isinstance(val, cls):
811 if isinstance(val, cls):
811 return val.getval()
812 return val.getval()
812 return val
813 return val
813
814
814
815
815 def glob2re(pat):
816 def glob2re(pat):
816 import fnmatch
817 import fnmatch
817 return fnmatch.translate(pat)
818 return fnmatch.translate(pat)
818
819
819
820
820 def parse_byte_string(size_str):
821 def parse_byte_string(size_str):
821 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
822 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
822 if not match:
823 if not match:
823 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
824 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
824 f'to use format of <num>(MB|KB)')
825 f'to use format of <num>(MB|KB)')
825
826
826 _parts = match.groups()
827 _parts = match.groups()
827 num, type_ = _parts
828 num, type_ = _parts
828 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
829 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
829
830
830
831
831 class CachedProperty(object):
832 class CachedProperty(object):
832 """
833 """
833 Lazy Attributes. With option to invalidate the cache by running a method
834 Lazy Attributes. With option to invalidate the cache by running a method
834
835
835 >>> class Foo(object):
836 >>> class Foo(object):
836 ...
837 ...
837 ... @CachedProperty
838 ... @CachedProperty
838 ... def heavy_func(self):
839 ... def heavy_func(self):
839 ... return 'super-calculation'
840 ... return 'super-calculation'
840 ...
841 ...
841 ... foo = Foo()
842 ... foo = Foo()
842 ... foo.heavy_func() # first computation
843 ... foo.heavy_func() # first computation
843 ... foo.heavy_func() # fetch from cache
844 ... foo.heavy_func() # fetch from cache
844 ... foo._invalidate_prop_cache('heavy_func')
845 ... foo._invalidate_prop_cache('heavy_func')
845
846
846 # at this point calling foo.heavy_func() will be re-computed
847 # at this point calling foo.heavy_func() will be re-computed
847 """
848 """
848
849
849 def __init__(self, func, func_name=None):
850 def __init__(self, func, func_name=None):
850
851
851 if func_name is None:
852 if func_name is None:
852 func_name = func.__name__
853 func_name = func.__name__
853 self.data = (func, func_name)
854 self.data = (func, func_name)
854 functools.update_wrapper(self, func)
855 functools.update_wrapper(self, func)
855
856
856 def __get__(self, inst, class_):
857 def __get__(self, inst, class_):
857 if inst is None:
858 if inst is None:
858 return self
859 return self
859
860
860 func, func_name = self.data
861 func, func_name = self.data
861 value = func(inst)
862 value = func(inst)
862 inst.__dict__[func_name] = value
863 inst.__dict__[func_name] = value
863 if '_invalidate_prop_cache' not in inst.__dict__:
864 if '_invalidate_prop_cache' not in inst.__dict__:
864 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
865 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
865 self._invalidate_prop_cache, inst)
866 self._invalidate_prop_cache, inst)
866 return value
867 return value
867
868
868 def _invalidate_prop_cache(self, inst, name):
869 def _invalidate_prop_cache(self, inst, name):
869 inst.__dict__.pop(name, None)
870 inst.__dict__.pop(name, None)
870
871
871
872
872 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
873 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
873 """
874 """
874 Retry decorator with exponential backoff.
875 Retry decorator with exponential backoff.
875
876
876 Parameters
877 Parameters
877 ----------
878 ----------
878 func : typing.Callable, optional
879 func : typing.Callable, optional
879 Callable on which the decorator is applied, by default None
880 Callable on which the decorator is applied, by default None
880 exception : Exception or tuple of Exceptions, optional
881 exception : Exception or tuple of Exceptions, optional
881 Exception(s) that invoke retry, by default Exception
882 Exception(s) that invoke retry, by default Exception
882 n_tries : int, optional
883 n_tries : int, optional
883 Number of tries before giving up, by default 5
884 Number of tries before giving up, by default 5
884 delay : int, optional
885 delay : int, optional
885 Initial delay between retries in seconds, by default 5
886 Initial delay between retries in seconds, by default 5
886 backoff : int, optional
887 backoff : int, optional
887 Backoff multiplier e.g. value of 2 will double the delay, by default 1
888 Backoff multiplier e.g. value of 2 will double the delay, by default 1
888 logger : bool, optional
889 logger : bool, optional
889 Option to log or print, by default False
890 Option to log or print, by default False
890
891
891 Returns
892 Returns
892 -------
893 -------
893 typing.Callable
894 typing.Callable
894 Decorated callable that calls itself when exception(s) occur.
895 Decorated callable that calls itself when exception(s) occur.
895
896
896 Examples
897 Examples
897 --------
898 --------
898 >>> import random
899 >>> import random
899 >>> @retry(exception=Exception, n_tries=3)
900 >>> @retry(exception=Exception, n_tries=3)
900 ... def test_random(text):
901 ... def test_random(text):
901 ... x = random.random()
902 ... x = random.random()
902 ... if x < 0.5:
903 ... if x < 0.5:
903 ... raise Exception("Fail")
904 ... raise Exception("Fail")
904 ... else:
905 ... else:
905 ... print("Success: ", text)
906 ... print("Success: ", text)
906 >>> test_random("It works!")
907 >>> test_random("It works!")
907 """
908 """
908
909
909 if func is None:
910 if func is None:
910 return functools.partial(
911 return functools.partial(
911 retry,
912 retry,
912 exception=exception,
913 exception=exception,
913 n_tries=n_tries,
914 n_tries=n_tries,
914 delay=delay,
915 delay=delay,
915 backoff=backoff,
916 backoff=backoff,
916 logger=logger,
917 logger=logger,
917 )
918 )
918
919
919 @functools.wraps(func)
920 @functools.wraps(func)
920 def wrapper(*args, **kwargs):
921 def wrapper(*args, **kwargs):
921 _n_tries, n_delay = n_tries, delay
922 _n_tries, n_delay = n_tries, delay
922 log = logging.getLogger('rhodecode.retry')
923 log = logging.getLogger('rhodecode.retry')
923
924
924 while _n_tries > 1:
925 while _n_tries > 1:
925 try:
926 try:
926 return func(*args, **kwargs)
927 return func(*args, **kwargs)
927 except exception as e:
928 except exception as e:
928 e_details = repr(e)
929 e_details = repr(e)
929 msg = "Exception on calling func {func}: {e}, " \
930 msg = "Exception on calling func {func}: {e}, " \
930 "Retrying in {n_delay} seconds..."\
931 "Retrying in {n_delay} seconds..."\
931 .format(func=func, e=e_details, n_delay=n_delay)
932 .format(func=func, e=e_details, n_delay=n_delay)
932 if logger:
933 if logger:
933 log.warning(msg)
934 log.warning(msg)
934 else:
935 else:
935 print(msg)
936 print(msg)
936 time.sleep(n_delay)
937 time.sleep(n_delay)
937 _n_tries -= 1
938 _n_tries -= 1
938 n_delay *= backoff
939 n_delay *= backoff
939
940
940 return func(*args, **kwargs)
941 return func(*args, **kwargs)
941
942
942 return wrapper
943 return wrapper
943
944
944
945
945 def user_agent_normalizer(user_agent_raw, safe=True):
946 def user_agent_normalizer(user_agent_raw, safe=True):
946 log = logging.getLogger('rhodecode.user_agent_normalizer')
947 log = logging.getLogger('rhodecode.user_agent_normalizer')
947 ua = (user_agent_raw or '').strip().lower()
948 ua = (user_agent_raw or '').strip().lower()
948 ua = ua.replace('"', '')
949 ua = ua.replace('"', '')
949
950
950 try:
951 try:
951 if 'mercurial/proto-1.0' in ua:
952 if 'mercurial/proto-1.0' in ua:
952 ua = ua.replace('mercurial/proto-1.0', '')
953 ua = ua.replace('mercurial/proto-1.0', '')
953 ua = ua.replace('(', '').replace(')', '').strip()
954 ua = ua.replace('(', '').replace(')', '').strip()
954 ua = ua.replace('mercurial ', 'mercurial/')
955 ua = ua.replace('mercurial ', 'mercurial/')
955 elif ua.startswith('git'):
956 elif ua.startswith('git'):
956 parts = ua.split(' ')
957 parts = ua.split(' ')
957 if parts:
958 if parts:
958 ua = parts[0]
959 ua = parts[0]
959 ua = re.sub(r'\.windows\.\d', '', ua).strip()
960 ua = re.sub(r'\.windows\.\d', '', ua).strip()
960
961
961 return ua
962 return ua
962 except Exception:
963 except Exception:
963 log.exception('Failed to parse scm user-agent')
964 log.exception('Failed to parse scm user-agent')
964 if not safe:
965 if not safe:
965 raise
966 raise
966
967
967 return ua
968 return ua
968
969
969
970
970 def get_available_port(min_port=40000, max_port=55555, use_range=False):
971 def get_available_port(min_port=40000, max_port=55555, use_range=False):
971 hostname = ''
972 hostname = ''
972 for _check_port in range(min_port, max_port):
973 for _check_port in range(min_port, max_port):
973 pick_port = 0
974 pick_port = 0
974 if use_range:
975 if use_range:
975 pick_port = random.randint(min_port, max_port)
976 pick_port = random.randint(min_port, max_port)
976
977
977 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
978 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
978 try:
979 try:
979 s.bind((hostname, pick_port))
980 s.bind((hostname, pick_port))
980 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
981 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
981 return s.getsockname()[1]
982 return s.getsockname()[1]
982 except socket.error as e:
983 except socket.error as e:
983 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
984 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
984 continue
985 continue
985 raise
986 raise
986 except OSError:
987 except OSError:
987 continue
988 continue
@@ -1,453 +1,450
1 # Copyright (C) 2014-2024 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT commit module
20 GIT commit module
21 """
21 """
22
22
23 import io
23 import io
24 import configparser
24 import configparser
25 import logging
25 import logging
26 from itertools import chain
26 from itertools import chain
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.datelib import utcdate_fromtimestamp
31 from rhodecode.lib.str_utils import safe_bytes, safe_str
31 from rhodecode.lib.str_utils import safe_bytes, safe_str
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
33 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
34 from rhodecode.lib.vcs.nodes import (
34 from rhodecode.lib.vcs.nodes import (
35 FileNode,
35 FileNode,
36 DirNode,
36 DirNode,
37 NodeKind,
37 NodeKind,
38 RootNode,
38 RootNode,
39 SubModuleNode,
39 SubModuleNode,
40 LargeFileNode,
40 LargeFileNode,
41 )
41 )
42 from rhodecode.lib.vcs_common import FILEMODE_LINK
42 from rhodecode.lib.vcs_common import FILEMODE_LINK
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class GitCommit(base.BaseCommit):
47 class GitCommit(base.BaseCommit):
48 """
48 """
49 Represents state of the repository at single commit id.
49 Represents state of the repository at single commit id.
50 """
50 """
51
51
52 _filter_pre_load = [
52 _filter_pre_load = [
53 # done through a more complex tree walk on parents
53 # done through a more complex tree walk on parents
54 "affected_files",
54 "affected_files",
55 # done through subprocess not remote call
55 # done through subprocess not remote call
56 "children",
56 "children",
57 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
58 "status",
58 "status",
59 # mercurial specific property not supported here
59 # mercurial specific property not supported here
60 "obsolete",
60 "obsolete",
61 # mercurial specific property not supported here
61 # mercurial specific property not supported here
62 "phase",
62 "phase",
63 # mercurial specific property not supported here
63 # mercurial specific property not supported here
64 "hidden",
64 "hidden",
65 ]
65 ]
66
66
67 def __init__(self, repository, raw_id, idx, pre_load=None):
67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 self.repository = repository
68 self.repository = repository
69 self._remote = repository._remote
69 self._remote = repository._remote
70 # TODO: johbo: Tweak of raw_id should not be necessary
70 # TODO: johbo: Tweak of raw_id should not be necessary
71 self.raw_id = safe_str(raw_id)
71 self.raw_id = safe_str(raw_id)
72 self.idx = idx
72 self.idx = idx
73
73
74 self._set_bulk_properties(pre_load)
74 self._set_bulk_properties(pre_load)
75
75
76 # caches
76 # caches
77 self.nodes = {}
77 self.nodes = {}
78 self._path_mode_cache = {} # path stats cache, e.g filemode etc
78 self._path_mode_cache = {} # path stats cache, e.g filemode etc
79 self._path_type_cache = {} # path type dir/file/link etc cache
79 self._path_type_cache = {} # path type dir/file/link etc cache
80
80
81 self._submodules = None
81 self._submodules = None
82
82
83 def _set_bulk_properties(self, pre_load):
83 def _set_bulk_properties(self, pre_load):
84 if not pre_load:
84 if not pre_load:
85 return
85 return
86 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
86 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
87 if not pre_load:
87 if not pre_load:
88 return
88 return
89
89
90 result = self._remote.bulk_request(self.raw_id, pre_load)
90 result = self._remote.bulk_request(self.raw_id, pre_load)
91 for attr, value in result.items():
91 for attr, value in result.items():
92 if attr in ["author", "message"]:
92 if attr in ["author", "message"]:
93 if value:
93 if value:
94 value = safe_str(value)
94 value = safe_str(value)
95 elif attr == "date":
95 elif attr == "date":
96 value = utcdate_fromtimestamp(*value)
96 value = utcdate_fromtimestamp(*value)
97 elif attr == "parents":
97 elif attr == "parents":
98 value = self._make_commits(value)
98 value = self._make_commits(value)
99 elif attr == "branch":
99 elif attr == "branch":
100 value = self._set_branch(value)
100 value = self._set_branch(value)
101 self.__dict__[attr] = value
101 self.__dict__[attr] = value
102
102
103 @LazyProperty
103 @LazyProperty
104 def _commit(self):
104 def _commit(self):
105 return self._remote[self.raw_id]
105 return self._remote[self.raw_id]
106
106
107 @LazyProperty
107 @LazyProperty
108 def _tree_id(self):
108 def _tree_id(self):
109 return self._remote[self._commit["tree"]]["id"]
109 return self._remote[self._commit["tree"]]["id"]
110
110
111 @LazyProperty
111 @LazyProperty
112 def id(self):
112 def id(self):
113 return self.raw_id
113 return self.raw_id
114
114
115 @LazyProperty
115 @LazyProperty
116 def short_id(self):
116 def short_id(self):
117 return self.raw_id[:12]
117 return self.raw_id[:12]
118
118
119 @LazyProperty
119 @LazyProperty
120 def message(self):
120 def message(self):
121 return safe_str(self._remote.message(self.id))
121 return safe_str(self._remote.message(self.id))
122
122
123 @LazyProperty
123 @LazyProperty
124 def committer(self):
124 def committer(self):
125 return safe_str(self._remote.author(self.id))
125 return safe_str(self._remote.author(self.id))
126
126
127 @LazyProperty
127 @LazyProperty
128 def author(self):
128 def author(self):
129 return safe_str(self._remote.author(self.id))
129 return safe_str(self._remote.author(self.id))
130
130
131 @LazyProperty
131 @LazyProperty
132 def date(self):
132 def date(self):
133 unix_ts, tz = self._remote.date(self.raw_id)
133 unix_ts, tz = self._remote.date(self.raw_id)
134 return utcdate_fromtimestamp(unix_ts, tz)
134 return utcdate_fromtimestamp(unix_ts, tz)
135
135
136 @LazyProperty
136 @LazyProperty
137 def status(self):
137 def status(self):
138 """
138 """
139 Returns modified, added, removed, deleted files for current commit
139 Returns modified, added, removed, deleted files for current commit
140 """
140 """
141 added, modified, deleted = self._changes_cache
141 added, modified, deleted = self._changes_cache
142 return list(modified), list(modified), list(deleted)
142 return list(modified), list(modified), list(deleted)
143
143
144 @LazyProperty
144 @LazyProperty
145 def tags(self):
145 def tags(self):
146 tags = [safe_str(name) for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
146 tags = [safe_str(name) for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
147 return tags
147 return tags
148
148
149 @LazyProperty
149 @LazyProperty
150 def commit_branches(self):
150 def commit_branches(self):
151 branches = []
151 branches = []
152 for name, commit_id in self.repository.branches.items():
152 for name, commit_id in self.repository.branches.items():
153 if commit_id == self.raw_id:
153 if commit_id == self.raw_id:
154 branches.append(name)
154 branches.append(name)
155 return branches
155 return branches
156
156
157 def _set_branch(self, branches):
157 def _set_branch(self, branches):
158 if branches:
158 if branches:
159 # actually commit can have multiple branches in git
159 # actually commit can have multiple branches in git
160 return safe_str(branches[0])
160 return safe_str(branches[0])
161
161
162 @LazyProperty
162 @LazyProperty
163 def branch(self):
163 def branch(self):
164 branches = self._remote.branch(self.raw_id)
164 branches = self._remote.branch(self.raw_id)
165 return self._set_branch(branches)
165 return self._set_branch(branches)
166
166
167 def _get_path_tree_id_and_type(self, path: bytes):
167 def _get_path_tree_id_and_type(self, path: bytes):
168
168
169 if path in self._path_type_cache:
169 if path in self._path_type_cache:
170 return self._path_type_cache[path]
170 return self._path_type_cache[path]
171
171
172 if path == b"":
172 if path == b"":
173 self._path_type_cache[b""] = [self._tree_id, NodeKind.DIR]
173 self._path_type_cache[b""] = [self._tree_id, NodeKind.DIR]
174 return self._path_type_cache[path]
174 return self._path_type_cache[path]
175
175
176 tree_id, tree_type, tree_mode = self._remote.tree_and_type_for_path(self.raw_id, path)
176 tree_id, tree_type, tree_mode = self._remote.tree_and_type_for_path(self.raw_id, path)
177 if tree_id is None:
177 if tree_id is None:
178 raise self.no_node_at_path(path)
178 raise self.no_node_at_path(path)
179
179
180 self._path_type_cache[path] = [tree_id, tree_type]
180 self._path_type_cache[path] = [tree_id, tree_type]
181 self._path_mode_cache[path] = tree_mode
181 self._path_mode_cache[path] = tree_mode
182
182
183 return self._path_type_cache[path]
183 return self._path_type_cache[path]
184
184
185 def _get_kind(self, path):
185 def _get_kind(self, path):
186 path = self._fix_path(path)
186 path = self._fix_path(path)
187 _, path_type = self._get_path_tree_id_and_type(path)
187 _, path_type = self._get_path_tree_id_and_type(path)
188 return path_type
188 return path_type
189
189
190 def _assert_is_path(self, path):
190 def _assert_is_path(self, path):
191 path = self._fix_path(path)
191 path = self._fix_path(path)
192 if self._get_kind(path) != NodeKind.FILE:
192 if self._get_kind(path) != NodeKind.FILE:
193 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
193 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
194 return path
194 return path
195
195
196 def _get_file_nodes(self):
196 def _get_file_nodes(self):
197 return chain(*(t[2] for t in self.walk()))
197 return chain(*(t[2] for t in self.walk()))
198
198
199 @LazyProperty
199 @LazyProperty
200 def parents(self):
200 def parents(self):
201 """
201 """
202 Returns list of parent commits.
202 Returns list of parent commits.
203 """
203 """
204 parent_ids = self._remote.parents(self.id)
204 parent_ids = self._remote.parents(self.id)
205 return self._make_commits(parent_ids)
205 return self._make_commits(parent_ids)
206
206
207 @LazyProperty
207 @LazyProperty
208 def children(self):
208 def children(self):
209 """
209 """
210 Returns list of child commits.
210 Returns list of child commits.
211 """
211 """
212
212
213 children = self._remote.children(self.raw_id)
213 children = self._remote.children(self.raw_id)
214 return self._make_commits(children)
214 return self._make_commits(children)
215
215
216 def _make_commits(self, commit_ids):
216 def _make_commits(self, commit_ids):
217 def commit_maker(_commit_id):
217 def commit_maker(_commit_id):
218 return self.repository.get_commit(commit_id=_commit_id)
218 return self.repository.get_commit(commit_id=_commit_id)
219
219
220 return [commit_maker(commit_id) for commit_id in commit_ids]
220 return [commit_maker(commit_id) for commit_id in commit_ids]
221
221
222 def get_file_mode(self, path: bytes):
222 def get_file_mode(self, path: bytes):
223 """
223 """
224 Returns stat mode of the file at the given `path`.
224 Returns stat mode of the file at the given `path`.
225 """
225 """
226 path = self._assert_is_path(path)
226 path = self._assert_is_path(path)
227
227
228 # ensure path is traversed
228 # ensure path is traversed
229 self._get_path_tree_id_and_type(path)
229 self._get_path_tree_id_and_type(path)
230
230
231 return self._path_mode_cache[path]
231 return self._path_mode_cache[path]
232
232
233 def is_link(self, path: bytes):
233 def is_link(self, path: bytes):
234 path = self._assert_is_path(path)
234 path = self._assert_is_path(path)
235 if path not in self._path_mode_cache:
235 if path not in self._path_mode_cache:
236 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
236 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
237
237
238 return self._path_mode_cache[path] == FILEMODE_LINK
238 return self._path_mode_cache[path] == FILEMODE_LINK
239
239
240 def is_node_binary(self, path):
240 def is_node_binary(self, path):
241 tree_id, _ = self._get_path_tree_id_and_type(path)
241 tree_id, _ = self._get_path_tree_id_and_type(path)
242 return self._remote.is_binary(tree_id)
242 return self._remote.is_binary(tree_id)
243
243
244 def node_md5_hash(self, path):
244 def node_md5_hash(self, path):
245 path = self._assert_is_path(path)
245 path = self._assert_is_path(path)
246 return self._remote.md5_hash(self.raw_id, path)
246 return self._remote.md5_hash(self.raw_id, path)
247
247
248 def get_file_content(self, path):
248 def get_file_content(self, path):
249 """
249 """
250 Returns content of the file at given `path`.
250 Returns content of the file at given `path`.
251 """
251 """
252 tree_id, _ = self._get_path_tree_id_and_type(path)
252 tree_id, _ = self._get_path_tree_id_and_type(path)
253 return self._remote.blob_as_pretty_string(tree_id)
253 return self._remote.blob_as_pretty_string(tree_id)
254
254
255 def get_file_content_streamed(self, path):
255 def get_file_content_streamed(self, path):
256 tree_id, _ = self._get_path_tree_id_and_type(path)
256 tree_id, _ = self._get_path_tree_id_and_type(path)
257 stream_method = getattr(self._remote, "stream:blob_as_pretty_string")
257 stream_method = getattr(self._remote, "stream:blob_as_pretty_string")
258 return stream_method(tree_id)
258 return stream_method(tree_id)
259
259
260 def get_file_size(self, path):
260 def get_file_size(self, path):
261 """
261 """
262 Returns size of the file at given `path`.
262 Returns size of the file at given `path`.
263 """
263 """
264 tree_id, _ = self._get_path_tree_id_and_type(path)
264 tree_id, _ = self._get_path_tree_id_and_type(path)
265 return self._remote.blob_raw_length(tree_id)
265 return self._remote.blob_raw_length(tree_id)
266
266
267 def get_path_history(self, path, limit=None, pre_load=None):
267 def get_path_history(self, path, limit=None, pre_load=None):
268 """
268 """
269 Returns history of file as reversed list of `GitCommit` objects for
269 Returns history of file as reversed list of `GitCommit` objects for
270 which file at given `path` has been modified.
270 which file at given `path` has been modified.
271 """
271 """
272 path = self._assert_is_path(path)
272 path = self._assert_is_path(path)
273 history = self._remote.node_history(self.raw_id, path, limit)
273 history = self._remote.node_history(self.raw_id, path, limit)
274 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
274 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
275
275
276 def get_file_annotate(self, path, pre_load=None):
276 def get_file_annotate(self, path, pre_load=None):
277 """
277 """
278 Returns a generator of four element tuples with
278 Returns a generator of four element tuples with
279 lineno, commit_id, commit lazy loader and line
279 lineno, commit_id, commit lazy loader and line
280 """
280 """
281
281
282 result = self._remote.node_annotate(self.raw_id, path)
282 result = self._remote.node_annotate(self.raw_id, path)
283
283
284 for ln_no, commit_id, content in result:
284 for ln_no, commit_id, content in result:
285 yield (
285 yield (
286 ln_no,
286 ln_no,
287 commit_id,
287 commit_id,
288 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
288 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
289 content,
289 content,
290 )
290 )
291
291
292 def get_nodes(self, path: bytes, pre_load=None):
292 def get_nodes(self, path: bytes, pre_load=None):
293
293
294 if self._get_kind(path) != NodeKind.DIR:
294 if self._get_kind(path) != NodeKind.DIR:
295 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
295 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
296 path = self._fix_path(path)
296 path = self._fix_path(path)
297
297
298 # call and check tree_id for this path
299 tree_id, _ = self._get_path_tree_id_and_type(path)
300
301 path_nodes = []
298 path_nodes = []
302
299
303 for bytes_name, stat_, tree_item_id, node_kind in self._remote.tree_items(tree_id):
300 for obj_name, stat_, tree_item_id, node_kind, pre_load_data in self._remote.get_nodes(self.raw_id, path, pre_load):
304 if node_kind is None:
301 if node_kind is None:
305 raise CommitError(f"Requested object type={node_kind} cannot be determined")
302 raise CommitError(f"Requested object type={node_kind} cannot be determined")
306
303
307 if path != b"":
304 if path == b"":
308 obj_path = b"/".join((path, bytes_name))
305 obj_path = obj_name
309 else:
306 else:
310 obj_path = bytes_name
307 obj_path = b"/".join((path, obj_name))
311
308
312 # cache file mode for git, since we have it already
309 # cache file mode for git, since we have it already
313 if obj_path not in self._path_mode_cache:
310 if obj_path not in self._path_mode_cache:
314 self._path_mode_cache[obj_path] = stat_
311 self._path_mode_cache[obj_path] = stat_
315
312
316 # cache type
313 # cache type
317 if node_kind not in self._path_type_cache:
314 if node_kind not in self._path_type_cache:
318 self._path_type_cache[obj_path] = [tree_item_id, node_kind]
315 self._path_type_cache[obj_path] = [tree_item_id, node_kind]
319
316
320 entry = None
317 entry = None
321 if obj_path in self.nodes:
318 if obj_path in self.nodes:
322 entry = self.nodes[obj_path]
319 entry = self.nodes[obj_path]
323 else:
320 else:
324 if node_kind == NodeKind.SUBMODULE:
321 if node_kind == NodeKind.SUBMODULE:
325 url = self._get_submodule_url(b"/".join((path, bytes_name)))
322 url = self._get_submodule_url(obj_path)
326 entry= SubModuleNode(bytes_name, url=url, commit=tree_item_id, alias=self.repository.alias)
323 entry= SubModuleNode(obj_name, url=url, commit=tree_item_id, alias=self.repository.alias)
327 elif node_kind == NodeKind.DIR:
324 elif node_kind == NodeKind.DIR:
328 entry = DirNode(safe_bytes(obj_path), commit=self)
325 entry = DirNode(safe_bytes(obj_path), commit=self)
329 elif node_kind == NodeKind.FILE:
326 elif node_kind == NodeKind.FILE:
330 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
327 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load_data=pre_load_data)
331
328
332 if entry:
329 if entry:
333 self.nodes[obj_path] = entry
330 self.nodes[obj_path] = entry
334 path_nodes.append(entry)
331 path_nodes.append(entry)
335
332
336 path_nodes.sort()
333 path_nodes.sort()
337 return path_nodes
334 return path_nodes
338
335
339 def get_node(self, path: bytes, pre_load=None):
336 def get_node(self, path: bytes, pre_load=None):
340 path = self._fix_path(path)
337 path = self._fix_path(path)
341
338
342 # use cached, if we have one
339 # use cached, if we have one
343 if path in self.nodes:
340 if path in self.nodes:
344 return self.nodes[path]
341 return self.nodes[path]
345
342
346 try:
343 try:
347 tree_id, path_type = self._get_path_tree_id_and_type(path)
344 tree_id, path_type = self._get_path_tree_id_and_type(path)
348 except CommitError:
345 except CommitError:
349 raise NodeDoesNotExistError(f"Cannot find one of parents' directories for a given path: {path}")
346 raise NodeDoesNotExistError(f"Cannot find one of parents' directories for a given path: {path}")
350
347
351 if path == b"":
348 if path == b"":
352 node = RootNode(commit=self)
349 node = RootNode(commit=self)
353 else:
350 else:
354 if path_type == NodeKind.SUBMODULE:
351 if path_type == NodeKind.SUBMODULE:
355 url = self._get_submodule_url(path)
352 url = self._get_submodule_url(path)
356 node = SubModuleNode(path, url=url, commit=tree_id, alias=self.repository.alias)
353 node = SubModuleNode(path, url=url, commit=tree_id, alias=self.repository.alias)
357 elif path_type == NodeKind.DIR:
354 elif path_type == NodeKind.DIR:
358 node = DirNode(safe_bytes(path), commit=self)
355 node = DirNode(safe_bytes(path), commit=self)
359 elif path_type == NodeKind.FILE:
356 elif path_type == NodeKind.FILE:
360 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
357 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
361 self._path_mode_cache[path] = node.mode
358 self._path_mode_cache[path] = node.mode
362 else:
359 else:
363 raise self.no_node_at_path(path)
360 raise self.no_node_at_path(path)
364
361
365 # cache node
362 # cache node
366 self.nodes[path] = node
363 self.nodes[path] = node
367 return self.nodes[path]
364 return self.nodes[path]
368
365
369 def get_largefile_node(self, path: bytes):
366 def get_largefile_node(self, path: bytes):
370 tree_id, _ = self._get_path_tree_id_and_type(path)
367 tree_id, _ = self._get_path_tree_id_and_type(path)
371 pointer_spec = self._remote.is_large_file(tree_id)
368 pointer_spec = self._remote.is_large_file(tree_id)
372
369
373 if pointer_spec:
370 if pointer_spec:
374 # content of that file regular FileNode is the hash of largefile
371 # content of that file regular FileNode is the hash of largefile
375 file_id = pointer_spec.get("oid_hash")
372 file_id = pointer_spec.get("oid_hash")
376 if not self._remote.in_largefiles_store(file_id):
373 if not self._remote.in_largefiles_store(file_id):
377 log.warning(f'Largefile oid={file_id} not found in store')
374 log.warning(f'Largefile oid={file_id} not found in store')
378 return None
375 return None
379
376
380 lf_path = self._remote.store_path(file_id)
377 lf_path = self._remote.store_path(file_id)
381 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
378 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
382
379
383 @LazyProperty
380 @LazyProperty
384 def affected_files(self) -> list[bytes]:
381 def affected_files(self) -> list[bytes]:
385 """
382 """
386 Gets a fast accessible file changes for given commit
383 Gets a fast accessible file changes for given commit
387 """
384 """
388 added, modified, deleted = self._changes_cache
385 added, modified, deleted = self._changes_cache
389 return list(added.union(modified).union(deleted))
386 return list(added.union(modified).union(deleted))
390
387
391 @LazyProperty
388 @LazyProperty
392 def _changes_cache(self) -> tuple[set, set, set]:
389 def _changes_cache(self) -> tuple[set, set, set]:
393 added = set()
390 added = set()
394 modified = set()
391 modified = set()
395 deleted = set()
392 deleted = set()
396
393
397 parents = self.parents
394 parents = self.parents
398 if not self.parents:
395 if not self.parents:
399 parents = [base.EmptyCommit()]
396 parents = [base.EmptyCommit()]
400 for parent in parents:
397 for parent in parents:
401 if isinstance(parent, base.EmptyCommit):
398 if isinstance(parent, base.EmptyCommit):
402 oid = None
399 oid = None
403 else:
400 else:
404 oid = parent.raw_id
401 oid = parent.raw_id
405 _added, _modified, _deleted = self._remote.tree_changes(oid, self.raw_id)
402 _added, _modified, _deleted = self._remote.tree_changes(oid, self.raw_id)
406 added = added | set(_added)
403 added = added | set(_added)
407 modified = modified | set(_modified)
404 modified = modified | set(_modified)
408 deleted = deleted | set(_deleted)
405 deleted = deleted | set(_deleted)
409
406
410 return added, modified, deleted
407 return added, modified, deleted
411
408
412 def _get_paths_for_status(self, status):
409 def _get_paths_for_status(self, status):
413 """
410 """
414 Returns sorted list of paths for given ``status``.
411 Returns sorted list of paths for given ``status``.
415
412
416 :param status: one of: *added*, *modified* or *deleted*
413 :param status: one of: *added*, *modified* or *deleted*
417 """
414 """
418 added, modified, deleted = self._changes_cache
415 added, modified, deleted = self._changes_cache
419 return sorted({"added": list(added), "modified": list(modified), "deleted": list(deleted)}[status])
416 return sorted({"added": list(added), "modified": list(modified), "deleted": list(deleted)}[status])
420
417
421 @LazyProperty
418 @LazyProperty
422 def added_paths(self):
419 def added_paths(self):
423 return [n for n in self._get_paths_for_status("added")]
420 return [n for n in self._get_paths_for_status("added")]
424
421
425 @LazyProperty
422 @LazyProperty
426 def changed_paths(self):
423 def changed_paths(self):
427 return [n for n in self._get_paths_for_status("modified")]
424 return [n for n in self._get_paths_for_status("modified")]
428
425
429 @LazyProperty
426 @LazyProperty
430 def removed_paths(self):
427 def removed_paths(self):
431 return [n for n in self._get_paths_for_status("deleted")]
428 return [n for n in self._get_paths_for_status("deleted")]
432
429
433 def _get_submodule_url(self, submodule_path: bytes):
430 def _get_submodule_url(self, submodule_path: bytes):
434 git_modules_path = b".gitmodules"
431 git_modules_path = b".gitmodules"
435
432
436 if self._submodules is None:
433 if self._submodules is None:
437 self._submodules = {}
434 self._submodules = {}
438
435
439 try:
436 try:
440 submodules_node = self.get_node(git_modules_path)
437 submodules_node = self.get_node(git_modules_path)
441 except NodeDoesNotExistError:
438 except NodeDoesNotExistError:
442 return None
439 return None
443
440
444 parser = configparser.RawConfigParser()
441 parser = configparser.RawConfigParser()
445 parser.read_file(io.StringIO(submodules_node.str_content))
442 parser.read_file(io.StringIO(submodules_node.str_content))
446
443
447 for section in parser.sections():
444 for section in parser.sections():
448 path = parser.get(section, "path")
445 path = parser.get(section, "path")
449 url = parser.get(section, "url")
446 url = parser.get(section, "url")
450 if path and url:
447 if path and url:
451 self._submodules[safe_bytes(path).strip(b"/")] = url
448 self._submodules[safe_bytes(path).strip(b"/")] = url
452
449
453 return self._submodules.get(submodule_path.strip(b"/"))
450 return self._submodules.get(submodule_path.strip(b"/"))
@@ -1,397 +1,397
1 # Copyright (C) 2014-2024 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG commit module
20 HG commit module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24
24
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26
26
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
29 from rhodecode.lib.vcs.backends import base
29 from rhodecode.lib.vcs.backends import base
30 from rhodecode.lib.vcs.exceptions import CommitError
30 from rhodecode.lib.vcs.exceptions import CommitError
31 from rhodecode.lib.vcs.nodes import (
31 from rhodecode.lib.vcs.nodes import (
32 DirNode,
32 DirNode,
33 FileNode,
33 FileNode,
34 NodeKind,
34 NodeKind,
35 RootNode,
35 RootNode,
36 SubModuleNode,
36 SubModuleNode,
37 LargeFileNode,
37 LargeFileNode,
38 )
38 )
39 from rhodecode.lib.vcs_common import FILEMODE_LINK
39 from rhodecode.lib.vcs_common import FILEMODE_LINK
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class MercurialCommit(base.BaseCommit):
44 class MercurialCommit(base.BaseCommit):
45 """
45 """
46 Represents state of the repository at the single commit.
46 Represents state of the repository at the single commit.
47 """
47 """
48
48
49 _filter_pre_load = [
49 _filter_pre_load = [
50 # git specific property not supported here
50 # git specific property not supported here
51 "_commit",
51 "_commit",
52 ]
52 ]
53
53
54 def __init__(self, repository, raw_id, idx, pre_load=None):
54 def __init__(self, repository, raw_id, idx, pre_load=None):
55 raw_id = safe_str(raw_id)
55 raw_id = safe_str(raw_id)
56
56
57 self.repository = repository
57 self.repository = repository
58 self._remote = repository._remote
58 self._remote = repository._remote
59
59
60 self.raw_id = raw_id
60 self.raw_id = raw_id
61 self.idx = idx
61 self.idx = idx
62
62
63 self._set_bulk_properties(pre_load)
63 self._set_bulk_properties(pre_load)
64
64
65 # caches
65 # caches
66 self.nodes = {}
66 self.nodes = {}
67 self._path_mode_cache = {} # path stats cache, e.g filemode etc
67 self._path_mode_cache = {} # path stats cache, e.g filemode etc
68 self._path_type_cache = {} # path type dir/file/link etc cache
68 self._path_type_cache = {} # path type dir/file/link etc cache
69
69
70 def _set_bulk_properties(self, pre_load):
70 def _set_bulk_properties(self, pre_load):
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
73 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
74 if not pre_load:
74 if not pre_load:
75 return
75 return
76
76
77 result = self._remote.bulk_request(self.raw_id, pre_load)
77 result = self._remote.bulk_request(self.raw_id, pre_load)
78
78
79 for attr, value in result.items():
79 for attr, value in result.items():
80 if attr in ["author", "branch", "message"]:
80 if attr in ["author", "branch", "message"]:
81 value = safe_str(value)
81 value = safe_str(value)
82 elif attr == "affected_files":
82 elif attr == "affected_files":
83 value = list(map(safe_str, value))
83 value = list(map(safe_str, value))
84 elif attr == "date":
84 elif attr == "date":
85 value = utcdate_fromtimestamp(*value)
85 value = utcdate_fromtimestamp(*value)
86 elif attr in ["children", "parents"]:
86 elif attr in ["children", "parents"]:
87 value = self._make_commits(value)
87 value = self._make_commits(value)
88 elif attr in ["phase"]:
88 elif attr in ["phase"]:
89 value = self._get_phase_text(value)
89 value = self._get_phase_text(value)
90 self.__dict__[attr] = value
90 self.__dict__[attr] = value
91
91
92 @LazyProperty
92 @LazyProperty
93 def tags(self):
93 def tags(self):
94 tags = [name for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
94 tags = [name for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
95 return tags
95 return tags
96
96
97 @LazyProperty
97 @LazyProperty
98 def branch(self):
98 def branch(self):
99 return safe_str(self._remote.ctx_branch(self.raw_id))
99 return safe_str(self._remote.ctx_branch(self.raw_id))
100
100
101 @LazyProperty
101 @LazyProperty
102 def bookmarks(self):
102 def bookmarks(self):
103 bookmarks = [name for name, commit_id in self.repository.bookmarks.items() if commit_id == self.raw_id]
103 bookmarks = [name for name, commit_id in self.repository.bookmarks.items() if commit_id == self.raw_id]
104 return bookmarks
104 return bookmarks
105
105
106 @LazyProperty
106 @LazyProperty
107 def message(self):
107 def message(self):
108 return safe_str(self._remote.ctx_description(self.raw_id))
108 return safe_str(self._remote.ctx_description(self.raw_id))
109
109
110 @LazyProperty
110 @LazyProperty
111 def committer(self):
111 def committer(self):
112 return safe_str(self.author)
112 return safe_str(self.author)
113
113
114 @LazyProperty
114 @LazyProperty
115 def author(self):
115 def author(self):
116 return safe_str(self._remote.ctx_user(self.raw_id))
116 return safe_str(self._remote.ctx_user(self.raw_id))
117
117
118 @LazyProperty
118 @LazyProperty
119 def date(self):
119 def date(self):
120 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
121
121
122 @LazyProperty
122 @LazyProperty
123 def status(self):
123 def status(self):
124 """
124 """
125 Returns modified, added, removed, deleted files for current commit
125 Returns modified, added, removed, deleted files for current commit
126 """
126 """
127 modified, added, deleted, *_ = self._remote.ctx_status(self.raw_id)
127 modified, added, deleted, *_ = self._remote.ctx_status(self.raw_id)
128 return modified, added, deleted
128 return modified, added, deleted
129
129
130 @LazyProperty
130 @LazyProperty
131 def id(self):
131 def id(self):
132 if self.last:
132 if self.last:
133 return "tip"
133 return "tip"
134 return self.short_id
134 return self.short_id
135
135
136 @LazyProperty
136 @LazyProperty
137 def short_id(self):
137 def short_id(self):
138 return self.raw_id[:12]
138 return self.raw_id[:12]
139
139
140 def _make_commits(self, commit_ids, pre_load=None):
140 def _make_commits(self, commit_ids, pre_load=None):
141 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in commit_ids]
141 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in commit_ids]
142
142
143 @LazyProperty
143 @LazyProperty
144 def parents(self):
144 def parents(self):
145 """
145 """
146 Returns list of parent commits.
146 Returns list of parent commits.
147 """
147 """
148 parents = self._remote.ctx_parents(self.raw_id)
148 parents = self._remote.ctx_parents(self.raw_id)
149 return self._make_commits(parents)
149 return self._make_commits(parents)
150
150
151 def _get_phase_text(self, phase_id):
151 def _get_phase_text(self, phase_id):
152 return {
152 return {
153 0: "public",
153 0: "public",
154 1: "draft",
154 1: "draft",
155 2: "secret",
155 2: "secret",
156 }.get(phase_id) or ""
156 }.get(phase_id) or ""
157
157
158 @LazyProperty
158 @LazyProperty
159 def phase(self):
159 def phase(self):
160 phase_id = self._remote.ctx_phase(self.raw_id)
160 phase_id = self._remote.ctx_phase(self.raw_id)
161 phase_text = self._get_phase_text(phase_id)
161 phase_text = self._get_phase_text(phase_id)
162
162
163 return safe_str(phase_text)
163 return safe_str(phase_text)
164
164
165 @LazyProperty
165 @LazyProperty
166 def obsolete(self):
166 def obsolete(self):
167 obsolete = self._remote.ctx_obsolete(self.raw_id)
167 obsolete = self._remote.ctx_obsolete(self.raw_id)
168 return obsolete
168 return obsolete
169
169
170 @LazyProperty
170 @LazyProperty
171 def hidden(self):
171 def hidden(self):
172 hidden = self._remote.ctx_hidden(self.raw_id)
172 hidden = self._remote.ctx_hidden(self.raw_id)
173 return hidden
173 return hidden
174
174
175 @LazyProperty
175 @LazyProperty
176 def children(self):
176 def children(self):
177 """
177 """
178 Returns list of child commits.
178 Returns list of child commits.
179 """
179 """
180 children = self._remote.ctx_children(self.raw_id)
180 children = self._remote.ctx_children(self.raw_id)
181 return self._make_commits(children)
181 return self._make_commits(children)
182
182
183 def _get_kind(self, path):
183 def _get_kind(self, path):
184 path = self._fix_path(path)
184 path = self._fix_path(path)
185 path_type = self._get_path_type(path)
185 path_type = self._get_path_type(path)
186 return path_type
186 return path_type
187
187
188 def _assert_is_path(self, path) -> str | bytes:
188 def _assert_is_path(self, path) -> str | bytes:
189 path = self._fix_path(path)
189 path = self._fix_path(path)
190
190
191 if self._get_kind(path) != NodeKind.FILE:
191 if self._get_kind(path) != NodeKind.FILE:
192 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
192 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
193
193
194 return path
194 return path
195
195
196 def get_file_mode(self, path: bytes):
196 def get_file_mode(self, path: bytes):
197 """
197 """
198 Returns stat mode of the file at the given ``path``.
198 Returns stat mode of the file at the given ``path``.
199 """
199 """
200 path = self._assert_is_path(path)
200 path = self._assert_is_path(path)
201 if path not in self._path_mode_cache:
201 if path not in self._path_mode_cache:
202 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
202 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
203
203
204 return self._path_mode_cache[path]
204 return self._path_mode_cache[path]
205
205
206 def is_link(self, path: bytes):
206 def is_link(self, path: bytes):
207 path = self._assert_is_path(path)
207 path = self._assert_is_path(path)
208 if path not in self._path_mode_cache:
208 if path not in self._path_mode_cache:
209 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
209 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
210
210
211 return self._path_mode_cache[path] == FILEMODE_LINK
211 return self._path_mode_cache[path] == FILEMODE_LINK
212
212
213 def is_node_binary(self, path):
213 def is_node_binary(self, path):
214 path = self._assert_is_path(path)
214 path = self._assert_is_path(path)
215 return self._remote.is_binary(self.raw_id, path)
215 return self._remote.is_binary(self.raw_id, path)
216
216
217 def node_md5_hash(self, path):
217 def node_md5_hash(self, path):
218 path = self._assert_is_path(path)
218 path = self._assert_is_path(path)
219 return self._remote.md5_hash(self.raw_id, path)
219 return self._remote.md5_hash(self.raw_id, path)
220
220
221 def get_file_content(self, path):
221 def get_file_content(self, path):
222 """
222 """
223 Returns content of the file at given ``path``.
223 Returns content of the file at given ``path``.
224 """
224 """
225 path = self._assert_is_path(path)
225 path = self._assert_is_path(path)
226 return self._remote.fctx_node_data(self.raw_id, path)
226 return self._remote.fctx_node_data(self.raw_id, path)
227
227
228 def get_file_content_streamed(self, path):
228 def get_file_content_streamed(self, path):
229 path = self._assert_is_path(path)
229 path = self._assert_is_path(path)
230 stream_method = getattr(self._remote, "stream:fctx_node_data")
230 stream_method = getattr(self._remote, "stream:fctx_node_data")
231 return stream_method(self.raw_id, path)
231 return stream_method(self.raw_id, path)
232
232
233 def get_file_size(self, path):
233 def get_file_size(self, path):
234 """
234 """
235 Returns size of the file at given ``path``.
235 Returns size of the file at given ``path``.
236 """
236 """
237 path = self._assert_is_path(path)
237 path = self._assert_is_path(path)
238 return self._remote.fctx_size(self.raw_id, path)
238 return self._remote.fctx_size(self.raw_id, path)
239
239
240 def get_path_history(self, path, limit=None, pre_load=None):
240 def get_path_history(self, path, limit=None, pre_load=None):
241 """
241 """
242 Returns history of file as reversed list of `MercurialCommit` objects
242 Returns history of file as reversed list of `MercurialCommit` objects
243 for which file at given ``path`` has been modified.
243 for which file at given ``path`` has been modified.
244 """
244 """
245 path = self._assert_is_path(path)
245 path = self._assert_is_path(path)
246 history = self._remote.node_history(self.raw_id, path, limit)
246 history = self._remote.node_history(self.raw_id, path, limit)
247 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
247 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
248
248
249 def get_file_annotate(self, path, pre_load=None):
249 def get_file_annotate(self, path, pre_load=None):
250 """
250 """
251 Returns a generator of four element tuples with
251 Returns a generator of four element tuples with
252 lineno, commit_id, commit lazy loader and line
252 lineno, commit_id, commit lazy loader and line
253 """
253 """
254 result = self._remote.fctx_annotate(self.raw_id, path)
254 result = self._remote.fctx_annotate(self.raw_id, path)
255
255
256 for ln_no, commit_id, content in result:
256 for ln_no, commit_id, content in result:
257 yield (
257 yield (
258 ln_no,
258 ln_no,
259 commit_id,
259 commit_id,
260 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
260 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
261 content,
261 content,
262 )
262 )
263
263
264 def get_nodes(self, path: bytes, pre_load=None):
264 def get_nodes(self, path: bytes, pre_load=None):
265 """
265 """
266 Returns combined ``DirNode`` and ``FileNode`` objects list representing
266 Returns combined ``DirNode`` and ``FileNode`` objects list representing
267 state of commit at the given ``path``. If node at the given ``path``
267 state of commit at the given ``path``. If node at the given ``path``
268 is not instance of ``DirNode``, CommitError would be raised.
268 is not instance of ``DirNode``, CommitError would be raised.
269 """
269 """
270
270
271 if self._get_kind(path) != NodeKind.DIR:
271 if self._get_kind(path) != NodeKind.DIR:
272 raise CommitError(f"Directory does not exist for idx {self.raw_id} at '{path}'")
272 raise CommitError(f"Directory does not exist for idx {self.raw_id} at '{path}'")
273 path = self._fix_path(path)
273 path = self._fix_path(path)
274
274
275 path_nodes = []
275 path_nodes = []
276
276
277 for obj_path, (node_kind, flags) in self._remote.dir_items(self.raw_id, path):
277 for obj_path, node_kind, flags, pre_load_data in self._remote.get_nodes(self.raw_id, path, pre_load):
278
278
279 if node_kind is None:
279 if node_kind is None:
280 raise CommitError(f"Requested object type={node_kind} cannot be mapped to a proper type")
280 raise CommitError(f"Requested object type={node_kind} cannot be mapped to a proper type")
281
281
282 stat_ = flags
282 stat_ = flags
283 # cache file mode
283 # cache file mode
284 if obj_path not in self._path_mode_cache:
284 if obj_path not in self._path_mode_cache:
285 self._path_mode_cache[obj_path] = stat_
285 self._path_mode_cache[obj_path] = stat_
286
286
287 # cache type
287 # cache type
288 if node_kind not in self._path_type_cache:
288 if node_kind not in self._path_type_cache:
289 self._path_type_cache[obj_path] = node_kind
289 self._path_type_cache[obj_path] = node_kind
290
290
291 entry = None
291 entry = None
292 if obj_path in self.nodes:
292 if obj_path in self.nodes:
293 entry = self.nodes[obj_path]
293 entry = self.nodes[obj_path]
294 else:
294 else:
295 if node_kind == NodeKind.DIR:
295 if node_kind == NodeKind.DIR:
296 entry = DirNode(safe_bytes(obj_path), commit=self)
296 entry = DirNode(safe_bytes(obj_path), commit=self)
297 elif node_kind == NodeKind.FILE:
297 elif node_kind == NodeKind.FILE:
298 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
298 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load, pre_load_data=pre_load_data)
299 if entry:
299 if entry:
300 self.nodes[obj_path] = entry
300 self.nodes[obj_path] = entry
301 path_nodes.append(entry)
301 path_nodes.append(entry)
302
302
303 for obj_path, (location, commit, scm_type) in self._submodules.items():
303 for obj_path, (location, commit, scm_type) in self._submodules.items():
304
304
305 if os.path.dirname(obj_path) == path:
305 if os.path.dirname(obj_path) == path:
306 entry = SubModuleNode(obj_path, url=location, commit=commit, alias=scm_type)
306 entry = SubModuleNode(obj_path, url=location, commit=commit, alias=scm_type)
307 self.nodes[obj_path] = entry
307 self.nodes[obj_path] = entry
308 path_nodes.append(entry)
308 path_nodes.append(entry)
309
309
310 path_nodes.sort()
310 path_nodes.sort()
311 return path_nodes
311 return path_nodes
312
312
313 def get_node(self, path: bytes, pre_load=None):
313 def get_node(self, path: bytes, pre_load=None):
314 """
314 """
315 Returns `Node` object from the given `path`. If there is no node at
315 Returns `Node` object from the given `path`. If there is no node at
316 the given `path`, `NodeDoesNotExistError` would be raised.
316 the given `path`, `NodeDoesNotExistError` would be raised.
317 """
317 """
318 path = self._fix_path(path)
318 path = self._fix_path(path)
319
319
320 # use cached, if we have one
320 # use cached, if we have one
321 if path in self.nodes:
321 if path in self.nodes:
322 return self.nodes[path]
322 return self.nodes[path]
323
323
324 path_type = self._get_path_type(path)
324 path_type = self._get_path_type(path)
325 if path == b"":
325 if path == b"":
326 node = RootNode(commit=self)
326 node = RootNode(commit=self)
327 else:
327 else:
328 if path_type == NodeKind.DIR:
328 if path_type == NodeKind.DIR:
329 node = DirNode(safe_bytes(path), commit=self)
329 node = DirNode(safe_bytes(path), commit=self)
330 elif path_type == NodeKind.FILE:
330 elif path_type == NodeKind.FILE:
331 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
331 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
332 self._path_mode_cache[path] = node.mode
332 self._path_mode_cache[path] = node.mode
333 else:
333 else:
334 raise self.no_node_at_path(path)
334 raise self.no_node_at_path(path)
335 # cache node
335 # cache node
336 self.nodes[path] = node
336 self.nodes[path] = node
337 return self.nodes[path]
337 return self.nodes[path]
338
338
339 def _get_path_type(self, path: bytes):
339 def _get_path_type(self, path: bytes):
340 if path in self._path_type_cache:
340 if path in self._path_type_cache:
341 return self._path_type_cache[path]
341 return self._path_type_cache[path]
342
342
343 if path == b"":
343 if path == b"":
344 self._path_type_cache[b""] = NodeKind.DIR
344 self._path_type_cache[b""] = NodeKind.DIR
345 return NodeKind.DIR
345 return NodeKind.DIR
346
346
347 path_type, flags = self._remote.get_path_type(self.raw_id, path)
347 path_type, flags = self._remote.get_path_type(self.raw_id, path)
348
348
349 if not path_type:
349 if not path_type:
350 raise self.no_node_at_path(path)
350 raise self.no_node_at_path(path)
351
351
352 self._path_type_cache[path] = path_type
352 self._path_type_cache[path] = path_type
353 self._path_mode_cache[path] = flags
353 self._path_mode_cache[path] = flags
354
354
355 return self._path_type_cache[path]
355 return self._path_type_cache[path]
356
356
357 def get_largefile_node(self, path: bytes):
357 def get_largefile_node(self, path: bytes):
358 pointer_spec = self._remote.is_large_file(self.raw_id, path)
358 pointer_spec = self._remote.is_large_file(self.raw_id, path)
359 if pointer_spec:
359 if pointer_spec:
360 # content of that file regular FileNode is the hash of largefile
360 # content of that file regular FileNode is the hash of largefile
361 file_id = self.get_file_content(path).strip()
361 file_id = self.get_file_content(path).strip()
362
362
363 if self._remote.in_largefiles_store(file_id):
363 if self._remote.in_largefiles_store(file_id):
364 lf_path = self._remote.store_path(file_id)
364 lf_path = self._remote.store_path(file_id)
365 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
365 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
366 elif self._remote.in_user_cache(file_id):
366 elif self._remote.in_user_cache(file_id):
367 lf_path = self._remote.store_path(file_id)
367 lf_path = self._remote.store_path(file_id)
368 self._remote.link(file_id, path)
368 self._remote.link(file_id, path)
369 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
369 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
370
370
371 @LazyProperty
371 @LazyProperty
372 def _submodules(self):
372 def _submodules(self):
373 """
373 """
374 Returns a dictionary with submodule information from substate file
374 Returns a dictionary with submodule information from substate file
375 of hg repository.
375 of hg repository.
376 """
376 """
377 return self._remote.ctx_substate(self.raw_id)
377 return self._remote.ctx_substate(self.raw_id)
378
378
379 @LazyProperty
379 @LazyProperty
380 def affected_files(self) -> list[bytes]:
380 def affected_files(self) -> list[bytes]:
381 """
381 """
382 Gets a fast accessible file changes for given commit
382 Gets a fast accessible file changes for given commit
383 """
383 """
384 return self._remote.ctx_files(self.raw_id)
384 return self._remote.ctx_files(self.raw_id)
385
385
386 @LazyProperty
386 @LazyProperty
387 def added_paths(self):
387 def added_paths(self):
388 return [n for n in self.status[1]]
388 return [n for n in self.status[1]]
389
389
390 @LazyProperty
390 @LazyProperty
391 def changed_paths(self):
391 def changed_paths(self):
392 return [n for n in self.status[0]]
392 return [n for n in self.status[0]]
393
393
394
394
395 @LazyProperty
395 @LazyProperty
396 def removed_paths(self):
396 def removed_paths(self):
397 return [n for n in self.status[2]]
397 return [n for n in self.status[2]]
@@ -1,278 +1,278
1 # Copyright (C) 2014-2024 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN commit module
20 SVN commit module
21 """
21 """
22 import logging
22 import logging
23 import dateutil.parser
23 import dateutil.parser
24 from zope.cachedescriptors.property import Lazy as LazyProperty
24 from zope.cachedescriptors.property import Lazy as LazyProperty
25
25
26 from rhodecode.lib.str_utils import safe_bytes, safe_str
26 from rhodecode.lib.str_utils import safe_bytes, safe_str
27 from rhodecode.lib.vcs import nodes, path as vcspath
27 from rhodecode.lib.vcs import nodes, path as vcspath
28 from rhodecode.lib.vcs.backends import base
28 from rhodecode.lib.vcs.backends import base
29 from rhodecode.lib.vcs.exceptions import CommitError
29 from rhodecode.lib.vcs.exceptions import CommitError
30 from vcsserver.lib.vcs_common import NodeKind, FILEMODE_EXECUTABLE, FILEMODE_DEFAULT, FILEMODE_LINK
30 from vcsserver.lib.vcs_common import NodeKind, FILEMODE_EXECUTABLE, FILEMODE_DEFAULT, FILEMODE_LINK
31 _SVN_PROP_TRUE = "*"
31 _SVN_PROP_TRUE = "*"
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class SubversionCommit(base.BaseCommit):
36 class SubversionCommit(base.BaseCommit):
37 """
37 """
38 Subversion specific implementation of commits
38 Subversion specific implementation of commits
39
39
40 .. attribute:: branch
40 .. attribute:: branch
41
41
42 The Subversion backend does not support to assign branches to
42 The Subversion backend does not support to assign branches to
43 specific commits. This attribute has always the value `None`.
43 specific commits. This attribute has always the value `None`.
44
44
45 """
45 """
46
46
47 def __init__(self, repository, commit_id):
47 def __init__(self, repository, commit_id):
48 self.repository = repository
48 self.repository = repository
49 self.idx = self.repository._get_commit_idx(commit_id)
49 self.idx = self.repository._get_commit_idx(commit_id)
50 self._svn_rev = self.idx + 1
50 self._svn_rev = self.idx + 1
51 self._remote = repository._remote
51 self._remote = repository._remote
52 # TODO: handling of raw_id should be a method on repository itself,
52 # TODO: handling of raw_id should be a method on repository itself,
53 # which knows how to translate commit index and commit id
53 # which knows how to translate commit index and commit id
54 self.raw_id = commit_id
54 self.raw_id = commit_id
55 self.short_id = commit_id
55 self.short_id = commit_id
56 self.id = f"r{commit_id}"
56 self.id = f"r{commit_id}"
57
57
58 self.nodes = {}
58 self.nodes = {}
59 self._path_mode_cache = {} # path stats cache, e.g filemode etc
59 self._path_mode_cache = {} # path stats cache, e.g filemode etc
60 self._path_type_cache = {} # path type dir/file/link etc cache
60 self._path_type_cache = {} # path type dir/file/link etc cache
61 self.tags = []
61 self.tags = []
62
62
63 @property
63 @property
64 def author(self):
64 def author(self):
65 return safe_str(self._properties.get("svn:author"))
65 return safe_str(self._properties.get("svn:author"))
66
66
67 @property
67 @property
68 def date(self):
68 def date(self):
69 return _date_from_svn_properties(self._properties)
69 return _date_from_svn_properties(self._properties)
70
70
71 @property
71 @property
72 def message(self):
72 def message(self):
73 return safe_str(self._properties.get("svn:log"))
73 return safe_str(self._properties.get("svn:log"))
74
74
75 @LazyProperty
75 @LazyProperty
76 def _properties(self):
76 def _properties(self):
77 return self._remote.revision_properties(self._svn_rev)
77 return self._remote.revision_properties(self._svn_rev)
78
78
79 @LazyProperty
79 @LazyProperty
80 def parents(self):
80 def parents(self):
81 parent_idx = self.idx - 1
81 parent_idx = self.idx - 1
82 if parent_idx >= 0:
82 if parent_idx >= 0:
83 parent = self.repository.get_commit(commit_idx=parent_idx)
83 parent = self.repository.get_commit(commit_idx=parent_idx)
84 return [parent]
84 return [parent]
85 return []
85 return []
86
86
87 @LazyProperty
87 @LazyProperty
88 def children(self):
88 def children(self):
89 child_idx = self.idx + 1
89 child_idx = self.idx + 1
90 if child_idx < len(self.repository.commit_ids):
90 if child_idx < len(self.repository.commit_ids):
91 child = self.repository.get_commit(commit_idx=child_idx)
91 child = self.repository.get_commit(commit_idx=child_idx)
92 return [child]
92 return [child]
93 return []
93 return []
94
94
95 def _calculate_file_mode(self, path: bytes):
95 def _calculate_file_mode(self, path: bytes):
96 # Note: Subversion flags files which are executable with a special
96 # Note: Subversion flags files which are executable with a special
97 # property `svn:executable` which is set to the value ``"*"``.
97 # property `svn:executable` which is set to the value ``"*"``.
98 if self._get_file_property(path, "svn:executable") == _SVN_PROP_TRUE:
98 if self._get_file_property(path, "svn:executable") == _SVN_PROP_TRUE:
99 return FILEMODE_EXECUTABLE
99 return FILEMODE_EXECUTABLE
100 else:
100 else:
101 return FILEMODE_DEFAULT
101 return FILEMODE_DEFAULT
102
102
103 def get_file_mode(self, path: bytes):
103 def get_file_mode(self, path: bytes):
104 path = self._fix_path(path)
104 path = self._fix_path(path)
105
105
106 if path not in self._path_mode_cache:
106 if path not in self._path_mode_cache:
107 self._path_mode_cache[path] = self._calculate_file_mode(path)
107 self._path_mode_cache[path] = self._calculate_file_mode(path)
108
108
109 return self._path_mode_cache[path]
109 return self._path_mode_cache[path]
110
110
111 def _get_path_type(self, path: bytes):
111 def _get_path_type(self, path: bytes):
112 if path in self._path_type_cache:
112 if path in self._path_type_cache:
113 return self._path_type_cache[path]
113 return self._path_type_cache[path]
114
114
115 if path == b"":
115 if path == b"":
116 self._path_type_cache[b""] = NodeKind.DIR
116 self._path_type_cache[b""] = NodeKind.DIR
117 return NodeKind.DIR
117 return NodeKind.DIR
118
118
119 path_type = self._remote.get_node_type(self._svn_rev, path)
119 path_type = self._remote.get_node_type(self._svn_rev, path)
120
120
121 if not path_type:
121 if not path_type:
122 raise self.no_node_at_path(path)
122 raise self.no_node_at_path(path)
123
123
124 #flags = None
124 #flags = None
125 self._path_type_cache[path] = path_type
125 self._path_type_cache[path] = path_type
126 #self._path_mode_cache[path] = flags
126 #self._path_mode_cache[path] = flags
127
127
128 return self._path_type_cache[path]
128 return self._path_type_cache[path]
129
129
130 def is_link(self, path: bytes):
130 def is_link(self, path: bytes):
131 # Note: Subversion has a flag for special files, the content of the
131 # Note: Subversion has a flag for special files, the content of the
132 # file contains the type of that file.
132 # file contains the type of that file.
133 if self._get_file_property(path, "svn:special") == _SVN_PROP_TRUE:
133 if self._get_file_property(path, "svn:special") == _SVN_PROP_TRUE:
134 return self.get_file_content(path).startswith(b"link")
134 return self.get_file_content(path).startswith(b"link")
135 return False
135 return False
136
136
137 def is_node_binary(self, path):
137 def is_node_binary(self, path):
138 path = self._fix_path(path)
138 path = self._fix_path(path)
139 return self._remote.is_binary(self._svn_rev, safe_str(path))
139 return self._remote.is_binary(self._svn_rev, safe_str(path))
140
140
141 def node_md5_hash(self, path):
141 def node_md5_hash(self, path):
142 path = self._fix_path(path)
142 path = self._fix_path(path)
143 return self._remote.md5_hash(self._svn_rev, safe_str(path))
143 return self._remote.md5_hash(self._svn_rev, safe_str(path))
144
144
145 def _get_file_property(self, path, name):
145 def _get_file_property(self, path, name):
146 file_properties = self._remote.node_properties(safe_str(path), self._svn_rev)
146 file_properties = self._remote.node_properties(safe_str(path), self._svn_rev)
147 return file_properties.get(name)
147 return file_properties.get(name)
148
148
149 def get_file_content(self, path):
149 def get_file_content(self, path):
150 path = self._fix_path(path)
150 path = self._fix_path(path)
151 return self._remote.get_file_content(self._svn_rev, safe_str(path))
151 return self._remote.get_file_content(self._svn_rev, safe_str(path))
152
152
153 def get_file_content_streamed(self, path):
153 def get_file_content_streamed(self, path):
154 path = self._fix_path(path)
154 path = self._fix_path(path)
155
155
156 stream_method = getattr(self._remote, "stream:get_file_content")
156 stream_method = getattr(self._remote, "stream:get_file_content")
157 return stream_method(self._svn_rev, safe_str(path))
157 return stream_method(self._svn_rev, safe_str(path))
158
158
159 def get_file_size(self, path):
159 def get_file_size(self, path):
160 path = self._fix_path(path)
160 path = self._fix_path(path)
161 return self._remote.get_file_size(self._svn_rev, safe_str(path))
161 return self._remote.get_file_size(self._svn_rev, safe_str(path))
162
162
163 def get_path_history(self, path, limit=None, pre_load=None):
163 def get_path_history(self, path, limit=None, pre_load=None):
164 path = self._fix_path(path)
164 path = self._fix_path(path)
165 history = self._remote.node_history(self._svn_rev, safe_str(path), limit)
165 history = self._remote.node_history(self._svn_rev, safe_str(path), limit)
166 return [self.repository.get_commit(commit_id=str(svn_rev)) for svn_rev in history]
166 return [self.repository.get_commit(commit_id=str(svn_rev)) for svn_rev in history]
167
167
168 def get_file_annotate(self, path, pre_load=None):
168 def get_file_annotate(self, path, pre_load=None):
169 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
169 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
170
170
171 for zero_based_line_no, svn_rev, content in result:
171 for zero_based_line_no, svn_rev, content in result:
172 commit_id = str(svn_rev)
172 commit_id = str(svn_rev)
173 line_no = zero_based_line_no + 1
173 line_no = zero_based_line_no + 1
174 yield line_no, commit_id, lambda: self.repository.get_commit(commit_id=commit_id), content
174 yield line_no, commit_id, lambda: self.repository.get_commit(commit_id=commit_id), content
175
175
176 def get_node(self, path: bytes, pre_load=None):
176 def get_node(self, path: bytes, pre_load=None):
177 path = self._fix_path(path)
177 path = self._fix_path(path)
178
178
179 # use cached, if we have one
179 # use cached, if we have one
180 if path in self.nodes:
180 if path in self.nodes:
181 return self.nodes[path]
181 return self.nodes[path]
182
182
183 path_type = self._get_path_type(path)
183 path_type = self._get_path_type(path)
184 if path == b"":
184 if path == b"":
185 node = nodes.RootNode(commit=self)
185 node = nodes.RootNode(commit=self)
186 else:
186 else:
187 if path_type == NodeKind.DIR:
187 if path_type == NodeKind.DIR:
188 node = nodes.DirNode(safe_bytes(path), commit=self)
188 node = nodes.DirNode(safe_bytes(path), commit=self)
189 elif path_type == NodeKind.FILE:
189 elif path_type == NodeKind.FILE:
190 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
190 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
191 self._path_mode_cache[path] = node.mode
191 self._path_mode_cache[path] = node.mode
192 else:
192 else:
193 raise self.no_node_at_path(path)
193 raise self.no_node_at_path(path)
194
194
195 self.nodes[path] = node
195 self.nodes[path] = node
196 return self.nodes[path]
196 return self.nodes[path]
197
197
198 def get_nodes(self, path: bytes, pre_load=None):
198 def get_nodes(self, path: bytes, pre_load=None):
199 if self._get_kind(path) != nodes.NodeKind.DIR:
199 if self._get_kind(path) != nodes.NodeKind.DIR:
200 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
200 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
201 path = self._fix_path(path)
201 path = self._fix_path(path)
202
202
203 path_nodes = []
203 path_nodes = []
204 for name, node_kind in self._remote.get_nodes(self._svn_rev, path):
204
205 obj_path = vcspath.join(path, name)
205 for obj_path, node_kind, pre_load_data in self._remote.get_nodes(self._svn_rev, path, pre_load):
206
206
207 if node_kind is None:
207 if node_kind is None:
208 raise CommitError(f"Requested object type={node_kind} cannot be determined")
208 raise CommitError(f"Requested object type={node_kind} cannot be determined")
209
209
210 # TODO: implement it ??
210 # TODO: implement it ??
211 stat_ = None
211 stat_ = None
212 # # cache file mode
212 # # cache file mode
213 # if obj_path not in self._path_mode_cache:
213 # if obj_path not in self._path_mode_cache:
214 # self._path_mode_cache[obj_path] = stat_
214 # self._path_mode_cache[obj_path] = stat_
215
215
216 # cache type
216 # cache type
217 if node_kind not in self._path_type_cache:
217 if node_kind not in self._path_type_cache:
218 self._path_type_cache[obj_path] = node_kind
218 self._path_type_cache[obj_path] = node_kind
219
219
220 entry = None
220 entry = None
221 if obj_path in self.nodes:
221 if obj_path in self.nodes:
222 entry = self.nodes[obj_path]
222 entry = self.nodes[obj_path]
223 else:
223 else:
224 if node_kind == NodeKind.DIR:
224 if node_kind == NodeKind.DIR:
225 entry = nodes.DirNode(safe_bytes(obj_path), commit=self)
225 entry = nodes.DirNode(safe_bytes(obj_path), commit=self)
226 elif node_kind == NodeKind.FILE:
226 elif node_kind == NodeKind.FILE:
227 entry = nodes.FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
227 entry = nodes.FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load, pre_load_data=pre_load_data)
228 if entry:
228 if entry:
229 self.nodes[obj_path] = entry
229 self.nodes[obj_path] = entry
230 path_nodes.append(entry)
230 path_nodes.append(entry)
231
231
232 path_nodes.sort()
232 path_nodes.sort()
233 return path_nodes
233 return path_nodes
234
234
235 def _get_kind(self, path):
235 def _get_kind(self, path):
236 path = self._fix_path(path)
236 path = self._fix_path(path)
237 path_type = self._get_path_type(path)
237 path_type = self._get_path_type(path)
238 return path_type
238 return path_type
239
239
240 @LazyProperty
240 @LazyProperty
241 def _changes_cache(self):
241 def _changes_cache(self):
242 return self._remote.revision_changes(self._svn_rev)
242 return self._remote.revision_changes(self._svn_rev)
243
243
244 @LazyProperty
244 @LazyProperty
245 def affected_files(self) -> list[bytes]:
245 def affected_files(self) -> list[bytes]:
246 changed_files = set()
246 changed_files = set()
247 for files in self._changes_cache.values():
247 for files in self._changes_cache.values():
248 changed_files.update(files)
248 changed_files.update(files)
249 return list(changed_files)
249 return list(changed_files)
250
250
251 @LazyProperty
251 @LazyProperty
252 def id(self):
252 def id(self):
253 return self.raw_id
253 return self.raw_id
254
254
255 @LazyProperty
255 @LazyProperty
256 def added_paths(self):
256 def added_paths(self):
257 return [n for n in self._changes_cache["added"]]
257 return [n for n in self._changes_cache["added"]]
258
258
259 @LazyProperty
259 @LazyProperty
260 def changed_paths(self):
260 def changed_paths(self):
261 return [n for n in self._changes_cache["changed"]]
261 return [n for n in self._changes_cache["changed"]]
262
262
263 @LazyProperty
263 @LazyProperty
264 def removed_paths(self):
264 def removed_paths(self):
265 return [n for n in self._changes_cache["removed"]]
265 return [n for n in self._changes_cache["removed"]]
266
266
267
267
268 def _date_from_svn_properties(properties):
268 def _date_from_svn_properties(properties):
269 """
269 """
270 Parses the date out of given svn properties.
270 Parses the date out of given svn properties.
271
271
272 :return: :class:`datetime.datetime` instance. The object is naive.
272 :return: :class:`datetime.datetime` instance. The object is naive.
273 """
273 """
274
274
275 aware_date = dateutil.parser.parse(properties.get("svn:date"))
275 aware_date = dateutil.parser.parse(properties.get("svn:date"))
276 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
276 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
277 final_date = aware_date
277 final_date = aware_date
278 return final_date.replace(tzinfo=None)
278 return final_date.replace(tzinfo=None)
@@ -1,783 +1,788
1 # Copyright (C) 2014-2024 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Module holding everything related to vcs nodes, with vcs2 architecture.
20 Module holding everything related to vcs nodes, with vcs2 architecture.
21 """
21 """
22
22
23 import functools
23 import functools
24 import os
24 import os
25 import stat
25 import stat
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
29 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
30 from rhodecode.lib.str_utils import safe_str, safe_bytes
30 from rhodecode.lib.str_utils import safe_str, safe_bytes
31 from rhodecode.lib.hash_utils import md5
31 from rhodecode.lib.hash_utils import md5
32 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs import path as vcspath
33 from rhodecode.lib.vcs.backends.base import EmptyCommit
33 from rhodecode.lib.vcs.backends.base import EmptyCommit
34 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
34 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
35 from rhodecode.lib.vcs.exceptions import NodeError
35 from rhodecode.lib.vcs.exceptions import NodeError
36 from rhodecode.lib.vcs_common import NodeKind, FILEMODE_DEFAULT
36 from rhodecode.lib.vcs_common import NodeKind, FILEMODE_DEFAULT
37
37
38 LARGEFILE_PREFIX = ".hglf"
38 LARGEFILE_PREFIX = ".hglf"
39
39
40
40
41 class NodeState:
41 class NodeState:
42 ADDED = "added"
42 ADDED = "added"
43 CHANGED = "changed"
43 CHANGED = "changed"
44 NOT_CHANGED = "not changed"
44 NOT_CHANGED = "not changed"
45 REMOVED = "removed"
45 REMOVED = "removed"
46
46
47
47
48 # TODO: not sure if that should be bytes or str ?
48 # TODO: not sure if that should be bytes or str ?
49 # most probably bytes because content should be bytes and we check it
49 # most probably bytes because content should be bytes and we check it
50 BIN_BYTE_MARKER = b"\0"
50 BIN_BYTE_MARKER = b"\0"
51
51
52
52
53
53
54 @functools.total_ordering
54 @functools.total_ordering
55 class Node(object):
55 class Node(object):
56 """
56 """
57 Simplest class representing file or directory on repository. SCM backends
57 Simplest class representing file or directory on repository. SCM backends
58 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
58 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
59 directly.
59 directly.
60
60
61 Node's ``path`` cannot start with slash as we operate on *relative* paths
61 Node's ``path`` cannot start with slash as we operate on *relative* paths
62 only. Moreover, every single node is identified by the ``path`` attribute,
62 only. Moreover, every single node is identified by the ``path`` attribute,
63 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
63 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
64 """
64 """
65
65
66 # RTLO marker allows swapping text, and certain
66 # RTLO marker allows swapping text, and certain
67 # security attacks could be used with this
67 # security attacks could be used with this
68 RTLO_MARKER = "\u202e"
68 RTLO_MARKER = "\u202e"
69
69
70 commit = None
70 commit = None
71
71
72 def __init__(self, path: bytes, kind):
72 def __init__(self, path: bytes, kind):
73 self._validate_path(path) # can throw exception if path is invalid
73 self._validate_path(path) # can throw exception if path is invalid
74
74
75 self.bytes_path: bytes = path.rstrip(b"/") # store for mixed encoding, and raw version
75 self.bytes_path: bytes = path.rstrip(b"/") # store for mixed encoding, and raw version
76 self.str_path: str = safe_str(self.bytes_path) # we store paths as str
76 self.str_path: str = safe_str(self.bytes_path) # we store paths as str
77 self.path: str = self.str_path
77 self.path: str = self.str_path
78
78
79 if self.bytes_path == b"" and kind != NodeKind.DIR:
79 if self.bytes_path == b"" and kind != NodeKind.DIR:
80 raise NodeError("Only DirNode and its subclasses may be initialized with empty path")
80 raise NodeError("Only DirNode and its subclasses may be initialized with empty path")
81 self.kind = kind
81 self.kind = kind
82
82
83 if self.is_root() and not self.is_dir():
83 if self.is_root() and not self.is_dir():
84 raise NodeError("Root node cannot be FILE kind")
84 raise NodeError("Root node cannot be FILE kind")
85
85
86 def __eq__(self, other):
86 def __eq__(self, other):
87 if type(self) is not type(other):
87 if type(self) is not type(other):
88 return False
88 return False
89 for attr in ["name", "path", "kind"]:
89 for attr in ["name", "path", "kind"]:
90 if getattr(self, attr) != getattr(other, attr):
90 if getattr(self, attr) != getattr(other, attr):
91 return False
91 return False
92 if self.is_file():
92 if self.is_file():
93 # FileNode compare, we need to fallback to content compare
93 # FileNode compare, we need to fallback to content compare
94 return None
94 return None
95 else:
95 else:
96 # For DirNode's check without entering each dir
96 # For DirNode's check without entering each dir
97 self_nodes_paths = list(sorted(n.path for n in self.nodes))
97 self_nodes_paths = list(sorted(n.path for n in self.nodes))
98 other_nodes_paths = list(sorted(n.path for n in self.nodes))
98 other_nodes_paths = list(sorted(n.path for n in self.nodes))
99 if self_nodes_paths != other_nodes_paths:
99 if self_nodes_paths != other_nodes_paths:
100 return False
100 return False
101 return True
101 return True
102
102
103 def __lt__(self, other):
103 def __lt__(self, other):
104 if self.kind < other.kind:
104 if self.kind < other.kind:
105 return True
105 return True
106 if self.kind > other.kind:
106 if self.kind > other.kind:
107 return False
107 return False
108 if self.path < other.path:
108 if self.path < other.path:
109 return True
109 return True
110 if self.path > other.path:
110 if self.path > other.path:
111 return False
111 return False
112
112
113 def __repr__(self):
113 def __repr__(self):
114 maybe_path = getattr(self, "path", "UNKNOWN_PATH")
114 maybe_path = getattr(self, "path", "UNKNOWN_PATH")
115 return f"<{self.__class__.__name__} {maybe_path!r}>"
115 return f"<{self.__class__.__name__} {maybe_path!r}>"
116
116
117 def __str__(self):
117 def __str__(self):
118 return self.name
118 return self.name
119
119
120 def _validate_path(self, path: bytes):
120 def _validate_path(self, path: bytes):
121 self._assert_bytes(path)
121 self._assert_bytes(path)
122
122
123 if path.startswith(b"/"):
123 if path.startswith(b"/"):
124 raise NodeError(
124 raise NodeError(
125 f"Cannot initialize Node objects with slash at "
125 f"Cannot initialize Node objects with slash at "
126 f"the beginning as only relative paths are supported. "
126 f"the beginning as only relative paths are supported. "
127 f"Got {path}"
127 f"Got {path}"
128 )
128 )
129
129
130 @classmethod
130 @classmethod
131 def _assert_bytes(cls, value):
131 def _assert_bytes(cls, value):
132 if not isinstance(value, bytes):
132 if not isinstance(value, bytes):
133 raise TypeError(f"Bytes required as input, got {type(value)} of {value}.")
133 raise TypeError(f"Bytes required as input, got {type(value)} of {value}.")
134
134
135 @LazyProperty
135 @LazyProperty
136 def parent(self):
136 def parent(self):
137 parent_path: bytes = self.get_parent_path()
137 parent_path: bytes = self.get_parent_path()
138 if parent_path:
138 if parent_path:
139 if self.commit:
139 if self.commit:
140 return self.commit.get_node(parent_path)
140 return self.commit.get_node(parent_path)
141 return DirNode(parent_path)
141 return DirNode(parent_path)
142 return None
142 return None
143
143
144 @LazyProperty
144 @LazyProperty
145 def has_rtlo(self):
145 def has_rtlo(self):
146 """Detects if a path has right-to-left-override marker"""
146 """Detects if a path has right-to-left-override marker"""
147 return self.RTLO_MARKER in self.str_path
147 return self.RTLO_MARKER in self.str_path
148
148
149 @LazyProperty
149 @LazyProperty
150 def dir_path(self):
150 def dir_path(self):
151 """
151 """
152 Returns name of the directory from full path of this vcs node. Empty
152 Returns name of the directory from full path of this vcs node. Empty
153 string is returned if there's no directory in the path
153 string is returned if there's no directory in the path
154 """
154 """
155 _parts = self.path.rstrip("/").rsplit("/", 1)
155 _parts = self.path.rstrip("/").rsplit("/", 1)
156 if len(_parts) == 2:
156 if len(_parts) == 2:
157 return _parts[0]
157 return _parts[0]
158 return ""
158 return ""
159
159
160 @LazyProperty
160 @LazyProperty
161 def name(self):
161 def name(self):
162 """
162 """
163 Returns name of the node so if its path
163 Returns name of the node so if its path
164 then only last part is returned.
164 then only last part is returned.
165 """
165 """
166 return self.str_path.rstrip("/").split("/")[-1]
166 return self.str_path.rstrip("/").split("/")[-1]
167
167
168 @property
168 @property
169 def kind(self):
169 def kind(self):
170 return self._kind
170 return self._kind
171
171
172 @kind.setter
172 @kind.setter
173 def kind(self, kind):
173 def kind(self, kind):
174 if hasattr(self, "_kind"):
174 if hasattr(self, "_kind"):
175 raise NodeError("Cannot change node's kind")
175 raise NodeError("Cannot change node's kind")
176 else:
176 else:
177 self._kind = kind
177 self._kind = kind
178 # Post setter check (path's trailing slash)
178 # Post setter check (path's trailing slash)
179 if self.str_path.endswith("/"):
179 if self.str_path.endswith("/"):
180 raise NodeError("Node's path cannot end with slash")
180 raise NodeError("Node's path cannot end with slash")
181
181
182 def get_parent_path(self) -> bytes:
182 def get_parent_path(self) -> bytes:
183 """
183 """
184 Returns node's parent path or empty string if node is root.
184 Returns node's parent path or empty string if node is root.
185 """
185 """
186 if self.is_root():
186 if self.is_root():
187 return b""
187 return b""
188 str_path = vcspath.dirname(self.bytes_path.rstrip(b"/")) + b"/"
188 str_path = vcspath.dirname(self.bytes_path.rstrip(b"/")) + b"/"
189
189
190 return safe_bytes(str_path)
190 return safe_bytes(str_path)
191
191
192 def is_file(self):
192 def is_file(self):
193 """
193 """
194 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
194 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
195 otherwise.
195 otherwise.
196 """
196 """
197 return self.kind == NodeKind.FILE
197 return self.kind == NodeKind.FILE
198
198
199 def is_dir(self):
199 def is_dir(self):
200 """
200 """
201 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
201 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
202 otherwise.
202 otherwise.
203 """
203 """
204 return self.kind == NodeKind.DIR
204 return self.kind == NodeKind.DIR
205
205
206 def is_root(self):
206 def is_root(self):
207 """
207 """
208 Returns ``True`` if node is a root node and ``False`` otherwise.
208 Returns ``True`` if node is a root node and ``False`` otherwise.
209 """
209 """
210 return self.kind == NodeKind.DIR and self.path == ""
210 return self.kind == NodeKind.DIR and self.path == ""
211
211
212 def is_submodule(self):
212 def is_submodule(self):
213 """
213 """
214 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
214 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
215 otherwise.
215 otherwise.
216 """
216 """
217 return self.kind == NodeKind.SUBMODULE
217 return self.kind == NodeKind.SUBMODULE
218
218
219 def is_largefile(self):
219 def is_largefile(self):
220 """
220 """
221 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
221 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
222 otherwise
222 otherwise
223 """
223 """
224 return self.kind == NodeKind.LARGE_FILE
224 return self.kind == NodeKind.LARGE_FILE
225
225
226 def is_link(self):
226 def is_link(self):
227 if self.commit:
227 if self.commit:
228 return self.commit.is_link(self.bytes_path)
228 return self.commit.is_link(self.bytes_path)
229 return False
229 return False
230
230
231
231
232 class FileNode(Node):
232 class FileNode(Node):
233 """
233 """
234 Class representing file nodes.
234 Class representing file nodes.
235
235
236 :attribute: path: path to the node, relative to repository's root
236 :attribute: path: path to the node, relative to repository's root
237 :attribute: content: if given arbitrary sets content of the file
237 :attribute: content: if given arbitrary sets content of the file
238 :attribute: commit: if given, first time content is accessed, callback
238 :attribute: commit: if given, first time content is accessed, callback
239 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
239 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
240 """
240 """
241
241
242 _filter_pre_load = []
242 _filter_pre_load = []
243
243
244 def __init__(self, path: bytes, content: bytes | None = None, commit=None, mode=None, pre_load=None):
244 def __init__(self, path: bytes, content: bytes | None = None, commit=None, mode=None, pre_load=None, pre_load_data=None):
245 """
245 """
246 Only one of ``content`` and ``commit`` may be given. Passing both
246 Only one of ``content`` and ``commit`` may be given. Passing both
247 would raise ``NodeError`` exception.
247 would raise ``NodeError`` exception.
248
248
249 :param path: relative path to the node
249 :param path: relative path to the node
250 :param content: content may be passed to constructor
250 :param content: content may be passed to constructor
251 :param commit: if given, will use it to lazily fetch content
251 :param commit: if given, will use it to lazily fetch content
252 :param mode: ST_MODE (i.e. 0100644)
252 :param mode: ST_MODE (i.e. 0100644)
253 """
253 """
254 if content and commit:
254 if content and commit:
255 raise NodeError("Cannot use both content and commit")
255 raise NodeError("Cannot use both content and commit")
256
256
257 super().__init__(path, kind=NodeKind.FILE)
257 super().__init__(path, kind=NodeKind.FILE)
258
258
259 self.commit = commit
259 self.commit = commit
260 if content and not isinstance(content, bytes):
260 if content and not isinstance(content, bytes):
261 # File content is one thing that inherently must be bytes
261 # File content is one thing that inherently must be bytes
262 # we support passing str too, and convert the content
262 # we support passing str too, and convert the content
263 content = safe_bytes(content)
263 content = safe_bytes(content)
264 self._content = content
264 self._content = content
265 self._mode = mode or FILEMODE_DEFAULT
265 self._mode = mode or FILEMODE_DEFAULT
266
266 if pre_load_data:
267 self._store_pre_load(pre_load_data)
268 else:
267 self._set_bulk_properties(pre_load)
269 self._set_bulk_properties(pre_load)
268
270
269 def __eq__(self, other):
271 def __eq__(self, other):
270 eq = super().__eq__(other)
272 eq = super().__eq__(other)
271 if eq is not None:
273 if eq is not None:
272 return eq
274 return eq
273 return self.content == other.content
275 return self.content == other.content
274
276
275 def __hash__(self):
277 def __hash__(self):
276 raw_id = getattr(self.commit, "raw_id", "")
278 raw_id = getattr(self.commit, "raw_id", "")
277 return hash((self.path, raw_id))
279 return hash((self.path, raw_id))
278
280
279 def __lt__(self, other):
281 def __lt__(self, other):
280 lt = super().__lt__(other)
282 lt = super().__lt__(other)
281 if lt is not None:
283 if lt is not None:
282 return lt
284 return lt
283 return self.content < other.content
285 return self.content < other.content
284
286
285 def __repr__(self):
287 def __repr__(self):
286 short_id = getattr(self.commit, "short_id", "")
288 short_id = getattr(self.commit, "short_id", "")
287 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
289 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
288
290
289 def _set_bulk_properties(self, pre_load):
291 def _set_bulk_properties(self, pre_load):
290 if not pre_load:
292 if not pre_load:
291 return
293 return
292 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
294 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
293 if not pre_load:
295 if not pre_load:
294 return
296 return
295
297
296 remote = self.commit.get_remote()
298 remote = self.commit.get_remote()
297 result = remote.bulk_file_request(self.commit.raw_id, self.bytes_path, pre_load)
299 result = remote.bulk_file_request(self.commit.raw_id, self.bytes_path, pre_load)
298
300
299 for attr, value in result.items():
301 self._store_pre_load(result.items())
302
303 def _store_pre_load(self, pre_load_data):
304 for attr, value in pre_load_data:
300 if attr == "flags":
305 if attr == "flags":
301 self.__dict__["mode"] = safe_str(value)
306 self.__dict__["mode"] = safe_str(value)
302 elif attr == "size":
307 elif attr == "size":
303 self.__dict__["size"] = value
308 self.__dict__["size"] = value
304 elif attr == "data":
309 elif attr == "data":
305 self.__dict__["_content"] = value
310 self.__dict__["_content"] = value
306 elif attr == "is_binary":
311 elif attr == "is_binary":
307 self.__dict__["is_binary"] = value
312 self.__dict__["is_binary"] = value
308 elif attr == "md5":
313 elif attr == "md5":
309 self.__dict__["md5"] = value
314 self.__dict__["md5"] = value
310 else:
315 else:
311 raise ValueError(f"Unsupported attr in bulk_property: {attr}")
316 raise ValueError(f"Unsupported attr in bulk_property: {attr}")
312
317
313 @LazyProperty
318 @LazyProperty
314 def mode(self):
319 def mode(self):
315 """
320 """
316 Returns lazily mode of the FileNode. If `commit` is not set, would
321 Returns lazily mode of the FileNode. If `commit` is not set, would
317 use value given at initialization or `FILEMODE_DEFAULT` (default).
322 use value given at initialization or `FILEMODE_DEFAULT` (default).
318 """
323 """
319 if self.commit:
324 if self.commit:
320 mode = self.commit.get_file_mode(self.bytes_path)
325 mode = self.commit.get_file_mode(self.bytes_path)
321 else:
326 else:
322 mode = self._mode
327 mode = self._mode
323 return mode
328 return mode
324
329
325 @LazyProperty
330 @LazyProperty
326 def raw_bytes(self) -> bytes:
331 def raw_bytes(self) -> bytes:
327 """
332 """
328 Returns lazily the raw bytes of the FileNode.
333 Returns lazily the raw bytes of the FileNode.
329 """
334 """
330 if self.commit:
335 if self.commit:
331 if self._content is None:
336 if self._content is None:
332 self._content = self.commit.get_file_content(self.bytes_path)
337 self._content = self.commit.get_file_content(self.bytes_path)
333 content = self._content
338 content = self._content
334 else:
339 else:
335 content = self._content
340 content = self._content
336 return content
341 return content
337
342
338 def content_uncached(self):
343 def content_uncached(self):
339 """
344 """
340 Returns lazily content of the FileNode.
345 Returns lazily content of the FileNode.
341 """
346 """
342 if self.commit:
347 if self.commit:
343 content = self.commit.get_file_content(self.bytes_path)
348 content = self.commit.get_file_content(self.bytes_path)
344 else:
349 else:
345 content = self._content
350 content = self._content
346 return content
351 return content
347
352
348 def stream_bytes(self):
353 def stream_bytes(self):
349 """
354 """
350 Returns an iterator that will stream the content of the file directly from
355 Returns an iterator that will stream the content of the file directly from
351 vcsserver without loading it to memory.
356 vcsserver without loading it to memory.
352 """
357 """
353 if self.commit:
358 if self.commit:
354 return self.commit.get_file_content_streamed(self.bytes_path)
359 return self.commit.get_file_content_streamed(self.bytes_path)
355 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
360 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
356
361
357 def metadata_uncached(self):
362 def metadata_uncached(self):
358 """
363 """
359 Returns md5, binary flag of the file node, without any cache usage.
364 Returns md5, binary flag of the file node, without any cache usage.
360 """
365 """
361
366
362 content = self.content_uncached()
367 content = self.content_uncached()
363
368
364 is_binary = bool(content and BIN_BYTE_MARKER in content)
369 is_binary = bool(content and BIN_BYTE_MARKER in content)
365 size = 0
370 size = 0
366 if content:
371 if content:
367 size = len(content)
372 size = len(content)
368
373
369 return is_binary, md5(content), size, content
374 return is_binary, md5(content), size, content
370
375
371 @LazyProperty
376 @LazyProperty
372 def content(self) -> bytes:
377 def content(self) -> bytes:
373 """
378 """
374 Returns lazily content of the FileNode.
379 Returns lazily content of the FileNode.
375 """
380 """
376 content = self.raw_bytes
381 content = self.raw_bytes
377 if content and not isinstance(content, bytes):
382 if content and not isinstance(content, bytes):
378 raise ValueError(f"Content is of type {type(content)} instead of bytes")
383 raise ValueError(f"Content is of type {type(content)} instead of bytes")
379 return content
384 return content
380
385
381 @LazyProperty
386 @LazyProperty
382 def str_content(self) -> str:
387 def str_content(self) -> str:
383 return safe_str(self.raw_bytes)
388 return safe_str(self.raw_bytes)
384
389
385 @LazyProperty
390 @LazyProperty
386 def size(self):
391 def size(self):
387 if self.commit:
392 if self.commit:
388 return self.commit.get_file_size(self.bytes_path)
393 return self.commit.get_file_size(self.bytes_path)
389 raise NodeError("Cannot retrieve size of the file without related commit attribute")
394 raise NodeError("Cannot retrieve size of the file without related commit attribute")
390
395
391 @LazyProperty
396 @LazyProperty
392 def message(self):
397 def message(self):
393 if self.commit:
398 if self.commit:
394 return self.last_commit.message
399 return self.last_commit.message
395 raise NodeError("Cannot retrieve message of the file without related " "commit attribute")
400 raise NodeError("Cannot retrieve message of the file without related " "commit attribute")
396
401
397 @LazyProperty
402 @LazyProperty
398 def last_commit(self):
403 def last_commit(self):
399 if self.commit:
404 if self.commit:
400 pre_load = ["author", "date", "message", "parents"]
405 pre_load = ["author", "date", "message", "parents"]
401 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
406 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
402 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
407 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
403
408
404 def get_mimetype(self):
409 def get_mimetype(self):
405 """
410 """
406 Mimetype is calculated based on the file's content. If ``_mimetype``
411 Mimetype is calculated based on the file's content. If ``_mimetype``
407 attribute is available, it will be returned (backends which store
412 attribute is available, it will be returned (backends which store
408 mimetypes or can easily recognize them, should set this private
413 mimetypes or can easily recognize them, should set this private
409 attribute to indicate that type should *NOT* be calculated).
414 attribute to indicate that type should *NOT* be calculated).
410 """
415 """
411
416
412 if hasattr(self, "_mimetype"):
417 if hasattr(self, "_mimetype"):
413 if isinstance(self._mimetype, (tuple, list)) and len(self._mimetype) == 2:
418 if isinstance(self._mimetype, (tuple, list)) and len(self._mimetype) == 2:
414 return self._mimetype
419 return self._mimetype
415 else:
420 else:
416 raise NodeError("given _mimetype attribute must be an 2 element list or tuple")
421 raise NodeError("given _mimetype attribute must be an 2 element list or tuple")
417
422
418 db = get_mimetypes_db()
423 db = get_mimetypes_db()
419 mtype, encoding = db.guess_type(self.name)
424 mtype, encoding = db.guess_type(self.name)
420
425
421 if mtype is None:
426 if mtype is None:
422 if not self.is_largefile() and self.is_binary:
427 if not self.is_largefile() and self.is_binary:
423 mtype = "application/octet-stream"
428 mtype = "application/octet-stream"
424 encoding = None
429 encoding = None
425 else:
430 else:
426 mtype = "text/plain"
431 mtype = "text/plain"
427 encoding = None
432 encoding = None
428
433
429 # try with pygments
434 # try with pygments
430 try:
435 try:
431 from pygments.lexers import get_lexer_for_filename
436 from pygments.lexers import get_lexer_for_filename
432
437
433 mt = get_lexer_for_filename(self.name).mimetypes
438 mt = get_lexer_for_filename(self.name).mimetypes
434 except Exception:
439 except Exception:
435 mt = None
440 mt = None
436
441
437 if mt:
442 if mt:
438 mtype = mt[0]
443 mtype = mt[0]
439
444
440 return mtype, encoding
445 return mtype, encoding
441
446
442 @LazyProperty
447 @LazyProperty
443 def mimetype(self):
448 def mimetype(self):
444 """
449 """
445 Wrapper around full mimetype info. It returns only type of fetched
450 Wrapper around full mimetype info. It returns only type of fetched
446 mimetype without the encoding part. use get_mimetype function to fetch
451 mimetype without the encoding part. use get_mimetype function to fetch
447 full set of (type,encoding)
452 full set of (type,encoding)
448 """
453 """
449 return self.get_mimetype()[0]
454 return self.get_mimetype()[0]
450
455
451 @LazyProperty
456 @LazyProperty
452 def mimetype_main(self):
457 def mimetype_main(self):
453 return self.mimetype.split("/")[0]
458 return self.mimetype.split("/")[0]
454
459
455 @classmethod
460 @classmethod
456 def get_lexer(cls, filename, content=None):
461 def get_lexer(cls, filename, content=None):
457 from pygments import lexers
462 from pygments import lexers
458
463
459 extension = filename.split(".")[-1]
464 extension = filename.split(".")[-1]
460 lexer = None
465 lexer = None
461
466
462 try:
467 try:
463 lexer = lexers.guess_lexer_for_filename(filename, content, stripnl=False)
468 lexer = lexers.guess_lexer_for_filename(filename, content, stripnl=False)
464 except lexers.ClassNotFound:
469 except lexers.ClassNotFound:
465 pass
470 pass
466
471
467 # try our EXTENSION_MAP
472 # try our EXTENSION_MAP
468 if not lexer:
473 if not lexer:
469 try:
474 try:
470 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
475 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
471 if lexer_class:
476 if lexer_class:
472 lexer = lexers.get_lexer_by_name(lexer_class[0])
477 lexer = lexers.get_lexer_by_name(lexer_class[0])
473 except lexers.ClassNotFound:
478 except lexers.ClassNotFound:
474 pass
479 pass
475
480
476 if not lexer:
481 if not lexer:
477 lexer = lexers.TextLexer(stripnl=False)
482 lexer = lexers.TextLexer(stripnl=False)
478
483
479 return lexer
484 return lexer
480
485
481 @LazyProperty
486 @LazyProperty
482 def lexer(self):
487 def lexer(self):
483 """
488 """
484 Returns pygment's lexer class. Would try to guess lexer taking file's
489 Returns pygment's lexer class. Would try to guess lexer taking file's
485 content, name and mimetype.
490 content, name and mimetype.
486 """
491 """
487 # TODO: this is more proper, but super heavy on investigating the type based on the content
492 # TODO: this is more proper, but super heavy on investigating the type based on the content
488 # self.get_lexer(self.name, self.content)
493 # self.get_lexer(self.name, self.content)
489
494
490 return self.get_lexer(self.name)
495 return self.get_lexer(self.name)
491
496
492 @LazyProperty
497 @LazyProperty
493 def lexer_alias(self):
498 def lexer_alias(self):
494 """
499 """
495 Returns first alias of the lexer guessed for this file.
500 Returns first alias of the lexer guessed for this file.
496 """
501 """
497 return self.lexer.aliases[0]
502 return self.lexer.aliases[0]
498
503
499 @LazyProperty
504 @LazyProperty
500 def history(self):
505 def history(self):
501 """
506 """
502 Returns a list of commit for this file in which the file was changed
507 Returns a list of commit for this file in which the file was changed
503 """
508 """
504 if self.commit is None:
509 if self.commit is None:
505 raise NodeError("Unable to get commit for this FileNode")
510 raise NodeError("Unable to get commit for this FileNode")
506 return self.commit.get_path_history(self.bytes_path)
511 return self.commit.get_path_history(self.bytes_path)
507
512
508 @LazyProperty
513 @LazyProperty
509 def annotate(self):
514 def annotate(self):
510 """
515 """
511 Returns a list of three element tuples with lineno, commit and line
516 Returns a list of three element tuples with lineno, commit and line
512 """
517 """
513 if self.commit is None:
518 if self.commit is None:
514 raise NodeError("Unable to get commit for this FileNode")
519 raise NodeError("Unable to get commit for this FileNode")
515 pre_load = ["author", "date", "message", "parents"]
520 pre_load = ["author", "date", "message", "parents"]
516 return self.commit.get_file_annotate(self.bytes_path, pre_load=pre_load)
521 return self.commit.get_file_annotate(self.bytes_path, pre_load=pre_load)
517
522
518 @LazyProperty
523 @LazyProperty
519 def is_binary(self):
524 def is_binary(self):
520 """
525 """
521 Returns True if file has binary content.
526 Returns True if file has binary content.
522 """
527 """
523 if self.commit:
528 if self.commit:
524 return self.commit.is_node_binary(self.bytes_path)
529 return self.commit.is_node_binary(self.bytes_path)
525 else:
530 else:
526 raw_bytes = self._content
531 raw_bytes = self._content
527 return bool(raw_bytes and BIN_BYTE_MARKER in raw_bytes)
532 return bool(raw_bytes and BIN_BYTE_MARKER in raw_bytes)
528
533
529 @LazyProperty
534 @LazyProperty
530 def md5(self):
535 def md5(self):
531 """
536 """
532 Returns md5 of the file node.
537 Returns md5 of the file node.
533 """
538 """
534
539
535 if self.commit:
540 if self.commit:
536 return self.commit.node_md5_hash(self.bytes_path)
541 return self.commit.node_md5_hash(self.bytes_path)
537 else:
542 else:
538 raw_bytes = self._content
543 raw_bytes = self._content
539 # TODO: this sucks, we're computing md5 on potentially super big stream data...
544 # TODO: this sucks, we're computing md5 on potentially super big stream data...
540 return md5(raw_bytes)
545 return md5(raw_bytes)
541
546
542 @LazyProperty
547 @LazyProperty
543 def extension(self):
548 def extension(self):
544 """Returns filenode extension"""
549 """Returns filenode extension"""
545 return self.name.split(".")[-1]
550 return self.name.split(".")[-1]
546
551
547 @property
552 @property
548 def is_executable(self):
553 def is_executable(self):
549 """
554 """
550 Returns ``True`` if file has executable flag turned on.
555 Returns ``True`` if file has executable flag turned on.
551 """
556 """
552 return bool(self.mode & stat.S_IXUSR)
557 return bool(self.mode & stat.S_IXUSR)
553
558
554 def get_largefile_node(self):
559 def get_largefile_node(self):
555 """
560 """
556 Try to return a Mercurial FileNode from this node. It does internal
561 Try to return a Mercurial FileNode from this node. It does internal
557 checks inside largefile store, if that file exist there it will
562 checks inside largefile store, if that file exist there it will
558 create special instance of LargeFileNode which can get content from
563 create special instance of LargeFileNode which can get content from
559 LF store.
564 LF store.
560 """
565 """
561 if self.commit:
566 if self.commit:
562 return self.commit.get_largefile_node(self.bytes_path)
567 return self.commit.get_largefile_node(self.bytes_path)
563
568
564 def count_lines(self, content: str | bytes, count_empty=False):
569 def count_lines(self, content: str | bytes, count_empty=False):
565 if isinstance(content, str):
570 if isinstance(content, str):
566 newline_marker = "\n"
571 newline_marker = "\n"
567 elif isinstance(content, bytes):
572 elif isinstance(content, bytes):
568 newline_marker = b"\n"
573 newline_marker = b"\n"
569 else:
574 else:
570 raise ValueError("content must be bytes or str got {type(content)} instead")
575 raise ValueError("content must be bytes or str got {type(content)} instead")
571
576
572 if count_empty:
577 if count_empty:
573 all_lines = 0
578 all_lines = 0
574 empty_lines = 0
579 empty_lines = 0
575 for line in content.splitlines(True):
580 for line in content.splitlines(True):
576 if line == newline_marker:
581 if line == newline_marker:
577 empty_lines += 1
582 empty_lines += 1
578 all_lines += 1
583 all_lines += 1
579
584
580 return all_lines, all_lines - empty_lines
585 return all_lines, all_lines - empty_lines
581 else:
586 else:
582 # fast method
587 # fast method
583 empty_lines = all_lines = content.count(newline_marker)
588 empty_lines = all_lines = content.count(newline_marker)
584 if all_lines == 0 and content:
589 if all_lines == 0 and content:
585 # one-line without a newline
590 # one-line without a newline
586 empty_lines = all_lines = 1
591 empty_lines = all_lines = 1
587
592
588 return all_lines, empty_lines
593 return all_lines, empty_lines
589
594
590 def lines(self, count_empty=False):
595 def lines(self, count_empty=False):
591 all_lines, empty_lines = 0, 0
596 all_lines, empty_lines = 0, 0
592
597
593 if not self.is_binary:
598 if not self.is_binary:
594 content = self.content
599 content = self.content
595 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
600 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
596 return all_lines, empty_lines
601 return all_lines, empty_lines
597
602
598
603
599 class DirNode(Node):
604 class DirNode(Node):
600 """
605 """
601 DirNode stores list of files and directories within this node.
606 DirNode stores list of files and directories within this node.
602 Nodes may be used standalone but within repository context they
607 Nodes may be used standalone but within repository context they
603 lazily fetch data within same repository's commit.
608 lazily fetch data within same repository's commit.
604 """
609 """
605
610
606 def __init__(self, path, nodes=(), commit=None, default_pre_load=None):
611 def __init__(self, path, nodes=(), commit=None, default_pre_load=None):
607 """
612 """
608 Only one of ``nodes`` and ``commit`` may be given. Passing both
613 Only one of ``nodes`` and ``commit`` may be given. Passing both
609 would raise ``NodeError`` exception.
614 would raise ``NodeError`` exception.
610
615
611 :param path: relative path to the node
616 :param path: relative path to the node
612 :param nodes: content may be passed to constructor
617 :param nodes: content may be passed to constructor
613 :param commit: if given, will use it to lazily fetch content
618 :param commit: if given, will use it to lazily fetch content
614 """
619 """
615 if nodes and commit:
620 if nodes and commit:
616 raise NodeError("Cannot use both nodes and commit")
621 raise NodeError("Cannot use both nodes and commit")
617 super().__init__(path, NodeKind.DIR)
622 super().__init__(path, NodeKind.DIR)
618 self.commit = commit
623 self.commit = commit
619 self._nodes = nodes
624 self._nodes = nodes
620 self.default_pre_load = default_pre_load or ["is_binary", "size"]
625 self.default_pre_load = default_pre_load or ["is_binary", "size"]
621
626
622 def __iter__(self):
627 def __iter__(self):
623 yield from self.nodes
628 yield from self.nodes
624
629
625 def __eq__(self, other):
630 def __eq__(self, other):
626 eq = super().__eq__(other)
631 eq = super().__eq__(other)
627 if eq is not None:
632 if eq is not None:
628 return eq
633 return eq
629 # check without entering each dir
634 # check without entering each dir
630 self_nodes_paths = list(sorted(n.path for n in self.nodes))
635 self_nodes_paths = list(sorted(n.path for n in self.nodes))
631 other_nodes_paths = list(sorted(n.path for n in self.nodes))
636 other_nodes_paths = list(sorted(n.path for n in self.nodes))
632 return self_nodes_paths == other_nodes_paths
637 return self_nodes_paths == other_nodes_paths
633
638
634 def __lt__(self, other):
639 def __lt__(self, other):
635 lt = super().__lt__(other)
640 lt = super().__lt__(other)
636 if lt is not None:
641 if lt is not None:
637 return lt
642 return lt
638 # check without entering each dir
643 # check without entering each dir
639 self_nodes_paths = list(sorted(n.path for n in self.nodes))
644 self_nodes_paths = list(sorted(n.path for n in self.nodes))
640 other_nodes_paths = list(sorted(n.path for n in self.nodes))
645 other_nodes_paths = list(sorted(n.path for n in self.nodes))
641 return self_nodes_paths < other_nodes_paths
646 return self_nodes_paths < other_nodes_paths
642
647
643 @LazyProperty
648 @LazyProperty
644 def content(self):
649 def content(self):
645 raise NodeError(f"{self} represents a dir and has no `content` attribute")
650 raise NodeError(f"{self} represents a dir and has no `content` attribute")
646
651
647 @LazyProperty
652 @LazyProperty
648 def nodes(self):
653 def nodes(self):
649 if self.commit:
654 if self.commit:
650 nodes = self.commit.get_nodes(self.bytes_path, pre_load=self.default_pre_load)
655 nodes = self.commit.get_nodes(self.bytes_path, pre_load=self.default_pre_load)
651 else:
656 else:
652 nodes = self._nodes
657 nodes = self._nodes
653 return sorted(nodes)
658 return sorted(nodes)
654
659
655 @LazyProperty
660 @LazyProperty
656 def files(self):
661 def files(self):
657 return sorted(node for node in self.nodes if node.is_file())
662 return sorted(node for node in self.nodes if node.is_file())
658
663
659 @LazyProperty
664 @LazyProperty
660 def dirs(self):
665 def dirs(self):
661 return sorted(node for node in self.nodes if node.is_dir())
666 return sorted(node for node in self.nodes if node.is_dir())
662
667
663 @LazyProperty
668 @LazyProperty
664 def state(self):
669 def state(self):
665 raise NodeError("Cannot access state of DirNode")
670 raise NodeError("Cannot access state of DirNode")
666
671
667 @LazyProperty
672 @LazyProperty
668 def size(self):
673 def size(self):
669 size = 0
674 size = 0
670 for root, dirs, files in self.commit.walk(self.bytes_path):
675 for root, dirs, files in self.commit.walk(self.bytes_path):
671 for f in files:
676 for f in files:
672 size += f.size
677 size += f.size
673
678
674 return size
679 return size
675
680
676 @LazyProperty
681 @LazyProperty
677 def last_commit(self):
682 def last_commit(self):
678 if self.commit:
683 if self.commit:
679 pre_load = ["author", "date", "message", "parents"]
684 pre_load = ["author", "date", "message", "parents"]
680 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
685 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
681 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
686 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
682
687
683 def __repr__(self):
688 def __repr__(self):
684 short_id = getattr(self.commit, "short_id", "")
689 short_id = getattr(self.commit, "short_id", "")
685 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
690 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
686
691
687
692
688 class RootNode(DirNode):
693 class RootNode(DirNode):
689 """
694 """
690 DirNode being the root node of the repository.
695 DirNode being the root node of the repository.
691 """
696 """
692
697
693 def __init__(self, nodes=(), commit=None):
698 def __init__(self, nodes=(), commit=None):
694 super().__init__(path=b"", nodes=nodes, commit=commit)
699 super().__init__(path=b"", nodes=nodes, commit=commit)
695
700
696 def __repr__(self):
701 def __repr__(self):
697 short_id = getattr(self.commit, "short_id", "")
702 short_id = getattr(self.commit, "short_id", "")
698 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
703 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
699
704
700
705
701 class SubModuleNode(Node):
706 class SubModuleNode(Node):
702 """
707 """
703 represents a SubModule of Git or SubRepo of Mercurial
708 represents a SubModule of Git or SubRepo of Mercurial
704 """
709 """
705
710
706 is_binary = False
711 is_binary = False
707 size = 0
712 size = 0
708
713
709 def __init__(self, name, url=None, commit=None, alias=None):
714 def __init__(self, name, url=None, commit=None, alias=None):
710 self.path: bytes = name
715 self.path: bytes = name
711 self.str_path: str = safe_str(self.path) # we store paths as str
716 self.str_path: str = safe_str(self.path) # we store paths as str
712 self.kind = NodeKind.SUBMODULE
717 self.kind = NodeKind.SUBMODULE
713 self.alias = alias
718 self.alias = alias
714
719
715 # we have to use EmptyCommit here since this can point to svn/git/hg
720 # we have to use EmptyCommit here since this can point to svn/git/hg
716 # submodules we cannot get from repository
721 # submodules we cannot get from repository
717 self.commit = EmptyCommit(safe_str(commit), alias=alias)
722 self.commit = EmptyCommit(safe_str(commit), alias=alias)
718 self.url = safe_str(url) or self._extract_submodule_url()
723 self.url = safe_str(url) or self._extract_submodule_url()
719
724
720 def __repr__(self):
725 def __repr__(self):
721 short_id = getattr(self.commit, "short_id", "")
726 short_id = getattr(self.commit, "short_id", "")
722 return f"<{self.__class__.__name__} {self.str_path!r} @ {short_id}>"
727 return f"<{self.__class__.__name__} {self.str_path!r} @ {short_id}>"
723
728
724 def _extract_submodule_url(self):
729 def _extract_submodule_url(self):
725 # TODO: find a way to parse gits submodule file and extract the linking URL
730 # TODO: find a way to parse gits submodule file and extract the linking URL
726 return safe_str(self.path)
731 return safe_str(self.path)
727
732
728 @LazyProperty
733 @LazyProperty
729 def name(self):
734 def name(self):
730 """
735 """
731 Returns name of the node so if its path
736 Returns name of the node so if its path
732 then only last part is returned.
737 then only last part is returned.
733 """
738 """
734 org = self.str_path.rstrip("/").split("/")[-1]
739 org = self.str_path.rstrip("/").split("/")[-1]
735 return f"{org} @ {self.commit.short_id}"
740 return f"{org} @ {self.commit.short_id}"
736
741
737
742
738 class LargeFileNode(FileNode):
743 class LargeFileNode(FileNode):
739 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
744 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
740 self._validate_path(path) # can throw exception if path is invalid
745 self._validate_path(path) # can throw exception if path is invalid
741 self.org_path = org_path # as stored in VCS as LF pointer
746 self.org_path = org_path # as stored in VCS as LF pointer
742
747
743 self.bytes_path = path.rstrip(b"/") # store for __repr__
748 self.bytes_path = path.rstrip(b"/") # store for __repr__
744 self.str_path = safe_str(self.bytes_path)
749 self.str_path = safe_str(self.bytes_path)
745 self.path = self.str_path
750 self.path = self.str_path
746
751
747 self.kind = NodeKind.LARGE_FILE
752 self.kind = NodeKind.LARGE_FILE
748 self.alias = alias
753 self.alias = alias
749 self._content = b""
754 self._content = b""
750
755
751 def _validate_path(self, path: bytes):
756 def _validate_path(self, path: bytes):
752 """
757 """
753 we override check since the LargeFileNode path is system absolute, but we check for bytes only
758 we override check since the LargeFileNode path is system absolute, but we check for bytes only
754 """
759 """
755 self._assert_bytes(path)
760 self._assert_bytes(path)
756
761
757 def __repr__(self):
762 def __repr__(self):
758 return f"<{self.__class__.__name__} {self.org_path} -> {self.str_path!r}>"
763 return f"<{self.__class__.__name__} {self.org_path} -> {self.str_path!r}>"
759
764
760 @LazyProperty
765 @LazyProperty
761 def size(self):
766 def size(self):
762 return os.stat(self.path).st_size
767 return os.stat(self.path).st_size
763
768
764 @LazyProperty
769 @LazyProperty
765 def raw_bytes(self):
770 def raw_bytes(self):
766 with open(self.path, "rb") as f:
771 with open(self.path, "rb") as f:
767 content = f.read()
772 content = f.read()
768 return content
773 return content
769
774
770 @LazyProperty
775 @LazyProperty
771 def name(self):
776 def name(self):
772 """
777 """
773 Overwrites name to be the org lf path
778 Overwrites name to be the org lf path
774 """
779 """
775 return self.org_path
780 return self.org_path
776
781
777 def stream_bytes(self):
782 def stream_bytes(self):
778 with open(self.path, "rb") as stream:
783 with open(self.path, "rb") as stream:
779 while True:
784 while True:
780 data = stream.read(16 * 1024)
785 data = stream.read(16 * 1024)
781 if not data:
786 if not data:
782 break
787 break
783 yield data
788 yield data
@@ -1,1218 +1,1220
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import re
20 import re
21 import shutil
21 import shutil
22 import time
22 import time
23 import logging
23 import logging
24 import traceback
24 import traceback
25 import datetime
25 import datetime
26
26
27 from pyramid.threadlocal import get_current_request
27 from pyramid.threadlocal import get_current_request
28 from sqlalchemy.orm import aliased
28 from sqlalchemy.orm import aliased
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
35 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
36 from rhodecode.lib.str_utils import safe_bytes
36 from rhodecode.lib.str_utils import safe_bytes
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, remove_prefix, obfuscate_url_pw,
40 safe_str, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.lib.vcs.nodes import NodeKind
43 from rhodecode.lib.vcs.nodes import NodeKind
44 from rhodecode.model import BaseModel
44 from rhodecode.model import BaseModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 _hash_key, func, case, joinedload, or_, in_filter_generator,
47 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
48 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
49 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
50 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.permission import PermissionModel
51 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.settings import VcsSettingsModel
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 class RepoModel(BaseModel):
56 class RepoModel(BaseModel):
57
57
58 cls = Repository
58 cls = Repository
59
59
60 def _get_user_group(self, users_group):
60 def _get_user_group(self, users_group):
61 return self._get_instance(UserGroup, users_group,
61 return self._get_instance(UserGroup, users_group,
62 callback=UserGroup.get_by_group_name)
62 callback=UserGroup.get_by_group_name)
63
63
64 def _get_repo_group(self, repo_group):
64 def _get_repo_group(self, repo_group):
65 return self._get_instance(RepoGroup, repo_group,
65 return self._get_instance(RepoGroup, repo_group,
66 callback=RepoGroup.get_by_group_name)
66 callback=RepoGroup.get_by_group_name)
67
67
68 def _create_default_perms(self, repository, private):
68 def _create_default_perms(self, repository, private):
69 # create default permission
69 # create default permission
70 default = 'repository.read'
70 default = 'repository.read'
71 def_user = User.get_default_user()
71 def_user = User.get_default_user()
72 for p in def_user.user_perms:
72 for p in def_user.user_perms:
73 if p.permission.permission_name.startswith('repository.'):
73 if p.permission.permission_name.startswith('repository.'):
74 default = p.permission.permission_name
74 default = p.permission.permission_name
75 break
75 break
76
76
77 default_perm = 'repository.none' if private else default
77 default_perm = 'repository.none' if private else default
78
78
79 repo_to_perm = UserRepoToPerm()
79 repo_to_perm = UserRepoToPerm()
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81
81
82 repo_to_perm.repository = repository
82 repo_to_perm.repository = repository
83 repo_to_perm.user = def_user
83 repo_to_perm.user = def_user
84
84
85 return repo_to_perm
85 return repo_to_perm
86
86
87 def get(self, repo_id):
87 def get(self, repo_id):
88 repo = self.sa.query(Repository) \
88 repo = self.sa.query(Repository) \
89 .filter(Repository.repo_id == repo_id)
89 .filter(Repository.repo_id == repo_id)
90
90
91 return repo.scalar()
91 return repo.scalar()
92
92
93 def get_repo(self, repository):
93 def get_repo(self, repository):
94 return self._get_repo(repository)
94 return self._get_repo(repository)
95
95
96 def get_by_repo_name(self, repo_name, cache=False):
96 def get_by_repo_name(self, repo_name, cache=False):
97 repo = self.sa.query(Repository) \
97 repo = self.sa.query(Repository) \
98 .filter(Repository.repo_name == repo_name)
98 .filter(Repository.repo_name == repo_name)
99
99
100 if cache:
100 if cache:
101 name_key = _hash_key(repo_name)
101 name_key = _hash_key(repo_name)
102 repo = repo.options(
102 repo = repo.options(
103 FromCache("sql_cache_short", f"get_repo_{name_key}"))
103 FromCache("sql_cache_short", f"get_repo_{name_key}"))
104 return repo.scalar()
104 return repo.scalar()
105
105
106 def _extract_id_from_repo_name(self, repo_name):
106 def _extract_id_from_repo_name(self, repo_name):
107 if repo_name.startswith('/'):
107 if repo_name.startswith('/'):
108 repo_name = repo_name.lstrip('/')
108 repo_name = repo_name.lstrip('/')
109 by_id_match = re.match(r'^_(\d+)', repo_name)
109 by_id_match = re.match(r'^_(\d+)', repo_name)
110 if by_id_match:
110 if by_id_match:
111 return by_id_match.groups()[0]
111 return by_id_match.groups()[0]
112
112
113 def get_repo_by_id(self, repo_name):
113 def get_repo_by_id(self, repo_name):
114 """
114 """
115 Extracts repo_name by id from special urls.
115 Extracts repo_name by id from special urls.
116 Example url is _11/repo_name
116 Example url is _11/repo_name
117
117
118 :param repo_name:
118 :param repo_name:
119 :return: repo object if matched else None
119 :return: repo object if matched else None
120 """
120 """
121 _repo_id = None
121 _repo_id = None
122 try:
122 try:
123 _repo_id = self._extract_id_from_repo_name(repo_name)
123 _repo_id = self._extract_id_from_repo_name(repo_name)
124 if _repo_id:
124 if _repo_id:
125 return self.get(_repo_id)
125 return self.get(_repo_id)
126 except Exception:
126 except Exception:
127 log.exception('Failed to extract repo_name from URL')
127 log.exception('Failed to extract repo_name from URL')
128 if _repo_id:
128 if _repo_id:
129 Session().rollback()
129 Session().rollback()
130
130
131 return None
131 return None
132
132
133 def get_repos_for_root(self, root, traverse=False):
133 def get_repos_for_root(self, root, traverse=False):
134 if traverse:
134 if traverse:
135 like_expression = u'{}%'.format(safe_str(root))
135 like_expression = u'{}%'.format(safe_str(root))
136 repos = Repository.query().filter(
136 repos = Repository.query().filter(
137 Repository.repo_name.like(like_expression)).all()
137 Repository.repo_name.like(like_expression)).all()
138 else:
138 else:
139 if root and not isinstance(root, RepoGroup):
139 if root and not isinstance(root, RepoGroup):
140 raise ValueError(
140 raise ValueError(
141 'Root must be an instance '
141 'Root must be an instance '
142 'of RepoGroup, got:{} instead'.format(type(root)))
142 'of RepoGroup, got:{} instead'.format(type(root)))
143 repos = Repository.query().filter(Repository.group == root).all()
143 repos = Repository.query().filter(Repository.group == root).all()
144 return repos
144 return repos
145
145
146 def get_url(self, repo, request=None, permalink=False):
146 def get_url(self, repo, request=None, permalink=False):
147 if not request:
147 if not request:
148 request = get_current_request()
148 request = get_current_request()
149
149
150 if not request:
150 if not request:
151 return
151 return
152
152
153 if permalink:
153 if permalink:
154 return request.route_url(
154 return request.route_url(
155 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
155 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
156 else:
156 else:
157 return request.route_url(
157 return request.route_url(
158 'repo_summary', repo_name=safe_str(repo.repo_name))
158 'repo_summary', repo_name=safe_str(repo.repo_name))
159
159
160 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
160 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
161 if not request:
161 if not request:
162 request = get_current_request()
162 request = get_current_request()
163
163
164 if not request:
164 if not request:
165 return
165 return
166
166
167 if permalink:
167 if permalink:
168 return request.route_url(
168 return request.route_url(
169 'repo_commit', repo_name=safe_str(repo.repo_id),
169 'repo_commit', repo_name=safe_str(repo.repo_id),
170 commit_id=commit_id)
170 commit_id=commit_id)
171
171
172 else:
172 else:
173 return request.route_url(
173 return request.route_url(
174 'repo_commit', repo_name=safe_str(repo.repo_name),
174 'repo_commit', repo_name=safe_str(repo.repo_name),
175 commit_id=commit_id)
175 commit_id=commit_id)
176
176
177 def get_repo_log(self, repo, filter_term):
177 def get_repo_log(self, repo, filter_term):
178 repo_log = UserLog.query()\
178 repo_log = UserLog.query()\
179 .filter(or_(UserLog.repository_id == repo.repo_id,
179 .filter(or_(UserLog.repository_id == repo.repo_id,
180 UserLog.repository_name == repo.repo_name))\
180 UserLog.repository_name == repo.repo_name))\
181 .options(joinedload(UserLog.user))\
181 .options(joinedload(UserLog.user))\
182 .options(joinedload(UserLog.repository))\
182 .options(joinedload(UserLog.repository))\
183 .order_by(UserLog.action_date.desc())
183 .order_by(UserLog.action_date.desc())
184
184
185 repo_log = user_log_filter(repo_log, filter_term)
185 repo_log = user_log_filter(repo_log, filter_term)
186 return repo_log
186 return repo_log
187
187
188 @classmethod
188 @classmethod
189 def update_commit_cache(cls, repositories=None):
189 def update_commit_cache(cls, repositories=None):
190 if not repositories:
190 if not repositories:
191 repositories = Repository.getAll()
191 repositories = Repository.getAll()
192 for repo in repositories:
192 for repo in repositories:
193 repo.update_commit_cache()
193 repo.update_commit_cache()
194
194
195 def get_repos_as_dict(self, repo_list=None, admin=False,
195 def get_repos_as_dict(self, repo_list=None, admin=False,
196 super_user_actions=False, short_name=None):
196 super_user_actions=False, short_name=None):
197
197
198 _render = get_current_request().get_partial_renderer(
198 _render = get_current_request().get_partial_renderer(
199 'rhodecode:templates/data_table/_dt_elements.mako')
199 'rhodecode:templates/data_table/_dt_elements.mako')
200 c = _render.get_call_context()
200 c = _render.get_call_context()
201 h = _render.get_helpers()
201 h = _render.get_helpers()
202
202
203 def quick_menu(repo_name):
203 def quick_menu(repo_name):
204 return _render('quick_menu', repo_name)
204 return _render('quick_menu', repo_name)
205
205
206 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
206 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
207 if short_name is not None:
207 if short_name is not None:
208 short_name_var = short_name
208 short_name_var = short_name
209 else:
209 else:
210 short_name_var = not admin
210 short_name_var = not admin
211 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
211 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
212 short_name=short_name_var, admin=False)
212 short_name=short_name_var, admin=False)
213
213
214 def last_change(last_change):
214 def last_change(last_change):
215 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
215 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
216 ts = time.time()
216 ts = time.time()
217 utc_offset = (datetime.datetime.fromtimestamp(ts)
217 utc_offset = (datetime.datetime.fromtimestamp(ts)
218 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
218 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
219 last_change = last_change + datetime.timedelta(seconds=utc_offset)
219 last_change = last_change + datetime.timedelta(seconds=utc_offset)
220
220
221 return _render("last_change", last_change)
221 return _render("last_change", last_change)
222
222
223 def rss_lnk(repo_name):
223 def rss_lnk(repo_name):
224 return _render("rss", repo_name)
224 return _render("rss", repo_name)
225
225
226 def atom_lnk(repo_name):
226 def atom_lnk(repo_name):
227 return _render("atom", repo_name)
227 return _render("atom", repo_name)
228
228
229 def last_rev(repo_name, cs_cache):
229 def last_rev(repo_name, cs_cache):
230 return _render('revision', repo_name, cs_cache.get('revision'),
230 return _render('revision', repo_name, cs_cache.get('revision'),
231 cs_cache.get('raw_id'), cs_cache.get('author'),
231 cs_cache.get('raw_id'), cs_cache.get('author'),
232 cs_cache.get('message'), cs_cache.get('date'))
232 cs_cache.get('message'), cs_cache.get('date'))
233
233
234 def desc(desc):
234 def desc(desc):
235 return _render('repo_desc', desc, c.visual.stylify_metatags)
235 return _render('repo_desc', desc, c.visual.stylify_metatags)
236
236
237 def state(repo_state):
237 def state(repo_state):
238 return _render("repo_state", repo_state)
238 return _render("repo_state", repo_state)
239
239
240 def repo_actions(repo_name):
240 def repo_actions(repo_name):
241 return _render('repo_actions', repo_name, super_user_actions)
241 return _render('repo_actions', repo_name, super_user_actions)
242
242
243 def user_profile(username):
243 def user_profile(username):
244 return _render('user_profile', username)
244 return _render('user_profile', username)
245
245
246 repos_data = []
246 repos_data = []
247 for repo in repo_list:
247 for repo in repo_list:
248 # NOTE(marcink): because we use only raw column we need to load it like that
248 # NOTE(marcink): because we use only raw column we need to load it like that
249 changeset_cache = Repository._load_changeset_cache(
249 changeset_cache = Repository._load_changeset_cache(
250 repo.repo_id, repo._changeset_cache)
250 repo.repo_id, repo._changeset_cache)
251
251
252 row = {
252 row = {
253 "menu": quick_menu(repo.repo_name),
253 "menu": quick_menu(repo.repo_name),
254
254
255 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
255 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
256 repo.private, repo.archived, repo.fork_repo_name),
256 repo.private, repo.archived, repo.fork_repo_name),
257
257
258 "desc": desc(h.escape(repo.description)),
258 "desc": desc(h.escape(repo.description)),
259
259
260 "last_change": last_change(repo.updated_on),
260 "last_change": last_change(repo.updated_on),
261
261
262 "last_changeset": last_rev(repo.repo_name, changeset_cache),
262 "last_changeset": last_rev(repo.repo_name, changeset_cache),
263 "last_changeset_raw": changeset_cache.get('revision'),
263 "last_changeset_raw": changeset_cache.get('revision'),
264
264
265 "owner": user_profile(repo.owner_username),
265 "owner": user_profile(repo.owner_username),
266
266
267 "state": state(repo.repo_state),
267 "state": state(repo.repo_state),
268 "rss": rss_lnk(repo.repo_name),
268 "rss": rss_lnk(repo.repo_name),
269 "atom": atom_lnk(repo.repo_name),
269 "atom": atom_lnk(repo.repo_name),
270 }
270 }
271 if admin:
271 if admin:
272 row.update({
272 row.update({
273 "action": repo_actions(repo.repo_name),
273 "action": repo_actions(repo.repo_name),
274 })
274 })
275 repos_data.append(row)
275 repos_data.append(row)
276
276
277 return repos_data
277 return repos_data
278
278
279 def get_repos_data_table(
279 def get_repos_data_table(
280 self, draw, start, limit,
280 self, draw, start, limit,
281 search_q, order_by, order_dir,
281 search_q, order_by, order_dir,
282 auth_user, repo_group_id):
282 auth_user, repo_group_id):
283 from rhodecode.model.scm import RepoList
283 from rhodecode.model.scm import RepoList
284
284
285 _perms = ['repository.read', 'repository.write', 'repository.admin']
285 _perms = ['repository.read', 'repository.write', 'repository.admin']
286
286
287 repos = Repository.query() \
287 repos = Repository.query() \
288 .filter(Repository.group_id == repo_group_id) \
288 .filter(Repository.group_id == repo_group_id) \
289 .all()
289 .all()
290 auth_repo_list = RepoList(
290 auth_repo_list = RepoList(
291 repos, perm_set=_perms,
291 repos, perm_set=_perms,
292 extra_kwargs=dict(user=auth_user))
292 extra_kwargs=dict(user=auth_user))
293
293
294 allowed_ids = [-1]
294 allowed_ids = [-1]
295 for repo in auth_repo_list:
295 for repo in auth_repo_list:
296 allowed_ids.append(repo.repo_id)
296 allowed_ids.append(repo.repo_id)
297
297
298 repos_data_total_count = Repository.query() \
298 repos_data_total_count = Repository.query() \
299 .filter(Repository.group_id == repo_group_id) \
299 .filter(Repository.group_id == repo_group_id) \
300 .filter(or_(
300 .filter(or_(
301 # generate multiple IN to fix limitation problems
301 # generate multiple IN to fix limitation problems
302 *in_filter_generator(Repository.repo_id, allowed_ids))
302 *in_filter_generator(Repository.repo_id, allowed_ids))
303 ) \
303 ) \
304 .count()
304 .count()
305
305
306 RepoFork = aliased(Repository)
306 RepoFork = aliased(Repository)
307 OwnerUser = aliased(User)
307 OwnerUser = aliased(User)
308 base_q = Session.query(
308 base_q = Session.query(
309 Repository.repo_id,
309 Repository.repo_id,
310 Repository.repo_name,
310 Repository.repo_name,
311 Repository.description,
311 Repository.description,
312 Repository.repo_type,
312 Repository.repo_type,
313 Repository.repo_state,
313 Repository.repo_state,
314 Repository.private,
314 Repository.private,
315 Repository.archived,
315 Repository.archived,
316 Repository.updated_on,
316 Repository.updated_on,
317 Repository._changeset_cache,
317 Repository._changeset_cache,
318 RepoFork.repo_name.label('fork_repo_name'),
318 RepoFork.repo_name.label('fork_repo_name'),
319 OwnerUser.username.label('owner_username'),
319 OwnerUser.username.label('owner_username'),
320 ) \
320 ) \
321 .filter(Repository.group_id == repo_group_id) \
321 .filter(Repository.group_id == repo_group_id) \
322 .filter(or_(
322 .filter(or_(
323 # generate multiple IN to fix limitation problems
323 # generate multiple IN to fix limitation problems
324 *in_filter_generator(Repository.repo_id, allowed_ids))
324 *in_filter_generator(Repository.repo_id, allowed_ids))
325 ) \
325 ) \
326 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
326 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
327 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
327 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
328
328
329 repos_data_total_filtered_count = base_q.count()
329 repos_data_total_filtered_count = base_q.count()
330
330
331 sort_defined = False
331 sort_defined = False
332 if order_by == 'repo_name':
332 if order_by == 'repo_name':
333 sort_col = func.lower(Repository.repo_name)
333 sort_col = func.lower(Repository.repo_name)
334 sort_defined = True
334 sort_defined = True
335 elif order_by == 'user_username':
335 elif order_by == 'user_username':
336 sort_col = User.username
336 sort_col = User.username
337 else:
337 else:
338 sort_col = getattr(Repository, order_by, None)
338 sort_col = getattr(Repository, order_by, None)
339
339
340 if sort_defined or sort_col:
340 if sort_defined or sort_col:
341 if order_dir == 'asc':
341 if order_dir == 'asc':
342 sort_col = sort_col.asc()
342 sort_col = sort_col.asc()
343 else:
343 else:
344 sort_col = sort_col.desc()
344 sort_col = sort_col.desc()
345
345
346 base_q = base_q.order_by(sort_col)
346 base_q = base_q.order_by(sort_col)
347 base_q = base_q.offset(start).limit(limit)
347 base_q = base_q.offset(start).limit(limit)
348
348
349 repos_list = base_q.all()
349 repos_list = base_q.all()
350
350
351 repos_data = RepoModel().get_repos_as_dict(
351 repos_data = RepoModel().get_repos_as_dict(
352 repo_list=repos_list, admin=False)
352 repo_list=repos_list, admin=False)
353
353
354 data = ({
354 data = ({
355 'draw': draw,
355 'draw': draw,
356 'data': repos_data,
356 'data': repos_data,
357 'recordsTotal': repos_data_total_count,
357 'recordsTotal': repos_data_total_count,
358 'recordsFiltered': repos_data_total_filtered_count,
358 'recordsFiltered': repos_data_total_filtered_count,
359 })
359 })
360 return data
360 return data
361
361
362 def _get_defaults(self, repo_name):
362 def _get_defaults(self, repo_name):
363 """
363 """
364 Gets information about repository, and returns a dict for
364 Gets information about repository, and returns a dict for
365 usage in forms
365 usage in forms
366
366
367 :param repo_name:
367 :param repo_name:
368 """
368 """
369
369
370 repo_info = Repository.get_by_repo_name(repo_name)
370 repo_info = Repository.get_by_repo_name(repo_name)
371
371
372 if repo_info is None:
372 if repo_info is None:
373 return None
373 return None
374
374
375 defaults = repo_info.get_dict()
375 defaults = repo_info.get_dict()
376 defaults['repo_name'] = repo_info.just_name
376 defaults['repo_name'] = repo_info.just_name
377
377
378 groups = repo_info.groups_with_parents
378 groups = repo_info.groups_with_parents
379 parent_group = groups[-1] if groups else None
379 parent_group = groups[-1] if groups else None
380
380
381 # we use -1 as this is how in HTML, we mark an empty group
381 # we use -1 as this is how in HTML, we mark an empty group
382 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
382 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
383
383
384 keys_to_process = (
384 keys_to_process = (
385 {'k': 'repo_type', 'strip': False},
385 {'k': 'repo_type', 'strip': False},
386 {'k': 'repo_enable_downloads', 'strip': True},
386 {'k': 'repo_enable_downloads', 'strip': True},
387 {'k': 'repo_description', 'strip': True},
387 {'k': 'repo_description', 'strip': True},
388 {'k': 'repo_enable_locking', 'strip': True},
388 {'k': 'repo_enable_locking', 'strip': True},
389 {'k': 'repo_landing_rev', 'strip': True},
389 {'k': 'repo_landing_rev', 'strip': True},
390 {'k': 'clone_uri', 'strip': False},
390 {'k': 'clone_uri', 'strip': False},
391 {'k': 'push_uri', 'strip': False},
391 {'k': 'push_uri', 'strip': False},
392 {'k': 'repo_private', 'strip': True},
392 {'k': 'repo_private', 'strip': True},
393 {'k': 'repo_enable_statistics', 'strip': True}
393 {'k': 'repo_enable_statistics', 'strip': True}
394 )
394 )
395
395
396 for item in keys_to_process:
396 for item in keys_to_process:
397 attr = item['k']
397 attr = item['k']
398 if item['strip']:
398 if item['strip']:
399 attr = remove_prefix(item['k'], 'repo_')
399 attr = remove_prefix(item['k'], 'repo_')
400
400
401 val = defaults[attr]
401 val = defaults[attr]
402 if item['k'] == 'repo_landing_rev':
402 if item['k'] == 'repo_landing_rev':
403 val = ':'.join(defaults[attr])
403 val = ':'.join(defaults[attr])
404 defaults[item['k']] = val
404 defaults[item['k']] = val
405 if item['k'] == 'clone_uri':
405 if item['k'] == 'clone_uri':
406 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
406 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
407 if item['k'] == 'push_uri':
407 if item['k'] == 'push_uri':
408 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
408 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
409
409
410 # fill owner
410 # fill owner
411 if repo_info.user:
411 if repo_info.user:
412 defaults.update({'user': repo_info.user.username})
412 defaults.update({'user': repo_info.user.username})
413 else:
413 else:
414 replacement_user = User.get_first_super_admin().username
414 replacement_user = User.get_first_super_admin().username
415 defaults.update({'user': replacement_user})
415 defaults.update({'user': replacement_user})
416
416
417 return defaults
417 return defaults
418
418
419 def update(self, repo, **kwargs):
419 def update(self, repo, **kwargs):
420 try:
420 try:
421 cur_repo = self._get_repo(repo)
421 cur_repo = self._get_repo(repo)
422 source_repo_name = cur_repo.repo_name
422 source_repo_name = cur_repo.repo_name
423
423
424 affected_user_ids = []
424 affected_user_ids = []
425 if 'user' in kwargs:
425 if 'user' in kwargs:
426 old_owner_id = cur_repo.user.user_id
426 old_owner_id = cur_repo.user.user_id
427 new_owner = User.get_by_username(kwargs['user'])
427 new_owner = User.get_by_username(kwargs['user'])
428 cur_repo.user = new_owner
428 cur_repo.user = new_owner
429
429
430 if old_owner_id != new_owner.user_id:
430 if old_owner_id != new_owner.user_id:
431 affected_user_ids = [new_owner.user_id, old_owner_id]
431 affected_user_ids = [new_owner.user_id, old_owner_id]
432
432
433 if 'repo_group' in kwargs:
433 if 'repo_group' in kwargs:
434 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
434 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
435 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
435 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
436
436
437 update_keys = [
437 update_keys = [
438 (1, 'repo_description'),
438 (1, 'repo_description'),
439 (1, 'repo_landing_rev'),
439 (1, 'repo_landing_rev'),
440 (1, 'repo_private'),
440 (1, 'repo_private'),
441 (1, 'repo_enable_downloads'),
441 (1, 'repo_enable_downloads'),
442 (1, 'repo_enable_locking'),
442 (1, 'repo_enable_locking'),
443 (1, 'repo_enable_statistics'),
443 (1, 'repo_enable_statistics'),
444 (0, 'clone_uri'),
444 (0, 'clone_uri'),
445 (0, 'push_uri'),
445 (0, 'push_uri'),
446 (0, 'fork_id')
446 (0, 'fork_id')
447 ]
447 ]
448 for strip, k in update_keys:
448 for strip, k in update_keys:
449 if k in kwargs:
449 if k in kwargs:
450 val = kwargs[k]
450 val = kwargs[k]
451 if strip:
451 if strip:
452 k = remove_prefix(k, 'repo_')
452 k = remove_prefix(k, 'repo_')
453
453
454 setattr(cur_repo, k, val)
454 setattr(cur_repo, k, val)
455
455
456 new_name = source_repo_name
456 new_name = source_repo_name
457 if 'repo_name' in kwargs:
457 if 'repo_name' in kwargs:
458 new_name = cur_repo.get_new_name(kwargs['repo_name'])
458 new_name = cur_repo.get_new_name(kwargs['repo_name'])
459 cur_repo.repo_name = new_name
459 cur_repo.repo_name = new_name
460
460
461 if 'repo_private' in kwargs:
461 if 'repo_private' in kwargs:
462 # if private flag is set to True, reset default permission to NONE
462 # if private flag is set to True, reset default permission to NONE
463 set_private_to = kwargs.get('repo_private')
463 set_private_to = kwargs.get('repo_private')
464 if set_private_to:
464 if set_private_to:
465 EMPTY_PERM = 'repository.none'
465 EMPTY_PERM = 'repository.none'
466 RepoModel().grant_user_permission(
466 RepoModel().grant_user_permission(
467 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
467 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 )
468 )
469 if set_private_to != cur_repo.private:
469 if set_private_to != cur_repo.private:
470 # NOTE(dan): we change repo private mode we need to notify all USERS
470 # NOTE(dan): we change repo private mode we need to notify all USERS
471 # this is just by having this value set to a different value then it was before
471 # this is just by having this value set to a different value then it was before
472 affected_user_ids = User.get_all_user_ids()
472 affected_user_ids = User.get_all_user_ids()
473
473
474 if kwargs.get('repo_landing_rev'):
474 if kwargs.get('repo_landing_rev'):
475 landing_rev_val = kwargs['repo_landing_rev']
475 landing_rev_val = kwargs['repo_landing_rev']
476 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
476 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
477
477
478 # handle extra fields
478 # handle extra fields
479 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
479 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
480 k = RepositoryField.un_prefix_key(field)
480 k = RepositoryField.un_prefix_key(field)
481 ex_field = RepositoryField.get_by_key_name(
481 ex_field = RepositoryField.get_by_key_name(
482 key=k, repo=cur_repo)
482 key=k, repo=cur_repo)
483 if ex_field:
483 if ex_field:
484 ex_field.field_value = kwargs[field]
484 ex_field.field_value = kwargs[field]
485 self.sa.add(ex_field)
485 self.sa.add(ex_field)
486
486
487 self.sa.add(cur_repo)
487 self.sa.add(cur_repo)
488
488
489 if source_repo_name != new_name:
489 if source_repo_name != new_name:
490 # rename repository
490 # rename repository
491 self._rename_filesystem_repo(
491 self._rename_filesystem_repo(
492 old=source_repo_name, new=new_name)
492 old=source_repo_name, new=new_name)
493
493
494 if affected_user_ids:
494 if affected_user_ids:
495 PermissionModel().trigger_permission_flush(affected_user_ids)
495 PermissionModel().trigger_permission_flush(affected_user_ids)
496
496
497 return cur_repo
497 return cur_repo
498 except Exception:
498 except Exception:
499 log.error(traceback.format_exc())
499 log.error(traceback.format_exc())
500 raise
500 raise
501
501
502 def _create_repo(self, repo_name, repo_type, description, owner,
502 def _create_repo(self, repo_name, repo_type, description, owner,
503 private=False, clone_uri=None, repo_group=None,
503 private=False, clone_uri=None, repo_group=None,
504 landing_rev=None, fork_of=None,
504 landing_rev=None, fork_of=None,
505 copy_fork_permissions=False, enable_statistics=False,
505 copy_fork_permissions=False, enable_statistics=False,
506 enable_locking=False, enable_downloads=False,
506 enable_locking=False, enable_downloads=False,
507 copy_group_permissions=False,
507 copy_group_permissions=False,
508 state=Repository.STATE_PENDING):
508 state=Repository.STATE_PENDING):
509 """
509 """
510 Create repository inside database with PENDING state, this should be
510 Create repository inside database with PENDING state, this should be
511 only executed by create() repo. With exception of importing existing
511 only executed by create() repo. With exception of importing existing
512 repos
512 repos
513 """
513 """
514 from rhodecode.model.scm import ScmModel
514 from rhodecode.model.scm import ScmModel
515
515
516 owner = self._get_user(owner)
516 owner = self._get_user(owner)
517 fork_of = self._get_repo(fork_of)
517 fork_of = self._get_repo(fork_of)
518 repo_group = self._get_repo_group(safe_int(repo_group))
518 repo_group = self._get_repo_group(safe_int(repo_group))
519 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
519 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
520 landing_rev = landing_rev or default_landing_ref
520 landing_rev = landing_rev or default_landing_ref
521
521
522 try:
522 try:
523 repo_name = safe_str(repo_name)
523 repo_name = safe_str(repo_name)
524 description = safe_str(description)
524 description = safe_str(description)
525 # repo name is just a name of repository
525 # repo name is just a name of repository
526 # while repo_name_full is a full qualified name that is combined
526 # while repo_name_full is a full qualified name that is combined
527 # with name and path of group
527 # with name and path of group
528 repo_name_full = repo_name
528 repo_name_full = repo_name
529 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
529 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
530
530
531 new_repo = Repository()
531 new_repo = Repository()
532 new_repo.repo_state = state
532 new_repo.repo_state = state
533 new_repo.enable_statistics = False
533 new_repo.enable_statistics = False
534 new_repo.repo_name = repo_name_full
534 new_repo.repo_name = repo_name_full
535 new_repo.repo_type = repo_type
535 new_repo.repo_type = repo_type
536 new_repo.user = owner
536 new_repo.user = owner
537 new_repo.group = repo_group
537 new_repo.group = repo_group
538 new_repo.description = description or repo_name
538 new_repo.description = description or repo_name
539 new_repo.private = private
539 new_repo.private = private
540 new_repo.archived = False
540 new_repo.archived = False
541 new_repo.clone_uri = clone_uri
541 new_repo.clone_uri = clone_uri
542 new_repo.landing_rev = landing_rev
542 new_repo.landing_rev = landing_rev
543
543
544 new_repo.enable_statistics = enable_statistics
544 new_repo.enable_statistics = enable_statistics
545 new_repo.enable_locking = enable_locking
545 new_repo.enable_locking = enable_locking
546 new_repo.enable_downloads = enable_downloads
546 new_repo.enable_downloads = enable_downloads
547
547
548 if repo_group:
548 if repo_group:
549 new_repo.enable_locking = repo_group.enable_locking
549 new_repo.enable_locking = repo_group.enable_locking
550
550
551 if fork_of:
551 if fork_of:
552 parent_repo = fork_of
552 parent_repo = fork_of
553 new_repo.fork = parent_repo
553 new_repo.fork = parent_repo
554
554
555 events.trigger(events.RepoPreCreateEvent(new_repo))
555 events.trigger(events.RepoPreCreateEvent(new_repo))
556
556
557 self.sa.add(new_repo)
557 self.sa.add(new_repo)
558
558
559 EMPTY_PERM = 'repository.none'
559 EMPTY_PERM = 'repository.none'
560 if fork_of and copy_fork_permissions:
560 if fork_of and copy_fork_permissions:
561 repo = fork_of
561 repo = fork_of
562 user_perms = UserRepoToPerm.query() \
562 user_perms = UserRepoToPerm.query() \
563 .filter(UserRepoToPerm.repository == repo).all()
563 .filter(UserRepoToPerm.repository == repo).all()
564 group_perms = UserGroupRepoToPerm.query() \
564 group_perms = UserGroupRepoToPerm.query() \
565 .filter(UserGroupRepoToPerm.repository == repo).all()
565 .filter(UserGroupRepoToPerm.repository == repo).all()
566
566
567 for perm in user_perms:
567 for perm in user_perms:
568 UserRepoToPerm.create(
568 UserRepoToPerm.create(
569 perm.user, new_repo, perm.permission)
569 perm.user, new_repo, perm.permission)
570
570
571 for perm in group_perms:
571 for perm in group_perms:
572 UserGroupRepoToPerm.create(
572 UserGroupRepoToPerm.create(
573 perm.users_group, new_repo, perm.permission)
573 perm.users_group, new_repo, perm.permission)
574 # in case we copy permissions and also set this repo to private
574 # in case we copy permissions and also set this repo to private
575 # override the default user permission to make it a private repo
575 # override the default user permission to make it a private repo
576 if private:
576 if private:
577 RepoModel(self.sa).grant_user_permission(
577 RepoModel(self.sa).grant_user_permission(
578 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
578 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
579
579
580 elif repo_group and copy_group_permissions:
580 elif repo_group and copy_group_permissions:
581 user_perms = UserRepoGroupToPerm.query() \
581 user_perms = UserRepoGroupToPerm.query() \
582 .filter(UserRepoGroupToPerm.group == repo_group).all()
582 .filter(UserRepoGroupToPerm.group == repo_group).all()
583
583
584 group_perms = UserGroupRepoGroupToPerm.query() \
584 group_perms = UserGroupRepoGroupToPerm.query() \
585 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
585 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
586
586
587 for perm in user_perms:
587 for perm in user_perms:
588 perm_name = perm.permission.permission_name.replace(
588 perm_name = perm.permission.permission_name.replace(
589 'group.', 'repository.')
589 'group.', 'repository.')
590 perm_obj = Permission.get_by_key(perm_name)
590 perm_obj = Permission.get_by_key(perm_name)
591 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
591 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
592
592
593 for perm in group_perms:
593 for perm in group_perms:
594 perm_name = perm.permission.permission_name.replace(
594 perm_name = perm.permission.permission_name.replace(
595 'group.', 'repository.')
595 'group.', 'repository.')
596 perm_obj = Permission.get_by_key(perm_name)
596 perm_obj = Permission.get_by_key(perm_name)
597 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
597 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
598
598
599 if private:
599 if private:
600 RepoModel(self.sa).grant_user_permission(
600 RepoModel(self.sa).grant_user_permission(
601 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
601 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
602
602
603 else:
603 else:
604 perm_obj = self._create_default_perms(new_repo, private)
604 perm_obj = self._create_default_perms(new_repo, private)
605 self.sa.add(perm_obj)
605 self.sa.add(perm_obj)
606
606
607 # now automatically start following this repository as owner
607 # now automatically start following this repository as owner
608 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
608 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
609
609
610 # we need to flush here, in order to check if database won't
610 # we need to flush here, in order to check if database won't
611 # throw any exceptions, create filesystem dirs at the very end
611 # throw any exceptions, create filesystem dirs at the very end
612 self.sa.flush()
612 self.sa.flush()
613 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
613 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
614 return new_repo
614 return new_repo
615
615
616 except Exception:
616 except Exception:
617 log.error(traceback.format_exc())
617 log.error(traceback.format_exc())
618 raise
618 raise
619
619
620 def create(self, form_data, cur_user):
620 def create(self, form_data, cur_user):
621 """
621 """
622 Create repository using celery tasks
622 Create repository using celery tasks
623
623
624 :param form_data:
624 :param form_data:
625 :param cur_user:
625 :param cur_user:
626 """
626 """
627 from rhodecode.lib.celerylib import tasks, run_task
627 from rhodecode.lib.celerylib import tasks, run_task
628 return run_task(tasks.create_repo, form_data, cur_user)
628 return run_task(tasks.create_repo, form_data, cur_user)
629
629
630 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
630 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
631 perm_deletions=None, check_perms=True,
631 perm_deletions=None, check_perms=True,
632 cur_user=None):
632 cur_user=None):
633 if not perm_additions:
633 if not perm_additions:
634 perm_additions = []
634 perm_additions = []
635 if not perm_updates:
635 if not perm_updates:
636 perm_updates = []
636 perm_updates = []
637 if not perm_deletions:
637 if not perm_deletions:
638 perm_deletions = []
638 perm_deletions = []
639
639
640 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
640 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
641
641
642 changes = {
642 changes = {
643 'added': [],
643 'added': [],
644 'updated': [],
644 'updated': [],
645 'deleted': [],
645 'deleted': [],
646 'default_user_changed': None
646 'default_user_changed': None
647 }
647 }
648
648
649 repo = self._get_repo(repo)
649 repo = self._get_repo(repo)
650
650
651 # update permissions
651 # update permissions
652 for member_id, perm, member_type in perm_updates:
652 for member_id, perm, member_type in perm_updates:
653 member_id = int(member_id)
653 member_id = int(member_id)
654 if member_type == 'user':
654 if member_type == 'user':
655 member_name = User.get(member_id).username
655 member_name = User.get(member_id).username
656 if member_name == User.DEFAULT_USER:
656 if member_name == User.DEFAULT_USER:
657 # NOTE(dan): detect if we changed permissions for default user
657 # NOTE(dan): detect if we changed permissions for default user
658 perm_obj = self.sa.query(UserRepoToPerm) \
658 perm_obj = self.sa.query(UserRepoToPerm) \
659 .filter(UserRepoToPerm.user_id == member_id) \
659 .filter(UserRepoToPerm.user_id == member_id) \
660 .filter(UserRepoToPerm.repository == repo) \
660 .filter(UserRepoToPerm.repository == repo) \
661 .scalar()
661 .scalar()
662 if perm_obj and perm_obj.permission.permission_name != perm:
662 if perm_obj and perm_obj.permission.permission_name != perm:
663 changes['default_user_changed'] = True
663 changes['default_user_changed'] = True
664
664
665 # this updates also current one if found
665 # this updates also current one if found
666 self.grant_user_permission(
666 self.grant_user_permission(
667 repo=repo, user=member_id, perm=perm)
667 repo=repo, user=member_id, perm=perm)
668 elif member_type == 'user_group':
668 elif member_type == 'user_group':
669 # check if we have permissions to alter this usergroup
669 # check if we have permissions to alter this usergroup
670 member_name = UserGroup.get(member_id).users_group_name
670 member_name = UserGroup.get(member_id).users_group_name
671 if not check_perms or HasUserGroupPermissionAny(
671 if not check_perms or HasUserGroupPermissionAny(
672 *req_perms)(member_name, user=cur_user):
672 *req_perms)(member_name, user=cur_user):
673 self.grant_user_group_permission(
673 self.grant_user_group_permission(
674 repo=repo, group_name=member_id, perm=perm)
674 repo=repo, group_name=member_id, perm=perm)
675 else:
675 else:
676 raise ValueError("member_type must be 'user' or 'user_group' "
676 raise ValueError("member_type must be 'user' or 'user_group' "
677 "got {} instead".format(member_type))
677 "got {} instead".format(member_type))
678 changes['updated'].append({'type': member_type, 'id': member_id,
678 changes['updated'].append({'type': member_type, 'id': member_id,
679 'name': member_name, 'new_perm': perm})
679 'name': member_name, 'new_perm': perm})
680
680
681 # set new permissions
681 # set new permissions
682 for member_id, perm, member_type in perm_additions:
682 for member_id, perm, member_type in perm_additions:
683 member_id = int(member_id)
683 member_id = int(member_id)
684 if member_type == 'user':
684 if member_type == 'user':
685 member_name = User.get(member_id).username
685 member_name = User.get(member_id).username
686 self.grant_user_permission(
686 self.grant_user_permission(
687 repo=repo, user=member_id, perm=perm)
687 repo=repo, user=member_id, perm=perm)
688 elif member_type == 'user_group':
688 elif member_type == 'user_group':
689 # check if we have permissions to alter this usergroup
689 # check if we have permissions to alter this usergroup
690 member_name = UserGroup.get(member_id).users_group_name
690 member_name = UserGroup.get(member_id).users_group_name
691 if not check_perms or HasUserGroupPermissionAny(
691 if not check_perms or HasUserGroupPermissionAny(
692 *req_perms)(member_name, user=cur_user):
692 *req_perms)(member_name, user=cur_user):
693 self.grant_user_group_permission(
693 self.grant_user_group_permission(
694 repo=repo, group_name=member_id, perm=perm)
694 repo=repo, group_name=member_id, perm=perm)
695 else:
695 else:
696 raise ValueError("member_type must be 'user' or 'user_group' "
696 raise ValueError("member_type must be 'user' or 'user_group' "
697 "got {} instead".format(member_type))
697 "got {} instead".format(member_type))
698
698
699 changes['added'].append({'type': member_type, 'id': member_id,
699 changes['added'].append({'type': member_type, 'id': member_id,
700 'name': member_name, 'new_perm': perm})
700 'name': member_name, 'new_perm': perm})
701 # delete permissions
701 # delete permissions
702 for member_id, perm, member_type in perm_deletions:
702 for member_id, perm, member_type in perm_deletions:
703 member_id = int(member_id)
703 member_id = int(member_id)
704 if member_type == 'user':
704 if member_type == 'user':
705 member_name = User.get(member_id).username
705 member_name = User.get(member_id).username
706 self.revoke_user_permission(repo=repo, user=member_id)
706 self.revoke_user_permission(repo=repo, user=member_id)
707 elif member_type == 'user_group':
707 elif member_type == 'user_group':
708 # check if we have permissions to alter this usergroup
708 # check if we have permissions to alter this usergroup
709 member_name = UserGroup.get(member_id).users_group_name
709 member_name = UserGroup.get(member_id).users_group_name
710 if not check_perms or HasUserGroupPermissionAny(
710 if not check_perms or HasUserGroupPermissionAny(
711 *req_perms)(member_name, user=cur_user):
711 *req_perms)(member_name, user=cur_user):
712 self.revoke_user_group_permission(
712 self.revoke_user_group_permission(
713 repo=repo, group_name=member_id)
713 repo=repo, group_name=member_id)
714 else:
714 else:
715 raise ValueError("member_type must be 'user' or 'user_group' "
715 raise ValueError("member_type must be 'user' or 'user_group' "
716 "got {} instead".format(member_type))
716 "got {} instead".format(member_type))
717
717
718 changes['deleted'].append({'type': member_type, 'id': member_id,
718 changes['deleted'].append({'type': member_type, 'id': member_id,
719 'name': member_name, 'new_perm': perm})
719 'name': member_name, 'new_perm': perm})
720 return changes
720 return changes
721
721
722 def create_fork(self, form_data, cur_user):
722 def create_fork(self, form_data, cur_user):
723 """
723 """
724 Simple wrapper into executing celery task for fork creation
724 Simple wrapper into executing celery task for fork creation
725
725
726 :param form_data:
726 :param form_data:
727 :param cur_user:
727 :param cur_user:
728 """
728 """
729 from rhodecode.lib.celerylib import tasks, run_task
729 from rhodecode.lib.celerylib import tasks, run_task
730 return run_task(tasks.create_repo_fork, form_data, cur_user)
730 return run_task(tasks.create_repo_fork, form_data, cur_user)
731
731
732 def archive(self, repo):
732 def archive(self, repo):
733 """
733 """
734 Archive given repository. Set archive flag.
734 Archive given repository. Set archive flag.
735
735
736 :param repo:
736 :param repo:
737 """
737 """
738 repo = self._get_repo(repo)
738 repo = self._get_repo(repo)
739 if repo:
739 if repo:
740
740
741 try:
741 try:
742 repo.archived = True
742 repo.archived = True
743 self.sa.add(repo)
743 self.sa.add(repo)
744 self.sa.commit()
744 self.sa.commit()
745 except Exception:
745 except Exception:
746 log.error(traceback.format_exc())
746 log.error(traceback.format_exc())
747 raise
747 raise
748
748
749 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
749 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
750 """
750 """
751 Delete given repository, forks parameter defines what do do with
751 Delete given repository, forks parameter defines what do do with
752 attached forks. Throws AttachedForksError if deleted repo has attached
752 attached forks. Throws AttachedForksError if deleted repo has attached
753 forks
753 forks
754
754
755 :param repo:
755 :param repo:
756 :param forks: str 'delete' or 'detach'
756 :param forks: str 'delete' or 'detach'
757 :param pull_requests: str 'delete' or None
757 :param pull_requests: str 'delete' or None
758 :param artifacts: str 'delete' or None
758 :param artifacts: str 'delete' or None
759 :param fs_remove: remove(archive) repo from filesystem
759 :param fs_remove: remove(archive) repo from filesystem
760 """
760 """
761 if not cur_user:
761 if not cur_user:
762 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
762 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
763 repo = self._get_repo(repo)
763 repo = self._get_repo(repo)
764 if not repo:
764 if not repo:
765 return False
765 return False
766
766
767 if forks == 'detach':
767 if forks == 'detach':
768 for r in repo.forks:
768 for r in repo.forks:
769 r.fork = None
769 r.fork = None
770 self.sa.add(r)
770 self.sa.add(r)
771 elif forks == 'delete':
771 elif forks == 'delete':
772 for r in repo.forks:
772 for r in repo.forks:
773 self.delete(r, forks='delete')
773 self.delete(r, forks='delete')
774 elif [f for f in repo.forks]:
774 elif [f for f in repo.forks]:
775 raise AttachedForksError()
775 raise AttachedForksError()
776
776
777 # check for pull requests
777 # check for pull requests
778 pr_sources = repo.pull_requests_source
778 pr_sources = repo.pull_requests_source
779 pr_targets = repo.pull_requests_target
779 pr_targets = repo.pull_requests_target
780 if pull_requests != 'delete' and (pr_sources or pr_targets):
780 if pull_requests != 'delete' and (pr_sources or pr_targets):
781 raise AttachedPullRequestsError()
781 raise AttachedPullRequestsError()
782
782
783 artifacts_objs = repo.artifacts
783 artifacts_objs = repo.artifacts
784 if artifacts == 'delete':
784 if artifacts == 'delete':
785 for a in artifacts_objs:
785 for a in artifacts_objs:
786 self.sa.delete(a)
786 self.sa.delete(a)
787 elif [a for a in artifacts_objs]:
787 elif [a for a in artifacts_objs]:
788 raise AttachedArtifactsError()
788 raise AttachedArtifactsError()
789
789
790 old_repo_dict = repo.get_dict()
790 old_repo_dict = repo.get_dict()
791 if call_events:
791 if call_events:
792 events.trigger(events.RepoPreDeleteEvent(repo))
792 events.trigger(events.RepoPreDeleteEvent(repo))
793
793
794 try:
794 try:
795 self.sa.delete(repo)
795 self.sa.delete(repo)
796 if fs_remove:
796 if fs_remove:
797 self._delete_filesystem_repo(repo)
797 self._delete_filesystem_repo(repo)
798 else:
798 else:
799 log.debug('skipping removal from filesystem')
799 log.debug('skipping removal from filesystem')
800 old_repo_dict.update({
800 old_repo_dict.update({
801 'deleted_by': cur_user,
801 'deleted_by': cur_user,
802 'deleted_on': time.time(),
802 'deleted_on': time.time(),
803 })
803 })
804 if call_events:
804 if call_events:
805 hooks_base.delete_repository(**old_repo_dict)
805 hooks_base.delete_repository(**old_repo_dict)
806 events.trigger(events.RepoDeleteEvent(repo))
806 events.trigger(events.RepoDeleteEvent(repo))
807 except Exception:
807 except Exception:
808 log.error(traceback.format_exc())
808 log.error(traceback.format_exc())
809 raise
809 raise
810
810
811 return True
811 return True
812
812
813 def grant_user_permission(self, repo, user, perm):
813 def grant_user_permission(self, repo, user, perm):
814 """
814 """
815 Grant permission for user on given repository, or update existing one
815 Grant permission for user on given repository, or update existing one
816 if found
816 if found
817
817
818 :param repo: Instance of Repository, repository_id, or repository name
818 :param repo: Instance of Repository, repository_id, or repository name
819 :param user: Instance of User, user_id or username
819 :param user: Instance of User, user_id or username
820 :param perm: Instance of Permission, or permission_name
820 :param perm: Instance of Permission, or permission_name
821 """
821 """
822 user = self._get_user(user)
822 user = self._get_user(user)
823 repo = self._get_repo(repo)
823 repo = self._get_repo(repo)
824 permission = self._get_perm(perm)
824 permission = self._get_perm(perm)
825
825
826 # check if we have that permission already
826 # check if we have that permission already
827 obj = self.sa.query(UserRepoToPerm) \
827 obj = self.sa.query(UserRepoToPerm) \
828 .filter(UserRepoToPerm.user == user) \
828 .filter(UserRepoToPerm.user == user) \
829 .filter(UserRepoToPerm.repository == repo) \
829 .filter(UserRepoToPerm.repository == repo) \
830 .scalar()
830 .scalar()
831 if obj is None:
831 if obj is None:
832 # create new !
832 # create new !
833 obj = UserRepoToPerm()
833 obj = UserRepoToPerm()
834 obj.repository = repo
834 obj.repository = repo
835 obj.user = user
835 obj.user = user
836 obj.permission = permission
836 obj.permission = permission
837 self.sa.add(obj)
837 self.sa.add(obj)
838 log.debug('Granted perm %s to %s on %s', perm, user, repo)
838 log.debug('Granted perm %s to %s on %s', perm, user, repo)
839 action_logger_generic(
839 action_logger_generic(
840 'granted permission: {} to user: {} on repo: {}'.format(
840 'granted permission: {} to user: {} on repo: {}'.format(
841 perm, user, repo), namespace='security.repo')
841 perm, user, repo), namespace='security.repo')
842 return obj
842 return obj
843
843
844 def revoke_user_permission(self, repo, user):
844 def revoke_user_permission(self, repo, user):
845 """
845 """
846 Revoke permission for user on given repository
846 Revoke permission for user on given repository
847
847
848 :param repo: Instance of Repository, repository_id, or repository name
848 :param repo: Instance of Repository, repository_id, or repository name
849 :param user: Instance of User, user_id or username
849 :param user: Instance of User, user_id or username
850 """
850 """
851
851
852 user = self._get_user(user)
852 user = self._get_user(user)
853 repo = self._get_repo(repo)
853 repo = self._get_repo(repo)
854
854
855 obj = self.sa.query(UserRepoToPerm) \
855 obj = self.sa.query(UserRepoToPerm) \
856 .filter(UserRepoToPerm.repository == repo) \
856 .filter(UserRepoToPerm.repository == repo) \
857 .filter(UserRepoToPerm.user == user) \
857 .filter(UserRepoToPerm.user == user) \
858 .scalar()
858 .scalar()
859 if obj:
859 if obj:
860 self.sa.delete(obj)
860 self.sa.delete(obj)
861 log.debug('Revoked perm on %s on %s', repo, user)
861 log.debug('Revoked perm on %s on %s', repo, user)
862 action_logger_generic(
862 action_logger_generic(
863 'revoked permission from user: {} on repo: {}'.format(
863 'revoked permission from user: {} on repo: {}'.format(
864 user, repo), namespace='security.repo')
864 user, repo), namespace='security.repo')
865
865
866 def grant_user_group_permission(self, repo, group_name, perm):
866 def grant_user_group_permission(self, repo, group_name, perm):
867 """
867 """
868 Grant permission for user group on given repository, or update
868 Grant permission for user group on given repository, or update
869 existing one if found
869 existing one if found
870
870
871 :param repo: Instance of Repository, repository_id, or repository name
871 :param repo: Instance of Repository, repository_id, or repository name
872 :param group_name: Instance of UserGroup, users_group_id,
872 :param group_name: Instance of UserGroup, users_group_id,
873 or user group name
873 or user group name
874 :param perm: Instance of Permission, or permission_name
874 :param perm: Instance of Permission, or permission_name
875 """
875 """
876 repo = self._get_repo(repo)
876 repo = self._get_repo(repo)
877 group_name = self._get_user_group(group_name)
877 group_name = self._get_user_group(group_name)
878 permission = self._get_perm(perm)
878 permission = self._get_perm(perm)
879
879
880 # check if we have that permission already
880 # check if we have that permission already
881 obj = self.sa.query(UserGroupRepoToPerm) \
881 obj = self.sa.query(UserGroupRepoToPerm) \
882 .filter(UserGroupRepoToPerm.users_group == group_name) \
882 .filter(UserGroupRepoToPerm.users_group == group_name) \
883 .filter(UserGroupRepoToPerm.repository == repo) \
883 .filter(UserGroupRepoToPerm.repository == repo) \
884 .scalar()
884 .scalar()
885
885
886 if obj is None:
886 if obj is None:
887 # create new
887 # create new
888 obj = UserGroupRepoToPerm()
888 obj = UserGroupRepoToPerm()
889
889
890 obj.repository = repo
890 obj.repository = repo
891 obj.users_group = group_name
891 obj.users_group = group_name
892 obj.permission = permission
892 obj.permission = permission
893 self.sa.add(obj)
893 self.sa.add(obj)
894 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
894 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
895 action_logger_generic(
895 action_logger_generic(
896 'granted permission: {} to usergroup: {} on repo: {}'.format(
896 'granted permission: {} to usergroup: {} on repo: {}'.format(
897 perm, group_name, repo), namespace='security.repo')
897 perm, group_name, repo), namespace='security.repo')
898
898
899 return obj
899 return obj
900
900
901 def revoke_user_group_permission(self, repo, group_name):
901 def revoke_user_group_permission(self, repo, group_name):
902 """
902 """
903 Revoke permission for user group on given repository
903 Revoke permission for user group on given repository
904
904
905 :param repo: Instance of Repository, repository_id, or repository name
905 :param repo: Instance of Repository, repository_id, or repository name
906 :param group_name: Instance of UserGroup, users_group_id,
906 :param group_name: Instance of UserGroup, users_group_id,
907 or user group name
907 or user group name
908 """
908 """
909 repo = self._get_repo(repo)
909 repo = self._get_repo(repo)
910 group_name = self._get_user_group(group_name)
910 group_name = self._get_user_group(group_name)
911
911
912 obj = self.sa.query(UserGroupRepoToPerm) \
912 obj = self.sa.query(UserGroupRepoToPerm) \
913 .filter(UserGroupRepoToPerm.repository == repo) \
913 .filter(UserGroupRepoToPerm.repository == repo) \
914 .filter(UserGroupRepoToPerm.users_group == group_name) \
914 .filter(UserGroupRepoToPerm.users_group == group_name) \
915 .scalar()
915 .scalar()
916 if obj:
916 if obj:
917 self.sa.delete(obj)
917 self.sa.delete(obj)
918 log.debug('Revoked perm to %s on %s', repo, group_name)
918 log.debug('Revoked perm to %s on %s', repo, group_name)
919 action_logger_generic(
919 action_logger_generic(
920 'revoked permission from usergroup: {} on repo: {}'.format(
920 'revoked permission from usergroup: {} on repo: {}'.format(
921 group_name, repo), namespace='security.repo')
921 group_name, repo), namespace='security.repo')
922
922
923 def delete_stats(self, repo_name):
923 def delete_stats(self, repo_name):
924 """
924 """
925 removes stats for given repo
925 removes stats for given repo
926
926
927 :param repo_name:
927 :param repo_name:
928 """
928 """
929 repo = self._get_repo(repo_name)
929 repo = self._get_repo(repo_name)
930 try:
930 try:
931 obj = self.sa.query(Statistics) \
931 obj = self.sa.query(Statistics) \
932 .filter(Statistics.repository == repo).scalar()
932 .filter(Statistics.repository == repo).scalar()
933 if obj:
933 if obj:
934 self.sa.delete(obj)
934 self.sa.delete(obj)
935 except Exception:
935 except Exception:
936 log.error(traceback.format_exc())
936 log.error(traceback.format_exc())
937 raise
937 raise
938
938
939 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
939 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
940 field_type='str', field_desc=''):
940 field_type='str', field_desc=''):
941
941
942 repo = self._get_repo(repo_name)
942 repo = self._get_repo(repo_name)
943
943
944 new_field = RepositoryField()
944 new_field = RepositoryField()
945 new_field.repository = repo
945 new_field.repository = repo
946 new_field.field_key = field_key
946 new_field.field_key = field_key
947 new_field.field_type = field_type # python type
947 new_field.field_type = field_type # python type
948 new_field.field_value = field_value
948 new_field.field_value = field_value
949 new_field.field_desc = field_desc
949 new_field.field_desc = field_desc
950 new_field.field_label = field_label
950 new_field.field_label = field_label
951 self.sa.add(new_field)
951 self.sa.add(new_field)
952 return new_field
952 return new_field
953
953
954 def delete_repo_field(self, repo_name, field_key):
954 def delete_repo_field(self, repo_name, field_key):
955 repo = self._get_repo(repo_name)
955 repo = self._get_repo(repo_name)
956 field = RepositoryField.get_by_key_name(field_key, repo)
956 field = RepositoryField.get_by_key_name(field_key, repo)
957 if field:
957 if field:
958 self.sa.delete(field)
958 self.sa.delete(field)
959
959
960 def set_landing_rev(self, repo, landing_rev_name):
960 def set_landing_rev(self, repo, landing_rev_name):
961 if landing_rev_name.startswith('branch:'):
961 if landing_rev_name.startswith('branch:'):
962 landing_rev_name = landing_rev_name.split('branch:')[-1]
962 landing_rev_name = landing_rev_name.split('branch:')[-1]
963 scm_instance = repo.scm_instance()
963 scm_instance = repo.scm_instance()
964 if scm_instance:
964 if scm_instance:
965 return scm_instance._remote.set_head_ref(landing_rev_name)
965 return scm_instance._remote.set_head_ref(landing_rev_name)
966
966
967 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
967 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
968 clone_uri=None, repo_store_location=None,
968 clone_uri=None, repo_store_location=None,
969 use_global_config=False, install_hooks=True):
969 use_global_config=False, install_hooks=True):
970 """
970 """
971 makes repository on filesystem. It's group aware means it'll create
971 makes repository on filesystem. It's group aware means it'll create
972 a repository within a group, and alter the paths accordingly of
972 a repository within a group, and alter the paths accordingly of
973 group location
973 group location
974
974
975 :param repo_name:
975 :param repo_name:
976 :param alias:
976 :param alias:
977 :param parent:
977 :param parent:
978 :param clone_uri:
978 :param clone_uri:
979 :param repo_store_location:
979 :param repo_store_location:
980 """
980 """
981 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
981 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
982 from rhodecode.model.scm import ScmModel
982 from rhodecode.model.scm import ScmModel
983
983
984 if Repository.NAME_SEP in repo_name:
984 if Repository.NAME_SEP in repo_name:
985 raise ValueError(
985 raise ValueError(
986 'repo_name must not contain groups got `%s`' % repo_name)
986 'repo_name must not contain groups got `%s`' % repo_name)
987
987
988 if isinstance(repo_group, RepoGroup):
988 if isinstance(repo_group, RepoGroup):
989 new_parent_path = os.sep.join(repo_group.full_path_splitted)
989 new_parent_path = os.sep.join(repo_group.full_path_splitted)
990 else:
990 else:
991 new_parent_path = repo_group or ''
991 new_parent_path = repo_group or ''
992
992
993 if repo_store_location:
993 if repo_store_location:
994 _paths = [repo_store_location]
994 _paths = [repo_store_location]
995 else:
995 else:
996 _paths = [self.repos_path, new_parent_path, repo_name]
996 _paths = [self.repos_path, new_parent_path, repo_name]
997 # we need to make it str for mercurial
997 # we need to make it str for mercurial
998 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
998 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
999
999
1000 # check if this path is not a repository
1000 # check if this path is not a repository
1001 if is_valid_repo(repo_path, self.repos_path):
1001 if is_valid_repo(repo_path, self.repos_path):
1002 raise Exception(f'This path {repo_path} is a valid repository')
1002 raise Exception(f'This path {repo_path} is a valid repository')
1003
1003
1004 # check if this path is a group
1004 # check if this path is a group
1005 if is_valid_repo_group(repo_path, self.repos_path):
1005 if is_valid_repo_group(repo_path, self.repos_path):
1006 raise Exception(f'This path {repo_path} is a valid group')
1006 raise Exception(f'This path {repo_path} is a valid group')
1007
1007
1008 log.info('creating repo %s in %s from url: `%s`',
1008 log.info('creating repo %s in %s from url: `%s`',
1009 repo_name, safe_str(repo_path),
1009 repo_name, safe_str(repo_path),
1010 obfuscate_url_pw(clone_uri))
1010 obfuscate_url_pw(clone_uri))
1011
1011
1012 backend = get_backend(repo_type)
1012 backend = get_backend(repo_type)
1013
1013
1014 config_repo = None if use_global_config else repo_name
1014 config_repo = None if use_global_config else repo_name
1015 if config_repo and new_parent_path:
1015 if config_repo and new_parent_path:
1016 config_repo = Repository.NAME_SEP.join(
1016 config_repo = Repository.NAME_SEP.join(
1017 (new_parent_path, config_repo))
1017 (new_parent_path, config_repo))
1018 config = make_db_config(clear_session=False, repo=config_repo)
1018 config = make_db_config(clear_session=False, repo=config_repo)
1019 config.set('extensions', 'largefiles', '')
1019 config.set('extensions', 'largefiles', '')
1020
1020
1021 # patch and reset hooks section of UI config to not run any
1021 # patch and reset hooks section of UI config to not run any
1022 # hooks on creating remote repo
1022 # hooks on creating remote repo
1023 config.clear_section('hooks')
1023 config.clear_section('hooks')
1024
1024
1025 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1025 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1026 if repo_type == 'git':
1026 if repo_type == 'git':
1027 repo = backend(
1027 repo = backend(
1028 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1028 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1029 with_wire={"cache": False})
1029 with_wire={"cache": False})
1030 else:
1030 else:
1031 repo = backend(
1031 repo = backend(
1032 repo_path, config=config, create=True, src_url=clone_uri,
1032 repo_path, config=config, create=True, src_url=clone_uri,
1033 with_wire={"cache": False})
1033 with_wire={"cache": False})
1034
1034
1035 if install_hooks:
1035 if install_hooks:
1036 repo.install_hooks()
1036 repo.install_hooks()
1037
1037
1038 log.debug('Created repo %s with %s backend',
1038 log.debug('Created repo %s with %s backend',
1039 safe_str(repo_name), safe_str(repo_type))
1039 safe_str(repo_name), safe_str(repo_type))
1040 return repo
1040 return repo
1041
1041
1042 def _rename_filesystem_repo(self, old, new):
1042 def _rename_filesystem_repo(self, old, new):
1043 """
1043 """
1044 renames repository on filesystem
1044 renames repository on filesystem
1045
1045
1046 :param old: old name
1046 :param old: old name
1047 :param new: new name
1047 :param new: new name
1048 """
1048 """
1049 log.info('renaming repo from %s to %s', old, new)
1049 log.info('renaming repo from %s to %s', old, new)
1050
1050
1051 old_path = os.path.join(self.repos_path, old)
1051 old_path = os.path.join(self.repos_path, old)
1052 new_path = os.path.join(self.repos_path, new)
1052 new_path = os.path.join(self.repos_path, new)
1053 if os.path.isdir(new_path):
1053 if os.path.isdir(new_path):
1054 raise Exception(
1054 raise Exception(
1055 'Was trying to rename to already existing dir %s' % new_path
1055 'Was trying to rename to already existing dir %s' % new_path
1056 )
1056 )
1057 shutil.move(old_path, new_path)
1057 shutil.move(old_path, new_path)
1058
1058
1059 def _delete_filesystem_repo(self, repo):
1059 def _delete_filesystem_repo(self, repo):
1060 """
1060 """
1061 removes repo from filesystem, the removal is actually made by
1061 removes repo from filesystem, the removal is actually made by
1062 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1062 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1063 repository is no longer valid for rhodecode, can be undeleted later on
1063 repository is no longer valid for rhodecode, can be undeleted later on
1064 by reverting the renames on this repository
1064 by reverting the renames on this repository
1065
1065
1066 :param repo: repo object
1066 :param repo: repo object
1067 """
1067 """
1068 rm_path = os.path.join(self.repos_path, repo.repo_name)
1068 rm_path = os.path.join(self.repos_path, repo.repo_name)
1069 repo_group = repo.group
1069 repo_group = repo.group
1070 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1070 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1071 # disable hg/git internal that it doesn't get detected as repo
1071 # disable hg/git internal that it doesn't get detected as repo
1072 alias = repo.repo_type
1072 alias = repo.repo_type
1073
1073
1074 config = make_db_config(clear_session=False)
1074 config = make_db_config(clear_session=False)
1075 config.set('extensions', 'largefiles', '')
1075 config.set('extensions', 'largefiles', '')
1076 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1076 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1077
1077
1078 # skip this for bare git repos
1078 # skip this for bare git repos
1079 if not bare:
1079 if not bare:
1080 # disable VCS repo
1080 # disable VCS repo
1081 vcs_path = os.path.join(rm_path, '.%s' % alias)
1081 vcs_path = os.path.join(rm_path, '.%s' % alias)
1082 if os.path.exists(vcs_path):
1082 if os.path.exists(vcs_path):
1083 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1083 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1084
1084
1085 _now = datetime.datetime.now()
1085 _now = datetime.datetime.now()
1086 _ms = str(_now.microsecond).rjust(6, '0')
1086 _ms = str(_now.microsecond).rjust(6, '0')
1087 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1087 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1088 repo.just_name)
1088 repo.just_name)
1089 if repo_group:
1089 if repo_group:
1090 # if repository is in group, prefix the removal path with the group
1090 # if repository is in group, prefix the removal path with the group
1091 args = repo_group.full_path_splitted + [_d]
1091 args = repo_group.full_path_splitted + [_d]
1092 _d = os.path.join(*args)
1092 _d = os.path.join(*args)
1093
1093
1094 if os.path.isdir(rm_path):
1094 if os.path.isdir(rm_path):
1095 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1095 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1096
1096
1097 # finally cleanup diff-cache if it exists
1097 # finally cleanup diff-cache if it exists
1098 cached_diffs_dir = repo.cached_diffs_dir
1098 cached_diffs_dir = repo.cached_diffs_dir
1099 if os.path.isdir(cached_diffs_dir):
1099 if os.path.isdir(cached_diffs_dir):
1100 shutil.rmtree(cached_diffs_dir)
1100 shutil.rmtree(cached_diffs_dir)
1101
1101
1102
1102
1103 class ReadmeFinder:
1103 class ReadmeFinder:
1104 """
1104 """
1105 Utility which knows how to find a readme for a specific commit.
1105 Utility which knows how to find a readme for a specific commit.
1106
1106
1107 The main idea is that this is a configurable algorithm. When creating an
1107 The main idea is that this is a configurable algorithm. When creating an
1108 instance you can define parameters, currently only the `default_renderer`.
1108 instance you can define parameters, currently only the `default_renderer`.
1109 Based on this configuration the method :meth:`search` behaves slightly
1109 Based on this configuration the method :meth:`search` behaves slightly
1110 different.
1110 different.
1111 """
1111 """
1112
1112
1113 readme_re = re.compile(br'^readme(\.[^.]+)?$', re.IGNORECASE)
1113 readme_re = re.compile(br'^readme(\.[^.]+)?$', re.IGNORECASE)
1114 path_re = re.compile(br'^docs?', re.IGNORECASE)
1114 path_re = re.compile(br'^docs?', re.IGNORECASE)
1115
1115
1116 default_priorities = {
1116 default_priorities = {
1117 None: 0,
1117 None: 0,
1118 b'.rst': 1,
1118 b'.rst': 1,
1119 b'.md': 1,
1119 b'.md': 1,
1120 b'.rest': 2,
1120 b'.rest': 2,
1121 b'.mkdn': 2,
1121 b'.mkdn': 2,
1122 b'.text': 2,
1122 b'.text': 2,
1123 b'.txt': 3,
1123 b'.txt': 3,
1124 b'.mdown': 3,
1124 b'.mdown': 3,
1125 b'.markdown': 4,
1125 b'.markdown': 4,
1126 }
1126 }
1127
1127
1128 path_priority = {
1128 path_priority = {
1129 b'doc': 0,
1129 b'doc': 0,
1130 b'docs': 1,
1130 b'docs': 1,
1131 }
1131 }
1132
1132
1133 FALLBACK_PRIORITY = 99
1133 FALLBACK_PRIORITY = 99
1134
1134
1135 RENDERER_TO_EXTENSION = {
1135 RENDERER_TO_EXTENSION = {
1136 'rst': [b'.rst', b'.rest'],
1136 'rst': [b'.rst', b'.rest'],
1137 'markdown': [b'.md', b'mkdn', b'.mdown', b'.markdown'],
1137 'markdown': [b'.md', b'mkdn', b'.mdown', b'.markdown'],
1138 }
1138 }
1139
1139
1140 def __init__(self, default_renderer=None):
1140 def __init__(self, default_renderer=None):
1141 self._default_renderer = default_renderer
1141 self._default_renderer = default_renderer
1142 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(default_renderer, [])
1142 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(default_renderer, [])
1143
1143
1144 def search(self, commit, path=b'/'):
1144 def search(self, commit, path=b'/', nodes=None):
1145 """
1145 """
1146 Find a readme in the given `commit`.
1146 Find a readme in the given `commit`.
1147 """
1147 """
1148 # firstly, check the PATH type if it is actually a DIR
1148 # firstly, check the PATH type if it is actually a DIR
1149 bytes_path = safe_bytes(path)
1149 bytes_path = safe_bytes(path)
1150 if commit.get_node(bytes_path).kind != NodeKind.DIR:
1150 if commit.get_node(bytes_path).kind != NodeKind.DIR:
1151 return None
1151 return None
1152
1152
1153 if not nodes:
1153 nodes = commit.get_nodes(bytes_path)
1154 nodes = commit.get_nodes(bytes_path)
1155
1154 matches = self._match_readmes(nodes)
1156 matches = self._match_readmes(nodes)
1155 matches = self._sort_according_to_priority(matches)
1157 matches = self._sort_according_to_priority(matches)
1156 if matches:
1158 if matches:
1157 return matches[0].node
1159 return matches[0].node
1158
1160
1159 paths = self._match_paths(nodes)
1161 paths = self._match_paths(nodes)
1160 paths = self._sort_paths_according_to_priority(paths)
1162 paths = self._sort_paths_according_to_priority(paths)
1161 for bytes_path in paths:
1163 for bytes_path in paths:
1162 match = self.search(commit, path=bytes_path)
1164 match = self.search(commit, path=bytes_path)
1163 if match:
1165 if match:
1164 return match
1166 return match
1165
1167
1166 return None
1168 return None
1167
1169
1168 def _match_readmes(self, nodes):
1170 def _match_readmes(self, nodes):
1169 for node in nodes:
1171 for node in nodes:
1170 if not node.is_file():
1172 if not node.is_file():
1171 continue
1173 continue
1172 path = node.bytes_path.rsplit(b'/', 1)[-1]
1174 path = node.bytes_path.rsplit(b'/', 1)[-1]
1173 match = self.readme_re.match(path)
1175 match = self.readme_re.match(path)
1174 if match:
1176 if match:
1175 extension = match.group(1)
1177 extension = match.group(1)
1176 yield ReadmeMatch(node, match, self._priority(extension))
1178 yield ReadmeMatch(node, match, self._priority(extension))
1177
1179
1178 def _match_paths(self, nodes):
1180 def _match_paths(self, nodes):
1179 for node in nodes:
1181 for node in nodes:
1180 if not node.is_dir():
1182 if not node.is_dir():
1181 continue
1183 continue
1182 match = self.path_re.match(node.bytes_path)
1184 match = self.path_re.match(node.bytes_path)
1183 if match:
1185 if match:
1184 yield node.bytes_path
1186 yield node.bytes_path
1185
1187
1186 def _priority(self, extension):
1188 def _priority(self, extension):
1187 renderer_priority = 0 if extension in self._renderer_extensions else 1
1189 renderer_priority = 0 if extension in self._renderer_extensions else 1
1188 extension_priority = self.default_priorities.get(extension, self.FALLBACK_PRIORITY)
1190 extension_priority = self.default_priorities.get(extension, self.FALLBACK_PRIORITY)
1189 return renderer_priority, extension_priority
1191 return renderer_priority, extension_priority
1190
1192
1191 def _sort_according_to_priority(self, matches):
1193 def _sort_according_to_priority(self, matches):
1192
1194
1193 def priority_and_path(match):
1195 def priority_and_path(match):
1194 return match.priority, match.path
1196 return match.priority, match.path
1195
1197
1196 return sorted(matches, key=priority_and_path)
1198 return sorted(matches, key=priority_and_path)
1197
1199
1198 def _sort_paths_according_to_priority(self, paths):
1200 def _sort_paths_according_to_priority(self, paths):
1199
1201
1200 def priority_and_path(path):
1202 def priority_and_path(path):
1201 return self.path_priority.get(path, self.FALLBACK_PRIORITY), path
1203 return self.path_priority.get(path, self.FALLBACK_PRIORITY), path
1202
1204
1203 return sorted(paths, key=priority_and_path)
1205 return sorted(paths, key=priority_and_path)
1204
1206
1205
1207
1206 class ReadmeMatch:
1208 class ReadmeMatch:
1207
1209
1208 def __init__(self, node, match, priority):
1210 def __init__(self, node, match, priority):
1209 self.node = node
1211 self.node = node
1210 self._match = match
1212 self._match = match
1211 self.priority = priority
1213 self.priority = priority
1212
1214
1213 @property
1215 @property
1214 def path(self):
1216 def path(self):
1215 return self.node.path
1217 return self.node.path
1216
1218
1217 def __repr__(self):
1219 def __repr__(self):
1218 return f'<ReadmeMatch {self.path} priority={self.priority}'
1220 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,116 +1,118
1 <%namespace name="base" file="/base/base.mako"/>
1 <%namespace name="base" file="/base/base.mako"/>
2
2
3 <%doc>
3 <%doc>
4 Please note the content of this file is cached, so changes here might not be reflected when editing.
4 Please note the content of this file is cached, so changes here might not be reflected when editing.
5 add ?no-cache=true into the file url to disable caches.
5 add ?no-cache=true into the file url to disable caches.
6
6
7 e.g
7 e.g
8 http://docker-dev:10020/ipython/files/master/IPython/frontend/html/notebook/static?no-cache=1
8 http://docker-dev:10020/ipython/files/master/IPython/frontend/html/notebook/static?no-cache=1
9
9
10 </%doc>
10 </%doc>
11 <%
11 <%
12 at_ref = request.GET.get('at')
12 at_ref = request.GET.get('at')
13 if at_ref:
13 if at_ref:
14 query={'at': at_ref}
14 query={'at': at_ref}
15 default_landing_ref = at_ref or c.rhodecode_db_repo.landing_ref_name
15 default_landing_ref = at_ref or c.rhodecode_db_repo.landing_ref_name
16 else:
16 else:
17 query=None
17 query=None
18 default_landing_ref = c.commit.raw_id
18 default_landing_ref = c.commit.raw_id
19 %>
19 %>
20 <div id="file-tree-wrapper" class="browser-body ${('full-load' if c.full_load else '')}">
20 <div id="file-tree-wrapper" class="browser-body ${('full-load' if c.full_load else '')}">
21 <table class="code-browser rctable table-bordered">
21 <table class="code-browser rctable table-bordered">
22 <thead>
22 <thead>
23 <tr>
23 <tr>
24 <th>${_('Name')}</th>
24 <th>${_('Name')}</th>
25 <th>${_('Size')}</th>
25 <th>${_('Size')}</th>
26 <th>${_('Modified')}</th>
26 <th>${_('Modified')}</th>
27 <th>${_('Last Commit')}</th>
27 <th>${_('Last Commit')}</th>
28 <th>${_('Author')}</th>
28 <th>${_('Author')}</th>
29 </tr>
29 </tr>
30 </thead>
30 </thead>
31
31
32 <tbody id="tbody">
32 <tbody id="tbody">
33 <tr>
33 <tr>
34 <td colspan="5">
34 <td colspan="5">
35 ${h.files_breadcrumbs(c.repo_name, c.rhodecode_db_repo.repo_type, c.commit.raw_id, c.file.path, c.rhodecode_db_repo.landing_ref_name, request.GET.get('at'), limit_items=True)}
35 ${h.files_breadcrumbs(c.repo_name, c.rhodecode_db_repo.repo_type, c.commit.raw_id, c.file.path, c.rhodecode_db_repo.landing_ref_name, request.GET.get('at'), limit_items=True)}
36 </td>
36 </td>
37 </tr>
37 </tr>
38
38
39 <% has_files = False %>
39 <% has_files = False %>
40 % if not c.file.is_submodule():
40 % if not c.file.is_submodule():
41 % for cnt, node in enumerate(c.file):
41 % for cnt, node in enumerate(c.file_nodes):
42 <% has_files = True %>
42 <% has_files = True %>
43 <tr class="parity${(cnt % 2)}">
43 <tr class="parity${(cnt % 2)}">
44 <td class="td-componentname">
44 <td class="td-componentname">
45 % if node.is_submodule():
45 % if node.is_submodule():
46 <span class="submodule-dir">
46 <span class="submodule-dir">
47 % if node.url.startswith('http://') or node.url.startswith('https://'):
47 % if node.url.startswith('http://') or node.url.startswith('https://'):
48 <a href="${node.url}">
48 <a href="${node.url}">
49 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
49 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
50 </a>
50 </a>
51 % else:
51 % else:
52 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
52 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
53 % endif
53 % endif
54 </span>
54 </span>
55 % else:
55 % else:
56 <a href="${h.repo_files_by_ref_url(c.repo_name, c.rhodecode_db_repo.repo_type, f_path=h.safe_str(node.path), ref_name=default_landing_ref, commit_id=c.commit.raw_id, query=query)}">
56 <a href="${h.repo_files_by_ref_url(c.repo_name, c.rhodecode_db_repo.repo_type, f_path=h.safe_str(node.path), ref_name=default_landing_ref, commit_id=c.commit.raw_id, query=query)}">
57 <i class="${('icon-file-text browser-file' if node.is_file() else 'icon-directory browser-dir')}"></i>${node.name}
57 <i class="${('icon-file-text browser-file' if node.is_file() else 'icon-directory browser-dir')}"></i>${node.name}
58 </a>
58 </a>
59 % endif
59 % endif
60 </td>
60 </td>
61 %if node.is_file():
61 %if node.is_file():
62 <td class="td-size" data-attr-name="size">
62 <td class="td-size" data-attr-name="size">
63 % if c.full_load:
63 % if c.full_load:
64 <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span>
64 <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span>
65 % else:
65 % else:
66 ${_('Loading ...')}
66 ${_('Loading ...')}
67 % endif
67 % endif
68 </td>
68 </td>
69 <td class="td-time" data-attr-name="modified_at">
69 <td class="td-time" data-attr-name="modified_at">
70 % if c.full_load:
70 % if c.full_load:
71 <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span>
71 <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span>
72 % endif
72 % endif
73 </td>
73 </td>
74 <td class="td-hash" data-attr-name="commit_id">
74 <td class="td-hash" data-attr-name="commit_id">
75 % if c.full_load:
75 % if c.full_load:
76 <div class="tooltip-hovercard" data-hovercard-alt="${node.last_commit.message}" data-hovercard-url="${h.route_path('hovercard_repo_commit', repo_name=c.repo_name, commit_id=node.last_commit.raw_id)}">
76 <div class="tooltip-hovercard" data-hovercard-alt="${node.last_commit.message}" data-hovercard-url="${h.route_path('hovercard_repo_commit', repo_name=c.repo_name, commit_id=node.last_commit.raw_id)}">
77 <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.idx}:${node.last_commit.short_id}</pre>
77 <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.idx}:${node.last_commit.short_id}</pre>
78 </div>
78 </div>
79 % endif
79 % endif
80 </td>
80 </td>
81 <td class="td-user" data-attr-name="author">
81 <td class="td-user" data-attr-name="author">
82 % if c.full_load:
82 % if c.full_load:
83 <span data-author="${node.last_commit.author}">${h.gravatar_with_user(request, node.last_commit.author, tooltip=True)|n}</span>
83 <span data-author="${node.last_commit.author}">${h.gravatar_with_user(request, node.last_commit.author, tooltip=True)|n}</span>
84 % endif
84 % endif
85 </td>
85 </td>
86 %else:
86 %else:
87 <td></td>
87 <td></td>
88 <td></td>
88 <td></td>
89 <td></td>
89 <td></td>
90 <td></td>
90 <td></td>
91 %endif
91 %endif
92 </tr>
92 </tr>
93 % endfor
93 % endfor
94 % endif
94 % endif
95
95
96 % if not has_files:
96 % if not has_files:
97 <tr>
97 <tr>
98 <td colspan="5">
98 <td colspan="5">
99 ##empty-dir mostly SVN
99 ##empty-dir mostly SVN
100
100
101 ## submodule if we somehow endup
101 ## submodule if we somehow endup
102 % if c.file.is_submodule():
102 % if c.file.is_submodule():
103 <span class="submodule-dir">
103 <span class="submodule-dir">
104 <strong>Submodule Node</strong><br/>
104 <strong>Submodule Node</strong><br/>
105 ${h.escape(c.file.name)}
105 ${h.escape(c.file.name)}
106 <pre>${c.file.url}</pre>
106 <pre>${c.file.url}</pre>
107 </span>
107 </span>
108 % else:
109 <div>${_('Empty directory')}</div>
108 %endif
110 %endif
109 </td>
111 </td>
110 </tr>
112 </tr>
111 % endif
113 % endif
112
114
113 </tbody>
115 </tbody>
114 <tbody id="tbody_filtered"></tbody>
116 <tbody id="tbody_filtered"></tbody>
115 </table>
117 </table>
116 </div>
118 </div>
@@ -1,1249 +1,1251
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import datetime
19 import datetime
20 import mock
20 import mock
21 import os
21 import os
22 import sys
22 import sys
23 import shutil
23 import shutil
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.utils import make_db_config
28 from rhodecode.lib.vcs.backends.base import Reference
28 from rhodecode.lib.vcs.backends.base import Reference
29 from rhodecode.lib.vcs.backends.git import GitRepository, GitCommit, discover_git_version
29 from rhodecode.lib.vcs.backends.git import GitRepository, GitCommit, discover_git_version
30 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
30 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
31 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState, SubModuleNode, RootNode
31 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState, SubModuleNode, RootNode
32 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
32 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
33 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 from rhodecode.tests.vcs.conftest import BackendTestMixin
34
34
35
35
36 pytestmark = pytest.mark.backends("git")
36 pytestmark = pytest.mark.backends("git")
37
37
38
38
39 DIFF_FROM_REMOTE = rb"""diff --git a/foobar b/foobar
39 DIFF_FROM_REMOTE = rb"""diff --git a/foobar b/foobar
40 new file mode 100644
40 new file mode 100644
41 index 0000000..f6ea049
41 index 0000000..f6ea049
42 --- /dev/null
42 --- /dev/null
43 +++ b/foobar
43 +++ b/foobar
44 @@ -0,0 +1 @@
44 @@ -0,0 +1 @@
45 +foobar
45 +foobar
46 \ No newline at end of file
46 \ No newline at end of file
47 diff --git a/foobar2 b/foobar2
47 diff --git a/foobar2 b/foobar2
48 new file mode 100644
48 new file mode 100644
49 index 0000000..e8c9d6b
49 index 0000000..e8c9d6b
50 --- /dev/null
50 --- /dev/null
51 +++ b/foobar2
51 +++ b/foobar2
52 @@ -0,0 +1 @@
52 @@ -0,0 +1 @@
53 +foobar2
53 +foobar2
54 \ No newline at end of file
54 \ No newline at end of file
55 """
55 """
56
56
57
57
58 def callable_get_diff(*args, **kwargs):
58 def callable_get_diff(*args, **kwargs):
59 return DIFF_FROM_REMOTE
59 return DIFF_FROM_REMOTE
60
60
61
61
62 class TestGitRepository(object):
62 class TestGitRepository(object):
63 @pytest.fixture(autouse=True)
63 @pytest.fixture(autouse=True)
64 def prepare(self, request, baseapp):
64 def prepare(self, request, baseapp):
65 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
65 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
66 self.repo.count()
66 self.repo.count()
67
67
68 def get_clone_repo(self, tmpdir):
68 def get_clone_repo(self, tmpdir):
69 """
69 """
70 Return a non bare clone of the base repo.
70 Return a non bare clone of the base repo.
71 """
71 """
72 clone_path = str(tmpdir.join("clone-repo"))
72 clone_path = str(tmpdir.join("clone-repo"))
73 repo_clone = GitRepository(clone_path, create=True, src_url=self.repo.path, bare=False)
73 repo_clone = GitRepository(clone_path, create=True, src_url=self.repo.path, bare=False)
74
74
75 return repo_clone
75 return repo_clone
76
76
77 def get_empty_repo(self, tmpdir, bare=False):
77 def get_empty_repo(self, tmpdir, bare=False):
78 """
78 """
79 Return a non bare empty repo.
79 Return a non bare empty repo.
80 """
80 """
81 clone_path = str(tmpdir.join("empty-repo"))
81 clone_path = str(tmpdir.join("empty-repo"))
82 return GitRepository(clone_path, create=True, bare=bare)
82 return GitRepository(clone_path, create=True, bare=bare)
83
83
84 def test_wrong_repo_path(self):
84 def test_wrong_repo_path(self):
85 wrong_repo_path = "/tmp/errorrepo_git"
85 wrong_repo_path = "/tmp/errorrepo_git"
86 with pytest.raises(RepositoryError):
86 with pytest.raises(RepositoryError):
87 GitRepository(wrong_repo_path)
87 GitRepository(wrong_repo_path)
88
88
89 def test_repo_clone(self, tmp_path_factory):
89 def test_repo_clone(self, tmp_path_factory):
90 repo = GitRepository(TEST_GIT_REPO)
90 repo = GitRepository(TEST_GIT_REPO)
91 clone_path = f"{tmp_path_factory.mktemp('_')}_{TEST_GIT_REPO_CLONE}"
91 clone_path = f"{tmp_path_factory.mktemp('_')}_{TEST_GIT_REPO_CLONE}"
92 repo_clone = GitRepository(clone_path, src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
92 repo_clone = GitRepository(clone_path, src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
93
93
94 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
94 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
95 # Checking hashes of commits should be enough
95 # Checking hashes of commits should be enough
96 for commit in repo.get_commits():
96 for commit in repo.get_commits():
97 raw_id = commit.raw_id
97 raw_id = commit.raw_id
98 assert raw_id == repo_clone.get_commit(raw_id).raw_id
98 assert raw_id == repo_clone.get_commit(raw_id).raw_id
99
99
100 def test_repo_clone_without_create(self):
100 def test_repo_clone_without_create(self):
101 with pytest.raises(RepositoryError):
101 with pytest.raises(RepositoryError):
102 GitRepository(TEST_GIT_REPO_CLONE + "_wo_create", src_url=TEST_GIT_REPO)
102 GitRepository(TEST_GIT_REPO_CLONE + "_wo_create", src_url=TEST_GIT_REPO)
103
103
104 def test_repo_clone_with_update(self, tmp_path_factory):
104 def test_repo_clone_with_update(self, tmp_path_factory):
105 repo = GitRepository(TEST_GIT_REPO)
105 repo = GitRepository(TEST_GIT_REPO)
106 clone_path = "{}_{}_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
106 clone_path = "{}_{}_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
107
107
108 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
108 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
109 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
109 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110
110
111 # check if current workdir was updated
111 # check if current workdir was updated
112 fpath = os.path.join(clone_path, "MANIFEST.in")
112 fpath = os.path.join(clone_path, "MANIFEST.in")
113 assert os.path.isfile(fpath)
113 assert os.path.isfile(fpath)
114
114
115 def test_repo_clone_without_update(self, tmp_path_factory):
115 def test_repo_clone_without_update(self, tmp_path_factory):
116 repo = GitRepository(TEST_GIT_REPO)
116 repo = GitRepository(TEST_GIT_REPO)
117 clone_path = "{}_{}_without_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
117 clone_path = "{}_{}_without_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
118 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
118 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
119 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
119 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
120 # check if current workdir was *NOT* updated
120 # check if current workdir was *NOT* updated
121 fpath = os.path.join(clone_path, "MANIFEST.in")
121 fpath = os.path.join(clone_path, "MANIFEST.in")
122 # Make sure it's not bare repo
122 # Make sure it's not bare repo
123 assert not repo_clone.bare
123 assert not repo_clone.bare
124 assert not os.path.isfile(fpath)
124 assert not os.path.isfile(fpath)
125
125
126 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
126 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
127 repo = GitRepository(TEST_GIT_REPO)
127 repo = GitRepository(TEST_GIT_REPO)
128 clone_path = "{}_{}_bare.git".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
128 clone_path = "{}_{}_bare.git".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
129 repo_clone = GitRepository(clone_path, create=True, src_url=repo.path, bare=True)
129 repo_clone = GitRepository(clone_path, create=True, src_url=repo.path, bare=True)
130 assert repo_clone.bare
130 assert repo_clone.bare
131
131
132 def test_create_repo_is_not_bare_by_default(self):
132 def test_create_repo_is_not_bare_by_default(self):
133 repo = GitRepository(get_new_dir("not-bare-by-default"), create=True)
133 repo = GitRepository(get_new_dir("not-bare-by-default"), create=True)
134 assert not repo.bare
134 assert not repo.bare
135
135
136 def test_create_bare_repo(self):
136 def test_create_bare_repo(self):
137 repo = GitRepository(get_new_dir("bare-repo"), create=True, bare=True)
137 repo = GitRepository(get_new_dir("bare-repo"), create=True, bare=True)
138 assert repo.bare
138 assert repo.bare
139
139
140 def test_update_server_info(self):
140 def test_update_server_info(self):
141 self.repo._update_server_info()
141 self.repo._update_server_info()
142
142
143 def test_fetch(self, vcsbackend_git):
143 def test_fetch(self, vcsbackend_git):
144 # Note: This is a git specific part of the API, it's only implemented
144 # Note: This is a git specific part of the API, it's only implemented
145 # by the git backend.
145 # by the git backend.
146 source_repo = vcsbackend_git.repo
146 source_repo = vcsbackend_git.repo
147 target_repo = vcsbackend_git.create_repo(bare=True)
147 target_repo = vcsbackend_git.create_repo(bare=True)
148 target_repo.fetch(source_repo.path)
148 target_repo.fetch(source_repo.path)
149 # Note: Get a fresh instance, avoids caching trouble
149 # Note: Get a fresh instance, avoids caching trouble
150 target_repo = vcsbackend_git.backend(target_repo.path)
150 target_repo = vcsbackend_git.backend(target_repo.path)
151 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
151 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
152
152
153 def test_commit_ids(self):
153 def test_commit_ids(self):
154 # there are 112 commits (by now)
154 # there are 112 commits (by now)
155 # so we can assume they would be available from now on
155 # so we can assume they would be available from now on
156 subset = {
156 subset = {
157 "c1214f7e79e02fc37156ff215cd71275450cffc3",
157 "c1214f7e79e02fc37156ff215cd71275450cffc3",
158 "38b5fe81f109cb111f549bfe9bb6b267e10bc557",
158 "38b5fe81f109cb111f549bfe9bb6b267e10bc557",
159 "fa6600f6848800641328adbf7811fd2372c02ab2",
159 "fa6600f6848800641328adbf7811fd2372c02ab2",
160 "102607b09cdd60e2793929c4f90478be29f85a17",
160 "102607b09cdd60e2793929c4f90478be29f85a17",
161 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
161 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
162 "2d1028c054665b962fa3d307adfc923ddd528038",
162 "2d1028c054665b962fa3d307adfc923ddd528038",
163 "d7e0d30fbcae12c90680eb095a4f5f02505ce501",
163 "d7e0d30fbcae12c90680eb095a4f5f02505ce501",
164 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
164 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
165 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
165 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
166 "8430a588b43b5d6da365400117c89400326e7992",
166 "8430a588b43b5d6da365400117c89400326e7992",
167 "d955cd312c17b02143c04fa1099a352b04368118",
167 "d955cd312c17b02143c04fa1099a352b04368118",
168 "f67b87e5c629c2ee0ba58f85197e423ff28d735b",
168 "f67b87e5c629c2ee0ba58f85197e423ff28d735b",
169 "add63e382e4aabc9e1afdc4bdc24506c269b7618",
169 "add63e382e4aabc9e1afdc4bdc24506c269b7618",
170 "f298fe1189f1b69779a4423f40b48edf92a703fc",
170 "f298fe1189f1b69779a4423f40b48edf92a703fc",
171 "bd9b619eb41994cac43d67cf4ccc8399c1125808",
171 "bd9b619eb41994cac43d67cf4ccc8399c1125808",
172 "6e125e7c890379446e98980d8ed60fba87d0f6d1",
172 "6e125e7c890379446e98980d8ed60fba87d0f6d1",
173 "d4a54db9f745dfeba6933bf5b1e79e15d0af20bd",
173 "d4a54db9f745dfeba6933bf5b1e79e15d0af20bd",
174 "0b05e4ed56c802098dfc813cbe779b2f49e92500",
174 "0b05e4ed56c802098dfc813cbe779b2f49e92500",
175 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
175 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
176 "45223f8f114c64bf4d6f853e3c35a369a6305520",
176 "45223f8f114c64bf4d6f853e3c35a369a6305520",
177 "ca1eb7957a54bce53b12d1a51b13452f95bc7c7e",
177 "ca1eb7957a54bce53b12d1a51b13452f95bc7c7e",
178 "f5ea29fc42ef67a2a5a7aecff10e1566699acd68",
178 "f5ea29fc42ef67a2a5a7aecff10e1566699acd68",
179 "27d48942240f5b91dfda77accd2caac94708cc7d",
179 "27d48942240f5b91dfda77accd2caac94708cc7d",
180 "622f0eb0bafd619d2560c26f80f09e3b0b0d78af",
180 "622f0eb0bafd619d2560c26f80f09e3b0b0d78af",
181 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
181 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
182 }
182 }
183 assert subset.issubset(set(self.repo.commit_ids))
183 assert subset.issubset(set(self.repo.commit_ids))
184
184
185 def test_slicing(self):
185 def test_slicing(self):
186 # 4 1 5 10 95
186 # 4 1 5 10 95
187 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]:
187 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]:
188 commit_ids = list(self.repo[sfrom:sto])
188 commit_ids = list(self.repo[sfrom:sto])
189 assert len(commit_ids) == size
189 assert len(commit_ids) == size
190 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
190 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
191 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
191 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
192
192
193 def test_branches(self):
193 def test_branches(self):
194 # TODO: Need more tests here
194 # TODO: Need more tests here
195 # Removed (those are 'remotes' branches for cloned repo)
195 # Removed (those are 'remotes' branches for cloned repo)
196 # assert 'master' in self.repo.branches
196 # assert 'master' in self.repo.branches
197 # assert 'gittree' in self.repo.branches
197 # assert 'gittree' in self.repo.branches
198 # assert 'web-branch' in self.repo.branches
198 # assert 'web-branch' in self.repo.branches
199 for __, commit_id in self.repo.branches.items():
199 for __, commit_id in self.repo.branches.items():
200 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
200 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
201
201
202 def test_tags(self):
202 def test_tags(self):
203 # TODO: Need more tests here
203 # TODO: Need more tests here
204 assert "v0.1.1" in self.repo.tags
204 assert "v0.1.1" in self.repo.tags
205 assert "v0.1.2" in self.repo.tags
205 assert "v0.1.2" in self.repo.tags
206 for __, commit_id in self.repo.tags.items():
206 for __, commit_id in self.repo.tags.items():
207 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
207 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208
208
209 def _test_single_commit_cache(self, commit_id):
209 def _test_single_commit_cache(self, commit_id):
210 commit = self.repo.get_commit(commit_id)
210 commit = self.repo.get_commit(commit_id)
211 assert commit_id in self.repo.commits
211 assert commit_id in self.repo.commits
212 assert commit is self.repo.commits[commit_id]
212 assert commit is self.repo.commits[commit_id]
213
213
214 def test_initial_commit(self):
214 def test_initial_commit(self):
215 commit_id = self.repo.commit_ids[0]
215 commit_id = self.repo.commit_ids[0]
216 init_commit = self.repo.get_commit(commit_id)
216 init_commit = self.repo.get_commit(commit_id)
217 init_author = init_commit.author
217 init_author = init_commit.author
218
218
219 assert init_commit.message == "initial import\n"
219 assert init_commit.message == "initial import\n"
220 assert init_author == "Marcin Kuzminski <marcin@python-blog.com>"
220 assert init_author == "Marcin Kuzminski <marcin@python-blog.com>"
221 assert init_author == init_commit.committer
221 assert init_author == init_commit.committer
222 assert sorted(init_commit.added_paths) == sorted(
222 assert sorted(init_commit.added_paths) == sorted(
223 [
223 [
224 b"vcs/__init__.py",
224 b"vcs/__init__.py",
225 b"vcs/backends/BaseRepository.py",
225 b"vcs/backends/BaseRepository.py",
226 b"vcs/backends/__init__.py",
226 b"vcs/backends/__init__.py",
227 ]
227 ]
228 )
228 )
229 assert sorted(init_commit.affected_files) == sorted(
229 assert sorted(init_commit.affected_files) == sorted(
230 [
230 [
231 b"vcs/__init__.py",
231 b"vcs/__init__.py",
232 b"vcs/backends/BaseRepository.py",
232 b"vcs/backends/BaseRepository.py",
233 b"vcs/backends/__init__.py",
233 b"vcs/backends/__init__.py",
234 ]
234 ]
235 )
235 )
236
236
237 for path in (b"vcs/__init__.py", b"vcs/backends/BaseRepository.py", b"vcs/backends/__init__.py"):
237 for path in (b"vcs/__init__.py", b"vcs/backends/BaseRepository.py", b"vcs/backends/__init__.py"):
238 assert isinstance(init_commit.get_node(path), FileNode)
238 assert isinstance(init_commit.get_node(path), FileNode)
239 for path in (b"", b"vcs", b"vcs/backends"):
239 for path in (b"", b"vcs", b"vcs/backends"):
240 assert isinstance(init_commit.get_node(path), DirNode)
240 assert isinstance(init_commit.get_node(path), DirNode)
241
241
242 with pytest.raises(NodeDoesNotExistError):
242 with pytest.raises(NodeDoesNotExistError):
243 init_commit.get_node(path=b"foobar")
243 init_commit.get_node(path=b"foobar")
244
244
245 node = init_commit.get_node(b"vcs/")
245 node = init_commit.get_node(b"vcs/")
246 assert hasattr(node, "kind")
246 assert hasattr(node, "kind")
247 assert node.kind == NodeKind.DIR
247 assert node.kind == NodeKind.DIR
248
248
249 node = init_commit.get_node(b"vcs")
249 node = init_commit.get_node(b"vcs")
250 assert hasattr(node, "kind")
250 assert hasattr(node, "kind")
251 assert node.kind == NodeKind.DIR
251 assert node.kind == NodeKind.DIR
252
252
253 node = init_commit.get_node(b"vcs/__init__.py")
253 node = init_commit.get_node(b"vcs/__init__.py")
254 assert hasattr(node, "kind")
254 assert hasattr(node, "kind")
255 assert node.kind == NodeKind.FILE
255 assert node.kind == NodeKind.FILE
256
256
257 def test_not_existing_commit(self):
257 def test_not_existing_commit(self):
258 with pytest.raises(RepositoryError):
258 with pytest.raises(RepositoryError):
259 self.repo.get_commit("f" * 40)
259 self.repo.get_commit("f" * 40)
260
260
261 def test_commit10(self):
261 def test_commit10(self):
262 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
262 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
263 README = """===
263 README = """===
264 VCS
264 VCS
265 ===
265 ===
266
266
267 Various Version Control System management abstraction layer for Python.
267 Various Version Control System management abstraction layer for Python.
268
268
269 Introduction
269 Introduction
270 ------------
270 ------------
271
271
272 TODO: To be written...
272 TODO: To be written...
273
273
274 """
274 """
275 node = commit10.get_node(b"README.rst")
275 node = commit10.get_node(b"README.rst")
276 assert node.kind == NodeKind.FILE
276 assert node.kind == NodeKind.FILE
277 assert node.str_content == README
277 assert node.str_content == README
278
278
279 def test_head(self):
279 def test_head(self):
280 assert self.repo.head == self.repo.get_commit().raw_id
280 assert self.repo.head == self.repo.get_commit().raw_id
281
281
282 def test_checkout_with_create(self, tmpdir):
282 def test_checkout_with_create(self, tmpdir):
283 repo_clone = self.get_clone_repo(tmpdir)
283 repo_clone = self.get_clone_repo(tmpdir)
284
284
285 new_branch = "new_branch"
285 new_branch = "new_branch"
286 assert repo_clone._current_branch() == "master"
286 assert repo_clone._current_branch() == "master"
287 assert set(repo_clone.branches) == {"master"}
287 assert set(repo_clone.branches) == {"master"}
288 repo_clone._checkout(new_branch, create=True)
288 repo_clone._checkout(new_branch, create=True)
289
289
290 # Branches is a lazy property so we need to recrete the Repo object.
290 # Branches is a lazy property so we need to recrete the Repo object.
291 repo_clone = GitRepository(repo_clone.path)
291 repo_clone = GitRepository(repo_clone.path)
292 assert set(repo_clone.branches) == {"master", new_branch}
292 assert set(repo_clone.branches) == {"master", new_branch}
293 assert repo_clone._current_branch() == new_branch
293 assert repo_clone._current_branch() == new_branch
294
294
295 def test_checkout(self, tmpdir):
295 def test_checkout(self, tmpdir):
296 repo_clone = self.get_clone_repo(tmpdir)
296 repo_clone = self.get_clone_repo(tmpdir)
297
297
298 repo_clone._checkout("new_branch", create=True)
298 repo_clone._checkout("new_branch", create=True)
299 repo_clone._checkout("master")
299 repo_clone._checkout("master")
300
300
301 assert repo_clone._current_branch() == "master"
301 assert repo_clone._current_branch() == "master"
302
302
303 def test_checkout_same_branch(self, tmpdir):
303 def test_checkout_same_branch(self, tmpdir):
304 repo_clone = self.get_clone_repo(tmpdir)
304 repo_clone = self.get_clone_repo(tmpdir)
305
305
306 repo_clone._checkout("master")
306 repo_clone._checkout("master")
307 assert repo_clone._current_branch() == "master"
307 assert repo_clone._current_branch() == "master"
308
308
309 def test_checkout_branch_already_exists(self, tmpdir):
309 def test_checkout_branch_already_exists(self, tmpdir):
310 repo_clone = self.get_clone_repo(tmpdir)
310 repo_clone = self.get_clone_repo(tmpdir)
311
311
312 with pytest.raises(RepositoryError):
312 with pytest.raises(RepositoryError):
313 repo_clone._checkout("master", create=True)
313 repo_clone._checkout("master", create=True)
314
314
315 def test_checkout_bare_repo(self):
315 def test_checkout_bare_repo(self):
316 with pytest.raises(RepositoryError):
316 with pytest.raises(RepositoryError):
317 self.repo._checkout("master")
317 self.repo._checkout("master")
318
318
319 def test_current_branch_bare_repo(self):
319 def test_current_branch_bare_repo(self):
320 with pytest.raises(RepositoryError):
320 with pytest.raises(RepositoryError):
321 self.repo._current_branch()
321 self.repo._current_branch()
322
322
323 def test_current_branch_empty_repo(self, tmpdir):
323 def test_current_branch_empty_repo(self, tmpdir):
324 repo = self.get_empty_repo(tmpdir)
324 repo = self.get_empty_repo(tmpdir)
325 assert repo._current_branch() is None
325 assert repo._current_branch() is None
326
326
327 def test_local_clone(self, tmp_path_factory):
327 def test_local_clone(self, tmp_path_factory):
328 clone_path = str(tmp_path_factory.mktemp("test-local-clone"))
328 clone_path = str(tmp_path_factory.mktemp("test-local-clone"))
329 self.repo._local_clone(clone_path, "master")
329 self.repo._local_clone(clone_path, "master")
330 repo_clone = GitRepository(clone_path)
330 repo_clone = GitRepository(clone_path)
331
331
332 assert self.repo.commit_ids == repo_clone.commit_ids
332 assert self.repo.commit_ids == repo_clone.commit_ids
333
333
334 def test_local_clone_with_specific_branch(self, tmpdir):
334 def test_local_clone_with_specific_branch(self, tmpdir):
335 source_repo = self.get_clone_repo(tmpdir)
335 source_repo = self.get_clone_repo(tmpdir)
336
336
337 # Create a new branch in source repo
337 # Create a new branch in source repo
338 new_branch_commit = source_repo.commit_ids[-3]
338 new_branch_commit = source_repo.commit_ids[-3]
339 source_repo._checkout(new_branch_commit)
339 source_repo._checkout(new_branch_commit)
340 source_repo._checkout("new_branch", create=True)
340 source_repo._checkout("new_branch", create=True)
341
341
342 clone_path = str(tmpdir.join("git-clone-path-1"))
342 clone_path = str(tmpdir.join("git-clone-path-1"))
343 source_repo._local_clone(clone_path, "new_branch")
343 source_repo._local_clone(clone_path, "new_branch")
344 repo_clone = GitRepository(clone_path)
344 repo_clone = GitRepository(clone_path)
345
345
346 assert source_repo.commit_ids[: -3 + 1] == repo_clone.commit_ids
346 assert source_repo.commit_ids[: -3 + 1] == repo_clone.commit_ids
347
347
348 clone_path = str(tmpdir.join("git-clone-path-2"))
348 clone_path = str(tmpdir.join("git-clone-path-2"))
349 source_repo._local_clone(clone_path, "master")
349 source_repo._local_clone(clone_path, "master")
350 repo_clone = GitRepository(clone_path)
350 repo_clone = GitRepository(clone_path)
351
351
352 assert source_repo.commit_ids == repo_clone.commit_ids
352 assert source_repo.commit_ids == repo_clone.commit_ids
353
353
354 def test_local_clone_fails_if_target_exists(self):
354 def test_local_clone_fails_if_target_exists(self):
355 with pytest.raises(RepositoryError):
355 with pytest.raises(RepositoryError):
356 self.repo._local_clone(self.repo.path, "master")
356 self.repo._local_clone(self.repo.path, "master")
357
357
358 def test_local_fetch(self, tmpdir):
358 def test_local_fetch(self, tmpdir):
359 target_repo = self.get_empty_repo(tmpdir)
359 target_repo = self.get_empty_repo(tmpdir)
360 source_repo = self.get_clone_repo(tmpdir)
360 source_repo = self.get_clone_repo(tmpdir)
361
361
362 # Create a new branch in source repo
362 # Create a new branch in source repo
363 master_commit = source_repo.commit_ids[-1]
363 master_commit = source_repo.commit_ids[-1]
364 new_branch_commit = source_repo.commit_ids[-3]
364 new_branch_commit = source_repo.commit_ids[-3]
365 source_repo._checkout(new_branch_commit)
365 source_repo._checkout(new_branch_commit)
366 source_repo._checkout("new_branch", create=True)
366 source_repo._checkout("new_branch", create=True)
367
367
368 target_repo._local_fetch(source_repo.path, "new_branch")
368 target_repo._local_fetch(source_repo.path, "new_branch")
369 assert target_repo._last_fetch_heads() == [new_branch_commit]
369 assert target_repo._last_fetch_heads() == [new_branch_commit]
370
370
371 target_repo._local_fetch(source_repo.path, "master")
371 target_repo._local_fetch(source_repo.path, "master")
372 assert target_repo._last_fetch_heads() == [master_commit]
372 assert target_repo._last_fetch_heads() == [master_commit]
373
373
374 def test_local_fetch_from_bare_repo(self, tmpdir):
374 def test_local_fetch_from_bare_repo(self, tmpdir):
375 target_repo = self.get_empty_repo(tmpdir)
375 target_repo = self.get_empty_repo(tmpdir)
376 target_repo._local_fetch(self.repo.path, "master")
376 target_repo._local_fetch(self.repo.path, "master")
377
377
378 master_commit = self.repo.commit_ids[-1]
378 master_commit = self.repo.commit_ids[-1]
379 assert target_repo._last_fetch_heads() == [master_commit]
379 assert target_repo._last_fetch_heads() == [master_commit]
380
380
381 def test_local_fetch_from_same_repo(self):
381 def test_local_fetch_from_same_repo(self):
382 with pytest.raises(ValueError):
382 with pytest.raises(ValueError):
383 self.repo._local_fetch(self.repo.path, "master")
383 self.repo._local_fetch(self.repo.path, "master")
384
384
385 def test_local_fetch_branch_does_not_exist(self, tmpdir):
385 def test_local_fetch_branch_does_not_exist(self, tmpdir):
386 target_repo = self.get_empty_repo(tmpdir)
386 target_repo = self.get_empty_repo(tmpdir)
387
387
388 with pytest.raises(RepositoryError):
388 with pytest.raises(RepositoryError):
389 target_repo._local_fetch(self.repo.path, "new_branch")
389 target_repo._local_fetch(self.repo.path, "new_branch")
390
390
391 def test_local_pull(self, tmpdir):
391 def test_local_pull(self, tmpdir):
392 target_repo = self.get_empty_repo(tmpdir)
392 target_repo = self.get_empty_repo(tmpdir)
393 source_repo = self.get_clone_repo(tmpdir)
393 source_repo = self.get_clone_repo(tmpdir)
394
394
395 # Create a new branch in source repo
395 # Create a new branch in source repo
396 master_commit = source_repo.commit_ids[-1]
396 master_commit = source_repo.commit_ids[-1]
397 new_branch_commit = source_repo.commit_ids[-3]
397 new_branch_commit = source_repo.commit_ids[-3]
398 source_repo._checkout(new_branch_commit)
398 source_repo._checkout(new_branch_commit)
399 source_repo._checkout("new_branch", create=True)
399 source_repo._checkout("new_branch", create=True)
400
400
401 target_repo._local_pull(source_repo.path, "new_branch")
401 target_repo._local_pull(source_repo.path, "new_branch")
402 target_repo = GitRepository(target_repo.path)
402 target_repo = GitRepository(target_repo.path)
403 assert target_repo.head == new_branch_commit
403 assert target_repo.head == new_branch_commit
404
404
405 target_repo._local_pull(source_repo.path, "master")
405 target_repo._local_pull(source_repo.path, "master")
406 target_repo = GitRepository(target_repo.path)
406 target_repo = GitRepository(target_repo.path)
407 assert target_repo.head == master_commit
407 assert target_repo.head == master_commit
408
408
409 def test_local_pull_in_bare_repo(self):
409 def test_local_pull_in_bare_repo(self):
410 with pytest.raises(RepositoryError):
410 with pytest.raises(RepositoryError):
411 self.repo._local_pull(self.repo.path, "master")
411 self.repo._local_pull(self.repo.path, "master")
412
412
413 def test_local_merge(self, tmpdir):
413 def test_local_merge(self, tmpdir):
414 target_repo = self.get_empty_repo(tmpdir)
414 target_repo = self.get_empty_repo(tmpdir)
415 source_repo = self.get_clone_repo(tmpdir)
415 source_repo = self.get_clone_repo(tmpdir)
416
416
417 # Create a new branch in source repo
417 # Create a new branch in source repo
418 master_commit = source_repo.commit_ids[-1]
418 master_commit = source_repo.commit_ids[-1]
419 new_branch_commit = source_repo.commit_ids[-3]
419 new_branch_commit = source_repo.commit_ids[-3]
420 source_repo._checkout(new_branch_commit)
420 source_repo._checkout(new_branch_commit)
421 source_repo._checkout("new_branch", create=True)
421 source_repo._checkout("new_branch", create=True)
422
422
423 # This is required as one cannot do a -ff-only merge in an empty repo.
423 # This is required as one cannot do a -ff-only merge in an empty repo.
424 target_repo._local_pull(source_repo.path, "new_branch")
424 target_repo._local_pull(source_repo.path, "new_branch")
425
425
426 target_repo._local_fetch(source_repo.path, "master")
426 target_repo._local_fetch(source_repo.path, "master")
427 merge_message = "Merge message\n\nDescription:..."
427 merge_message = "Merge message\n\nDescription:..."
428 user_name = "Albert Einstein"
428 user_name = "Albert Einstein"
429 user_email = "albert@einstein.com"
429 user_email = "albert@einstein.com"
430 target_repo._local_merge(merge_message, user_name, user_email, target_repo._last_fetch_heads())
430 target_repo._local_merge(merge_message, user_name, user_email, target_repo._last_fetch_heads())
431
431
432 target_repo = GitRepository(target_repo.path)
432 target_repo = GitRepository(target_repo.path)
433 assert target_repo.commit_ids[-2] == master_commit
433 assert target_repo.commit_ids[-2] == master_commit
434 last_commit = target_repo.get_commit(target_repo.head)
434 last_commit = target_repo.get_commit(target_repo.head)
435 assert last_commit.message.strip() == merge_message
435 assert last_commit.message.strip() == merge_message
436 assert last_commit.author == "%s <%s>" % (user_name, user_email)
436 assert last_commit.author == "%s <%s>" % (user_name, user_email)
437
437
438 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
438 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
439
439
440 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
440 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
441 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
441 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
442 vcsbackend_git.ensure_file(b"README", b"I will conflict with you!!!")
442 vcsbackend_git.ensure_file(b"README", b"I will conflict with you!!!")
443
443
444 target_repo._local_fetch(self.repo.path, "master")
444 target_repo._local_fetch(self.repo.path, "master")
445 with pytest.raises(RepositoryError):
445 with pytest.raises(RepositoryError):
446 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
446 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
447
447
448 # Check we are not left in an intermediate merge state
448 # Check we are not left in an intermediate merge state
449 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
449 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
450
450
451 def test_local_merge_into_empty_repo(self, tmpdir):
451 def test_local_merge_into_empty_repo(self, tmpdir):
452 target_repo = self.get_empty_repo(tmpdir)
452 target_repo = self.get_empty_repo(tmpdir)
453
453
454 # This is required as one cannot do a -ff-only merge in an empty repo.
454 # This is required as one cannot do a -ff-only merge in an empty repo.
455 target_repo._local_fetch(self.repo.path, "master")
455 target_repo._local_fetch(self.repo.path, "master")
456 with pytest.raises(RepositoryError):
456 with pytest.raises(RepositoryError):
457 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
457 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
458
458
459 def test_local_merge_in_bare_repo(self):
459 def test_local_merge_in_bare_repo(self):
460 with pytest.raises(RepositoryError):
460 with pytest.raises(RepositoryError):
461 self.repo._local_merge("merge_message", "user name", "user@name.com", None)
461 self.repo._local_merge("merge_message", "user name", "user@name.com", None)
462
462
463 def test_local_push_non_bare(self, tmpdir):
463 def test_local_push_non_bare(self, tmpdir):
464 target_repo = self.get_empty_repo(tmpdir)
464 target_repo = self.get_empty_repo(tmpdir)
465
465
466 pushed_branch = "pushed_branch"
466 pushed_branch = "pushed_branch"
467 self.repo._local_push("master", target_repo.path, pushed_branch)
467 self.repo._local_push("master", target_repo.path, pushed_branch)
468 # Fix the HEAD of the target repo, or otherwise GitRepository won't
468 # Fix the HEAD of the target repo, or otherwise GitRepository won't
469 # report any branches.
469 # report any branches.
470 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
470 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
471 f.write("ref: refs/heads/%s" % pushed_branch)
471 f.write("ref: refs/heads/%s" % pushed_branch)
472
472
473 target_repo = GitRepository(target_repo.path)
473 target_repo = GitRepository(target_repo.path)
474
474
475 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
475 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
476
476
477 def test_local_push_bare(self, tmpdir):
477 def test_local_push_bare(self, tmpdir):
478 target_repo = self.get_empty_repo(tmpdir, bare=True)
478 target_repo = self.get_empty_repo(tmpdir, bare=True)
479
479
480 pushed_branch = "pushed_branch"
480 pushed_branch = "pushed_branch"
481 self.repo._local_push("master", target_repo.path, pushed_branch)
481 self.repo._local_push("master", target_repo.path, pushed_branch)
482 # Fix the HEAD of the target repo, or otherwise GitRepository won't
482 # Fix the HEAD of the target repo, or otherwise GitRepository won't
483 # report any branches.
483 # report any branches.
484 with open(os.path.join(target_repo.path, "HEAD"), "w") as f:
484 with open(os.path.join(target_repo.path, "HEAD"), "w") as f:
485 f.write("ref: refs/heads/%s" % pushed_branch)
485 f.write("ref: refs/heads/%s" % pushed_branch)
486
486
487 target_repo = GitRepository(target_repo.path)
487 target_repo = GitRepository(target_repo.path)
488
488
489 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
489 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
490
490
491 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
491 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
492 target_repo = self.get_clone_repo(tmpdir)
492 target_repo = self.get_clone_repo(tmpdir)
493
493
494 pushed_branch = "pushed_branch"
494 pushed_branch = "pushed_branch"
495 # Create a new branch in source repo
495 # Create a new branch in source repo
496 new_branch_commit = target_repo.commit_ids[-3]
496 new_branch_commit = target_repo.commit_ids[-3]
497 target_repo._checkout(new_branch_commit)
497 target_repo._checkout(new_branch_commit)
498 target_repo._checkout(pushed_branch, create=True)
498 target_repo._checkout(pushed_branch, create=True)
499
499
500 self.repo._local_push("master", target_repo.path, pushed_branch)
500 self.repo._local_push("master", target_repo.path, pushed_branch)
501
501
502 target_repo = GitRepository(target_repo.path)
502 target_repo = GitRepository(target_repo.path)
503
503
504 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
504 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
505
505
506 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
506 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
507 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
508 with pytest.raises(RepositoryError):
508 with pytest.raises(RepositoryError):
509 self.repo._local_push("master", target_repo.path, "master")
509 self.repo._local_push("master", target_repo.path, "master")
510
510
511 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
511 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
512 target_repo = self.get_empty_repo(tmpdir, bare=True)
512 target_repo = self.get_empty_repo(tmpdir, bare=True)
513
513
514 with mock.patch.object(self.repo, "run_git_command") as run_mock:
514 with mock.patch.object(self.repo, "run_git_command") as run_mock:
515 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
515 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
516 env = run_mock.call_args[1]["extra_env"]
516 env = run_mock.call_args[1]["extra_env"]
517 assert "RC_SKIP_HOOKS" not in env
517 assert "RC_SKIP_HOOKS" not in env
518
518
519 def _add_failing_hook(self, repo_path, hook_name, bare=False):
519 def _add_failing_hook(self, repo_path, hook_name, bare=False):
520 path_components = ["hooks", hook_name] if bare else [".git", "hooks", hook_name]
520 path_components = ["hooks", hook_name] if bare else [".git", "hooks", hook_name]
521 hook_path = os.path.join(repo_path, *path_components)
521 hook_path = os.path.join(repo_path, *path_components)
522 with open(hook_path, "w") as f:
522 with open(hook_path, "w") as f:
523 script_lines = [
523 script_lines = [
524 "#!%s" % sys.executable,
524 "#!%s" % sys.executable,
525 "import os",
525 "import os",
526 "import sys",
526 "import sys",
527 'if os.environ.get("RC_SKIP_HOOKS"):',
527 'if os.environ.get("RC_SKIP_HOOKS"):',
528 " sys.exit(0)",
528 " sys.exit(0)",
529 "sys.exit(1)",
529 "sys.exit(1)",
530 ]
530 ]
531 f.write("\n".join(script_lines))
531 f.write("\n".join(script_lines))
532 os.chmod(hook_path, 0o755)
532 os.chmod(hook_path, 0o755)
533
533
534 def test_local_push_does_not_execute_hook(self, tmpdir):
534 def test_local_push_does_not_execute_hook(self, tmpdir):
535 target_repo = self.get_empty_repo(tmpdir)
535 target_repo = self.get_empty_repo(tmpdir)
536
536
537 pushed_branch = "pushed_branch"
537 pushed_branch = "pushed_branch"
538 self._add_failing_hook(target_repo.path, "pre-receive")
538 self._add_failing_hook(target_repo.path, "pre-receive")
539 self.repo._local_push("master", target_repo.path, pushed_branch)
539 self.repo._local_push("master", target_repo.path, pushed_branch)
540 # Fix the HEAD of the target repo, or otherwise GitRepository won't
540 # Fix the HEAD of the target repo, or otherwise GitRepository won't
541 # report any branches.
541 # report any branches.
542 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
542 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
543 f.write("ref: refs/heads/%s" % pushed_branch)
543 f.write("ref: refs/heads/%s" % pushed_branch)
544
544
545 target_repo = GitRepository(target_repo.path)
545 target_repo = GitRepository(target_repo.path)
546
546
547 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
547 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
548
548
549 def test_local_push_executes_hook(self, tmpdir):
549 def test_local_push_executes_hook(self, tmpdir):
550 target_repo = self.get_empty_repo(tmpdir, bare=True)
550 target_repo = self.get_empty_repo(tmpdir, bare=True)
551 self._add_failing_hook(target_repo.path, "pre-receive", bare=True)
551 self._add_failing_hook(target_repo.path, "pre-receive", bare=True)
552 with pytest.raises(RepositoryError):
552 with pytest.raises(RepositoryError):
553 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
553 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
554
554
555 def test_maybe_prepare_merge_workspace(self):
555 def test_maybe_prepare_merge_workspace(self):
556 workspace = self.repo._maybe_prepare_merge_workspace(
556 workspace = self.repo._maybe_prepare_merge_workspace(
557 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
557 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
558 )
558 )
559
559
560 assert os.path.isdir(workspace)
560 assert os.path.isdir(workspace)
561 workspace_repo = GitRepository(workspace)
561 workspace_repo = GitRepository(workspace)
562 assert workspace_repo.branches == self.repo.branches
562 assert workspace_repo.branches == self.repo.branches
563
563
564 # Calling it a second time should also succeed
564 # Calling it a second time should also succeed
565 workspace = self.repo._maybe_prepare_merge_workspace(
565 workspace = self.repo._maybe_prepare_merge_workspace(
566 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
566 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
567 )
567 )
568 assert os.path.isdir(workspace)
568 assert os.path.isdir(workspace)
569
569
570 def test_maybe_prepare_merge_workspace_different_refs(self):
570 def test_maybe_prepare_merge_workspace_different_refs(self):
571 workspace = self.repo._maybe_prepare_merge_workspace(
571 workspace = self.repo._maybe_prepare_merge_workspace(
572 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
572 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
573 )
573 )
574
574
575 assert os.path.isdir(workspace)
575 assert os.path.isdir(workspace)
576 workspace_repo = GitRepository(workspace)
576 workspace_repo = GitRepository(workspace)
577 assert workspace_repo.branches == self.repo.branches
577 assert workspace_repo.branches == self.repo.branches
578
578
579 # Calling it a second time should also succeed
579 # Calling it a second time should also succeed
580 workspace = self.repo._maybe_prepare_merge_workspace(
580 workspace = self.repo._maybe_prepare_merge_workspace(
581 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
581 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
582 )
582 )
583 assert os.path.isdir(workspace)
583 assert os.path.isdir(workspace)
584
584
585 def test_cleanup_merge_workspace(self):
585 def test_cleanup_merge_workspace(self):
586 workspace = self.repo._maybe_prepare_merge_workspace(
586 workspace = self.repo._maybe_prepare_merge_workspace(
587 2, "pr3", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
587 2, "pr3", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
588 )
588 )
589 self.repo.cleanup_merge_workspace(2, "pr3")
589 self.repo.cleanup_merge_workspace(2, "pr3")
590
590
591 assert not os.path.exists(workspace)
591 assert not os.path.exists(workspace)
592
592
593 def test_cleanup_merge_workspace_invalid_workspace_id(self):
593 def test_cleanup_merge_workspace_invalid_workspace_id(self):
594 # No assert: because in case of an inexistent workspace this function
594 # No assert: because in case of an inexistent workspace this function
595 # should still succeed.
595 # should still succeed.
596 self.repo.cleanup_merge_workspace(1, "pr4")
596 self.repo.cleanup_merge_workspace(1, "pr4")
597
597
598 def test_set_refs(self):
598 def test_set_refs(self):
599 test_ref = "refs/test-refs/abcde"
599 test_ref = "refs/test-refs/abcde"
600 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
600 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
601
601
602 self.repo.set_refs(test_ref, test_commit_id)
602 self.repo.set_refs(test_ref, test_commit_id)
603 stdout, _ = self.repo.run_git_command(["show-ref"])
603 stdout, _ = self.repo.run_git_command(["show-ref"])
604 assert test_ref in stdout
604 assert test_ref in stdout
605 assert test_commit_id in stdout
605 assert test_commit_id in stdout
606
606
607 def test_remove_ref(self):
607 def test_remove_ref(self):
608 test_ref = "refs/test-refs/abcde"
608 test_ref = "refs/test-refs/abcde"
609 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
609 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
610 self.repo.set_refs(test_ref, test_commit_id)
610 self.repo.set_refs(test_ref, test_commit_id)
611 stdout, _ = self.repo.run_git_command(["show-ref"])
611 stdout, _ = self.repo.run_git_command(["show-ref"])
612 assert test_ref in stdout
612 assert test_ref in stdout
613 assert test_commit_id in stdout
613 assert test_commit_id in stdout
614
614
615 self.repo.remove_ref(test_ref)
615 self.repo.remove_ref(test_ref)
616 stdout, _ = self.repo.run_git_command(["show-ref"])
616 stdout, _ = self.repo.run_git_command(["show-ref"])
617 assert test_ref not in stdout
617 assert test_ref not in stdout
618 assert test_commit_id not in stdout
618 assert test_commit_id not in stdout
619
619
620
620
621 class TestGitCommit(object):
621 class TestGitCommit(object):
622 @pytest.fixture(autouse=True)
622 @pytest.fixture(autouse=True)
623 def prepare(self):
623 def prepare(self):
624 self.repo = GitRepository(TEST_GIT_REPO)
624 self.repo = GitRepository(TEST_GIT_REPO)
625
625
626 def test_default_commit(self):
626 def test_default_commit(self):
627 tip = self.repo.get_commit()
627 tip = self.repo.get_commit()
628 assert tip == self.repo.get_commit(None)
628 assert tip == self.repo.get_commit(None)
629 assert tip == self.repo.get_commit("tip")
629 assert tip == self.repo.get_commit("tip")
630
630
631 def test_root_node(self):
631 def test_root_node(self):
632 tip = self.repo.get_commit()
632 tip = self.repo.get_commit()
633 assert tip.root is tip.get_node(b"")
633 assert tip.root is tip.get_node(b"")
634
634
635 def test_lazy_fetch(self):
635 def test_lazy_fetch(self):
636 """
636 """
637 Test if commit's nodes expands and are cached as we walk through
637 Test if commit's nodes expands and are cached as we walk through
638 the commit. This test is somewhat hard to write as order of tests
638 the commit. This test is somewhat hard to write as order of tests
639 is a key here. Written by running command after command in a shell.
639 is a key here. Written by running command after command in a shell.
640 """
640 """
641 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
641 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
642 assert commit_id in self.repo.commit_ids
642 assert commit_id in self.repo.commit_ids
643 commit = self.repo.get_commit(commit_id)
643 commit = self.repo.get_commit(commit_id)
644 assert len(commit.nodes) == 0
644 assert len(commit.nodes) == 0
645 root = commit.root
645 root = commit.root
646 assert len(commit.nodes) == 1
646 assert len(commit.nodes) == 1
647 assert len(root.nodes) == 8
647 assert len(root.nodes) == 8
648 # accessing root.nodes updates commit.nodes
648 # accessing root.nodes updates commit.nodes
649 assert len(commit.nodes) == 9
649 assert len(commit.nodes) == 9
650
650
651 docs = commit.get_node(b"docs")
651 docs = commit.get_node(b"docs")
652 # we haven't yet accessed anything new as docs dir was already cached
652 # we haven't yet accessed anything new as docs dir was already cached
653 assert len(commit.nodes) == 9
653 assert len(commit.nodes) == 9
654 assert len(docs.nodes) == 8
654 assert len(docs.nodes) == 8
655 # accessing docs.nodes updates commit.nodes
655 # accessing docs.nodes updates commit.nodes
656 assert len(commit.nodes) == 17
656 assert len(commit.nodes) == 17
657
657
658 assert docs is commit.get_node(b"docs")
658 assert docs is commit.get_node(b"docs")
659 assert docs is root.nodes[0]
659 assert docs is root.nodes[0]
660 assert docs is root.dirs[0]
660 assert docs is root.dirs[0]
661 assert docs is commit.get_node(b"docs")
661 assert docs is commit.get_node(b"docs")
662
662
663 def test_nodes_with_commit(self):
663 def test_nodes_with_commit(self):
664 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
664 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
665 commit = self.repo.get_commit(commit_id)
665 commit = self.repo.get_commit(commit_id)
666 root = commit.root
666 root = commit.root
667 assert isinstance(root, RootNode)
667 assert isinstance(root, RootNode)
668 docs = commit.get_node(b"docs")
668 docs = commit.get_node(b"docs")
669 assert docs is commit.get_node(b"docs")
669 assert docs is commit.get_node(b"docs")
670 api = commit.get_node(b"docs/api")
670 api = commit.get_node(b"docs/api")
671 assert api is commit.get_node(b"docs/api")
671 assert api is commit.get_node(b"docs/api")
672 index = commit.get_node(b"docs/api/index.rst")
672 index = commit.get_node(b"docs/api/index.rst")
673 assert index is commit.get_node(b"docs/api/index.rst")
673 assert index is commit.get_node(b"docs/api/index.rst")
674
674
675 def test_branch_and_tags(self):
675 def test_branch_and_tags(self):
676 """
676 """
677 rev0 = self.repo.commit_ids[0]
677 rev0 = self.repo.commit_ids[0]
678 commit0 = self.repo.get_commit(rev0)
678 commit0 = self.repo.get_commit(rev0)
679 assert commit0.branch == 'master'
679 assert commit0.branch == 'master'
680 assert commit0.tags == []
680 assert commit0.tags == []
681
681
682 rev10 = self.repo.commit_ids[10]
682 rev10 = self.repo.commit_ids[10]
683 commit10 = self.repo.get_commit(rev10)
683 commit10 = self.repo.get_commit(rev10)
684 assert commit10.branch == 'master'
684 assert commit10.branch == 'master'
685 assert commit10.tags == []
685 assert commit10.tags == []
686
686
687 rev44 = self.repo.commit_ids[44]
687 rev44 = self.repo.commit_ids[44]
688 commit44 = self.repo.get_commit(rev44)
688 commit44 = self.repo.get_commit(rev44)
689 assert commit44.branch == 'web-branch'
689 assert commit44.branch == 'web-branch'
690
690
691 tip = self.repo.get_commit('tip')
691 tip = self.repo.get_commit('tip')
692 assert 'tip' in tip.tags
692 assert 'tip' in tip.tags
693 """
693 """
694 # Those tests would fail - branches are now going
694 # Those tests would fail - branches are now going
695 # to be changed at main API in order to support git backend
695 # to be changed at main API in order to support git backend
696 pass
696 pass
697
697
698 def test_file_size(self):
698 def test_file_size(self):
699 to_check = (
699 to_check = (
700 ("c1214f7e79e02fc37156ff215cd71275450cffc3", b"vcs/backends/BaseRepository.py", 502),
700 ("c1214f7e79e02fc37156ff215cd71275450cffc3", b"vcs/backends/BaseRepository.py", 502),
701 ("d7e0d30fbcae12c90680eb095a4f5f02505ce501", b"vcs/backends/hg.py", 854),
701 ("d7e0d30fbcae12c90680eb095a4f5f02505ce501", b"vcs/backends/hg.py", 854),
702 ("6e125e7c890379446e98980d8ed60fba87d0f6d1", b"setup.py", 1068),
702 ("6e125e7c890379446e98980d8ed60fba87d0f6d1", b"setup.py", 1068),
703 ("d955cd312c17b02143c04fa1099a352b04368118", b"vcs/backends/base.py", 2921),
703 ("d955cd312c17b02143c04fa1099a352b04368118", b"vcs/backends/base.py", 2921),
704 ("ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", b"vcs/backends/base.py", 3936),
704 ("ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", b"vcs/backends/base.py", 3936),
705 ("f50f42baeed5af6518ef4b0cb2f1423f3851a941", b"vcs/backends/base.py", 6189),
705 ("f50f42baeed5af6518ef4b0cb2f1423f3851a941", b"vcs/backends/base.py", 6189),
706 )
706 )
707 for commit_id, path, size in to_check:
707 for commit_id, path, size in to_check:
708 node = self.repo.get_commit(commit_id).get_node(path)
708 node = self.repo.get_commit(commit_id).get_node(path)
709 assert node.is_file()
709 assert node.is_file()
710 assert node.size == size
710 assert node.size == size
711
711
712 def test_file_history_from_commits(self):
712 def test_file_history_from_commits(self):
713 node = self.repo[10].get_node(b"setup.py")
713 node = self.repo[10].get_node(b"setup.py")
714 commit_ids = [commit.raw_id for commit in node.history]
714 commit_ids = [commit.raw_id for commit in node.history]
715 assert ["ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == commit_ids
715 assert ["ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == commit_ids
716
716
717 node = self.repo[20].get_node(b"setup.py")
717 node = self.repo[20].get_node(b"setup.py")
718 node_ids = [commit.raw_id for commit in node.history]
718 node_ids = [commit.raw_id for commit in node.history]
719 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
719 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
720
720
721 # special case we check history from commit that has this particular
721 # special case we check history from commit that has this particular
722 # file changed this means we check if it's included as well
722 # file changed this means we check if it's included as well
723 node = self.repo.get_commit("191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e").get_node(b"setup.py")
723 node = self.repo.get_commit("191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e").get_node(b"setup.py")
724 node_ids = [commit.raw_id for commit in node.history]
724 node_ids = [commit.raw_id for commit in node.history]
725 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
725 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
726
726
727 def test_file_history(self):
727 def test_file_history(self):
728 # we can only check if those commits are present in the history
728 # we can only check if those commits are present in the history
729 # as we cannot update this test every time file is changed
729 # as we cannot update this test every time file is changed
730 files = {
730 files = {
731 b"setup.py": [
731 b"setup.py": [
732 "54386793436c938cff89326944d4c2702340037d",
732 "54386793436c938cff89326944d4c2702340037d",
733 "51d254f0ecf5df2ce50c0b115741f4cf13985dab",
733 "51d254f0ecf5df2ce50c0b115741f4cf13985dab",
734 "998ed409c795fec2012b1c0ca054d99888b22090",
734 "998ed409c795fec2012b1c0ca054d99888b22090",
735 "5e0eb4c47f56564395f76333f319d26c79e2fb09",
735 "5e0eb4c47f56564395f76333f319d26c79e2fb09",
736 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
736 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
737 "7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e",
737 "7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e",
738 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
738 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
739 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
739 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
740 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
740 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
741 ],
741 ],
742 b"vcs/nodes.py": [
742 b"vcs/nodes.py": [
743 "33fa3223355104431402a888fa77a4e9956feb3e",
743 "33fa3223355104431402a888fa77a4e9956feb3e",
744 "fa014c12c26d10ba682fadb78f2a11c24c8118e1",
744 "fa014c12c26d10ba682fadb78f2a11c24c8118e1",
745 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
745 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
746 "ab5721ca0a081f26bf43d9051e615af2cc99952f",
746 "ab5721ca0a081f26bf43d9051e615af2cc99952f",
747 "c877b68d18e792a66b7f4c529ea02c8f80801542",
747 "c877b68d18e792a66b7f4c529ea02c8f80801542",
748 "4313566d2e417cb382948f8d9d7c765330356054",
748 "4313566d2e417cb382948f8d9d7c765330356054",
749 "6c2303a793671e807d1cfc70134c9ca0767d98c2",
749 "6c2303a793671e807d1cfc70134c9ca0767d98c2",
750 "54386793436c938cff89326944d4c2702340037d",
750 "54386793436c938cff89326944d4c2702340037d",
751 "54000345d2e78b03a99d561399e8e548de3f3203",
751 "54000345d2e78b03a99d561399e8e548de3f3203",
752 "1c6b3677b37ea064cb4b51714d8f7498f93f4b2b",
752 "1c6b3677b37ea064cb4b51714d8f7498f93f4b2b",
753 "2d03ca750a44440fb5ea8b751176d1f36f8e8f46",
753 "2d03ca750a44440fb5ea8b751176d1f36f8e8f46",
754 "2a08b128c206db48c2f0b8f70df060e6db0ae4f8",
754 "2a08b128c206db48c2f0b8f70df060e6db0ae4f8",
755 "30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b",
755 "30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b",
756 "ac71e9503c2ca95542839af0ce7b64011b72ea7c",
756 "ac71e9503c2ca95542839af0ce7b64011b72ea7c",
757 "12669288fd13adba2a9b7dd5b870cc23ffab92d2",
757 "12669288fd13adba2a9b7dd5b870cc23ffab92d2",
758 "5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382",
758 "5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382",
759 "12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5",
759 "12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5",
760 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
760 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
761 "f50f42baeed5af6518ef4b0cb2f1423f3851a941",
761 "f50f42baeed5af6518ef4b0cb2f1423f3851a941",
762 "d7e390a45f6aa96f04f5e7f583ad4f867431aa25",
762 "d7e390a45f6aa96f04f5e7f583ad4f867431aa25",
763 "f15c21f97864b4f071cddfbf2750ec2e23859414",
763 "f15c21f97864b4f071cddfbf2750ec2e23859414",
764 "e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade",
764 "e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade",
765 "ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b",
765 "ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b",
766 "84dec09632a4458f79f50ddbbd155506c460b4f9",
766 "84dec09632a4458f79f50ddbbd155506c460b4f9",
767 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
767 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
768 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
768 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
769 "3bf1c5868e570e39569d094f922d33ced2fa3b2b",
769 "3bf1c5868e570e39569d094f922d33ced2fa3b2b",
770 "b8d04012574729d2c29886e53b1a43ef16dd00a1",
770 "b8d04012574729d2c29886e53b1a43ef16dd00a1",
771 "6970b057cffe4aab0a792aa634c89f4bebf01441",
771 "6970b057cffe4aab0a792aa634c89f4bebf01441",
772 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
772 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
773 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
773 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
774 ],
774 ],
775 b"vcs/backends/git.py": [
775 b"vcs/backends/git.py": [
776 "4cf116ad5a457530381135e2f4c453e68a1b0105",
776 "4cf116ad5a457530381135e2f4c453e68a1b0105",
777 "9a751d84d8e9408e736329767387f41b36935153",
777 "9a751d84d8e9408e736329767387f41b36935153",
778 "cb681fb539c3faaedbcdf5ca71ca413425c18f01",
778 "cb681fb539c3faaedbcdf5ca71ca413425c18f01",
779 "428f81bb652bcba8d631bce926e8834ff49bdcc6",
779 "428f81bb652bcba8d631bce926e8834ff49bdcc6",
780 "180ab15aebf26f98f714d8c68715e0f05fa6e1c7",
780 "180ab15aebf26f98f714d8c68715e0f05fa6e1c7",
781 "2b8e07312a2e89e92b90426ab97f349f4bce2a3a",
781 "2b8e07312a2e89e92b90426ab97f349f4bce2a3a",
782 "50e08c506174d8645a4bb517dd122ac946a0f3bf",
782 "50e08c506174d8645a4bb517dd122ac946a0f3bf",
783 "54000345d2e78b03a99d561399e8e548de3f3203",
783 "54000345d2e78b03a99d561399e8e548de3f3203",
784 ],
784 ],
785 }
785 }
786 for path, commit_ids in files.items():
786 for path, commit_ids in files.items():
787 node = self.repo.get_commit(commit_ids[0]).get_node(path)
787 node = self.repo.get_commit(commit_ids[0]).get_node(path)
788 node_ids = [commit.raw_id for commit in node.history]
788 node_ids = [commit.raw_id for commit in node.history]
789 assert set(commit_ids).issubset(set(node_ids)), (
789 assert set(commit_ids).issubset(set(node_ids)), (
790 "We assumed that %s is subset of commit_ids for which file %s "
790 "We assumed that %s is subset of commit_ids for which file %s "
791 "has been changed, and history of that node returned: %s" % (commit_ids, path, node_ids)
791 "has been changed, and history of that node returned: %s" % (commit_ids, path, node_ids)
792 )
792 )
793
793
794 def test_file_annotate(self):
794 def test_file_annotate(self):
795 files = {
795 files = {
796 b"vcs/backends/__init__.py": {
796 b"vcs/backends/__init__.py": {
797 "c1214f7e79e02fc37156ff215cd71275450cffc3": {
797 "c1214f7e79e02fc37156ff215cd71275450cffc3": {
798 "lines_no": 1,
798 "lines_no": 1,
799 "commits": [
799 "commits": [
800 "c1214f7e79e02fc37156ff215cd71275450cffc3",
800 "c1214f7e79e02fc37156ff215cd71275450cffc3",
801 ],
801 ],
802 },
802 },
803 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647": {
803 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647": {
804 "lines_no": 21,
804 "lines_no": 21,
805 "commits": [
805 "commits": [
806 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
806 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
807 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
807 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
808 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
808 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
809 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
809 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
810 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
810 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
811 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
811 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
812 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
812 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
813 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
813 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
814 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
814 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
815 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
815 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
816 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
816 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
817 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
817 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
818 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
818 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
819 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
819 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
820 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
820 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
821 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
821 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
822 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
822 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
823 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
823 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
824 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
824 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
825 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
825 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
826 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
826 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
827 ],
827 ],
828 },
828 },
829 "e29b67bd158580fc90fc5e9111240b90e6e86064": {
829 "e29b67bd158580fc90fc5e9111240b90e6e86064": {
830 "lines_no": 32,
830 "lines_no": 32,
831 "commits": [
831 "commits": [
832 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
832 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
833 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
833 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
834 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
834 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
835 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
835 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
836 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
836 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
837 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
837 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
838 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
838 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
839 "54000345d2e78b03a99d561399e8e548de3f3203",
839 "54000345d2e78b03a99d561399e8e548de3f3203",
840 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
840 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
841 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
841 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
842 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
842 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
843 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
843 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
844 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
844 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
845 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
845 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
846 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
846 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
847 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
847 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
848 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
848 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
849 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
849 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
850 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
850 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
851 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
851 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
852 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
852 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
853 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
853 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
854 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
854 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
855 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
855 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
856 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
856 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
857 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
857 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
858 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
858 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
859 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
859 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
860 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
860 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
861 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
861 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
862 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
862 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
863 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
863 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
864 ],
864 ],
865 },
865 },
866 },
866 },
867 }
867 }
868
868
869 for fname, commit_dict in files.items():
869 for fname, commit_dict in files.items():
870 for commit_id, __ in commit_dict.items():
870 for commit_id, __ in commit_dict.items():
871 commit = self.repo.get_commit(commit_id)
871 commit = self.repo.get_commit(commit_id)
872
872
873 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
873 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
874 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
874 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
875 assert l1_1 == l1_2
875 assert l1_1 == l1_2
876 l1 = l1_1
876 l1 = l1_1
877 l2 = files[fname][commit_id]["commits"]
877 l2 = files[fname][commit_id]["commits"]
878 assert l1 == l2, (
878 assert l1 == l2, (
879 "The lists of commit_ids for %s@commit_id %s"
879 "The lists of commit_ids for %s@commit_id %s"
880 "from annotation list should match each other, "
880 "from annotation list should match each other, "
881 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)
881 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)
882 )
882 )
883
883
884 def test_files_state(self):
884 def test_files_state(self):
885 """
885 """
886 Tests state of FileNodes.
886 Tests state of FileNodes.
887 """
887 """
888 commit = self.repo.get_commit("e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0")
888 commit = self.repo.get_commit("e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0")
889 node = commit.get_node(b"vcs/utils/diffs.py")
889 node = commit.get_node(b"vcs/utils/diffs.py")
890 assert node.bytes_path in commit.added_paths
890 assert node.bytes_path in commit.added_paths
891
891
892 commit = self.repo.get_commit("33fa3223355104431402a888fa77a4e9956feb3e")
892 commit = self.repo.get_commit("33fa3223355104431402a888fa77a4e9956feb3e")
893 node = commit.get_node(b".hgignore")
893 node = commit.get_node(b".hgignore")
894 assert node.bytes_path in commit.changed_paths
894 assert node.bytes_path in commit.changed_paths
895
895
896 commit = self.repo.get_commit("e29b67bd158580fc90fc5e9111240b90e6e86064")
896 commit = self.repo.get_commit("e29b67bd158580fc90fc5e9111240b90e6e86064")
897 node = commit.get_node(b"setup.py")
897 node = commit.get_node(b"setup.py")
898 assert node.bytes_path not in commit.affected_files
898 assert node.bytes_path not in commit.affected_files
899
899
900 # If node has REMOVED state then trying to fetch it would raise
900 # If node has REMOVED state then trying to fetch it would raise
901 # CommitError exception
901 # CommitError exception
902 commit = self.repo.get_commit("fa6600f6848800641328adbf7811fd2372c02ab2")
902 commit = self.repo.get_commit("fa6600f6848800641328adbf7811fd2372c02ab2")
903 path = b"vcs/backends/BaseRepository.py"
903 path = b"vcs/backends/BaseRepository.py"
904 with pytest.raises(NodeDoesNotExistError):
904 with pytest.raises(NodeDoesNotExistError):
905 commit.get_node(path)
905 commit.get_node(path)
906
906
907 # but it would be one of ``removed`` (commit's attribute)
907 # but it would be one of ``removed`` (commit's attribute)
908 assert path in [rf for rf in commit.removed_paths]
908 assert path in [rf for rf in commit.removed_paths]
909
909
910 commit = self.repo.get_commit("54386793436c938cff89326944d4c2702340037d")
910 commit = self.repo.get_commit("54386793436c938cff89326944d4c2702340037d")
911 changed = [b"setup.py", b"tests/test_nodes.py", b"vcs/backends/hg.py", b"vcs/nodes.py"]
911 changed = [b"setup.py", b"tests/test_nodes.py", b"vcs/backends/hg.py", b"vcs/nodes.py"]
912 assert set(changed) == set([f for f in commit.changed_paths])
912 assert set(changed) == set([f for f in commit.changed_paths])
913
913
914 def test_unicode_branch_refs(self):
914 def test_unicode_branch_refs(self):
915 unicode_branches = {
915 unicode_branches = {
916 "refs/heads/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
916 "refs/heads/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
917 "refs/heads/uniΓ§ΓΆβˆ‚e": "ΓΌrl",
917 "refs/heads/uniΓ§ΓΆβˆ‚e": "ΓΌrl",
918 }
918 }
919 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_branches):
919 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_branches):
920 branches = self.repo.branches
920 branches = self.repo.branches
921
921
922 assert "unicode" in branches
922 assert "unicode" in branches
923 assert "uniΓ§ΓΆβˆ‚e" in branches
923 assert "uniΓ§ΓΆβˆ‚e" in branches
924
924
925 def test_unicode_tag_refs(self):
925 def test_unicode_tag_refs(self):
926 unicode_tags = {
926 unicode_tags = {
927 "refs/tags/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
927 "refs/tags/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
928 "refs/tags/uniΓ§ΓΆβˆ‚e": "6c0ce52b229aa978889e91b38777f800e85f330b",
928 "refs/tags/uniΓ§ΓΆβˆ‚e": "6c0ce52b229aa978889e91b38777f800e85f330b",
929 }
929 }
930 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_tags):
930 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_tags):
931 tags = self.repo.tags
931 tags = self.repo.tags
932
932
933 assert "unicode" in tags
933 assert "unicode" in tags
934 assert "uniΓ§ΓΆβˆ‚e" in tags
934 assert "uniΓ§ΓΆβˆ‚e" in tags
935
935
936 def test_commit_message_is_unicode(self):
936 def test_commit_message_is_unicode(self):
937 for commit in self.repo:
937 for commit in self.repo:
938 assert type(commit.message) == str
938 assert type(commit.message) == str
939
939
940 def test_commit_author_is_unicode(self):
940 def test_commit_author_is_unicode(self):
941 for commit in self.repo:
941 for commit in self.repo:
942 assert type(commit.author) == str
942 assert type(commit.author) == str
943
943
944 def test_repo_files_content_types(self):
944 def test_repo_files_content_types(self):
945 commit = self.repo.get_commit()
945 commit = self.repo.get_commit()
946 for node in commit.get_node(b"/"):
946 for node in commit.get_node(b"/"):
947 if node.is_file():
947 if node.is_file():
948 assert type(node.content) == bytes
948 assert type(node.content) == bytes
949 assert type(node.str_content) == str
949 assert type(node.str_content) == str
950
950
951 def test_wrong_path(self):
951 def test_wrong_path(self):
952 # There is 'setup.py' in the root dir but not there:
952 # There is 'setup.py' in the root dir but not there:
953 path = b"foo/bar/setup.py"
953 path = b"foo/bar/setup.py"
954 tip = self.repo.get_commit()
954 tip = self.repo.get_commit()
955 with pytest.raises(VCSError):
955 with pytest.raises(VCSError):
956 tip.get_node(path)
956 tip.get_node(path)
957
957
958 @pytest.mark.parametrize(
958 @pytest.mark.parametrize(
959 "author_email, commit_id",
959 "author_email, commit_id",
960 [
960 [
961 ("marcin@python-blog.com", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
961 ("marcin@python-blog.com", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
962 ("lukasz.balcerzak@python-center.pl", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
962 ("lukasz.balcerzak@python-center.pl", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
963 ("none@none", "8430a588b43b5d6da365400117c89400326e7992"),
963 ("none@none", "8430a588b43b5d6da365400117c89400326e7992"),
964 ],
964 ],
965 )
965 )
966 def test_author_email(self, author_email, commit_id):
966 def test_author_email(self, author_email, commit_id):
967 commit = self.repo.get_commit(commit_id)
967 commit = self.repo.get_commit(commit_id)
968 assert author_email == commit.author_email
968 assert author_email == commit.author_email
969
969
970 @pytest.mark.parametrize(
970 @pytest.mark.parametrize(
971 "author, commit_id",
971 "author, commit_id",
972 [
972 [
973 ("Marcin Kuzminski", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
973 ("Marcin Kuzminski", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
974 ("Lukasz Balcerzak", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
974 ("Lukasz Balcerzak", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
975 ("marcink", "8430a588b43b5d6da365400117c89400326e7992"),
975 ("marcink", "8430a588b43b5d6da365400117c89400326e7992"),
976 ],
976 ],
977 )
977 )
978 def test_author_username(self, author, commit_id):
978 def test_author_username(self, author, commit_id):
979 commit = self.repo.get_commit(commit_id)
979 commit = self.repo.get_commit(commit_id)
980 assert author == commit.author_name
980 assert author == commit.author_name
981
981
982
982
983 class TestLargeFileRepo(object):
983 class TestLargeFileRepo(object):
984 def test_large_file(self, backend_git):
984 def test_large_file(self, backend_git):
985 conf = make_db_config()
985 conf = make_db_config()
986 git_largefiles_store = conf.get("vcs_git_lfs", "store_location")
986 git_largefiles_store = conf.get("vcs_git_lfs", "store_location")
987
987
988 repo = backend_git.create_test_repo("largefiles", conf)
988 repo = backend_git.create_test_repo("largefiles", conf)
989
989
990 tip = repo.scm_instance().get_commit()
990 tip = repo.scm_instance().get_commit()
991 node = tip.get_node(b"1MB.zip")
991 node = tip.get_node(b"1MB.zip")
992
992
993 # extract stored LF node into the origin cache
993 # extract stored LF node into the origin cache
994 repo_lfs_store: str = os.path.join(repo.repo_path, repo.repo_name, "lfs_store")
994 repo_lfs_store: str = os.path.join(repo.repo_path, repo.repo_name, "lfs_store")
995
995
996 oid: str = "7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf"
996 oid: str = "7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf"
997 # where the OID actually is INSIDE the repo...
997 # where the OID actually is INSIDE the repo...
998 oid_path = os.path.join(repo_lfs_store, oid)
998 oid_path = os.path.join(repo_lfs_store, oid)
999
999
1000 # Note: oid path depends on LFSOidStore.store_suffix. Once it will be changed update below line accordingly
1000 # Note: oid path depends on LFSOidStore.store_suffix. Once it will be changed update below line accordingly
1001 oid_destination = os.path.join(git_largefiles_store, f"objects/{oid[:2]}/{oid[2:4]}/{oid}")
1001 oid_destination = os.path.join(git_largefiles_store, f"objects/{oid[:2]}/{oid[2:4]}/{oid}")
1002
1002
1003 spec_path = os.path.dirname(oid_destination)
1003 spec_path = os.path.dirname(oid_destination)
1004 os.makedirs(spec_path, exist_ok=True)
1004 os.makedirs(spec_path, exist_ok=True)
1005 shutil.copy(oid_path, oid_destination)
1005 shutil.copy(oid_path, oid_destination)
1006
1006
1007 lf_node = node.get_largefile_node()
1007 lf_node = node.get_largefile_node()
1008
1008
1009 assert lf_node.is_largefile() is True
1009 assert lf_node.is_largefile() is True
1010 assert lf_node.size == 1024000
1010 assert lf_node.size == 1024000
1011 assert lf_node.name == b"1MB.zip"
1011 assert lf_node.name == b"1MB.zip"
1012
1012
1013
1013
1014 @pytest.mark.usefixtures("vcs_repository_support")
1014 @pytest.mark.usefixtures("vcs_repository_support")
1015 class TestGitSpecificWithRepo(BackendTestMixin):
1015 class TestGitSpecificWithRepo(BackendTestMixin):
1016 @classmethod
1016 @classmethod
1017 def _get_commits(cls):
1017 def _get_commits(cls):
1018 return [
1018 return [
1019 {
1019 {
1020 "message": "Initial",
1020 "message": "Initial",
1021 "author": "Joe Doe <joe.doe@example.com>",
1021 "author": "Joe Doe <joe.doe@example.com>",
1022 "date": datetime.datetime(2010, 1, 1, 20),
1022 "date": datetime.datetime(2010, 1, 1, 20),
1023 "added": [
1023 "added": [
1024 FileNode(b"foobar/static/js/admin/base.js", content=b"base"),
1024 FileNode(b"foobar/static/js/admin/base.js", content=b"base"),
1025 FileNode(b"foobar/static/admin", content=b"admin", mode=0o120000), # this is a link
1025 FileNode(b"foobar/static/admin", content=b"admin", mode=0o120000), # this is a link
1026 FileNode(b"foo", content=b"foo"),
1026 FileNode(b"foo", content=b"foo"),
1027 ],
1027 ],
1028 },
1028 },
1029 {
1029 {
1030 "message": "Second",
1030 "message": "Second",
1031 "author": "Joe Doe <joe.doe@example.com>",
1031 "author": "Joe Doe <joe.doe@example.com>",
1032 "date": datetime.datetime(2010, 1, 1, 22),
1032 "date": datetime.datetime(2010, 1, 1, 22),
1033 "added": [
1033 "added": [
1034 FileNode(b"foo2", content=b"foo2"),
1034 FileNode(b"foo2", content=b"foo2"),
1035 ],
1035 ],
1036 },
1036 },
1037 ]
1037 ]
1038
1038
1039 def test_paths_slow_traversing(self):
1039 def test_paths_slow_traversing(self):
1040 commit = self.repo.get_commit()
1040 commit = self.repo.get_commit()
1041 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1041 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1042
1042
1043 def test_paths_fast_traversing(self):
1043 def test_paths_fast_traversing(self):
1044 commit = self.repo.get_commit()
1044 commit = self.repo.get_commit()
1045 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1045 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1046
1046
1047 def test_get_diff_runs_git_command_with_hashes(self):
1047 def test_get_diff_runs_git_command_with_hashes(self):
1048 comm1 = self.repo[0]
1048 comm1 = self.repo[0]
1049 comm2 = self.repo[1]
1049 comm2 = self.repo[1]
1050
1050
1051 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1051 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1052 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1052 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1053 self.repo.get_diff(comm1, comm2)
1053 self.repo.get_diff(comm1, comm2)
1054
1054
1055 remote_mock.diff.assert_called_once_with(
1055 remote_mock.diff.assert_called_once_with(
1056 comm1.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1056 comm1.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1057 )
1057 )
1058
1058
1059 def test_get_diff_runs_git_command_with_str_hashes(self):
1059 def test_get_diff_runs_git_command_with_str_hashes(self):
1060 comm2 = self.repo[1]
1060 comm2 = self.repo[1]
1061
1061
1062 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1062 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1063 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1063 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1064 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1064 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1065
1065
1066 remote_mock.diff.assert_called_once_with(
1066 remote_mock.diff.assert_called_once_with(
1067 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1067 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1068 )
1068 )
1069
1069
1070 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1070 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1071 comm1 = self.repo[0]
1071 comm1 = self.repo[0]
1072 comm2 = self.repo[1]
1072 comm2 = self.repo[1]
1073
1073
1074 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1074 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1075 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1075 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1076 self.repo.get_diff(comm1, comm2, "foo")
1076 self.repo.get_diff(comm1, comm2, "foo")
1077
1077
1078 remote_mock.diff.assert_called_once_with(
1078 remote_mock.diff.assert_called_once_with(
1079 self.repo._lookup_commit(0), comm2.raw_id, file_filter="foo", opt_ignorews=False, context=3
1079 self.repo._lookup_commit(0), comm2.raw_id, file_filter="foo", opt_ignorews=False, context=3
1080 )
1080 )
1081
1081
1082
1082
1083 @pytest.mark.usefixtures("vcs_repository_support")
1083 @pytest.mark.usefixtures("vcs_repository_support")
1084 class TestGitRegression(BackendTestMixin):
1084 class TestGitRegression(BackendTestMixin):
1085 @classmethod
1085 @classmethod
1086 def _get_commits(cls):
1086 def _get_commits(cls):
1087 return [
1087 return [
1088 {
1088 {
1089 "message": "Initial",
1089 "message": "Initial",
1090 "author": "Joe Doe <joe.doe@example.com>",
1090 "author": "Joe Doe <joe.doe@example.com>",
1091 "date": datetime.datetime(2010, 1, 1, 20),
1091 "date": datetime.datetime(2010, 1, 1, 20),
1092 "added": [
1092 "added": [
1093 FileNode(b"bot/__init__.py", content=b"base"),
1093 FileNode(b"bot/__init__.py", content=b"base"),
1094 FileNode(b"bot/templates/404.html", content=b"base"),
1094 FileNode(b"bot/templates/404.html", content=b"base"),
1095 FileNode(b"bot/templates/500.html", content=b"base"),
1095 FileNode(b"bot/templates/500.html", content=b"base"),
1096 ],
1096 ],
1097 },
1097 },
1098 {
1098 {
1099 "message": "Second",
1099 "message": "Second",
1100 "author": "Joe Doe <joe.doe@example.com>",
1100 "author": "Joe Doe <joe.doe@example.com>",
1101 "date": datetime.datetime(2010, 1, 1, 22),
1101 "date": datetime.datetime(2010, 1, 1, 22),
1102 "added": [
1102 "added": [
1103 FileNode(b"bot/build/migrations/1.py", content=b"foo2"),
1103 FileNode(b"bot/build/migrations/1.py", content=b"foo2"),
1104 FileNode(b"bot/build/migrations/2.py", content=b"foo2"),
1104 FileNode(b"bot/build/migrations/2.py", content=b"foo2"),
1105 FileNode(b"bot/build/static/templates/f.html", content=b"foo2"),
1105 FileNode(b"bot/build/static/templates/f.html", content=b"foo2"),
1106 FileNode(b"bot/build/static/templates/f1.html", content=b"foo2"),
1106 FileNode(b"bot/build/static/templates/f1.html", content=b"foo2"),
1107 FileNode(b"bot/build/templates/err.html", content=b"foo2"),
1107 FileNode(b"bot/build/templates/err.html", content=b"foo2"),
1108 FileNode(b"bot/build/templates/err2.html", content=b"foo2"),
1108 FileNode(b"bot/build/templates/err2.html", content=b"foo2"),
1109 ],
1109 ],
1110 },
1110 },
1111 ]
1111 ]
1112
1112
1113 @pytest.mark.parametrize(
1113 @pytest.mark.parametrize(
1114 "path, expected_paths",
1114 "path, expected_paths",
1115 [
1115 [
1116 (b"bot", ["bot/build", "bot/templates", "bot/__init__.py"]),
1116 (b"bot", ["bot/build", "bot/templates", "bot/__init__.py"]),
1117 (b"bot/build", ["bot/build/migrations", "bot/build/static", "bot/build/templates"]),
1117 (b"bot/build", ["bot/build/migrations", "bot/build/static", "bot/build/templates"]),
1118 (b"bot/build/static", ["bot/build/static/templates"]),
1118 (b"bot/build/static", ["bot/build/static/templates"]),
1119 (
1119 (
1120 b"bot/build/static/templates",
1120 b"bot/build/static/templates",
1121 ["bot/build/static/templates/f.html", "bot/build/static/templates/f1.html"],
1121 ["bot/build/static/templates/f.html", "bot/build/static/templates/f1.html"],
1122 ),
1122 ),
1123 (b"bot/build/templates", ["bot/build/templates/err.html", "bot/build/templates/err2.html"]),
1123 (b"bot/build/templates", ["bot/build/templates/err.html", "bot/build/templates/err2.html"]),
1124 (b"bot/templates/", ["bot/templates/404.html", "bot/templates/500.html"]),
1124 (b"bot/templates/", ["bot/templates/404.html", "bot/templates/500.html"]),
1125 ],
1125 ],
1126 )
1126 )
1127 def test_similar_paths(self, path, expected_paths):
1127 def test_similar_paths(self, path, expected_paths):
1128 commit = self.repo.get_commit()
1128 commit = self.repo.get_commit()
1129 paths = [n.path for n in commit.get_nodes(path)]
1129 paths = [n.path for n in commit.get_nodes(path)]
1130 assert paths == expected_paths
1130 assert paths == expected_paths
1131
1131
1132
1132
1133 class TestDiscoverGitVersion(object):
1133 class TestDiscoverGitVersion(object):
1134 def test_returns_git_version(self, baseapp):
1134 def test_returns_git_version(self, baseapp):
1135 version = discover_git_version()
1135 version = discover_git_version()
1136 assert version
1136 assert version
1137
1137
1138 def test_returns_empty_string_without_vcsserver(self):
1138 def test_returns_empty_string_without_vcsserver(self):
1139 mock_connection = mock.Mock()
1139 mock_connection = mock.Mock()
1140 mock_connection.discover_git_version = mock.Mock(side_effect=Exception)
1140 mock_connection.discover_git_version = mock.Mock(side_effect=Exception)
1141 with mock.patch("rhodecode.lib.vcs.connection.Git", mock_connection):
1141 with mock.patch("rhodecode.lib.vcs.connection.Git", mock_connection):
1142 version = discover_git_version()
1142 version = discover_git_version()
1143 assert version == ""
1143 assert version == ""
1144
1144
1145
1145
1146 class TestGetSubmoduleUrl(object):
1146 class TestGetSubmoduleUrl(object):
1147 def test_submodules_file_found(self):
1147 def test_submodules_file_found(self):
1148 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1148 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1149 node = mock.Mock()
1149 node = mock.Mock()
1150
1150
1151 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1151 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1152 node.str_content = (
1152 node.str_content = (
1153 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1153 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1154 )
1154 )
1155 result = commit._get_submodule_url(b"subrepo1")
1155 result = commit._get_submodule_url(b"subrepo1")
1156 get_node_mock.assert_called_once_with(b".gitmodules")
1156 get_node_mock.assert_called_once_with(b".gitmodules")
1157 assert result == "https://code.rhodecode.com/dulwich"
1157 assert result == "https://code.rhodecode.com/dulwich"
1158
1158
1159 def test_complex_submodule_path(self):
1159 def test_complex_submodule_path(self):
1160 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1160 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1161 node = mock.Mock()
1161 node = mock.Mock()
1162
1162
1163 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1163 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1164 node.str_content = (
1164 node.str_content = (
1165 '[submodule "complex/subrepo/path"]\n'
1165 '[submodule "complex/subrepo/path"]\n'
1166 "\tpath = complex/subrepo/path\n"
1166 "\tpath = complex/subrepo/path\n"
1167 "\turl = https://code.rhodecode.com/dulwich\n"
1167 "\turl = https://code.rhodecode.com/dulwich\n"
1168 )
1168 )
1169 result = commit._get_submodule_url(b"complex/subrepo/path")
1169 result = commit._get_submodule_url(b"complex/subrepo/path")
1170 get_node_mock.assert_called_once_with(b".gitmodules")
1170 get_node_mock.assert_called_once_with(b".gitmodules")
1171 assert result == "https://code.rhodecode.com/dulwich"
1171 assert result == "https://code.rhodecode.com/dulwich"
1172
1172
1173 def test_submodules_file_not_found(self):
1173 def test_submodules_file_not_found(self):
1174 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1174 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1175 with mock.patch.object(commit, "get_node", side_effect=NodeDoesNotExistError):
1175 with mock.patch.object(commit, "get_node", side_effect=NodeDoesNotExistError):
1176 result = commit._get_submodule_url(b"complex/subrepo/path")
1176 result = commit._get_submodule_url(b"complex/subrepo/path")
1177 assert result is None
1177 assert result is None
1178
1178
1179 def test_path_not_found(self):
1179 def test_path_not_found(self):
1180 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1180 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1181 node = mock.Mock()
1181 node = mock.Mock()
1182
1182
1183 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1183 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1184 node.str_content = (
1184 node.str_content = (
1185 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1185 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1186 )
1186 )
1187 result = commit._get_submodule_url(b"subrepo2")
1187 result = commit._get_submodule_url(b"subrepo2")
1188 get_node_mock.assert_called_once_with(b".gitmodules")
1188 get_node_mock.assert_called_once_with(b".gitmodules")
1189 assert result is None
1189 assert result is None
1190
1190
1191 def test_returns_cached_values(self):
1191 def test_returns_cached_values(self):
1192 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1192 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1193 node = mock.Mock()
1193 node = mock.Mock()
1194
1194
1195 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1195 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1196 node.str_content = (
1196 node.str_content = (
1197 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1197 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1198 )
1198 )
1199 for _ in range(3):
1199 for _ in range(3):
1200 commit._get_submodule_url(b"subrepo1")
1200 commit._get_submodule_url(b"subrepo1")
1201 get_node_mock.assert_called_once_with(b".gitmodules")
1201 get_node_mock.assert_called_once_with(b".gitmodules")
1202
1202
1203 def test_get_node_returns_a_link(self):
1203 def test_get_node_returns_a_link(self):
1204 repository = mock.Mock()
1204 repository = mock.Mock()
1205 repository.alias = "git"
1205 repository.alias = "git"
1206 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1206 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1207 submodule_url = "https://code.rhodecode.com/dulwich"
1207 submodule_url = "https://code.rhodecode.com/dulwich"
1208 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.SUBMODULE))
1208 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.SUBMODULE))
1209 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1209 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1210
1210
1211 with get_id_patch, get_submodule_patch as submodule_mock:
1211 with get_id_patch, get_submodule_patch as submodule_mock:
1212 node = commit.get_node(b"/abcde")
1212 node = commit.get_node(b"/abcde")
1213
1213
1214 submodule_mock.assert_called_once_with(b"/abcde")
1214 submodule_mock.assert_called_once_with(b"/abcde")
1215 assert type(node) == SubModuleNode
1215 assert type(node) == SubModuleNode
1216 assert node.url == submodule_url
1216 assert node.url == submodule_url
1217
1217
1218 def test_get_nodes_returns_links(self):
1218 def test_get_nodes_returns_links(self):
1219 repository = mock.MagicMock()
1219 repository = mock.MagicMock()
1220 repository.alias = "git"
1220 repository.alias = "git"
1221 repository._remote.tree_items.return_value = [(b"subrepo", "stat", 1, NodeKind.SUBMODULE)]
1221 # obj_name, stat_, tree_item_id, node_kind, pre_load_data
1222 repository._remote.get_nodes.return_value = [(b"subrepo", "stat", 1, NodeKind.SUBMODULE, [])]
1222 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1223 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1223 submodule_url = "https://code.rhodecode.com/dulwich"
1224 submodule_url = "https://code.rhodecode.com/dulwich"
1225
1224 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.DIR))
1226 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.DIR))
1225 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1227 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1226
1228
1227 with get_id_patch, get_submodule_patch as submodule_mock:
1229 with get_id_patch, get_submodule_patch as submodule_mock:
1228 nodes = commit.get_nodes(b"/abcde")
1230 nodes = commit.get_nodes(b"/abcde")
1229
1231
1230 submodule_mock.assert_called_once_with(b"/abcde/subrepo")
1231 assert len(nodes) == 1
1232 assert len(nodes) == 1
1232 assert type(nodes[0]) == SubModuleNode
1233 assert type(nodes[0]) == SubModuleNode
1233 assert nodes[0].url == submodule_url
1234 assert nodes[0].url == submodule_url
1235 submodule_mock.assert_called_once_with(b"/abcde/subrepo")
1234
1236
1235
1237
1236 class TestGetShadowInstance(object):
1238 class TestGetShadowInstance(object):
1237 @pytest.fixture()
1239 @pytest.fixture()
1238 def repo(self, vcsbackend_git):
1240 def repo(self, vcsbackend_git):
1239 _git_repo = vcsbackend_git.repo
1241 _git_repo = vcsbackend_git.repo
1240
1242
1241 mock.patch.object(_git_repo, "config", mock.Mock())
1243 mock.patch.object(_git_repo, "config", mock.Mock())
1242 connection_mock = mock.Mock(unsafe=True, name="connection.Hg")
1244 connection_mock = mock.Mock(unsafe=True, name="connection.Hg")
1243
1245
1244 mock.patch("rhodecode.lib.vcs.connection.Git", connection_mock)
1246 mock.patch("rhodecode.lib.vcs.connection.Git", connection_mock)
1245 return _git_repo
1247 return _git_repo
1246
1248
1247 def test_getting_shadow_instance_copies_config(self, repo):
1249 def test_getting_shadow_instance_copies_config(self, repo):
1248 shadow = repo.get_shadow_instance(repo.path)
1250 shadow = repo.get_shadow_instance(repo.path)
1249 assert shadow.config.serialize() == repo.config.serialize()
1251 assert shadow.config.serialize() == repo.config.serialize()
General Comments 0
You need to be logged in to leave comments. Login now