##// END OF EJS Templates
fix(file-caching): fixed cases when old cache was used before changes to operate on bytestrings
super-admin -
r5651:bad147da default
parent child Browse files
Show More
@@ -1,987 +1,985
1 1 # Copyright (C) 2016-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import time
20 20 import logging
21 21 import operator
22 22
23 23 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
24 24
25 25 from rhodecode.lib import helpers as h, diffs, rc_cache
26 26 from rhodecode.lib.str_utils import safe_str
27 27 from rhodecode.lib.utils import repo_name_slug
28 28 from rhodecode.lib.utils2 import (
29 29 StrictAttributeDict,
30 30 str2bool,
31 31 safe_int,
32 32 datetime_to_time,
33 33 )
34 34 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 36 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
37 37 from rhodecode.model import repo
38 38 from rhodecode.model import repo_group
39 39 from rhodecode.model import user_group
40 40 from rhodecode.model import user
41 41 from rhodecode.model.db import User
42 42 from rhodecode.model.scm import ScmModel
43 43 from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel
44 44 from rhodecode.model.repo import ReadmeFinder
45 45
46 46 log = logging.getLogger(__name__)
47 47
48 48
49 49 ADMIN_PREFIX: str = "/_admin"
50 50 STATIC_FILE_PREFIX: str = "/_static"
51 51
52 52 URL_NAME_REQUIREMENTS = {
53 53 # group name can have a slash in them, but they must not end with a slash
54 54 "group_name": r".*?[^/]",
55 55 "repo_group_name": r".*?[^/]",
56 56 # repo names can have a slash in them, but they must not end with a slash
57 57 "repo_name": r".*?[^/]",
58 58 # file path eats up everything at the end
59 59 "f_path": r".*",
60 60 # reference types
61 61 "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)",
62 62 "target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)",
63 63 }
64 64
65 65
66 66 def add_route_with_slash(config, name, pattern, **kw):
67 67 config.add_route(name, pattern, **kw)
68 68 if not pattern.endswith("/"):
69 69 config.add_route(name + "_slash", pattern + "/", **kw)
70 70
71 71
72 72 def add_route_requirements(route_path, requirements=None):
73 73 """
74 74 Adds regex requirements to pyramid routes using a mapping dict
75 75 e.g::
76 76 add_route_requirements('{repo_name}/settings')
77 77 """
78 78 requirements = requirements or URL_NAME_REQUIREMENTS
79 79 for key, regex in list(requirements.items()):
80 80 route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex))
81 81 return route_path
82 82
83 83
84 84 def get_format_ref_id(repo):
85 85 """Returns a `repo` specific reference formatter function"""
86 86 if h.is_svn(repo):
87 87 return _format_ref_id_svn
88 88 else:
89 89 return _format_ref_id
90 90
91 91
92 92 def _format_ref_id(name, raw_id):
93 93 """Default formatting of a given reference `name`"""
94 94 return name
95 95
96 96
97 97 def _format_ref_id_svn(name, raw_id):
98 98 """Special way of formatting a reference for Subversion including path"""
99 99 return f"{name}@{raw_id}"
100 100
101 101
102 102 class TemplateArgs(StrictAttributeDict):
103 103 pass
104 104
105 105
106 106 class BaseAppView(object):
107 107 DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"]
108 108 EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout']
109 109 SETUP_2FA_VIEW = 'setup_2fa'
110 110 VERIFY_2FA_VIEW = 'check_2fa'
111 111
112 112 def __init__(self, context, request):
113 113 self.request = request
114 114 self.context = context
115 115 self.session = request.session
116 116 if not hasattr(request, "user"):
117 117 # NOTE(marcink): edge case, we ended up in matched route
118 118 # but probably of web-app context, e.g API CALL/VCS CALL
119 119 if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"):
120 120 log.warning("Unable to process request `%s` in this scope", request)
121 121 raise HTTPBadRequest()
122 122
123 123 self._rhodecode_user = request.user # auth user
124 124 self._rhodecode_db_user = self._rhodecode_user.get_instance()
125 125 self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {}
126 126 self._maybe_needs_password_change(
127 127 request.matched_route.name, self._rhodecode_db_user
128 128 )
129 129 self._maybe_needs_2fa_configuration(
130 130 request.matched_route.name, self._rhodecode_db_user
131 131 )
132 132 self._maybe_needs_2fa_check(
133 133 request.matched_route.name, self._rhodecode_db_user
134 134 )
135 135
136 136 def _maybe_needs_password_change(self, view_name, user_obj):
137 137 if view_name in self.DONT_CHECKOUT_VIEWS:
138 138 return
139 139
140 140 log.debug(
141 141 "Checking if user %s needs password change on view %s", user_obj, view_name
142 142 )
143 143
144 144 skip_user_views = [
145 145 "logout",
146 146 "login",
147 147 "check_2fa",
148 148 "my_account_password",
149 149 "my_account_password_update",
150 150 ]
151 151
152 152 if not user_obj:
153 153 return
154 154
155 155 if user_obj.username == User.DEFAULT_USER:
156 156 return
157 157
158 158 now = time.time()
159 159 should_change = self.user_data.get("force_password_change")
160 160 change_after = safe_int(should_change) or 0
161 161 if should_change and now > change_after:
162 162 log.debug("User %s requires password change", user_obj)
163 163 h.flash(
164 164 "You are required to change your password",
165 165 "warning",
166 166 ignore_duplicate=True,
167 167 )
168 168
169 169 if view_name not in skip_user_views:
170 170 raise HTTPFound(self.request.route_path("my_account_password"))
171 171
172 172 def _maybe_needs_2fa_configuration(self, view_name, user_obj):
173 173 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
174 174 return
175 175
176 176 if not user_obj:
177 177 return
178 178
179 179 if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW:
180 180 h.flash(
181 181 "You are required to configure 2FA",
182 182 "warning",
183 183 ignore_duplicate=False,
184 184 )
185 185 # Special case for users created "on the fly" (ldap case for new user)
186 186 user_obj.check_2fa_required = False
187 187 raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW))
188 188
189 189 def _maybe_needs_2fa_check(self, view_name, user_obj):
190 190 if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
191 191 return
192 192
193 193 if not user_obj:
194 194 return
195 195
196 196 if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW:
197 197 raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW))
198 198
199 199 def _log_creation_exception(self, e, repo_name):
200 200 _ = self.request.translate
201 201 reason = None
202 202 if len(e.args) == 2:
203 203 reason = e.args[1]
204 204
205 205 if reason == "INVALID_CERTIFICATE":
206 206 log.exception("Exception creating a repository: invalid certificate")
207 207 msg = _("Error creating repository %s: invalid certificate") % repo_name
208 208 else:
209 209 log.exception("Exception creating a repository")
210 210 msg = _("Error creating repository %s") % repo_name
211 211 return msg
212 212
213 213 def _get_local_tmpl_context(self, include_app_defaults=True):
214 214 c = TemplateArgs()
215 215 c.auth_user = self.request.user
216 216 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
217 217 c.rhodecode_user = self.request.user
218 218
219 219 if include_app_defaults:
220 220 from rhodecode.lib.base import attach_context_attributes
221 221
222 222 attach_context_attributes(c, self.request, self.request.user.user_id)
223 223
224 224 c.is_super_admin = c.auth_user.is_admin
225 225
226 226 c.can_create_repo = c.is_super_admin
227 227 c.can_create_repo_group = c.is_super_admin
228 228 c.can_create_user_group = c.is_super_admin
229 229
230 230 c.is_delegated_admin = False
231 231
232 232 if not c.auth_user.is_default and not c.is_super_admin:
233 233 c.can_create_repo = h.HasPermissionAny("hg.create.repository")(
234 234 user=self.request.user
235 235 )
236 236 repositories = c.auth_user.repositories_admin or c.can_create_repo
237 237
238 238 c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")(
239 239 user=self.request.user
240 240 )
241 241 repository_groups = (
242 242 c.auth_user.repository_groups_admin or c.can_create_repo_group
243 243 )
244 244
245 245 c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")(
246 246 user=self.request.user
247 247 )
248 248 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
249 249 # delegated admin can create, or manage some objects
250 250 c.is_delegated_admin = repositories or repository_groups or user_groups
251 251 return c
252 252
253 253 def _get_template_context(self, tmpl_args, **kwargs):
254 254 local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args}
255 255 local_tmpl_args.update(kwargs)
256 256 return local_tmpl_args
257 257
258 258 def load_default_context(self):
259 259 """
260 260 example:
261 261
262 262 def load_default_context(self):
263 263 c = self._get_local_tmpl_context()
264 264 c.custom_var = 'foobar'
265 265
266 266 return c
267 267 """
268 268 raise NotImplementedError("Needs implementation in view class")
269 269
270 270
271 271 class RepoAppView(BaseAppView):
272 272 def __init__(self, context, request):
273 273 super().__init__(context, request)
274 274 self.db_repo = request.db_repo
275 275 self.db_repo_name = self.db_repo.repo_name
276 276 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
277 277 self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo)
278 278 self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo)
279 279
280 280 def _handle_missing_requirements(self, error):
281 281 log.error(
282 282 "Requirements are missing for repository %s: %s",
283 283 self.db_repo_name,
284 284 safe_str(error),
285 285 )
286 286
287 287 def _prepare_and_set_clone_url(self, c):
288 288 username = ""
289 289 if self._rhodecode_user.username != User.DEFAULT_USER:
290 290 username = self._rhodecode_user.username
291 291
292 292 _def_clone_uri = c.clone_uri_tmpl
293 293 _def_clone_uri_id = c.clone_uri_id_tmpl
294 294 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
295 295
296 296 c.clone_repo_url = self.db_repo.clone_url(
297 297 user=username, uri_tmpl=_def_clone_uri
298 298 )
299 299 c.clone_repo_url_id = self.db_repo.clone_url(
300 300 user=username, uri_tmpl=_def_clone_uri_id
301 301 )
302 302 c.clone_repo_url_ssh = self.db_repo.clone_url(
303 303 uri_tmpl=_def_clone_uri_ssh, ssh=True
304 304 )
305 305
306 306 def _get_local_tmpl_context(self, include_app_defaults=True):
307 307 _ = self.request.translate
308 308 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
309 309
310 310 # register common vars for this type of view
311 311 c.rhodecode_db_repo = self.db_repo
312 312 c.repo_name = self.db_repo_name
313 313 c.repository_pull_requests = self.db_repo_pull_requests
314 314 c.repository_artifacts = self.db_repo_artifacts
315 315 c.repository_is_user_following = ScmModel().is_following_repo(
316 316 self.db_repo_name, self._rhodecode_user.user_id
317 317 )
318 318 self.path_filter = PathFilter(None)
319 319
320 320 c.repository_requirements_missing = {}
321 321 try:
322 322 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
323 323 # NOTE(marcink):
324 324 # comparison to None since if it's an object __bool__ is expensive to
325 325 # calculate
326 326 if self.rhodecode_vcs_repo is not None:
327 327 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
328 328 c.auth_user.username
329 329 )
330 330 self.path_filter = PathFilter(path_perms)
331 331 except RepositoryRequirementError as e:
332 332 c.repository_requirements_missing = {"error": str(e)}
333 333 self._handle_missing_requirements(e)
334 334 self.rhodecode_vcs_repo = None
335 335
336 336 c.path_filter = self.path_filter # used by atom_feed_entry.mako
337 337
338 338 if self.rhodecode_vcs_repo is None:
339 339 # unable to fetch this repo as vcs instance, report back to user
340 340 log.debug(
341 341 "Repository was not found on filesystem, check if it exists or is not damaged"
342 342 )
343 343 h.flash(
344 344 _(
345 345 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
346 346 "Please check if it exist, or is not damaged."
347 347 )
348 348 % {"repo_name": c.repo_name},
349 349 category="error",
350 350 ignore_duplicate=True,
351 351 )
352 352 if c.repository_requirements_missing:
353 353 route = self.request.matched_route.name
354 354 if route.startswith(("edit_repo", "repo_summary")):
355 355 # allow summary and edit repo on missing requirements
356 356 return c
357 357
358 358 raise HTTPFound(
359 359 h.route_path("repo_summary", repo_name=self.db_repo_name)
360 360 )
361 361
362 362 else: # redirect if we don't show missing requirements
363 363 raise HTTPFound(h.route_path("home"))
364 364
365 365 c.has_origin_repo_read_perm = False
366 366 if self.db_repo.fork:
367 367 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
368 368 "repository.write", "repository.read", "repository.admin"
369 369 )(self.db_repo.fork.repo_name, "summary fork link")
370 370
371 371 return c
372 372
373 373 def _get_f_path_unchecked(self, matchdict, default=None):
374 374 """
375 375 Should only be used by redirects, everything else should call _get_f_path
376 376 """
377 377 f_path = matchdict.get("f_path")
378 378 if f_path:
379 379 # fix for multiple initial slashes that causes errors for GIT
380 380 return f_path.lstrip("/")
381 381
382 382 return default
383 383
384 384 def _get_f_path(self, matchdict, default=None):
385 385 f_path_match = self._get_f_path_unchecked(matchdict, default)
386 386 return self.path_filter.assert_path_permissions(f_path_match)
387 387
388 388 def _get_general_setting(self, target_repo, settings_key, default=False):
389 389 settings_model = VcsSettingsModel(repo=target_repo)
390 390 settings = settings_model.get_general_settings()
391 391 return settings.get(settings_key, default)
392 392
393 393 def _get_repo_setting(self, target_repo, settings_key, default=False):
394 394 settings_model = VcsSettingsModel(repo=target_repo)
395 395 settings = settings_model.get_repo_settings_inherited()
396 396 return settings.get(settings_key, default)
397 397
398 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/"):
398 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/", nodes=None):
399 399 log.debug("Looking for README file at path %s", path)
400 400 if commit_id:
401 401 landing_commit_id = commit_id
402 402 else:
403 403 landing_commit = db_repo.get_landing_commit()
404 404 if isinstance(landing_commit, EmptyCommit):
405 405 return None, None
406 406 landing_commit_id = landing_commit.raw_id
407 407
408 408 cache_namespace_uid = f"repo.{db_repo.repo_id}"
409 409 region = rc_cache.get_or_create_region(
410 410 "cache_repo", cache_namespace_uid, use_async_runner=False
411 411 )
412 412 start = time.time()
413 413
414 414 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
415 415 def generate_repo_readme(
416 repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type
416 _repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type
417 417 ):
418 readme_data = None
419 readme_filename = None
418 _readme_data = None
419 _readme_filename = None
420 420
421 421 commit = db_repo.get_commit(_commit_id)
422 422 log.debug("Searching for a README file at commit %s.", _commit_id)
423 readme_node = ReadmeFinder(_renderer_type).search(
424 commit, path=_readme_search_path
425 )
423 readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path, nodes=nodes)
426 424
427 425 if readme_node:
428 426 log.debug("Found README node: %s", readme_node)
429 427
430 428 relative_urls = {
431 429 "raw": h.route_path(
432 430 "repo_file_raw",
433 431 repo_name=_repo_name,
434 432 commit_id=commit.raw_id,
435 433 f_path=readme_node.path,
436 434 ),
437 435 "standard": h.route_path(
438 436 "repo_files",
439 437 repo_name=_repo_name,
440 438 commit_id=commit.raw_id,
441 439 f_path=readme_node.path,
442 440 ),
443 441 }
444 442
445 readme_data = self._render_readme_or_none(
443 _readme_data = self._render_readme_or_none(
446 444 commit, readme_node, relative_urls
447 445 )
448 readme_filename = readme_node.str_path
446 _readme_filename = readme_node.str_path
449 447
450 return readme_data, readme_filename
448 return _readme_data, _readme_filename
451 449
452 450 readme_data, readme_filename = generate_repo_readme(
453 451 db_repo.repo_id,
454 452 landing_commit_id,
455 453 db_repo.repo_name,
456 454 path,
457 renderer_type,
455 renderer_type
458 456 )
459 457
460 458 compute_time = time.time() - start
461 459 log.debug(
462 460 "Repo README for path %s generated and computed in %.4fs",
463 461 path,
464 462 compute_time,
465 463 )
466 464 return readme_data, readme_filename
467 465
468 466 def _render_readme_or_none(self, commit, readme_node, relative_urls):
469 467 log.debug("Found README file `%s` rendering...", readme_node.path)
470 468 renderer = MarkupRenderer()
471 469 try:
472 470 html_source = renderer.render(
473 471 readme_node.str_content, filename=readme_node.path
474 472 )
475 473 if relative_urls:
476 474 return relative_links(html_source, relative_urls)
477 475 return html_source
478 476 except Exception:
479 477 log.exception("Exception while trying to render the README")
480 478
481 479 def get_recache_flag(self):
482 480 for flag_name in ["force_recache", "force-recache", "no-cache"]:
483 481 flag_val = self.request.GET.get(flag_name)
484 482 if str2bool(flag_val):
485 483 return True
486 484 return False
487 485
488 486 def get_commit_preload_attrs(cls):
489 487 pre_load = [
490 488 "author",
491 489 "branch",
492 490 "date",
493 491 "message",
494 492 "parents",
495 493 "obsolete",
496 494 "phase",
497 495 "hidden",
498 496 ]
499 497 return pre_load
500 498
501 499
502 500 class PathFilter(object):
503 501 # Expects and instance of BasePathPermissionChecker or None
504 502 def __init__(self, permission_checker):
505 503 self.permission_checker = permission_checker
506 504
507 505 def assert_path_permissions(self, path):
508 506 if self.path_access_allowed(path):
509 507 return path
510 508 raise HTTPForbidden()
511 509
512 510 def path_access_allowed(self, path):
513 511 log.debug("Checking ACL permissions for PathFilter for `%s`", path)
514 512 if self.permission_checker:
515 513 has_access = path and self.permission_checker.has_access(path)
516 514 log.debug(
517 515 "ACL Permissions checker enabled, ACL Check has_access: %s", has_access
518 516 )
519 517 return has_access
520 518
521 519 log.debug("ACL permissions checker not enabled, skipping...")
522 520 return True
523 521
524 522 def filter_patchset(self, patchset):
525 523 if not self.permission_checker or not patchset:
526 524 return patchset, False
527 525 had_filtered = False
528 526 filtered_patchset = []
529 527 for patch in patchset:
530 528 filename = patch.get("filename", None)
531 529 if not filename or self.permission_checker.has_access(filename):
532 530 filtered_patchset.append(patch)
533 531 else:
534 532 had_filtered = True
535 533 if had_filtered:
536 534 if isinstance(patchset, diffs.LimitedDiffContainer):
537 535 filtered_patchset = diffs.LimitedDiffContainer(
538 536 patchset.diff_limit, patchset.cur_diff_size, filtered_patchset
539 537 )
540 538 return filtered_patchset, True
541 539 else:
542 540 return patchset, False
543 541
544 542 def render_patchset_filtered(
545 543 self, diffset, patchset, source_ref=None, target_ref=None
546 544 ):
547 545 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
548 546 result = diffset.render_patchset(
549 547 filtered_patchset, source_ref=source_ref, target_ref=target_ref
550 548 )
551 549 result.has_hidden_changes = has_hidden_changes
552 550 return result
553 551
554 552 def get_raw_patch(self, diff_processor):
555 553 if self.permission_checker is None:
556 554 return diff_processor.as_raw()
557 555 elif self.permission_checker.has_full_access:
558 556 return diff_processor.as_raw()
559 557 else:
560 558 return "# Repository has user-specific filters, raw patch generation is disabled."
561 559
562 560 @property
563 561 def is_enabled(self):
564 562 return self.permission_checker is not None
565 563
566 564
567 565 class RepoGroupAppView(BaseAppView):
568 566 def __init__(self, context, request):
569 567 super().__init__(context, request)
570 568 self.db_repo_group = request.db_repo_group
571 569 self.db_repo_group_name = self.db_repo_group.group_name
572 570
573 571 def _get_local_tmpl_context(self, include_app_defaults=True):
574 572 _ = self.request.translate
575 573 c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults)
576 574 c.repo_group = self.db_repo_group
577 575 return c
578 576
579 577 def _revoke_perms_on_yourself(self, form_result):
580 578 _updates = [
581 579 u
582 580 for u in form_result["perm_updates"]
583 581 if self._rhodecode_user.user_id == int(u[0])
584 582 ]
585 583 _additions = [
586 584 u
587 585 for u in form_result["perm_additions"]
588 586 if self._rhodecode_user.user_id == int(u[0])
589 587 ]
590 588 _deletions = [
591 589 u
592 590 for u in form_result["perm_deletions"]
593 591 if self._rhodecode_user.user_id == int(u[0])
594 592 ]
595 593 admin_perm = "group.admin"
596 594 if (
597 595 _updates
598 596 and _updates[0][1] != admin_perm
599 597 or _additions
600 598 and _additions[0][1] != admin_perm
601 599 or _deletions
602 600 and _deletions[0][1] != admin_perm
603 601 ):
604 602 return True
605 603 return False
606 604
607 605
608 606 class UserGroupAppView(BaseAppView):
609 607 def __init__(self, context, request):
610 608 super().__init__(context, request)
611 609 self.db_user_group = request.db_user_group
612 610 self.db_user_group_name = self.db_user_group.users_group_name
613 611
614 612
615 613 class UserAppView(BaseAppView):
616 614 def __init__(self, context, request):
617 615 super().__init__(context, request)
618 616 self.db_user = request.db_user
619 617 self.db_user_id = self.db_user.user_id
620 618
621 619 _ = self.request.translate
622 620 if not request.db_user_supports_default:
623 621 if self.db_user.username == User.DEFAULT_USER:
624 622 h.flash(
625 623 _("Editing user `{}` is disabled.".format(User.DEFAULT_USER)),
626 624 category="warning",
627 625 )
628 626 raise HTTPFound(h.route_path("users"))
629 627
630 628
631 629 class DataGridAppView(object):
632 630 """
633 631 Common class to have re-usable grid rendering components
634 632 """
635 633
636 634 def _extract_ordering(self, request, column_map=None):
637 635 column_map = column_map or {}
638 636 column_index = safe_int(request.GET.get("order[0][column]"))
639 637 order_dir = request.GET.get("order[0][dir]", "desc")
640 638 order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw")
641 639
642 640 # translate datatable to DB columns
643 641 order_by = column_map.get(order_by) or order_by
644 642
645 643 search_q = request.GET.get("search[value]")
646 644 return search_q, order_by, order_dir
647 645
648 646 def _extract_chunk(self, request):
649 647 start = safe_int(request.GET.get("start"), 0)
650 648 length = safe_int(request.GET.get("length"), 25)
651 649 draw = safe_int(request.GET.get("draw"))
652 650 return draw, start, length
653 651
654 652 def _get_order_col(self, order_by, model):
655 653 if isinstance(order_by, str):
656 654 try:
657 655 return operator.attrgetter(order_by)(model)
658 656 except AttributeError:
659 657 return None
660 658 else:
661 659 return order_by
662 660
663 661
664 662 class BaseReferencesView(RepoAppView):
665 663 """
666 664 Base for reference view for branches, tags and bookmarks.
667 665 """
668 666
669 667 def load_default_context(self):
670 668 c = self._get_local_tmpl_context()
671 669 return c
672 670
673 671 def load_refs_context(self, ref_items, partials_template):
674 672 _render = self.request.get_partial_renderer(partials_template)
675 673 pre_load = ["author", "date", "message", "parents"]
676 674
677 675 is_svn = h.is_svn(self.rhodecode_vcs_repo)
678 676 is_hg = h.is_hg(self.rhodecode_vcs_repo)
679 677
680 678 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
681 679
682 680 closed_refs = {}
683 681 if is_hg:
684 682 closed_refs = self.rhodecode_vcs_repo.branches_closed
685 683
686 684 data = []
687 685 for ref_name, commit_id in ref_items:
688 686 commit = self.rhodecode_vcs_repo.get_commit(
689 687 commit_id=commit_id, pre_load=pre_load
690 688 )
691 689 closed = ref_name in closed_refs
692 690
693 691 # TODO: johbo: Unify generation of reference links
694 692 use_commit_id = "/" in ref_name or is_svn
695 693
696 694 if use_commit_id:
697 695 files_url = h.route_path(
698 696 "repo_files",
699 697 repo_name=self.db_repo_name,
700 698 f_path=ref_name if is_svn else "",
701 699 commit_id=commit_id,
702 700 _query=dict(at=ref_name),
703 701 )
704 702
705 703 else:
706 704 files_url = h.route_path(
707 705 "repo_files",
708 706 repo_name=self.db_repo_name,
709 707 f_path=ref_name if is_svn else "",
710 708 commit_id=ref_name,
711 709 _query=dict(at=ref_name),
712 710 )
713 711
714 712 data.append(
715 713 {
716 714 "name": _render("name", ref_name, files_url, closed),
717 715 "name_raw": ref_name,
718 716 "closed": closed,
719 717 "date": _render("date", commit.date),
720 718 "date_raw": datetime_to_time(commit.date),
721 719 "author": _render("author", commit.author),
722 720 "commit": _render(
723 721 "commit", commit.message, commit.raw_id, commit.idx
724 722 ),
725 723 "commit_raw": commit.idx,
726 724 "compare": _render(
727 725 "compare", format_ref_id(ref_name, commit.raw_id)
728 726 ),
729 727 }
730 728 )
731 729
732 730 return data
733 731
734 732
735 733 class RepoRoutePredicate(object):
736 734 def __init__(self, val, config):
737 735 self.val = val
738 736
739 737 def text(self):
740 738 return f"repo_route = {self.val}"
741 739
742 740 phash = text
743 741
744 742 def __call__(self, info, request):
745 743 if hasattr(request, "vcs_call"):
746 744 # skip vcs calls
747 745 return
748 746
749 747 repo_name = info["match"]["repo_name"]
750 748
751 749 repo_name_parts = repo_name.split("/")
752 750 repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)]
753 751
754 752 if repo_name_parts != repo_slugs:
755 753 # short-skip if the repo-name doesn't follow slug rule
756 754 log.warning(
757 755 "repo_name: %s is different than slug %s", repo_name_parts, repo_slugs
758 756 )
759 757 return False
760 758
761 759 repo_model = repo.RepoModel()
762 760
763 761 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
764 762
765 763 def redirect_if_creating(route_info, db_repo):
766 764 skip_views = ["edit_repo_advanced_delete"]
767 765 route = route_info["route"]
768 766 # we should skip delete view so we can actually "remove" repositories
769 767 # if they get stuck in creating state.
770 768 if route.name in skip_views:
771 769 return
772 770
773 771 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
774 772 repo_creating_url = request.route_path(
775 773 "repo_creating", repo_name=db_repo.repo_name
776 774 )
777 775 raise HTTPFound(repo_creating_url)
778 776
779 777 if by_name_match:
780 778 # register this as request object we can re-use later
781 779 request.db_repo = by_name_match
782 780 request.db_repo_name = request.db_repo.repo_name
783 781
784 782 redirect_if_creating(info, by_name_match)
785 783 return True
786 784
787 785 by_id_match = repo_model.get_repo_by_id(repo_name)
788 786 if by_id_match:
789 787 request.db_repo = by_id_match
790 788 request.db_repo_name = request.db_repo.repo_name
791 789 redirect_if_creating(info, by_id_match)
792 790 return True
793 791
794 792 return False
795 793
796 794
797 795 class RepoForbidArchivedRoutePredicate(object):
798 796 def __init__(self, val, config):
799 797 self.val = val
800 798
801 799 def text(self):
802 800 return f"repo_forbid_archived = {self.val}"
803 801
804 802 phash = text
805 803
806 804 def __call__(self, info, request):
807 805 _ = request.translate
808 806 rhodecode_db_repo = request.db_repo
809 807
810 808 log.debug(
811 809 "%s checking if archived flag for repo for %s",
812 810 self.__class__.__name__,
813 811 rhodecode_db_repo.repo_name,
814 812 )
815 813
816 814 if rhodecode_db_repo.archived:
817 815 log.warning(
818 816 "Current view is not supported for archived repo:%s",
819 817 rhodecode_db_repo.repo_name,
820 818 )
821 819
822 820 h.flash(
823 821 h.literal(_("Action not supported for archived repository.")),
824 822 category="warning",
825 823 )
826 824 summary_url = request.route_path(
827 825 "repo_summary", repo_name=rhodecode_db_repo.repo_name
828 826 )
829 827 raise HTTPFound(summary_url)
830 828 return True
831 829
832 830
833 831 class RepoTypeRoutePredicate(object):
834 832 def __init__(self, val, config):
835 833 self.val = val or ["hg", "git", "svn"]
836 834
837 835 def text(self):
838 836 return f"repo_accepted_type = {self.val}"
839 837
840 838 phash = text
841 839
842 840 def __call__(self, info, request):
843 841 if hasattr(request, "vcs_call"):
844 842 # skip vcs calls
845 843 return
846 844
847 845 rhodecode_db_repo = request.db_repo
848 846
849 847 log.debug(
850 848 "%s checking repo type for %s in %s",
851 849 self.__class__.__name__,
852 850 rhodecode_db_repo.repo_type,
853 851 self.val,
854 852 )
855 853
856 854 if rhodecode_db_repo.repo_type in self.val:
857 855 return True
858 856 else:
859 857 log.warning(
860 858 "Current view is not supported for repo type:%s",
861 859 rhodecode_db_repo.repo_type,
862 860 )
863 861 return False
864 862
865 863
866 864 class RepoGroupRoutePredicate(object):
867 865 def __init__(self, val, config):
868 866 self.val = val
869 867
870 868 def text(self):
871 869 return f"repo_group_route = {self.val}"
872 870
873 871 phash = text
874 872
875 873 def __call__(self, info, request):
876 874 if hasattr(request, "vcs_call"):
877 875 # skip vcs calls
878 876 return
879 877
880 878 repo_group_name = info["match"]["repo_group_name"]
881 879
882 880 repo_group_name_parts = repo_group_name.split("/")
883 881 repo_group_slugs = [
884 882 x for x in [repo_name_slug(x) for x in repo_group_name_parts]
885 883 ]
886 884 if repo_group_name_parts != repo_group_slugs:
887 885 # short-skip if the repo-name doesn't follow slug rule
888 886 log.warning(
889 887 "repo_group_name: %s is different than slug %s",
890 888 repo_group_name_parts,
891 889 repo_group_slugs,
892 890 )
893 891 return False
894 892
895 893 repo_group_model = repo_group.RepoGroupModel()
896 894 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
897 895
898 896 if by_name_match:
899 897 # register this as request object we can re-use later
900 898 request.db_repo_group = by_name_match
901 899 request.db_repo_group_name = request.db_repo_group.group_name
902 900 return True
903 901
904 902 return False
905 903
906 904
907 905 class UserGroupRoutePredicate(object):
908 906 def __init__(self, val, config):
909 907 self.val = val
910 908
911 909 def text(self):
912 910 return f"user_group_route = {self.val}"
913 911
914 912 phash = text
915 913
916 914 def __call__(self, info, request):
917 915 if hasattr(request, "vcs_call"):
918 916 # skip vcs calls
919 917 return
920 918
921 919 user_group_id = info["match"]["user_group_id"]
922 920 user_group_model = user_group.UserGroup()
923 921 by_id_match = user_group_model.get(user_group_id, cache=False)
924 922
925 923 if by_id_match:
926 924 # register this as request object we can re-use later
927 925 request.db_user_group = by_id_match
928 926 return True
929 927
930 928 return False
931 929
932 930
933 931 class UserRoutePredicateBase(object):
934 932 supports_default = None
935 933
936 934 def __init__(self, val, config):
937 935 self.val = val
938 936
939 937 def text(self):
940 938 raise NotImplementedError()
941 939
942 940 def __call__(self, info, request):
943 941 if hasattr(request, "vcs_call"):
944 942 # skip vcs calls
945 943 return
946 944
947 945 user_id = info["match"]["user_id"]
948 946 user_model = user.User()
949 947 by_id_match = user_model.get(user_id, cache=False)
950 948
951 949 if by_id_match:
952 950 # register this as request object we can re-use later
953 951 request.db_user = by_id_match
954 952 request.db_user_supports_default = self.supports_default
955 953 return True
956 954
957 955 return False
958 956
959 957
960 958 class UserRoutePredicate(UserRoutePredicateBase):
961 959 supports_default = False
962 960
963 961 def text(self):
964 962 return f"user_route = {self.val}"
965 963
966 964 phash = text
967 965
968 966
969 967 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
970 968 supports_default = True
971 969
972 970 def text(self):
973 971 return f"user_with_default_route = {self.val}"
974 972
975 973 phash = text
976 974
977 975
978 976 def includeme(config):
979 977 config.add_route_predicate("repo_route", RepoRoutePredicate)
980 978 config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate)
981 979 config.add_route_predicate(
982 980 "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate
983 981 )
984 982 config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate)
985 983 config.add_route_predicate("user_group_route", UserGroupRoutePredicate)
986 984 config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate)
987 985 config.add_route_predicate("user_route", UserRoutePredicate)
This diff has been collapsed as it changes many lines, (991 lines changed) Show them Hide them
@@ -1,1704 +1,1601
1 1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import itertools
20 20 import logging
21 21 import os
22 22 import collections
23 23 import urllib.request
24 24 import urllib.parse
25 25 import urllib.error
26 26 import pathlib
27 27 import time
28 28 import random
29 29
30 30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 31
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 import rhodecode
36 36 from rhodecode.apps._base import RepoAppView
37 37
38 38
39 39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 40 from rhodecode.lib import audit_logger
41 41 from rhodecode.lib.hash_utils import sha1_safe
42 42 from rhodecode.lib.archive_cache import (
43 get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator)
43 get_archival_cache_store,
44 get_archival_config,
45 ArchiveCacheGenerationLock,
46 archive_iterator,
47 )
44 48 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
45 49 from rhodecode.lib.view_utils import parse_path_ref
46 50 from rhodecode.lib.exceptions import NonRelativePathError
47 from rhodecode.lib.codeblocks import (
48 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
51 from rhodecode.lib.codeblocks import filenode_as_lines_tokens, filenode_as_annotated_lines_tokens
49 52 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
50 53 from rhodecode.lib.type_utils import str2bool
51 54 from rhodecode.lib.str_utils import safe_str, safe_int, header_safe_str
52 from rhodecode.lib.auth import (
53 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
55 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired
54 56 from rhodecode.lib.vcs import path as vcspath
55 57 from rhodecode.lib.vcs.backends.base import EmptyCommit
56 58 from rhodecode.lib.vcs.conf import settings
57 59 from rhodecode.lib.vcs.nodes import FileNode
58 60 from rhodecode.lib.vcs.exceptions import (
59 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
60 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
61 NodeDoesNotExistError, CommitError, NodeError)
61 RepositoryError,
62 CommitDoesNotExistError,
63 EmptyRepositoryError,
64 ImproperArchiveTypeError,
65 VCSError,
66 NodeAlreadyExistsError,
67 NodeDoesNotExistError,
68 CommitError,
69 NodeError,
70 )
62 71
63 72 from rhodecode.model.scm import ScmModel
64 73 from rhodecode.model.db import Repository
65 74
66 75 log = logging.getLogger(__name__)
67 76
68 77
69 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
78 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha="", with_hash=True):
70 79 # original backward compat name of archive
71 clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_'))
80 clean_name = safe_str(convert_special_chars(db_repo_name).replace("/", "_"))
72 81
73 82 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
74 83 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
75 84 id_sha = sha1_safe(str(db_repo_id))[:4]
76 sub_repo = 'sub-1' if subrepos else 'sub-0'
77 commit = commit_sha if with_hash else 'archive'
78 path_marker = (path_sha if with_hash else '') or 'all'
79 archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}'
85 sub_repo = "sub-1" if subrepos else "sub-0"
86 commit = commit_sha if with_hash else "archive"
87 path_marker = (path_sha if with_hash else "") or "all"
88 archive_name = f"{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}"
80 89
81 90 return archive_name
82 91
83 92
84 93 def get_path_sha(at_path):
85 94 return safe_str(sha1_safe(at_path)[:8])
86 95
87 96
88 97 def _get_archive_spec(fname):
89 log.debug('Detecting archive spec for: `%s`', fname)
98 log.debug("Detecting archive spec for: `%s`", fname)
90 99
91 100 fileformat = None
92 101 ext = None
93 102 content_type = None
94 103 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
95
96 104 if fname.endswith(extension):
97 105 fileformat = a_type
98 log.debug('archive is of type: %s', fileformat)
106 log.debug("archive is of type: %s", fileformat)
99 107 ext = extension
100 108 break
101 109
102 110 if not fileformat:
103 111 raise ValueError()
104 112
105 113 # left over part of whole fname is the commit
106 114 commit_id = fname[:-len(ext)]
107 115
108 116 return commit_id, ext, fileformat, content_type
109 117
110 118
111 119 class RepoFilesView(RepoAppView):
112
113 120 @staticmethod
114 121 def adjust_file_path_for_svn(f_path, repo):
115 122 """
116 123 Computes the relative path of `f_path`.
117 124
118 125 This is mainly based on prefix matching of the recognized tags and
119 126 branches in the underlying repository.
120 127 """
121 tags_and_branches = itertools.chain(
122 repo.branches.keys(),
123 repo.tags.keys())
128 tags_and_branches = itertools.chain(repo.branches.keys(), repo.tags.keys())
124 129 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
125 130
126 131 for name in tags_and_branches:
127 if f_path.startswith(f'{name}/'):
132 if f_path.startswith(f"{name}/"):
128 133 f_path = vcspath.relpath(f_path, name)
129 134 break
130 135 return f_path
131 136
132 137 def load_default_context(self):
133 138 c = self._get_local_tmpl_context(include_app_defaults=True)
134 139 c.rhodecode_repo = self.rhodecode_vcs_repo
135 140 c.enable_downloads = self.db_repo.enable_downloads
136 141 return c
137 142
138 def _ensure_not_locked(self, commit_id='tip'):
143 def _ensure_not_locked(self, commit_id="tip"):
139 144 _ = self.request.translate
140 145
141 146 repo = self.db_repo
142 147 if repo.enable_locking and repo.locked[0]:
143 h.flash(_('This repository has been locked by %s on %s')
144 % (h.person_by_id(repo.locked[0]),
145 h.format_date(h.time_to_datetime(repo.locked[1]))),
146 'warning')
147 files_url = h.route_path(
148 'repo_files:default_path',
149 repo_name=self.db_repo_name, commit_id=commit_id)
148 h.flash(
149 _("This repository has been locked by %s on %s")
150 % (h.person_by_id(repo.locked[0]), h.format_date(h.time_to_datetime(repo.locked[1]))),
151 "warning",
152 )
153 files_url = h.route_path("repo_files:default_path", repo_name=self.db_repo_name, commit_id=commit_id)
150 154 raise HTTPFound(files_url)
151 155
152 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
156 def forbid_non_head(self, is_head, f_path, commit_id="tip", json_mode=False):
153 157 _ = self.request.translate
154 158
155 159 if not is_head:
156 message = _('Cannot modify file. '
157 'Given commit `{}` is not head of a branch.').format(commit_id)
158 h.flash(message, category='warning')
160 message = _("Cannot modify file. " "Given commit `{}` is not head of a branch.").format(commit_id)
161 h.flash(message, category="warning")
159 162
160 163 if json_mode:
161 164 return message
162 165
163 files_url = h.route_path(
164 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
165 f_path=f_path)
166 files_url = h.route_path("repo_files", repo_name=self.db_repo_name, commit_id=commit_id, f_path=f_path)
166 167 raise HTTPFound(files_url)
167 168
168 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
169 def check_branch_permission(self, branch_name, commit_id="tip", json_mode=False):
169 170 _ = self.request.translate
170 171
171 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
172 self.db_repo_name, branch_name)
173 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
174 message = _('Branch `{}` changes forbidden by rule {}.').format(
175 h.escape(branch_name), h.escape(rule))
176 h.flash(message, 'warning')
172 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(self.db_repo_name, branch_name)
173 if branch_perm and branch_perm not in ["branch.push", "branch.push_force"]:
174 message = _("Branch `{}` changes forbidden by rule {}.").format(h.escape(branch_name), h.escape(rule))
175 h.flash(message, "warning")
177 176
178 177 if json_mode:
179 178 return message
180 179
181 files_url = h.route_path(
182 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
180 files_url = h.route_path("repo_files:default_path", repo_name=self.db_repo_name, commit_id=commit_id)
183 181
184 182 raise HTTPFound(files_url)
185 183
186 184 def _get_commit_and_path(self):
187 185 default_commit_id = self.db_repo.landing_ref_name
188 default_f_path = '/'
186 default_f_path = "/"
189 187
190 commit_id = self.request.matchdict.get('commit_id', default_commit_id)
188 commit_id = self.request.matchdict.get("commit_id", default_commit_id)
191 189 f_path = self._get_f_path(self.request.matchdict, default_f_path)
192 190
193 191 bytes_path = safe_bytes(f_path)
194 192 return commit_id, f_path, bytes_path
195 193
196 194 @classmethod
197 195 def _get_default_encoding(cls, c):
198 enc_list = getattr(c, 'default_encodings', [])
199 return enc_list[0] if enc_list else 'UTF-8'
196 enc_list = getattr(c, "default_encodings", [])
197 return enc_list[0] if enc_list else "UTF-8"
200 198
201 199 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
202 200 """
203 201 This is a safe way to get commit. If an error occurs it redirects to
204 202 tip with proper message
205 203
206 204 :param commit_id: id of commit to fetch
207 205 :param redirect_after: toggle redirection
208 206 """
209 207 _ = self.request.translate
210 208
211 209 try:
212 210 return self.rhodecode_vcs_repo.get_commit(commit_id)
213 211 except EmptyRepositoryError:
214 212 if not redirect_after:
215 213 return None
216 214
217 215 add_new = upload_new = ""
218 if h.HasRepoPermissionAny(
219 'repository.write', 'repository.admin')(self.db_repo_name):
220 _url = h.route_path(
221 'repo_files_add_file',
222 repo_name=self.db_repo_name, commit_id=0, f_path='')
223 add_new = h.link_to(
224 _('add a new file'), _url, class_="alert-link")
216 if h.HasRepoPermissionAny("repository.write", "repository.admin")(self.db_repo_name):
217 _url = h.route_path("repo_files_add_file", repo_name=self.db_repo_name, commit_id=0, f_path="")
218 add_new = h.link_to(_("add a new file"), _url, class_="alert-link")
225 219
226 _url_upld = h.route_path(
227 'repo_files_upload_file',
228 repo_name=self.db_repo_name, commit_id=0, f_path='')
229 upload_new = h.link_to(
230 _('upload a new file'), _url_upld, class_="alert-link")
220 _url_upld = h.route_path("repo_files_upload_file", repo_name=self.db_repo_name, commit_id=0, f_path="")
221 upload_new = h.link_to(_("upload a new file"), _url_upld, class_="alert-link")
231 222
232 h.flash(h.literal(
233 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
234 raise HTTPFound(
235 h.route_path('repo_summary', repo_name=self.db_repo_name))
223 h.flash(
224 h.literal(_("There are no files yet. Click here to %s or %s.") % (add_new, upload_new)),
225 category="warning",
226 )
227 raise HTTPFound(h.route_path("repo_summary", repo_name=self.db_repo_name))
236 228
237 229 except (CommitDoesNotExistError, LookupError) as e:
238 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
239 h.flash(msg, category='error')
230 msg = _("No such commit exists for this repository. Commit: {}").format(commit_id)
231 h.flash(msg, category="error")
240 232 raise HTTPNotFound()
241 233 except RepositoryError as e:
242 h.flash(h.escape(safe_str(e)), category='error')
234 h.flash(h.escape(safe_str(e)), category="error")
243 235 raise HTTPNotFound()
244 236
245 237 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
246 238 """
247 239 Returns file_node, if error occurs or given path is directory,
248 240 it'll redirect to top level path
249 241 """
250 242 _ = self.request.translate
251 243
252 244 try:
253 245 file_node = commit_obj.get_node(path, pre_load=pre_load)
254 246 if file_node.is_dir():
255 raise RepositoryError('The given path is a directory')
247 raise RepositoryError("The given path is a directory")
256 248 except CommitDoesNotExistError:
257 log.exception('No such commit exists for this repository')
258 h.flash(_('No such commit exists for this repository'), category='error')
249 log.exception("No such commit exists for this repository")
250 h.flash(_("No such commit exists for this repository"), category="error")
259 251 raise HTTPNotFound()
260 252 except RepositoryError as e:
261 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
262 h.flash(h.escape(safe_str(e)), category='error')
253 log.warning("Repository error while fetching filenode `%s`. Err:%s", path, e)
254 h.flash(h.escape(safe_str(e)), category="error")
263 255 raise HTTPNotFound()
264 256
265 257 return file_node
266 258
267 259 def _is_valid_head(self, commit_id, repo, landing_ref):
268 branch_name = sha_commit_id = ''
260 branch_name = sha_commit_id = ""
269 261 is_head = False
270 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
262 log.debug("Checking if commit_id `%s` is a head for %s.", commit_id, repo)
271 263
272 264 for _branch_name, branch_commit_id in repo.branches.items():
273 265 # simple case we pass in branch name, it's a HEAD
274 266 if commit_id == _branch_name:
275 267 is_head = True
276 268 branch_name = _branch_name
277 269 sha_commit_id = branch_commit_id
278 270 break
279 271 # case when we pass in full sha commit_id, which is a head
280 272 elif commit_id == branch_commit_id:
281 273 is_head = True
282 274 branch_name = _branch_name
283 275 sha_commit_id = branch_commit_id
284 276 break
285 277
286 278 if h.is_svn(repo) and not repo.is_empty():
287 279 # Note: Subversion only has one head.
288 280 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
289 281 is_head = True
290 282 return branch_name, sha_commit_id, is_head
291 283
292 284 # checked branches, means we only need to try to get the branch/commit_sha
293 285 if repo.is_empty():
294 286 is_head = True
295 287 branch_name = landing_ref
296 288 sha_commit_id = EmptyCommit().raw_id
297 289 else:
298 290 commit = repo.get_commit(commit_id=commit_id)
299 291 if commit:
300 292 branch_name = commit.branch
301 293 sha_commit_id = commit.raw_id
302 294
303 295 return branch_name, sha_commit_id, is_head
304 296
305 297 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
306
307 298 repo_id = self.db_repo.repo_id
308 299 force_recache = self.get_recache_flag()
309 300
310 cache_seconds = safe_int(
311 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
301 cache_seconds = rhodecode.ConfigGet().get_int("rc_cache.cache_repo.expiration_time")
312 302 cache_on = not force_recache and cache_seconds > 0
303
313 304 log.debug(
314 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
315 'with caching: %s[TTL: %ss]' % (
316 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
305 "Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`"
306 "with caching: %s[TTL: %ss]" % (repo_id, commit_id, f_path, cache_on, cache_seconds or 0)
307 )
317 308
318 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
319 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
309 cache_namespace_uid = f"repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}"
310 region = rc_cache.get_or_create_region("cache_repo", cache_namespace_uid)
320 311
321 312 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
322 313 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
323 log.debug('Generating cached file tree at for repo_id: %s, %s, %s',
324 _repo_id, _commit_id, _f_path)
314 log.debug("Generating cached file tree at for repo_id: %s, %s, %s", _repo_id, _commit_id, _f_path)
325 315
326 316 c.full_load = _full_load
327 317 return render(
328 'rhodecode:templates/files/files_browser_tree.mako',
329 self._get_template_context(c), self.request, _at_rev)
318 "rhodecode:templates/files/files_browser_tree.mako",
319 self._get_template_context(c),
320 self.request,
321 _at_rev,
322 )
330 323
331 324 return compute_file_tree(
332 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
325 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev
326 )
333 327
334 328 def create_pure_path(self, *parts):
335 329 # Split paths and sanitize them, removing any ../ etc
336 sanitized_path = [
337 x for x in pathlib.PurePath(*parts).parts
338 if x not in ['.', '..']]
330 sanitized_path = [x for x in pathlib.PurePath(*parts).parts if x not in [".", ".."]]
339 331
340 332 pure_path = pathlib.PurePath(*sanitized_path)
341 333 return pure_path
342 334
343 335 def _is_lf_enabled(self, target_repo):
344 336 lf_enabled = False
345 337
346 lf_key_for_vcs_map = {
347 'hg': 'extensions_largefiles',
348 'git': 'vcs_git_lfs_enabled'
349 }
338 lf_key_for_vcs_map = {"hg": "extensions_largefiles", "git": "vcs_git_lfs_enabled"}
350 339
351 340 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
352 341
353 342 if lf_key_for_vcs:
354 343 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
355 344
356 345 return lf_enabled
357 346
358 347 @LoginRequired()
359 @HasRepoPermissionAnyDecorator(
360 'repository.read', 'repository.write', 'repository.admin')
348 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
361 349 def repo_archivefile(self):
362 350 # archive cache config
363 351 from rhodecode import CONFIG
352
364 353 _ = self.request.translate
365 354 self.load_default_context()
366 355
367 subrepos = self.request.GET.get('subrepos') == 'true'
368 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
356 subrepos = self.request.GET.get("subrepos") == "true"
357 with_hash = str2bool(self.request.GET.get("with_hash", "1"))
369 358
370 default_at_path = '/'
371 fname = self.request.matchdict['fname']
372 at_path = self.request.GET.get('at_path') or default_at_path
359 default_at_path = "/"
360 fname = self.request.matchdict["fname"]
361 at_path = self.request.GET.get("at_path") or default_at_path
373 362
374 363 if not self.db_repo.enable_downloads:
375 return Response(_('Downloads disabled'))
364 return Response(_("Downloads disabled"))
376 365
377 366 try:
378 367 commit_id, ext, file_format, content_type = _get_archive_spec(fname)
379 368 except ValueError:
380 return Response(_('Unknown archive type for: `{}`').format(h.escape(fname)))
369 return Response(_("Unknown archive type for: `{}`").format(h.escape(fname)))
381 370
382 371 try:
383 372 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
384 373 except CommitDoesNotExistError:
385 return Response(_('Unknown commit_id {}').format(
386 h.escape(commit_id)))
374 return Response(_("Unknown commit_id {}").format(h.escape(commit_id)))
387 375 except EmptyRepositoryError:
388 return Response(_('Empty repository'))
376 return Response(_("Empty repository"))
389 377
390 378 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
391 379 if commit_id != commit.raw_id:
392 fname=f'{commit.raw_id}{ext}'
380 fname = f"{commit.raw_id}{ext}"
393 381 raise HTTPFound(self.request.current_route_path(fname=fname))
394 382
395 383 try:
396 384 at_path = commit.get_node(safe_bytes(at_path)).path or default_at_path
397 385 except Exception:
398 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
386 return Response(_("No node at path {} for this repository").format(h.escape(at_path)))
399 387
400 388 path_sha = get_path_sha(at_path)
401 389
402 390 # used for cache etc, consistent unique archive name
403 391 archive_name_key = get_archive_name(
404 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
405 path_sha=path_sha, with_hash=True)
392 self.db_repo.repo_id,
393 self.db_repo_name,
394 commit_sha=commit.short_id,
395 ext=ext,
396 subrepos=subrepos,
397 path_sha=path_sha,
398 with_hash=True,
399 )
406 400
407 401 if not with_hash:
408 path_sha = ''
402 path_sha = ""
409 403
410 404 # what end client gets served
411 405 response_archive_name = get_archive_name(
412 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
413 path_sha=path_sha, with_hash=with_hash)
406 self.db_repo.repo_id,
407 self.db_repo_name,
408 commit_sha=commit.short_id,
409 ext=ext,
410 subrepos=subrepos,
411 path_sha=path_sha,
412 with_hash=with_hash,
413 )
414 414
415 415 # remove extension from our archive directory name
416 416 archive_dir_name = response_archive_name[:-len(ext)]
417 417
418 archive_cache_disable = self.request.GET.get('no_cache')
418 archive_cache_disable = self.request.GET.get("no_cache")
419 419
420 420 d_cache = get_archival_cache_store(config=CONFIG)
421 421
422 422 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
423 423 d_cache_conf = get_archival_config(config=CONFIG)
424 424
425 425 # This is also a cache key, and lock key
426 reentrant_lock_key = archive_name_key + '.lock'
426 reentrant_lock_key = archive_name_key + ".lock"
427 427
428 428 use_cached_archive = False
429 429 if not archive_cache_disable and archive_name_key in d_cache:
430 430 reader, metadata = d_cache.fetch(archive_name_key)
431 431
432 432 use_cached_archive = True
433 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
434 archive_name_key, metadata, reader.name)
433 log.debug(
434 "Found cached archive as key=%s tag=%s, serving archive from cache reader=%s",
435 archive_name_key,
436 metadata,
437 reader.name,
438 )
435 439 else:
436 440 reader = None
437 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
441 log.debug("Archive with key=%s is not yet cached, creating one now...", archive_name_key)
438 442
439 443 if not reader:
440 444 # generate new archive, as previous was not found in the cache
441 445 try:
442 446 with d_cache.get_lock(reentrant_lock_key):
443 447 try:
444 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
445 kind=file_format, subrepos=subrepos,
446 archive_at_path=at_path, cache_config=d_cache_conf)
448 commit.archive_repo(
449 archive_name_key,
450 archive_dir_name=archive_dir_name,
451 kind=file_format,
452 subrepos=subrepos,
453 archive_at_path=at_path,
454 cache_config=d_cache_conf,
455 )
447 456 except ImproperArchiveTypeError:
448 return _('Unknown archive type')
457 return _("Unknown archive type")
449 458
450 459 except ArchiveCacheGenerationLock:
451 460 retry_after = round(random.uniform(0.3, 3.0), 1)
452 461 time.sleep(retry_after)
453 462
454 463 location = self.request.url
455 464 response = Response(
456 465 f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
457 466 )
458 467 response.headers["Retry-After"] = str(retry_after)
459 468 response.status_code = 307 # temporary redirect
460 469
461 470 response.location = location
462 471 return response
463 472
464 473 reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30)
465 474
466 475 response = Response(app_iter=archive_iterator(reader))
467 response.content_disposition = f'attachment; filename={response_archive_name}'
476 response.content_disposition = f"attachment; filename={response_archive_name}"
468 477 response.content_type = str(content_type)
469 478
470 479 try:
471 480 return response
472 481 finally:
473 482 # store download action
474 483 audit_logger.store_web(
475 'repo.archive.download', action_data={
476 'user_agent': self.request.user_agent,
477 'archive_name': archive_name_key,
478 'archive_spec': fname,
479 'archive_cached': use_cached_archive},
484 "repo.archive.download",
485 action_data={
486 "user_agent": self.request.user_agent,
487 "archive_name": archive_name_key,
488 "archive_spec": fname,
489 "archive_cached": use_cached_archive,
490 },
480 491 user=self._rhodecode_user,
481 492 repo=self.db_repo,
482 commit=True
493 commit=True,
483 494 )
484 495
485 496 def _get_file_node(self, commit_id, f_path):
486 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
497 if commit_id not in ["", None, "None", "0" * 12, "0" * 40]:
487 498 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
488 499 try:
489 500 node = commit.get_node(safe_bytes(f_path))
490 501 if node.is_dir():
491 raise NodeError(f'{node} path is a {type(node)} not a file')
502 raise NodeError(f"{node} path is a {type(node)} not a file")
492 503 except NodeDoesNotExistError:
493 504 commit = EmptyCommit(
494 505 commit_id=commit_id,
495 506 idx=commit.idx,
496 507 repo=commit.repository,
497 508 alias=commit.repository.alias,
498 509 message=commit.message,
499 510 author=commit.author,
500 date=commit.date)
501 node = FileNode(safe_bytes(f_path), b'', commit=commit)
511 date=commit.date,
512 )
513 node = FileNode(safe_bytes(f_path), b"", commit=commit)
502 514 else:
503 commit = EmptyCommit(
504 repo=self.rhodecode_vcs_repo,
505 alias=self.rhodecode_vcs_repo.alias)
506 node = FileNode(safe_bytes(f_path), b'', commit=commit)
515 commit = EmptyCommit(repo=self.rhodecode_vcs_repo, alias=self.rhodecode_vcs_repo.alias)
516 node = FileNode(safe_bytes(f_path), b"", commit=commit)
507 517 return node
508 518
509 519 @LoginRequired()
510 @HasRepoPermissionAnyDecorator(
511 'repository.read', 'repository.write', 'repository.admin')
520 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
512 521 def repo_files_diff(self):
513 522 c = self.load_default_context()
514 523 f_path = self._get_f_path(self.request.matchdict)
515 diff1 = self.request.GET.get('diff1', '')
516 diff2 = self.request.GET.get('diff2', '')
524 diff1 = self.request.GET.get("diff1", "")
525 diff2 = self.request.GET.get("diff2", "")
517 526
518 527 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
519 528
520 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
521 line_context = self.request.GET.get('context', 3)
529 ignore_whitespace = str2bool(self.request.GET.get("ignorews"))
530 line_context = self.request.GET.get("context", 3)
522 531
523 532 if not any((diff1, diff2)):
524 h.flash(
525 'Need query parameter "diff1" or "diff2" to generate a diff.',
526 category='error')
533 h.flash('Need query parameter "diff1" or "diff2" to generate a diff.', category="error")
527 534 raise HTTPBadRequest()
528 535
529 c.action = self.request.GET.get('diff')
530 if c.action not in ['download', 'raw']:
536 c.action = self.request.GET.get("diff")
537 if c.action not in ["download", "raw"]:
531 538 compare_url = h.route_path(
532 'repo_compare',
539 "repo_compare",
533 540 repo_name=self.db_repo_name,
534 source_ref_type='rev',
541 source_ref_type="rev",
535 542 source_ref=diff1,
536 543 target_repo=self.db_repo_name,
537 target_ref_type='rev',
544 target_ref_type="rev",
538 545 target_ref=diff2,
539 _query=dict(f_path=f_path))
546 _query=dict(f_path=f_path),
547 )
540 548 # redirect to new view if we render diff
541 549 raise HTTPFound(compare_url)
542 550
543 551 try:
544 552 node1 = self._get_file_node(diff1, path1)
545 553 node2 = self._get_file_node(diff2, f_path)
546 554 except (RepositoryError, NodeError):
547 555 log.exception("Exception while trying to get node from repository")
548 raise HTTPFound(
549 h.route_path('repo_files', repo_name=self.db_repo_name,
550 commit_id='tip', f_path=f_path))
556 raise HTTPFound(h.route_path("repo_files", repo_name=self.db_repo_name, commit_id="tip", f_path=f_path))
551 557
552 if all(isinstance(node.commit, EmptyCommit)
553 for node in (node1, node2)):
558 if all(isinstance(node.commit, EmptyCommit) for node in (node1, node2)):
554 559 raise HTTPNotFound()
555 560
556 561 c.commit_1 = node1.commit
557 562 c.commit_2 = node2.commit
558 563
559 if c.action == 'download':
560 _diff = diffs.get_gitdiff(node1, node2,
561 ignore_whitespace=ignore_whitespace,
562 context=line_context)
564 if c.action == "download":
565 _diff = diffs.get_gitdiff(node1, node2, ignore_whitespace=ignore_whitespace, context=line_context)
563 566 # NOTE: this was using diff_format='gitdiff'
564 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
567 diff = diffs.DiffProcessor(_diff, diff_format="newdiff")
565 568
566 569 response = Response(self.path_filter.get_raw_patch(diff))
567 response.content_type = 'text/plain'
568 response.content_disposition = (
569 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
570 )
570 response.content_type = "text/plain"
571 response.content_disposition = f"attachment; filename={f_path}_{diff1}_vs_{diff2}.diff"
571 572 charset = self._get_default_encoding(c)
572 573 if charset:
573 574 response.charset = charset
574 575 return response
575 576
576 elif c.action == 'raw':
577 _diff = diffs.get_gitdiff(node1, node2,
578 ignore_whitespace=ignore_whitespace,
579 context=line_context)
577 elif c.action == "raw":
578 _diff = diffs.get_gitdiff(node1, node2, ignore_whitespace=ignore_whitespace, context=line_context)
580 579 # NOTE: this was using diff_format='gitdiff'
581 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
580 diff = diffs.DiffProcessor(_diff, diff_format="newdiff")
582 581
583 582 response = Response(self.path_filter.get_raw_patch(diff))
584 response.content_type = 'text/plain'
583 response.content_type = "text/plain"
585 584 charset = self._get_default_encoding(c)
586 585 if charset:
587 586 response.charset = charset
588 587 return response
589 588
590 589 # in case we ever end up here
591 590 raise HTTPNotFound()
592 591
593 592 @LoginRequired()
594 @HasRepoPermissionAnyDecorator(
595 'repository.read', 'repository.write', 'repository.admin')
593 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
596 594 def repo_files_diff_2way_redirect(self):
597 595 """
598 596 Kept only to make OLD links work
599 597 """
600 598 f_path = self._get_f_path_unchecked(self.request.matchdict)
601 diff1 = self.request.GET.get('diff1', '')
602 diff2 = self.request.GET.get('diff2', '')
599 diff1 = self.request.GET.get("diff1", "")
600 diff2 = self.request.GET.get("diff2", "")
603 601
604 602 if not any((diff1, diff2)):
605 h.flash(
606 'Need query parameter "diff1" or "diff2" to generate a diff.',
607 category='error')
603 h.flash('Need query parameter "diff1" or "diff2" to generate a diff.', category="error")
608 604 raise HTTPBadRequest()
609 605
610 606 compare_url = h.route_path(
611 'repo_compare',
607 "repo_compare",
612 608 repo_name=self.db_repo_name,
613 source_ref_type='rev',
609 source_ref_type="rev",
614 610 source_ref=diff1,
615 target_ref_type='rev',
611 target_ref_type="rev",
616 612 target_ref=diff2,
617 _query=dict(f_path=f_path, diffmode='sideside',
618 target_repo=self.db_repo_name,))
613 _query=dict(
614 f_path=f_path,
615 diffmode="sideside",
616 target_repo=self.db_repo_name,
617 ),
618 )
619 619 raise HTTPFound(compare_url)
620 620
621 621 @LoginRequired()
622 622 def repo_files_default_commit_redirect(self):
623 623 """
624 624 Special page that redirects to the landing page of files based on the default
625 625 commit for repository
626 626 """
627 627 c = self.load_default_context()
628 628 ref_name = c.rhodecode_db_repo.landing_ref_name
629 629 landing_url = h.repo_files_by_ref_url(
630 630 c.rhodecode_db_repo.repo_name,
631 631 c.rhodecode_db_repo.repo_type,
632 f_path='',
632 f_path="",
633 633 ref_name=ref_name,
634 commit_id='tip',
635 query=dict(at=ref_name)
634 commit_id="tip",
635 query=dict(at=ref_name),
636 636 )
637 637
638 638 raise HTTPFound(landing_url)
639 639
640 640 @LoginRequired()
641 @HasRepoPermissionAnyDecorator(
642 'repository.read', 'repository.write', 'repository.admin')
641 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
643 642 def repo_files(self):
644 643 c = self.load_default_context()
645 644
646 view_name = getattr(self.request.matched_route, 'name', None)
645 view_name = getattr(self.request.matched_route, "name", None)
647 646
648 c.annotate = view_name == 'repo_files:annotated'
647 c.annotate = view_name == "repo_files:annotated"
649 648 # default is false, but .rst/.md files later are auto rendered, we can
650 649 # overwrite auto rendering by setting this GET flag
651 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
650 c.renderer = view_name == "repo_files:rendered" or not self.request.GET.get("no-render", False)
652 651
653 652 commit_id, f_path, bytes_path = self._get_commit_and_path()
654 653
655 654 c.commit = self._get_commit_or_redirect(commit_id)
656 c.branch = self.request.GET.get('branch', None)
655 c.branch = self.request.GET.get("branch", None)
657 656 c.f_path = f_path
658 at_rev = self.request.GET.get('at')
657 at_rev = self.request.GET.get("at")
659 658
660 659 # files or dirs
661 660 try:
662
663 c.file = c.commit.get_node(bytes_path, pre_load=['is_binary', 'size', 'data'])
661 c.file = c.commit.get_node(bytes_path, pre_load=["is_binary", "size", "data"])
664 662
665 663 c.file_author = True
666 c.file_tree = ''
664 c.file_tree = ""
667 665
668 666 # prev link
669 667 try:
670 668 prev_commit = c.commit.prev(c.branch)
671 669 c.prev_commit = prev_commit
672 c.url_prev = h.route_path('repo_files', repo_name=self.db_repo_name, commit_id=prev_commit.raw_id, f_path=f_path)
670 c.url_prev = h.route_path(
671 "repo_files", repo_name=self.db_repo_name, commit_id=prev_commit.raw_id, f_path=f_path
672 )
673 673 if c.branch:
674 c.url_prev += f'?branch={c.branch}'
674 c.url_prev += f"?branch={c.branch}"
675 675 except (CommitDoesNotExistError, VCSError):
676 c.url_prev = '#'
676 c.url_prev = "#"
677 677 c.prev_commit = EmptyCommit()
678 678
679 679 # next link
680 680 try:
681 681 next_commit = c.commit.next(c.branch)
682 682 c.next_commit = next_commit
683 c.url_next = h.route_path('repo_files', repo_name=self.db_repo_name, commit_id=next_commit.raw_id, f_path=f_path)
683 c.url_next = h.route_path(
684 "repo_files", repo_name=self.db_repo_name, commit_id=next_commit.raw_id, f_path=f_path
685 )
684 686 if c.branch:
685 c.url_next += f'?branch={c.branch}'
687 c.url_next += f"?branch={c.branch}"
686 688 except (CommitDoesNotExistError, VCSError):
687 c.url_next = '#'
689 c.url_next = "#"
688 690 c.next_commit = EmptyCommit()
689 691
690 692 # load file content
691 693 if c.file.is_file():
692
693 694 c.lf_node = {}
694 695
695 696 has_lf_enabled = self._is_lf_enabled(self.db_repo)
696 697 if has_lf_enabled:
697 698 c.lf_node = c.file.get_largefile_node()
698 699
699 c.file_source_page = 'true'
700 c.file_source_page = "true"
700 701 c.file_last_commit = c.file.last_commit
701 702
702 703 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
703 704
704 705 if not (c.file_size_too_big or c.file.is_binary):
705 706 if c.annotate: # annotation has precedence over renderer
706 c.annotated_lines = filenode_as_annotated_lines_tokens(
707 c.file
708 )
707 c.annotated_lines = filenode_as_annotated_lines_tokens(c.file)
709 708 else:
710 c.renderer = (
711 c.renderer and h.renderer_from_filename(c.file.path)
712 )
709 c.renderer = c.renderer and h.renderer_from_filename(c.file.path)
713 710 if not c.renderer:
714 711 c.lines = filenode_as_lines_tokens(c.file)
715 712
716 _branch_name, _sha_commit_id, is_head = \
717 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
718 landing_ref=self.db_repo.landing_ref_name)
713 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
714 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
715 )
719 716 c.on_branch_head = is_head
720 717
721 branch = c.commit.branch if (
722 c.commit.branch and '/' not in c.commit.branch) else None
718 branch = c.commit.branch if (c.commit.branch and "/" not in c.commit.branch) else None
723 719 c.branch_or_raw_id = branch or c.commit.raw_id
724 720 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
725 721
726 722 author = c.file_last_commit.author
727 c.authors = [[
728 h.email(author),
729 h.person(author, 'username_or_name_or_email'),
730 1
731 ]]
723 c.authors = [[h.email(author), h.person(author, "username_or_name_or_email"), 1]]
732 724
733 else: # load tree content at path
734 c.file_source_page = 'false'
725 else: # load tree content (dir content) at path
726 c.file_source_page = "false"
735 727 c.authors = []
728
729 dir_node = c.file
730 c.file_nodes = dir_node.commit.get_nodes(dir_node.bytes_path, pre_load=dir_node.default_pre_load)
736 731 # this loads a simple tree without metadata to speed things up
737 732 # later via ajax we call repo_nodetree_full and fetch whole
738 733 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
739 734
740 c.readme_data, c.readme_file = \
741 self._get_readme_data(self.db_repo, c.visual.default_renderer, c.commit.raw_id, bytes_path)
735 c.readme_data, c.readme_file = self._get_readme_data(
736 self.db_repo, c.visual.default_renderer, c.commit.raw_id, bytes_path, nodes=c.file_nodes
737 )
742 738
743 739 except RepositoryError as e:
744 h.flash(h.escape(safe_str(e)), category='error')
740 h.flash(h.escape(safe_str(e)), category="error")
745 741 raise HTTPNotFound()
746 742
747 if self.request.environ.get('HTTP_X_PJAX'):
748 html = render('rhodecode:templates/files/files_pjax.mako',
749 self._get_template_context(c), self.request)
743 if self.request.environ.get("HTTP_X_PJAX"):
744 html = render("rhodecode:templates/files/files_pjax.mako", self._get_template_context(c), self.request)
750 745 else:
751 html = render('rhodecode:templates/files/files.mako',
752 self._get_template_context(c), self.request)
746 html = render("rhodecode:templates/files/files.mako", self._get_template_context(c), self.request)
753 747 return Response(html)
754 748
755 @HasRepoPermissionAnyDecorator(
756 'repository.read', 'repository.write', 'repository.admin')
749 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
757 750 def repo_files_annotated_previous(self):
758 751 self.load_default_context()
759 752
760 753 commit_id, bytes_path, bytes_path = self._get_commit_and_path()
761 754 commit = self._get_commit_or_redirect(commit_id)
762 755 prev_commit_id = commit.raw_id
763 line_anchor = self.request.GET.get('line_anchor')
756 line_anchor = self.request.GET.get("line_anchor")
764 757 is_file = False
765 758 try:
766 759 _file = commit.get_node(bytes_path)
767 760 is_file = _file.is_file()
768 761 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
769 762 pass
770 763
771 764 if is_file:
772 765 history = commit.get_path_history(bytes_path)
773 prev_commit_id = history[1].raw_id \
774 if len(history) > 1 else prev_commit_id
766 prev_commit_id = history[1].raw_id if len(history) > 1 else prev_commit_id
775 767 prev_url = h.route_path(
776 'repo_files:annotated', repo_name=self.db_repo_name,
777 commit_id=prev_commit_id, f_path=bytes_path,
778 _anchor=f'L{line_anchor}')
768 "repo_files:annotated",
769 repo_name=self.db_repo_name,
770 commit_id=prev_commit_id,
771 f_path=bytes_path,
772 _anchor=f"L{line_anchor}",
773 )
779 774
780 775 raise HTTPFound(prev_url)
781 776
782 777 @LoginRequired()
783 @HasRepoPermissionAnyDecorator(
784 'repository.read', 'repository.write', 'repository.admin')
778 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
785 779 def repo_nodetree_full(self):
786 780 """
787 781 Returns rendered html of file tree that contains commit date,
788 782 author, commit_id for the specified combination of
789 783 repo, commit_id and file path
790 784 """
791 785 c = self.load_default_context()
792 786
793 787 commit_id, f_path, bytes_path = self._get_commit_and_path()
794 788 commit = self._get_commit_or_redirect(commit_id)
795 789 try:
796 790 dir_node = commit.get_node(bytes_path)
797 791 except RepositoryError as e:
798 return Response(f'error: {h.escape(safe_str(e))}')
792 return Response(f"error: {h.escape(safe_str(e))}")
799 793
800 794 if dir_node.is_file():
801 return Response('')
795 return Response("")
802 796
803 797 c.file = dir_node
798 c.file_nodes = dir_node.commit.get_nodes(dir_node.bytes_path, pre_load=dir_node.default_pre_load)
804 799 c.commit = commit
805 at_rev = self.request.GET.get('at')
800 at_rev = self.request.GET.get("at")
806 801
807 html = self._get_tree_at_commit(
808 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
802 html = self._get_tree_at_commit(c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
809 803
810 804 return Response(html)
811 805
812 806 def _get_attachement_headers(self, f_path):
813 807 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
814 808 safe_path = f_name.replace('"', '\\"')
815 809 encoded_path = urllib.parse.quote(f_name)
816 810
817 headers = f"attachment; " \
818 f"filename=\"{safe_path}\"; " \
819 f"filename*=UTF-8\'\'{encoded_path}"
811 headers = f"attachment; " f'filename="{safe_path}"; ' f"filename*=UTF-8''{encoded_path}"
820 812
821 813 return header_safe_str(headers)
822 814
823 815 @LoginRequired()
824 @HasRepoPermissionAnyDecorator(
825 'repository.read', 'repository.write', 'repository.admin')
816 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
826 817 def repo_file_raw(self):
827 818 """
828 819 Action for show as raw, some mimetypes are "rendered",
829 820 those include images, icons.
830 821 """
831 822 c = self.load_default_context()
832 823
833 824 commit_id, f_path, bytes_path = self._get_commit_and_path()
834 825 commit = self._get_commit_or_redirect(commit_id)
835 826 file_node = self._get_filenode_or_redirect(commit, bytes_path)
836 827
837 828 raw_mimetype_mapping = {
838 829 # map original mimetype to a mimetype used for "show as raw"
839 830 # you can also provide a content-disposition to override the
840 831 # default "attachment" disposition.
841 832 # orig_type: (new_type, new_dispo)
842
843 833 # show images inline:
844 834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
845 835 # for example render an SVG with javascript inside or even render
846 836 # HTML.
847 'image/x-icon': ('image/x-icon', 'inline'),
848 'image/png': ('image/png', 'inline'),
849 'image/gif': ('image/gif', 'inline'),
850 'image/jpeg': ('image/jpeg', 'inline'),
851 'application/pdf': ('application/pdf', 'inline'),
837 "image/x-icon": ("image/x-icon", "inline"),
838 "image/png": ("image/png", "inline"),
839 "image/gif": ("image/gif", "inline"),
840 "image/jpeg": ("image/jpeg", "inline"),
841 "application/pdf": ("application/pdf", "inline"),
852 842 }
853 843
854 844 mimetype = file_node.mimetype
855 845 try:
856 846 mimetype, disposition = raw_mimetype_mapping[mimetype]
857 847 except KeyError:
858 848 # we don't know anything special about this, handle it safely
859 849 if file_node.is_binary:
860 850 # do same as download raw for binary files
861 mimetype, disposition = 'application/octet-stream', 'attachment'
851 mimetype, disposition = "application/octet-stream", "attachment"
862 852 else:
863 853 # do not just use the original mimetype, but force text/plain,
864 854 # otherwise it would serve text/html and that might be unsafe.
865 855 # Note: underlying vcs library fakes text/plain mimetype if the
866 856 # mimetype can not be determined and it thinks it is not
867 857 # binary.This might lead to erroneous text display in some
868 858 # cases, but helps in other cases, like with text files
869 859 # without extension.
870 mimetype, disposition = 'text/plain', 'inline'
860 mimetype, disposition = "text/plain", "inline"
871 861
872 if disposition == 'attachment':
862 if disposition == "attachment":
873 863 disposition = self._get_attachement_headers(f_path)
874 864
875 865 stream_content = file_node.stream_bytes()
876 866
877 867 response = Response(app_iter=stream_content)
878 868 response.content_disposition = disposition
879 869 response.content_type = mimetype
880 870
881 871 charset = self._get_default_encoding(c)
882 872 if charset:
883 873 response.charset = charset
884 874
885 875 return response
886 876
887 877 @LoginRequired()
888 @HasRepoPermissionAnyDecorator(
889 'repository.read', 'repository.write', 'repository.admin')
878 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
890 879 def repo_file_download(self):
891 880 c = self.load_default_context()
892 881
893 882 commit_id, f_path, bytes_path = self._get_commit_and_path()
894 883 commit = self._get_commit_or_redirect(commit_id)
895 884 file_node = self._get_filenode_or_redirect(commit, bytes_path)
896 885
897 if self.request.GET.get('lf'):
886 if self.request.GET.get("lf"):
898 887 # only if lf get flag is passed, we download this file
899 888 # as LFS/Largefile
900 889 lf_node = file_node.get_largefile_node()
901 890 if lf_node:
902 891 # overwrite our pointer with the REAL large-file
903 892 file_node = lf_node
904 893
905 894 disposition = self._get_attachement_headers(f_path)
906 895
907 896 stream_content = file_node.stream_bytes()
908 897
909 898 response = Response(app_iter=stream_content)
910 899 response.content_disposition = disposition
911 900 response.content_type = file_node.mimetype
912 901
913 902 charset = self._get_default_encoding(c)
914 903 if charset:
915 904 response.charset = charset
916 905
917 906 return response
918 907
919 908 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
920
921 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
909 cache_seconds = rhodecode.ConfigGet().get_int("rc_cache.cache_repo.expiration_time")
922 910 cache_on = cache_seconds > 0
923 911 log.debug(
924 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
925 'with caching: %s[TTL: %ss]' % (
926 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
912 "Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`"
913 "with caching: %s[TTL: %ss]" % (repo_id, commit_id, f_path, cache_on, cache_seconds or 0)
914 )
927 915
928 cache_namespace_uid = f'repo.{repo_id}'
929 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
916 cache_namespace_uid = f"repo.{repo_id}"
917 region = rc_cache.get_or_create_region("cache_repo", cache_namespace_uid)
930 918
931 919 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
932 920 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
933 log.debug('Generating cached nodelist for repo_id:%s, %s, %s', _repo_id, commit_id, f_path)
921 log.debug("Generating cached nodelist for repo_id:%s, %s, %s", _repo_id, commit_id, f_path)
934 922 try:
935 923 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
936 924 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
937 925 log.exception(safe_str(e))
938 h.flash(h.escape(safe_str(e)), category='error')
939 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, commit_id='tip', f_path='/'))
926 h.flash(h.escape(safe_str(e)), category="error")
927 raise HTTPFound(h.route_path("repo_files", repo_name=self.db_repo_name, commit_id="tip", f_path="/"))
940 928
941 929 return _d + _f
942 930
943 931 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path)
944 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
932 return filter(lambda n: self.path_filter.path_access_allowed(n["name"]), result)
945 933
946 934 @LoginRequired()
947 @HasRepoPermissionAnyDecorator(
948 'repository.read', 'repository.write', 'repository.admin')
935 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
949 936 def repo_nodelist(self):
950 937 self.load_default_context()
951 938
952 939 commit_id, f_path, bytes_path = self._get_commit_and_path()
953 940 commit = self._get_commit_or_redirect(commit_id)
954 941
955 metadata = self._get_nodelist_at_commit(
956 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
957 return {'nodes': [x for x in metadata]}
942 metadata = self._get_nodelist_at_commit(self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
943 return {"nodes": [x for x in metadata]}
958 944
959 945 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
960 946 items = []
961 947 for name, commit_id in branches_or_tags.items():
962 948 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
963 949 items.append((sym_ref, name, ref_type))
964 950 return items
965 951
966 952 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
967 953 return commit_id
968 954
969 955 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
970 956 return commit_id
971 957
972 958 # NOTE(dan): old code we used in "diff" mode compare
973 959 new_f_path = vcspath.join(name, f_path)
974 return f'{new_f_path}@{commit_id}'
960 return f"{new_f_path}@{commit_id}"
975 961
976 962 def _get_node_history(self, commit_obj, f_path, commits=None):
977 963 """
978 964 get commit history for given node
979 965
980 966 :param commit_obj: commit to calculate history
981 967 :param f_path: path for node to calculate history for
982 968 :param commits: if passed don't calculate history and take
983 969 commits defined in this list
984 970 """
985 971 _ = self.request.translate
986 972
987 973 # calculate history based on tip
988 974 tip = self.rhodecode_vcs_repo.get_commit()
989 975 if commits is None:
990 976 pre_load = ["author", "branch"]
991 977 try:
992 978 commits = tip.get_path_history(safe_bytes(f_path), pre_load=pre_load)
993 979 except (NodeDoesNotExistError, CommitError):
994 980 # this node is not present at tip!
995 981 commits = commit_obj.get_path_history(safe_bytes(f_path), pre_load=pre_load)
996 982
997 983 history = []
998 984 commits_group = ([], _("Changesets"))
999 985 for commit in commits:
1000 branch = ' (%s)' % commit.branch if commit.branch else ''
1001 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
1002 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
986 branch = " (%s)" % commit.branch if commit.branch else ""
987 n_desc = f"r{commit.idx}:{commit.short_id}{branch}"
988 commits_group[0].append((commit.raw_id, n_desc, "sha"))
1003 989 history.append(commits_group)
1004 990
1005 991 symbolic_reference = self._symbolic_reference
1006 992
1007 if self.rhodecode_vcs_repo.alias == 'svn':
1008 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1009 f_path, self.rhodecode_vcs_repo)
993 if self.rhodecode_vcs_repo.alias == "svn":
994 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(f_path, self.rhodecode_vcs_repo)
1010 995 if adjusted_f_path != f_path:
1011 996 log.debug(
1012 'Recognized svn tag or branch in file "%s", using svn '
1013 'specific symbolic references', f_path)
997 'Recognized svn tag or branch in file "%s", using svn ' "specific symbolic references", f_path
998 )
1014 999 f_path = adjusted_f_path
1015 1000 symbolic_reference = self._symbolic_reference_svn
1016 1001
1017 branches = self._create_references(
1018 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1002 branches = self._create_references(self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, "branch")
1019 1003 branches_group = (branches, _("Branches"))
1020 1004
1021 tags = self._create_references(
1022 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1005 tags = self._create_references(self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, "tag")
1023 1006 tags_group = (tags, _("Tags"))
1024 1007
1025 1008 history.append(branches_group)
1026 1009 history.append(tags_group)
1027 1010
1028 1011 return history, commits
1029 1012
1030 1013 @LoginRequired()
1031 @HasRepoPermissionAnyDecorator(
1032 'repository.read', 'repository.write', 'repository.admin')
1014 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
1033 1015 def repo_file_history(self):
1034 1016 self.load_default_context()
1035 1017
1036 1018 commit_id, f_path, bytes_path = self._get_commit_and_path()
1037 1019 commit = self._get_commit_or_redirect(commit_id)
1038 1020 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1039 1021
1040 1022 if file_node.is_file():
1041 1023 file_history, _hist = self._get_node_history(commit, f_path)
1042 1024
1043 1025 res = []
1044 1026 for section_items, section in file_history:
1045 1027 items = []
1046 1028 for obj_id, obj_text, obj_type in section_items:
1047 at_rev = ''
1048 if obj_type in ['branch', 'bookmark', 'tag']:
1029 at_rev = ""
1030 if obj_type in ["branch", "bookmark", "tag"]:
1049 1031 at_rev = obj_text
1050 entry = {
1051 'id': obj_id,
1052 'text': obj_text,
1053 'type': obj_type,
1054 'at_rev': at_rev
1055 }
1032 entry = {"id": obj_id, "text": obj_text, "type": obj_type, "at_rev": at_rev}
1056 1033
1057 1034 items.append(entry)
1058 1035
1059 res.append({
1060 'text': section,
1061 'children': items
1062 })
1036 res.append({"text": section, "children": items})
1063 1037
1064 data = {
1065 'more': False,
1066 'results': res
1067 }
1038 data = {"more": False, "results": res}
1068 1039 return data
1069 1040
1070 log.warning('Cannot fetch history for directory')
1041 log.warning("Cannot fetch history for directory")
1071 1042 raise HTTPBadRequest()
1072 1043
1073 1044 @LoginRequired()
1074 @HasRepoPermissionAnyDecorator(
1075 'repository.read', 'repository.write', 'repository.admin')
1045 @HasRepoPermissionAnyDecorator("repository.read", "repository.write", "repository.admin")
1076 1046 def repo_file_authors(self):
1077 1047 c = self.load_default_context()
1078 1048
1079 1049 commit_id, f_path, bytes_path = self._get_commit_and_path()
1080 1050 commit = self._get_commit_or_redirect(commit_id)
1081 1051 file_node = self._get_filenode_or_redirect(commit, bytes_path)
1082 1052
1083 1053 if not file_node.is_file():
1084 1054 raise HTTPBadRequest()
1085 1055
1086 1056 c.file_last_commit = file_node.last_commit
1087 if self.request.GET.get('annotate') == '1':
1057 if self.request.GET.get("annotate") == "1":
1088 1058 # use _hist from annotation if annotation mode is on
1089 1059 commit_ids = {x[1] for x in file_node.annotate}
1090 _hist = (
1091 self.rhodecode_vcs_repo.get_commit(commit_id)
1092 for commit_id in commit_ids)
1060 _hist = (self.rhodecode_vcs_repo.get_commit(commit_id) for commit_id in commit_ids)
1093 1061 else:
1094 1062 _f_history, _hist = self._get_node_history(commit, f_path)
1095 1063 c.file_author = False
1096 1064
1097 1065 unique = collections.OrderedDict()
1098 1066 for commit in _hist:
1099 1067 author = commit.author
1100 1068 if author not in unique:
1101 1069 unique[commit.author] = [
1102 1070 h.email(author),
1103 h.person(author, 'username_or_name_or_email'),
1104 1 # counter
1071 h.person(author, "username_or_name_or_email"),
1072 1, # counter
1105 1073 ]
1106 1074
1107 1075 else:
1108 1076 # increase counter
1109 1077 unique[commit.author][2] += 1
1110 1078
1111 1079 c.authors = [val for val in unique.values()]
1112 1080
1113 1081 return self._get_template_context(c)
1114 1082
1115 1083 @LoginRequired()
1116 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1084 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1117 1085 def repo_files_check_head(self):
1118 1086 self.load_default_context()
1119 1087
1120 1088 commit_id, f_path, bytes_path = self._get_commit_and_path()
1121 _branch_name, _sha_commit_id, is_head = \
1122 self._is_valid_head(commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name)
1089 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1090 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1091 )
1123 1092
1124 new_path = self.request.POST.get('path')
1125 operation = self.request.POST.get('operation')
1126 path_exist = ''
1093 new_path = self.request.POST.get("path")
1094 operation = self.request.POST.get("operation")
1095 path_exist = ""
1127 1096
1128 if new_path and operation in ['create', 'upload']:
1129 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1097 if new_path and operation in ["create", "upload"]:
1098 new_f_path = os.path.join(f_path.lstrip("/"), new_path)
1130 1099 try:
1131 1100 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1132 1101 # NOTE(dan): construct whole path without leading /
1133 1102 file_node = commit_obj.get_node(safe_bytes(new_f_path))
1134 1103 if file_node:
1135 1104 path_exist = new_f_path
1136 1105 except (EmptyRepositoryError, NodeDoesNotExistError):
1137 1106 pass
1138 1107
1139 return {
1140 'branch': _branch_name,
1141 'sha': _sha_commit_id,
1142 'is_head': is_head,
1143 'path_exists': path_exist
1144 }
1108 return {"branch": _branch_name, "sha": _sha_commit_id, "is_head": is_head, "path_exists": path_exist}
1145 1109
1146 1110 @LoginRequired()
1147 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1111 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1148 1112 def repo_files_remove_file(self):
1149 1113 _ = self.request.translate
1150 1114 c = self.load_default_context()
1151 1115 commit_id, f_path, bytes_path = self._get_commit_and_path()
1152 1116
1153 1117 self._ensure_not_locked()
1154 _branch_name, _sha_commit_id, is_head = \
1155 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1156 landing_ref=self.db_repo.landing_ref_name)
1118 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1119 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1120 )
1157 1121
1158 1122 self.forbid_non_head(is_head, f_path)
1159 1123 self.check_branch_permission(_branch_name)
1160 1124
1161 1125 c.commit = self._get_commit_or_redirect(commit_id)
1162 1126 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1163 1127
1164 c.default_message = _(
1165 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1128 c.default_message = _("Deleted file {} via RhodeCode Enterprise").format(f_path)
1166 1129 c.f_path = f_path
1167 1130
1168 1131 return self._get_template_context(c)
1169 1132
1170 1133 @LoginRequired()
1171 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1134 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1172 1135 @CSRFRequired()
1173 1136 def repo_files_delete_file(self):
1174 1137 _ = self.request.translate
1175 1138
1176 1139 c = self.load_default_context()
1177 1140 commit_id, f_path, bytes_path = self._get_commit_and_path()
1178 1141
1179 1142 self._ensure_not_locked()
1180 _branch_name, _sha_commit_id, is_head = \
1181 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1182 landing_ref=self.db_repo.landing_ref_name)
1143 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1144 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1145 )
1183 1146
1184 1147 self.forbid_non_head(is_head, f_path)
1185 1148 self.check_branch_permission(_branch_name)
1186 1149
1187 1150 c.commit = self._get_commit_or_redirect(commit_id)
1188 1151 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1189 1152
1190 c.default_message = _('Deleted file {} via RhodeCode Enterprise').format(f_path)
1153 c.default_message = _("Deleted file {} via RhodeCode Enterprise").format(f_path)
1191 1154 c.f_path = f_path
1192 1155 node_path = f_path
1193 1156 author = self._rhodecode_db_user.full_contact
1194 message = self.request.POST.get('message') or c.default_message
1157 message = self.request.POST.get("message") or c.default_message
1195 1158 try:
1196 nodes = {
1197 safe_bytes(node_path): {
1198 'content': b''
1199 }
1200 }
1159 nodes = {safe_bytes(node_path): {"content": b""}}
1201 1160 ScmModel().delete_nodes(
1202 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1161 user=self._rhodecode_db_user.user_id,
1162 repo=self.db_repo,
1203 1163 message=message,
1204 1164 nodes=nodes,
1205 1165 parent_commit=c.commit,
1206 1166 author=author,
1207 1167 )
1208 1168
1209 h.flash(
1210 _('Successfully deleted file `{}`').format(
1211 h.escape(f_path)), category='success')
1169 h.flash(_("Successfully deleted file `{}`").format(h.escape(f_path)), category="success")
1212 1170 except Exception:
1213 log.exception('Error during commit operation')
1214 h.flash(_('Error occurred during commit'), category='error')
1215 raise HTTPFound(
1216 h.route_path('repo_commit', repo_name=self.db_repo_name,
1217 commit_id='tip'))
1171 log.exception("Error during commit operation")
1172 h.flash(_("Error occurred during commit"), category="error")
1173 raise HTTPFound(h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip"))
1218 1174
1219 1175 @LoginRequired()
1220 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1176 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1221 1177 def repo_files_edit_file(self):
1222 1178 _ = self.request.translate
1223 1179 c = self.load_default_context()
1224 1180 commit_id, f_path, bytes_path = self._get_commit_and_path()
1225 1181
1226 1182 self._ensure_not_locked()
1227 _branch_name, _sha_commit_id, is_head = \
1228 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1229 landing_ref=self.db_repo.landing_ref_name)
1183 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1184 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1185 )
1230 1186
1231 1187 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1232 1188 self.check_branch_permission(_branch_name, commit_id=commit_id)
1233 1189
1234 1190 c.commit = self._get_commit_or_redirect(commit_id)
1235 1191 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1236 1192
1237 1193 if c.file.is_binary:
1238 1194 files_url = h.route_path(
1239 'repo_files',
1240 repo_name=self.db_repo_name,
1241 commit_id=c.commit.raw_id, f_path=f_path)
1195 "repo_files", repo_name=self.db_repo_name, commit_id=c.commit.raw_id, f_path=f_path
1196 )
1242 1197 raise HTTPFound(files_url)
1243 1198
1244 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1199 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1245 1200 c.f_path = f_path
1246 1201
1247 1202 return self._get_template_context(c)
1248 1203
1249 1204 @LoginRequired()
1250 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1205 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1251 1206 @CSRFRequired()
1252 1207 def repo_files_update_file(self):
1253 1208 _ = self.request.translate
1254 1209 c = self.load_default_context()
1255 1210 commit_id, f_path, bytes_path = self._get_commit_and_path()
1256 1211
1257 1212 self._ensure_not_locked()
1258 1213
1259 1214 c.commit = self._get_commit_or_redirect(commit_id)
1260 1215 c.file = self._get_filenode_or_redirect(c.commit, bytes_path)
1261 1216
1262 1217 if c.file.is_binary:
1263 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1264 commit_id=c.commit.raw_id, f_path=f_path))
1218 raise HTTPFound(
1219 h.route_path("repo_files", repo_name=self.db_repo_name, commit_id=c.commit.raw_id, f_path=f_path)
1220 )
1265 1221
1266 _branch_name, _sha_commit_id, is_head = \
1267 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1268 landing_ref=self.db_repo.landing_ref_name)
1222 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1223 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1224 )
1269 1225
1270 1226 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1271 1227 self.check_branch_permission(_branch_name, commit_id=commit_id)
1272 1228
1273 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1229 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1274 1230 c.f_path = f_path
1275 1231
1276 1232 old_content = c.file.str_content
1277 1233 sl = old_content.splitlines(1)
1278 first_line = sl[0] if sl else ''
1234 first_line = sl[0] if sl else ""
1279 1235
1280 1236 r_post = self.request.POST
1281 1237 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1282 1238 line_ending_mode = detect_mode(first_line, 0)
1283 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1239 content = convert_line_endings(r_post.get("content", ""), line_ending_mode)
1284 1240
1285 message = r_post.get('message') or c.default_message
1241 message = r_post.get("message") or c.default_message
1286 1242
1287 1243 org_node_path = c.file.str_path
1288 filename = r_post['filename']
1244 filename = r_post["filename"]
1289 1245
1290 1246 root_path = c.file.dir_path
1291 1247 pure_path = self.create_pure_path(root_path, filename)
1292 1248 node_path = pure_path.as_posix()
1293 1249
1294 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1295 commit_id=commit_id)
1250 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit_id)
1296 1251 if content == old_content and node_path == org_node_path:
1297 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1298 category='warning')
1252 h.flash(_("No changes detected on {}").format(h.escape(org_node_path)), category="warning")
1299 1253 raise HTTPFound(default_redirect_url)
1300 1254
1301 1255 try:
1302 1256 mapping = {
1303 1257 c.file.bytes_path: {
1304 'org_filename': org_node_path,
1305 'filename': safe_bytes(node_path),
1306 'content': safe_bytes(content),
1307 'lexer': '',
1308 'op': 'mod',
1309 'mode': c.file.mode
1258 "org_filename": org_node_path,
1259 "filename": safe_bytes(node_path),
1260 "content": safe_bytes(content),
1261 "lexer": "",
1262 "op": "mod",
1263 "mode": c.file.mode,
1310 1264 }
1311 1265 }
1312 1266
1313 1267 commit = ScmModel().update_nodes(
1314 1268 user=self._rhodecode_db_user.user_id,
1315 1269 repo=self.db_repo,
1316 1270 message=message,
1317 1271 nodes=mapping,
1318 1272 parent_commit=c.commit,
1319 1273 )
1320 1274
1321 h.flash(_('Successfully committed changes to file `{}`').format(
1322 h.escape(f_path)), category='success')
1323 default_redirect_url = h.route_path(
1324 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1275 h.flash(_("Successfully committed changes to file `{}`").format(h.escape(f_path)), category="success")
1276 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1325 1277
1326 1278 except Exception:
1327 log.exception('Error occurred during commit')
1328 h.flash(_('Error occurred during commit'), category='error')
1279 log.exception("Error occurred during commit")
1280 h.flash(_("Error occurred during commit"), category="error")
1329 1281
1330 1282 raise HTTPFound(default_redirect_url)
1331 1283
1332 1284 @LoginRequired()
1333 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1285 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1334 1286 def repo_files_add_file(self):
1335 1287 _ = self.request.translate
1336 1288 c = self.load_default_context()
1337 1289 commit_id, f_path, bytes_path = self._get_commit_and_path()
1338 1290
1339 1291 self._ensure_not_locked()
1340 1292
1341 1293 # Check if we need to use this page to upload binary
1342 upload_binary = str2bool(self.request.params.get('upload_binary', False))
1294 upload_binary = str2bool(self.request.params.get("upload_binary", False))
1343 1295
1344 1296 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1345 1297 if c.commit is None:
1346 1298 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1347 1299
1348 1300 if self.rhodecode_vcs_repo.is_empty():
1349 1301 # for empty repository we cannot check for current branch, we rely on
1350 1302 # c.commit.branch instead
1351 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1303 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1352 1304 else:
1353 _branch_name, _sha_commit_id, is_head = \
1354 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1355 landing_ref=self.db_repo.landing_ref_name)
1305 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1306 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1307 )
1356 1308
1357 1309 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1358 1310 self.check_branch_permission(_branch_name, commit_id=commit_id)
1359 1311
1360 c.default_message = (_('Added file via RhodeCode Enterprise')) \
1361 if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1362 c.f_path = f_path.lstrip('/') # ensure not relative path
1312 c.default_message = (
1313 (_("Added file via RhodeCode Enterprise"))
1314 if not upload_binary
1315 else (_("Edited file {} via RhodeCode Enterprise").format(f_path))
1316 )
1317 c.f_path = f_path.lstrip("/") # ensure not relative path
1363 1318 c.replace_binary = upload_binary
1364 1319
1365 1320 return self._get_template_context(c)
1366 1321
1367 1322 @LoginRequired()
1368 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1323 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1369 1324 @CSRFRequired()
1370 1325 def repo_files_create_file(self):
1371 1326 _ = self.request.translate
1372 1327 c = self.load_default_context()
1373 1328 commit_id, f_path, bytes_path = self._get_commit_and_path()
1374 1329
1375 1330 self._ensure_not_locked()
1376 1331
1377 1332 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1378 1333 if c.commit is None:
1379 1334 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1380 1335
1381 1336 # calculate redirect URL
1382 1337 if self.rhodecode_vcs_repo.is_empty():
1383 default_redirect_url = h.route_path(
1384 'repo_summary', repo_name=self.db_repo_name)
1338 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1385 1339 else:
1386 default_redirect_url = h.route_path(
1387 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1340 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1388 1341
1389 1342 if self.rhodecode_vcs_repo.is_empty():
1390 1343 # for empty repository we cannot check for current branch, we rely on
1391 1344 # c.commit.branch instead
1392 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1345 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1393 1346 else:
1394 _branch_name, _sha_commit_id, is_head = \
1395 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1396 landing_ref=self.db_repo.landing_ref_name)
1347 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1348 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1349 )
1397 1350
1398 1351 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1399 1352 self.check_branch_permission(_branch_name, commit_id=commit_id)
1400 1353
1401 c.default_message = (_('Added file via RhodeCode Enterprise'))
1354 c.default_message = _("Added file via RhodeCode Enterprise")
1402 1355 c.f_path = f_path
1403 1356
1404 1357 r_post = self.request.POST
1405 message = r_post.get('message') or c.default_message
1406 filename = r_post.get('filename')
1358 message = r_post.get("message") or c.default_message
1359 filename = r_post.get("filename")
1407 1360 unix_mode = 0
1408 1361
1409 1362 if not filename:
1410 1363 # If there's no commit, redirect to repo summary
1411 1364 if type(c.commit) is EmptyCommit:
1412 redirect_url = h.route_path(
1413 'repo_summary', repo_name=self.db_repo_name)
1365 redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1414 1366 else:
1415 1367 redirect_url = default_redirect_url
1416 h.flash(_('No filename specified'), category='warning')
1368 h.flash(_("No filename specified"), category="warning")
1417 1369 raise HTTPFound(redirect_url)
1418 1370
1419 1371 root_path = f_path
1420 1372 pure_path = self.create_pure_path(root_path, filename)
1421 node_path = pure_path.as_posix().lstrip('/')
1373 node_path = pure_path.as_posix().lstrip("/")
1422 1374
1423 1375 author = self._rhodecode_db_user.full_contact
1424 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1425 nodes = {
1426 safe_bytes(node_path): {
1427 'content': safe_bytes(content)
1428 }
1429 }
1376 content = convert_line_endings(r_post.get("content", ""), unix_mode)
1377 nodes = {safe_bytes(node_path): {"content": safe_bytes(content)}}
1430 1378
1431 1379 try:
1432
1433 1380 commit = ScmModel().create_nodes(
1434 1381 user=self._rhodecode_db_user.user_id,
1435 1382 repo=self.db_repo,
1436 1383 message=message,
1437 1384 nodes=nodes,
1438 1385 parent_commit=c.commit,
1439 1386 author=author,
1440 1387 )
1441 1388
1442 h.flash(_('Successfully committed new file `{}`').format(h.escape(node_path)), category='success')
1389 h.flash(_("Successfully committed new file `{}`").format(h.escape(node_path)), category="success")
1443 1390
1444 default_redirect_url = h.route_path(
1445 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1391 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1446 1392
1447 1393 except NonRelativePathError:
1448 log.exception('Non Relative path found')
1449 h.flash(_('The location specified must be a relative path and must not '
1450 'contain .. in the path'), category='warning')
1394 log.exception("Non Relative path found")
1395 h.flash(
1396 _("The location specified must be a relative path and must not " "contain .. in the path"),
1397 category="warning",
1398 )
1451 1399 raise HTTPFound(default_redirect_url)
1452 1400 except (NodeError, NodeAlreadyExistsError) as e:
1453 h.flash(h.escape(safe_str(e)), category='error')
1401 h.flash(h.escape(safe_str(e)), category="error")
1454 1402 except Exception:
1455 log.exception('Error occurred during commit')
1456 h.flash(_('Error occurred during commit'), category='error')
1403 log.exception("Error occurred during commit")
1404 h.flash(_("Error occurred during commit"), category="error")
1457 1405
1458 1406 raise HTTPFound(default_redirect_url)
1459 1407
1460 1408 @LoginRequired()
1461 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1409 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1462 1410 @CSRFRequired()
1463 1411 def repo_files_upload_file(self):
1464 1412 _ = self.request.translate
1465 1413 c = self.load_default_context()
1466 1414 commit_id, f_path, bytes_path = self._get_commit_and_path()
1467 1415
1468 1416 self._ensure_not_locked()
1469 1417
1470 1418 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1471 1419 if c.commit is None:
1472 1420 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1473 1421
1474 1422 # calculate redirect URL
1475 1423 if self.rhodecode_vcs_repo.is_empty():
1476 default_redirect_url = h.route_path(
1477 'repo_summary', repo_name=self.db_repo_name)
1424 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1478 1425 else:
1479 default_redirect_url = h.route_path(
1480 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1426 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1481 1427
1482 1428 if self.rhodecode_vcs_repo.is_empty():
1483 1429 # for empty repository we cannot check for current branch, we rely on
1484 1430 # c.commit.branch instead
1485 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1431 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1486 1432 else:
1487 _branch_name, _sha_commit_id, is_head = \
1488 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1489 landing_ref=self.db_repo.landing_ref_name)
1433 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1434 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1435 )
1490 1436
1491 1437 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1492 1438 if error:
1493 return {
1494 'error': error,
1495 'redirect_url': default_redirect_url
1496 }
1439 return {"error": error, "redirect_url": default_redirect_url}
1497 1440 error = self.check_branch_permission(_branch_name, json_mode=True)
1498 1441 if error:
1499 return {
1500 'error': error,
1501 'redirect_url': default_redirect_url
1502 }
1442 return {"error": error, "redirect_url": default_redirect_url}
1503 1443
1504 c.default_message = (_('Added file via RhodeCode Enterprise'))
1444 c.default_message = _("Added file via RhodeCode Enterprise")
1505 1445 c.f_path = f_path
1506 1446
1507 1447 r_post = self.request.POST
1508 1448
1509 1449 message = c.default_message
1510 user_message = r_post.getall('message')
1450 user_message = r_post.getall("message")
1511 1451 if isinstance(user_message, list) and user_message:
1512 1452 # we take the first from duplicated results if it's not empty
1513 1453 message = user_message[0] if user_message[0] else message
1514 1454
1515 1455 nodes = {}
1516 1456
1517 for file_obj in r_post.getall('files_upload') or []:
1457 for file_obj in r_post.getall("files_upload") or []:
1518 1458 content = file_obj.file
1519 1459 filename = file_obj.filename
1520 1460
1521 1461 root_path = f_path
1522 1462 pure_path = self.create_pure_path(root_path, filename)
1523 node_path = pure_path.as_posix().lstrip('/')
1463 node_path = pure_path.as_posix().lstrip("/")
1524 1464
1525 nodes[safe_bytes(node_path)] = {
1526 'content': content
1527 }
1465 nodes[safe_bytes(node_path)] = {"content": content}
1528 1466
1529 1467 if not nodes:
1530 error = 'missing files'
1531 return {
1532 'error': error,
1533 'redirect_url': default_redirect_url
1534 }
1468 error = "missing files"
1469 return {"error": error, "redirect_url": default_redirect_url}
1535 1470
1536 1471 author = self._rhodecode_db_user.full_contact
1537 1472
1538 1473 try:
1539 1474 commit = ScmModel().create_nodes(
1540 1475 user=self._rhodecode_db_user.user_id,
1541 1476 repo=self.db_repo,
1542 1477 message=message,
1543 1478 nodes=nodes,
1544 1479 parent_commit=c.commit,
1545 1480 author=author,
1546 1481 )
1547 1482 if len(nodes) == 1:
1548 flash_message = _('Successfully committed {} new files').format(len(nodes))
1483 flash_message = _("Successfully committed {} new files").format(len(nodes))
1549 1484 else:
1550 flash_message = _('Successfully committed 1 new file')
1485 flash_message = _("Successfully committed 1 new file")
1551 1486
1552 h.flash(flash_message, category='success')
1487 h.flash(flash_message, category="success")
1553 1488
1554 default_redirect_url = h.route_path(
1555 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1489 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1556 1490
1557 1491 except NonRelativePathError:
1558 log.exception('Non Relative path found')
1559 error = _('The location specified must be a relative path and must not '
1560 'contain .. in the path')
1561 h.flash(error, category='warning')
1492 log.exception("Non Relative path found")
1493 error = _("The location specified must be a relative path and must not " "contain .. in the path")
1494 h.flash(error, category="warning")
1562 1495
1563 return {
1564 'error': error,
1565 'redirect_url': default_redirect_url
1566 }
1496 return {"error": error, "redirect_url": default_redirect_url}
1567 1497 except (NodeError, NodeAlreadyExistsError) as e:
1568 1498 error = h.escape(e)
1569 h.flash(error, category='error')
1499 h.flash(error, category="error")
1570 1500
1571 return {
1572 'error': error,
1573 'redirect_url': default_redirect_url
1574 }
1501 return {"error": error, "redirect_url": default_redirect_url}
1575 1502 except Exception:
1576 log.exception('Error occurred during commit')
1577 error = _('Error occurred during commit')
1578 h.flash(error, category='error')
1579 return {
1580 'error': error,
1581 'redirect_url': default_redirect_url
1582 }
1503 log.exception("Error occurred during commit")
1504 error = _("Error occurred during commit")
1505 h.flash(error, category="error")
1506 return {"error": error, "redirect_url": default_redirect_url}
1583 1507
1584 return {
1585 'error': None,
1586 'redirect_url': default_redirect_url
1587 }
1508 return {"error": None, "redirect_url": default_redirect_url}
1588 1509
1589 1510 @LoginRequired()
1590 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1511 @HasRepoPermissionAnyDecorator("repository.write", "repository.admin")
1591 1512 @CSRFRequired()
1592 1513 def repo_files_replace_file(self):
1593 1514 _ = self.request.translate
1594 1515 c = self.load_default_context()
1595 1516 commit_id, f_path, bytes_path = self._get_commit_and_path()
1596 1517
1597 1518 self._ensure_not_locked()
1598 1519
1599 1520 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1600 1521 if c.commit is None:
1601 1522 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1602 1523
1603 1524 if self.rhodecode_vcs_repo.is_empty():
1604 default_redirect_url = h.route_path(
1605 'repo_summary', repo_name=self.db_repo_name)
1525 default_redirect_url = h.route_path("repo_summary", repo_name=self.db_repo_name)
1606 1526 else:
1607 default_redirect_url = h.route_path(
1608 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1527 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id="tip")
1609 1528
1610 1529 if self.rhodecode_vcs_repo.is_empty():
1611 1530 # for empty repository we cannot check for current branch, we rely on
1612 1531 # c.commit.branch instead
1613 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1532 _branch_name, _sha_commit_id, is_head = c.commit.branch, "", True
1614 1533 else:
1615 _branch_name, _sha_commit_id, is_head = \
1616 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1617 landing_ref=self.db_repo.landing_ref_name)
1534 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
1535 commit_id, self.rhodecode_vcs_repo, landing_ref=self.db_repo.landing_ref_name
1536 )
1618 1537
1619 1538 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1620 1539 if error:
1621 return {
1622 'error': error,
1623 'redirect_url': default_redirect_url
1624 }
1540 return {"error": error, "redirect_url": default_redirect_url}
1625 1541 error = self.check_branch_permission(_branch_name, json_mode=True)
1626 1542 if error:
1627 return {
1628 'error': error,
1629 'redirect_url': default_redirect_url
1630 }
1543 return {"error": error, "redirect_url": default_redirect_url}
1631 1544
1632 c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1545 c.default_message = _("Edited file {} via RhodeCode Enterprise").format(f_path)
1633 1546 c.f_path = f_path
1634 1547
1635 1548 r_post = self.request.POST
1636 1549
1637 1550 message = c.default_message
1638 user_message = r_post.getall('message')
1551 user_message = r_post.getall("message")
1639 1552 if isinstance(user_message, list) and user_message:
1640 1553 # we take the first from duplicated results if it's not empty
1641 1554 message = user_message[0] if user_message[0] else message
1642 1555
1643 data_for_replacement = r_post.getall('files_upload') or []
1556 data_for_replacement = r_post.getall("files_upload") or []
1644 1557 if (objects_count := len(data_for_replacement)) > 1:
1645 return {
1646 'error': 'too many files for replacement',
1647 'redirect_url': default_redirect_url
1648 }
1558 return {"error": "too many files for replacement", "redirect_url": default_redirect_url}
1649 1559 elif not objects_count:
1650 return {
1651 'error': 'missing files',
1652 'redirect_url': default_redirect_url
1653 }
1560 return {"error": "missing files", "redirect_url": default_redirect_url}
1654 1561
1655 1562 content = data_for_replacement[0].file
1656 1563 retrieved_filename = data_for_replacement[0].filename
1657 1564
1658 if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]:
1565 if retrieved_filename.split(".")[-1] != f_path.split(".")[-1]:
1659 1566 return {
1660 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension',
1661 'redirect_url': default_redirect_url
1567 "error": "file extension of uploaded file doesn't match an original file's extension",
1568 "redirect_url": default_redirect_url,
1662 1569 }
1663 1570
1664 1571 author = self._rhodecode_db_user.full_contact
1665 1572
1666 1573 try:
1667 1574 commit = ScmModel().update_binary_node(
1668 1575 user=self._rhodecode_db_user.user_id,
1669 1576 repo=self.db_repo,
1670 1577 message=message,
1671 1578 node={
1672 'content': content,
1673 'file_path': f_path.encode(),
1579 "content": content,
1580 "file_path": f_path.encode(),
1674 1581 },
1675 1582 parent_commit=c.commit,
1676 1583 author=author,
1677 1584 )
1678 1585
1679 h.flash(_('Successfully committed 1 new file'), category='success')
1586 h.flash(_("Successfully committed 1 new file"), category="success")
1680 1587
1681 default_redirect_url = h.route_path(
1682 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1588 default_redirect_url = h.route_path("repo_commit", repo_name=self.db_repo_name, commit_id=commit.raw_id)
1683 1589
1684 1590 except (NodeError, NodeAlreadyExistsError) as e:
1685 1591 error = h.escape(e)
1686 h.flash(error, category='error')
1592 h.flash(error, category="error")
1687 1593
1688 return {
1689 'error': error,
1690 'redirect_url': default_redirect_url
1691 }
1594 return {"error": error, "redirect_url": default_redirect_url}
1692 1595 except Exception:
1693 log.exception('Error occurred during commit')
1694 error = _('Error occurred during commit')
1695 h.flash(error, category='error')
1696 return {
1697 'error': error,
1698 'redirect_url': default_redirect_url
1699 }
1596 log.exception("Error occurred during commit")
1597 error = _("Error occurred during commit")
1598 h.flash(error, category="error")
1599 return {"error": error, "redirect_url": default_redirect_url}
1700 1600
1701 return {
1702 'error': None,
1703 'redirect_url': default_redirect_url
1704 }
1601 return {"error": None, "redirect_url": default_redirect_url}
@@ -1,119 +1,119
1 1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import threading
21 21
22 22 from dogpile.cache import register_backend
23 23
24 24 from . import region_meta
25 25 from .utils import (
26 26 ActiveRegionCache,
27 27 InvalidationContext,
28 28 backend_key_generator,
29 29 clear_cache_namespace,
30 30 get_default_cache_settings,
31 31 get_or_create_region,
32 32 make_region,
33 33 str2bool,
34 34 )
35 35
36 36 module_name = 'rhodecode'
37 37
38 38 register_backend(
39 39 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
40 40 "LRUMemoryBackend")
41 41
42 42 register_backend(
43 43 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
44 44 "FileNamespaceBackend")
45 45
46 46 register_backend(
47 47 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
48 48 "RedisPickleBackend")
49 49
50 50 register_backend(
51 51 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
52 52 "RedisMsgPackBackend")
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 FILE_TREE_CACHE_VER = 'v5'
58 FILE_TREE_CACHE_VER = 'v6'
59 59 LICENSE_CACHE_VER = 'v3'
60 60 PERMISSIONS_CACHE_VER = 'v2'
61 61
62 62 CLEAR_DELETE = 'delete'
63 63 CLEAR_INVALIDATE = 'invalidate'
64 64
65 65
66 66 def async_creation_runner(cache, cache_key, creator, mutex):
67 67
68 68 def runner():
69 69 try:
70 70 value = creator()
71 71 cache.set(cache_key, value)
72 72 finally:
73 73 mutex.release()
74 74
75 75 thread = threading.Thread(target=runner)
76 76 thread.start()
77 77
78 78
79 79 def configure_dogpile_cache(settings):
80 80 cache_dir = settings.get('cache_dir')
81 81 if cache_dir:
82 82 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
83 83
84 84 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
85 85
86 86 # inspect available namespaces
87 87 avail_regions = set()
88 88 for key in rc_cache_data.keys():
89 89 namespace_name = key.split('.', 1)[0]
90 90 if namespace_name in avail_regions:
91 91 continue
92 92
93 93 avail_regions.add(namespace_name)
94 94 log.debug('dogpile: found following cache regions: %s', namespace_name)
95 95
96 96 new_region = make_region(
97 97 name=namespace_name,
98 98 function_key_generator=None,
99 99 async_creation_runner=None
100 100 )
101 101
102 102 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
103 103 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
104 104
105 105 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
106 106 if async_creator:
107 107 log.debug('configuring region %s with async creator', new_region)
108 108 new_region.async_creation_runner = async_creation_runner
109 109
110 110 if log.isEnabledFor(logging.DEBUG):
111 111 region_args = dict(backend=new_region.actual_backend,
112 112 region_invalidator=new_region.region_invalidator.__class__)
113 113 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
114 114
115 115 region_meta.dogpile_cache_regions[namespace_name] = new_region
116 116
117 117
118 118 def includeme(config):
119 119 configure_dogpile_cache(config.registry.settings)
@@ -1,987 +1,988
1 1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 Some simple helper functions
22 22 """
23 23
24 24 import collections
25 25 import datetime
26 26 import dateutil.relativedelta
27 27 import logging
28 28 import re
29 29 import sys
30 30 import time
31 31 import urllib.request
32 32 import urllib.parse
33 33 import urllib.error
34 34 import urlobject
35 35 import uuid
36 36 import getpass
37 37 import socket
38 38 import errno
39 39 import random
40 40 import functools
41 41 from contextlib import closing
42 42
43 43 import pygments.lexers
44 44 import sqlalchemy
45 45 import sqlalchemy.event
46 46 import sqlalchemy.engine.url
47 47 import sqlalchemy.exc
48 48 import sqlalchemy.sql
49 49 import webob
50 50 from pyramid.settings import asbool
51 51
52 52 import rhodecode
53 53 from rhodecode.translation import _, _pluralize
54 54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
55 55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
56 56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
57 57
58 58
59 59 def __get_lem(extra_mapping=None):
60 60 """
61 61 Get language extension map based on what's inside pygments lexers
62 62 """
63 63 d = collections.defaultdict(lambda: [])
64 64
65 65 def __clean(s):
66 66 s = s.lstrip('*')
67 67 s = s.lstrip('.')
68 68
69 69 if s.find('[') != -1:
70 70 exts = []
71 71 start, stop = s.find('['), s.find(']')
72 72
73 73 for suffix in s[start + 1:stop]:
74 74 exts.append(s[:s.find('[')] + suffix)
75 75 return [e.lower() for e in exts]
76 76 else:
77 77 return [s.lower()]
78 78
79 79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
80 80 m = list(map(__clean, t[-2]))
81 81 if m:
82 82 m = functools.reduce(lambda x, y: x + y, m)
83 83 for ext in m:
84 84 desc = lx.replace('Lexer', '')
85 85 d[ext].append(desc)
86 86
87 87 data = dict(d)
88 88
89 89 extra_mapping = extra_mapping or {}
90 90 if extra_mapping:
91 91 for k, v in list(extra_mapping.items()):
92 92 if k not in data:
93 93 # register new mapping2lexer
94 94 data[k] = [v]
95 95
96 96 return data
97 97
98 98
99 99 def convert_line_endings(line: str, mode) -> str:
100 100 """
101 101 Converts a given line "line end" accordingly to given mode
102 102
103 103 Available modes are::
104 104 0 - Unix
105 105 1 - Mac
106 106 2 - DOS
107 107
108 108 :param line: given line to convert
109 109 :param mode: mode to convert to
110 110 :return: converted line according to mode
111 111 """
112 112 if mode == 0:
113 113 line = line.replace('\r\n', '\n')
114 114 line = line.replace('\r', '\n')
115 115 elif mode == 1:
116 116 line = line.replace('\r\n', '\r')
117 117 line = line.replace('\n', '\r')
118 118 elif mode == 2:
119 119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
120 120 return line
121 121
122 122
123 123 def detect_mode(line: str, default) -> int:
124 124 """
125 125 Detects line break for given line, if line break couldn't be found
126 126 given default value is returned
127 127
128 128 :param line: str line
129 129 :param default: default
130 130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
131 131 """
132 132 if line.endswith('\r\n'):
133 133 return 2
134 134 elif line.endswith('\n'):
135 135 return 0
136 136 elif line.endswith('\r'):
137 137 return 1
138 138 else:
139 139 return default
140 140
141 141
142 142 def remove_suffix(s, suffix):
143 143 if s.endswith(suffix):
144 144 s = s[:-1 * len(suffix)]
145 145 return s
146 146
147 147
148 148 def remove_prefix(s, prefix):
149 149 if s.startswith(prefix):
150 150 s = s[len(prefix):]
151 151 return s
152 152
153 153
154 154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
155 155 """
156 How to find calling context:
156 157 Look through the calling stack and return the frame which called
157 158 this function and is part of core module ( ie. rhodecode.* )
158 159
159 160 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
160 161 :param depth:
161 162 :param output_writer:
162 163 :param indent:
163 164
164 165 usage::
165 166
166 167 from rhodecode.lib.utils2 import find_calling_context
167 168
168 169 calling_context = find_calling_context(ignore_modules=[
169 170 'rhodecode.lib.caching_query',
170 171 'rhodecode.model.settings',
171 172 ])
172 173
173 174 """
174 175 import inspect
175 176 if not output_writer:
176 177 try:
177 178 from rich import print as pprint
178 179 except ImportError:
179 180 pprint = print
180 181 output_writer = pprint
181 182
182 183 frame = inspect.currentframe()
183 184 cc = []
184 185 try:
185 186 for i in range(depth): # current frame + 3 callers
186 187 frame = frame.f_back
187 188 if not frame:
188 189 break
189 190
190 191 info = inspect.getframeinfo(frame)
191 192 name = frame.f_globals.get('__name__')
192 193 if name not in ignore_modules:
193 194 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
194 195 finally:
195 196 # Avoids a reference cycle
196 197 del frame
197 198
198 199 output_writer('* INFO: This code was called from: *')
199 200 for cnt, frm_info in enumerate(cc):
200 201 if not indent:
201 202 cnt = 1
202 203 output_writer(' ' * cnt + frm_info)
203 204
204 205
205 206 def ping_connection(connection, branch):
206 207 if branch:
207 208 # "branch" refers to a sub-connection of a connection,
208 209 # we don't want to bother pinging on these.
209 210 return
210 211
211 212 # turn off "close with result". This flag is only used with
212 213 # "connectionless" execution, otherwise will be False in any case
213 214 save_should_close_with_result = connection.should_close_with_result
214 215 connection.should_close_with_result = False
215 216
216 217 try:
217 218 # run a SELECT 1. use a core select() so that
218 219 # the SELECT of a scalar value without a table is
219 220 # appropriately formatted for the backend
220 221 connection.scalar(sqlalchemy.sql.select([1]))
221 222 except sqlalchemy.exc.DBAPIError as err:
222 223 # catch SQLAlchemy's DBAPIError, which is a wrapper
223 224 # for the DBAPI's exception. It includes a .connection_invalidated
224 225 # attribute which specifies if this connection is a "disconnect"
225 226 # condition, which is based on inspection of the original exception
226 227 # by the dialect in use.
227 228 if err.connection_invalidated:
228 229 # run the same SELECT again - the connection will re-validate
229 230 # itself and establish a new connection. The disconnect detection
230 231 # here also causes the whole connection pool to be invalidated
231 232 # so that all stale connections are discarded.
232 233 connection.scalar(sqlalchemy.sql.select([1]))
233 234 else:
234 235 raise
235 236 finally:
236 237 # restore "close with result"
237 238 connection.should_close_with_result = save_should_close_with_result
238 239
239 240
240 241 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
241 242 """Custom engine_from_config functions."""
242 243 log = logging.getLogger('sqlalchemy.engine')
243 244 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
244 245 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
245 246
246 247 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
247 248
248 249 def color_sql(sql):
249 250 color_seq = '\033[1;33m' # This is yellow: code 33
250 251 normal = '\x1b[0m'
251 252 return ''.join([color_seq, sql, normal])
252 253
253 254 if use_ping_connection:
254 255 log.debug('Adding ping_connection on the engine config.')
255 256 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
256 257
257 258 if debug:
258 259 # attach events only for debug configuration
259 260 def before_cursor_execute(conn, cursor, statement,
260 261 parameters, context, executemany):
261 262 setattr(conn, 'query_start_time', time.time())
262 263 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
263 264 find_calling_context(ignore_modules=[
264 265 'rhodecode.lib.caching_query',
265 266 'rhodecode.model.settings',
266 267 ], output_writer=log.info)
267 268
268 269 def after_cursor_execute(conn, cursor, statement,
269 270 parameters, context, executemany):
270 271 delattr(conn, 'query_start_time')
271 272
272 273 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
273 274 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
274 275
275 276 return engine
276 277
277 278
278 279 def get_encryption_key(config) -> bytes:
279 280 secret = config.get('rhodecode.encrypted_values.secret')
280 281 default = config['beaker.session.secret']
281 282 enc_key = secret or default
282 283
283 284 return safe_bytes(enc_key)
284 285
285 286
286 287 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
287 288 """
288 289 Turns a datetime into an age string.
289 290 If show_short_version is True, this generates a shorter string with
290 291 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
291 292
292 293 * IMPORTANT*
293 294 Code of this function is written in special way so it's easier to
294 295 backport it to javascript. If you mean to update it, please also update
295 296 `jquery.timeago-extension.js` file
296 297
297 298 :param prevdate: datetime object
298 299 :param now: get current time, if not define we use
299 300 `datetime.datetime.now()`
300 301 :param show_short_version: if it should approximate the date and
301 302 return a shorter string
302 303 :param show_suffix:
303 304 :param short_format: show short format, eg 2D instead of 2 days
304 305 :rtype: unicode
305 306 :returns: unicode words describing age
306 307 """
307 308
308 309 def _get_relative_delta(now, prevdate):
309 310 base = dateutil.relativedelta.relativedelta(now, prevdate)
310 311 return {
311 312 'year': base.years,
312 313 'month': base.months,
313 314 'day': base.days,
314 315 'hour': base.hours,
315 316 'minute': base.minutes,
316 317 'second': base.seconds,
317 318 }
318 319
319 320 def _is_leap_year(year):
320 321 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
321 322
322 323 def get_month(prevdate):
323 324 return prevdate.month
324 325
325 326 def get_year(prevdate):
326 327 return prevdate.year
327 328
328 329 now = now or datetime.datetime.now()
329 330 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
330 331 deltas = {}
331 332 future = False
332 333
333 334 if prevdate > now:
334 335 now_old = now
335 336 now = prevdate
336 337 prevdate = now_old
337 338 future = True
338 339 if future:
339 340 prevdate = prevdate.replace(microsecond=0)
340 341 # Get date parts deltas
341 342 for part in order:
342 343 rel_delta = _get_relative_delta(now, prevdate)
343 344 deltas[part] = rel_delta[part]
344 345
345 346 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
346 347 # not 1 hour, -59 minutes and -59 seconds)
347 348 offsets = [[5, 60], [4, 60], [3, 24]]
348 349 for element in offsets: # seconds, minutes, hours
349 350 num = element[0]
350 351 length = element[1]
351 352
352 353 part = order[num]
353 354 carry_part = order[num - 1]
354 355
355 356 if deltas[part] < 0:
356 357 deltas[part] += length
357 358 deltas[carry_part] -= 1
358 359
359 360 # Same thing for days except that the increment depends on the (variable)
360 361 # number of days in the month
361 362 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
362 363 if deltas['day'] < 0:
363 364 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
364 365 deltas['day'] += 29
365 366 else:
366 367 deltas['day'] += month_lengths[get_month(prevdate) - 1]
367 368
368 369 deltas['month'] -= 1
369 370
370 371 if deltas['month'] < 0:
371 372 deltas['month'] += 12
372 373 deltas['year'] -= 1
373 374
374 375 # Format the result
375 376 if short_format:
376 377 fmt_funcs = {
377 378 'year': lambda d: '%dy' % d,
378 379 'month': lambda d: '%dm' % d,
379 380 'day': lambda d: '%dd' % d,
380 381 'hour': lambda d: '%dh' % d,
381 382 'minute': lambda d: '%dmin' % d,
382 383 'second': lambda d: '%dsec' % d,
383 384 }
384 385 else:
385 386 fmt_funcs = {
386 387 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
387 388 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
388 389 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
389 390 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
390 391 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
391 392 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
392 393 }
393 394
394 395 i = 0
395 396 for part in order:
396 397 value = deltas[part]
397 398 if value != 0:
398 399
399 400 if i < 5:
400 401 sub_part = order[i + 1]
401 402 sub_value = deltas[sub_part]
402 403 else:
403 404 sub_value = 0
404 405
405 406 if sub_value == 0 or show_short_version:
406 407 _val = fmt_funcs[part](value)
407 408 if future:
408 409 if show_suffix:
409 410 return _('in ${ago}', mapping={'ago': _val})
410 411 else:
411 412 return _(_val)
412 413
413 414 else:
414 415 if show_suffix:
415 416 return _('${ago} ago', mapping={'ago': _val})
416 417 else:
417 418 return _(_val)
418 419
419 420 val = fmt_funcs[part](value)
420 421 val_detail = fmt_funcs[sub_part](sub_value)
421 422 mapping = {'val': val, 'detail': val_detail}
422 423
423 424 if short_format:
424 425 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
425 426 if show_suffix:
426 427 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
427 428 if future:
428 429 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
429 430 else:
430 431 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
431 432 if show_suffix:
432 433 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
433 434 if future:
434 435 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
435 436
436 437 return datetime_tmpl
437 438 i += 1
438 439 return _('just now')
439 440
440 441
441 442 def age_from_seconds(seconds):
442 443 seconds = safe_int(seconds) or 0
443 444 prevdate = time_to_datetime(time.time() + seconds)
444 445 return age(prevdate, show_suffix=False, show_short_version=True)
445 446
446 447
447 448 def cleaned_uri(uri):
448 449 """
449 450 Quotes '[' and ']' from uri if there is only one of them.
450 451 according to RFC3986 we cannot use such chars in uri
451 452 :param uri:
452 453 :return: uri without this chars
453 454 """
454 455 return urllib.parse.quote(uri, safe='@$:/')
455 456
456 457
457 458 def credentials_filter(uri):
458 459 """
459 460 Returns a url with removed credentials
460 461
461 462 :param uri:
462 463 """
463 464 import urlobject
464 465 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
465 466 return 'InvalidDecryptionKey'
466 467
467 468 url_obj = urlobject.URLObject(cleaned_uri(uri))
468 469 url_obj = url_obj.without_password().without_username()
469 470
470 471 return url_obj
471 472
472 473
473 474 def get_host_info(request):
474 475 """
475 476 Generate host info, to obtain full url e.g https://server.com
476 477 use this
477 478 `{scheme}://{netloc}`
478 479 """
479 480 if not request:
480 481 return {}
481 482
482 483 qualified_home_url = request.route_url('home')
483 484 parsed_url = urlobject.URLObject(qualified_home_url)
484 485 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
485 486
486 487 return {
487 488 'scheme': parsed_url.scheme,
488 489 'netloc': parsed_url.netloc+decoded_path,
489 490 'hostname': parsed_url.hostname,
490 491 }
491 492
492 493
493 494 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
494 495 qualified_home_url = request.route_url('home')
495 496 parsed_url = urlobject.URLObject(qualified_home_url)
496 497 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
497 498
498 499 args = {
499 500 'scheme': parsed_url.scheme,
500 501 'user': '',
501 502 'sys_user': getpass.getuser(),
502 503 # path if we use proxy-prefix
503 504 'netloc': parsed_url.netloc+decoded_path,
504 505 'hostname': parsed_url.hostname,
505 506 'prefix': decoded_path,
506 507 'repo': repo_name,
507 508 'repoid': str(repo_id),
508 509 'repo_type': repo_type
509 510 }
510 511 args.update(override)
511 512 args['user'] = urllib.parse.quote(safe_str(args['user']))
512 513
513 514 for k, v in list(args.items()):
514 515 tmpl_key = '{%s}' % k
515 516 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
516 517
517 518 # special case for SVN clone url
518 519 if repo_type == 'svn':
519 520 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
520 521
521 522 # remove leading @ sign if it's present. Case of empty user
522 523 url_obj = urlobject.URLObject(uri_tmpl)
523 524 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
524 525
525 526 return safe_str(url)
526 527
527 528
528 529 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
529 530 maybe_unreachable=False, reference_obj=None):
530 531 """
531 532 Safe version of get_commit if this commit doesn't exists for a
532 533 repository it returns a Dummy one instead
533 534
534 535 :param repo: repository instance
535 536 :param commit_id: commit id as str
536 537 :param commit_idx: numeric commit index
537 538 :param pre_load: optional list of commit attributes to load
538 539 :param maybe_unreachable: translate unreachable commits on git repos
539 540 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
540 541 """
541 542 # TODO(skreft): remove these circular imports
542 543 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
543 544 from rhodecode.lib.vcs.exceptions import RepositoryError
544 545 if not isinstance(repo, BaseRepository):
545 546 raise Exception('You must pass an Repository '
546 547 'object as first argument got %s', type(repo))
547 548
548 549 try:
549 550 commit = repo.get_commit(
550 551 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
551 552 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
552 553 except (RepositoryError, LookupError):
553 554 commit = EmptyCommit()
554 555 return commit
555 556
556 557
557 558 def datetime_to_time(dt):
558 559 if dt:
559 560 return time.mktime(dt.timetuple())
560 561
561 562
562 563 def time_to_datetime(tm):
563 564 if tm:
564 565 if isinstance(tm, str):
565 566 try:
566 567 tm = float(tm)
567 568 except ValueError:
568 569 return
569 570 return datetime.datetime.fromtimestamp(tm)
570 571
571 572
572 573 def time_to_utcdatetime(tm):
573 574 if tm:
574 575 if isinstance(tm, str):
575 576 try:
576 577 tm = float(tm)
577 578 except ValueError:
578 579 return
579 580 return datetime.datetime.utcfromtimestamp(tm)
580 581
581 582
582 583 MENTIONS_REGEX = re.compile(
583 584 # ^@ or @ without any special chars in front
584 585 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
585 586 # main body starts with letter, then can be . - _
586 587 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
587 588 re.VERBOSE | re.MULTILINE)
588 589
589 590
590 591 def extract_mentioned_users(s):
591 592 """
592 593 Returns unique usernames from given string s that have @mention
593 594
594 595 :param s: string to get mentions
595 596 """
596 597 usrs = set()
597 598 for username in MENTIONS_REGEX.findall(s):
598 599 usrs.add(username)
599 600
600 601 return sorted(list(usrs), key=lambda k: k.lower())
601 602
602 603
603 604 def fix_PATH(os_=None):
604 605 """
605 606 Get current active python path, and append it to PATH variable to fix
606 607 issues of subprocess calls and different python versions
607 608 """
608 609 if os_ is None:
609 610 import os
610 611 else:
611 612 os = os_
612 613
613 614 cur_path = os.path.split(sys.executable)[0]
614 615 os_path = os.environ['PATH']
615 616 if not os.environ['PATH'].startswith(cur_path):
616 617 os.environ['PATH'] = f'{cur_path}:{os_path}'
617 618
618 619
619 620 def obfuscate_url_pw(engine):
620 621 _url = engine or ''
621 622 try:
622 623 _url = sqlalchemy.engine.url.make_url(engine)
623 624 except Exception:
624 625 pass
625 626 return repr(_url)
626 627
627 628
628 629 def get_server_url(environ):
629 630 req = webob.Request(environ)
630 631 return req.host_url + req.script_name
631 632
632 633
633 634 def unique_id(hexlen=32):
634 635 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
635 636 return suuid(truncate_to=hexlen, alphabet=alphabet)
636 637
637 638
638 639 def suuid(url=None, truncate_to=22, alphabet=None):
639 640 """
640 641 Generate and return a short URL safe UUID.
641 642
642 643 If the url parameter is provided, set the namespace to the provided
643 644 URL and generate a UUID.
644 645
645 646 :param url to get the uuid for
646 647 :truncate_to: truncate the basic 22 UUID to shorter version
647 648
648 649 The IDs won't be universally unique any longer, but the probability of
649 650 a collision will still be very low.
650 651 """
651 652 # Define our alphabet.
652 653 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
653 654
654 655 # If no URL is given, generate a random UUID.
655 656 if url is None:
656 657 unique_id = uuid.uuid4().int
657 658 else:
658 659 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
659 660
660 661 alphabet_length = len(_ALPHABET)
661 662 output = []
662 663 while unique_id > 0:
663 664 digit = unique_id % alphabet_length
664 665 output.append(_ALPHABET[digit])
665 666 unique_id = int(unique_id / alphabet_length)
666 667 return "".join(output)[:truncate_to]
667 668
668 669
669 670 def get_current_rhodecode_user(request=None):
670 671 """
671 672 Gets rhodecode user from request
672 673 """
673 674 import pyramid.threadlocal
674 675 pyramid_request = request or pyramid.threadlocal.get_current_request()
675 676
676 677 # web case
677 678 if pyramid_request and hasattr(pyramid_request, 'user'):
678 679 return pyramid_request.user
679 680
680 681 # api case
681 682 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
682 683 return pyramid_request.rpc_user
683 684
684 685 return None
685 686
686 687
687 688 def action_logger_generic(action, namespace=''):
688 689 """
689 690 A generic logger for actions useful to the system overview, tries to find
690 691 an acting user for the context of the call otherwise reports unknown user
691 692
692 693 :param action: logging message eg 'comment 5 deleted'
693 694 :param type: string
694 695
695 696 :param namespace: namespace of the logging message eg. 'repo.comments'
696 697 :param type: string
697 698
698 699 """
699 700
700 701 logger_name = 'rhodecode.actions'
701 702
702 703 if namespace:
703 704 logger_name += '.' + namespace
704 705
705 706 log = logging.getLogger(logger_name)
706 707
707 708 # get a user if we can
708 709 user = get_current_rhodecode_user()
709 710
710 711 logfunc = log.info
711 712
712 713 if not user:
713 714 user = '<unknown user>'
714 715 logfunc = log.warning
715 716
716 717 logfunc(f'Logging action by {user}: {action}')
717 718
718 719
719 720 def escape_split(text, sep=',', maxsplit=-1):
720 721 r"""
721 722 Allows for escaping of the separator: e.g. arg='foo\, bar'
722 723
723 724 It should be noted that the way bash et. al. do command line parsing, those
724 725 single quotes are required.
725 726 """
726 727 escaped_sep = r'\%s' % sep
727 728
728 729 if escaped_sep not in text:
729 730 return text.split(sep, maxsplit)
730 731
731 732 before, _mid, after = text.partition(escaped_sep)
732 733 startlist = before.split(sep, maxsplit) # a regular split is fine here
733 734 unfinished = startlist[-1]
734 735 startlist = startlist[:-1]
735 736
736 737 # recurse because there may be more escaped separators
737 738 endlist = escape_split(after, sep, maxsplit)
738 739
739 740 # finish building the escaped value. we use endlist[0] becaue the first
740 741 # part of the string sent in recursion is the rest of the escaped value.
741 742 unfinished += sep + endlist[0]
742 743
743 744 return startlist + [unfinished] + endlist[1:] # put together all the parts
744 745
745 746
746 747 class OptionalAttr(object):
747 748 """
748 749 Special Optional Option that defines other attribute. Example::
749 750
750 751 def test(apiuser, userid=Optional(OAttr('apiuser')):
751 752 user = Optional.extract(userid)
752 753 # calls
753 754
754 755 """
755 756
756 757 def __init__(self, attr_name):
757 758 self.attr_name = attr_name
758 759
759 760 def __repr__(self):
760 761 return '<OptionalAttr:%s>' % self.attr_name
761 762
762 763 def __call__(self):
763 764 return self
764 765
765 766
766 767 # alias
767 768 OAttr = OptionalAttr
768 769
769 770
770 771 class Optional(object):
771 772 """
772 773 Defines an optional parameter::
773 774
774 775 param = param.getval() if isinstance(param, Optional) else param
775 776 param = param() if isinstance(param, Optional) else param
776 777
777 778 is equivalent of::
778 779
779 780 param = Optional.extract(param)
780 781
781 782 """
782 783
783 784 def __init__(self, type_):
784 785 self.type_ = type_
785 786
786 787 def __repr__(self):
787 788 return '<Optional:%s>' % self.type_.__repr__()
788 789
789 790 def __call__(self):
790 791 return self.getval()
791 792
792 793 def getval(self):
793 794 """
794 795 returns value from this Optional instance
795 796 """
796 797 if isinstance(self.type_, OAttr):
797 798 # use params name
798 799 return self.type_.attr_name
799 800 return self.type_
800 801
801 802 @classmethod
802 803 def extract(cls, val):
803 804 """
804 805 Extracts value from Optional() instance
805 806
806 807 :param val:
807 808 :return: original value if it's not Optional instance else
808 809 value of instance
809 810 """
810 811 if isinstance(val, cls):
811 812 return val.getval()
812 813 return val
813 814
814 815
815 816 def glob2re(pat):
816 817 import fnmatch
817 818 return fnmatch.translate(pat)
818 819
819 820
820 821 def parse_byte_string(size_str):
821 822 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
822 823 if not match:
823 824 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
824 825 f'to use format of <num>(MB|KB)')
825 826
826 827 _parts = match.groups()
827 828 num, type_ = _parts
828 829 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
829 830
830 831
831 832 class CachedProperty(object):
832 833 """
833 834 Lazy Attributes. With option to invalidate the cache by running a method
834 835
835 836 >>> class Foo(object):
836 837 ...
837 838 ... @CachedProperty
838 839 ... def heavy_func(self):
839 840 ... return 'super-calculation'
840 841 ...
841 842 ... foo = Foo()
842 843 ... foo.heavy_func() # first computation
843 844 ... foo.heavy_func() # fetch from cache
844 845 ... foo._invalidate_prop_cache('heavy_func')
845 846
846 847 # at this point calling foo.heavy_func() will be re-computed
847 848 """
848 849
849 850 def __init__(self, func, func_name=None):
850 851
851 852 if func_name is None:
852 853 func_name = func.__name__
853 854 self.data = (func, func_name)
854 855 functools.update_wrapper(self, func)
855 856
856 857 def __get__(self, inst, class_):
857 858 if inst is None:
858 859 return self
859 860
860 861 func, func_name = self.data
861 862 value = func(inst)
862 863 inst.__dict__[func_name] = value
863 864 if '_invalidate_prop_cache' not in inst.__dict__:
864 865 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
865 866 self._invalidate_prop_cache, inst)
866 867 return value
867 868
868 869 def _invalidate_prop_cache(self, inst, name):
869 870 inst.__dict__.pop(name, None)
870 871
871 872
872 873 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
873 874 """
874 875 Retry decorator with exponential backoff.
875 876
876 877 Parameters
877 878 ----------
878 879 func : typing.Callable, optional
879 880 Callable on which the decorator is applied, by default None
880 881 exception : Exception or tuple of Exceptions, optional
881 882 Exception(s) that invoke retry, by default Exception
882 883 n_tries : int, optional
883 884 Number of tries before giving up, by default 5
884 885 delay : int, optional
885 886 Initial delay between retries in seconds, by default 5
886 887 backoff : int, optional
887 888 Backoff multiplier e.g. value of 2 will double the delay, by default 1
888 889 logger : bool, optional
889 890 Option to log or print, by default False
890 891
891 892 Returns
892 893 -------
893 894 typing.Callable
894 895 Decorated callable that calls itself when exception(s) occur.
895 896
896 897 Examples
897 898 --------
898 899 >>> import random
899 900 >>> @retry(exception=Exception, n_tries=3)
900 901 ... def test_random(text):
901 902 ... x = random.random()
902 903 ... if x < 0.5:
903 904 ... raise Exception("Fail")
904 905 ... else:
905 906 ... print("Success: ", text)
906 907 >>> test_random("It works!")
907 908 """
908 909
909 910 if func is None:
910 911 return functools.partial(
911 912 retry,
912 913 exception=exception,
913 914 n_tries=n_tries,
914 915 delay=delay,
915 916 backoff=backoff,
916 917 logger=logger,
917 918 )
918 919
919 920 @functools.wraps(func)
920 921 def wrapper(*args, **kwargs):
921 922 _n_tries, n_delay = n_tries, delay
922 923 log = logging.getLogger('rhodecode.retry')
923 924
924 925 while _n_tries > 1:
925 926 try:
926 927 return func(*args, **kwargs)
927 928 except exception as e:
928 929 e_details = repr(e)
929 930 msg = "Exception on calling func {func}: {e}, " \
930 931 "Retrying in {n_delay} seconds..."\
931 932 .format(func=func, e=e_details, n_delay=n_delay)
932 933 if logger:
933 934 log.warning(msg)
934 935 else:
935 936 print(msg)
936 937 time.sleep(n_delay)
937 938 _n_tries -= 1
938 939 n_delay *= backoff
939 940
940 941 return func(*args, **kwargs)
941 942
942 943 return wrapper
943 944
944 945
945 946 def user_agent_normalizer(user_agent_raw, safe=True):
946 947 log = logging.getLogger('rhodecode.user_agent_normalizer')
947 948 ua = (user_agent_raw or '').strip().lower()
948 949 ua = ua.replace('"', '')
949 950
950 951 try:
951 952 if 'mercurial/proto-1.0' in ua:
952 953 ua = ua.replace('mercurial/proto-1.0', '')
953 954 ua = ua.replace('(', '').replace(')', '').strip()
954 955 ua = ua.replace('mercurial ', 'mercurial/')
955 956 elif ua.startswith('git'):
956 957 parts = ua.split(' ')
957 958 if parts:
958 959 ua = parts[0]
959 960 ua = re.sub(r'\.windows\.\d', '', ua).strip()
960 961
961 962 return ua
962 963 except Exception:
963 964 log.exception('Failed to parse scm user-agent')
964 965 if not safe:
965 966 raise
966 967
967 968 return ua
968 969
969 970
970 971 def get_available_port(min_port=40000, max_port=55555, use_range=False):
971 972 hostname = ''
972 973 for _check_port in range(min_port, max_port):
973 974 pick_port = 0
974 975 if use_range:
975 976 pick_port = random.randint(min_port, max_port)
976 977
977 978 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
978 979 try:
979 980 s.bind((hostname, pick_port))
980 981 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
981 982 return s.getsockname()[1]
982 983 except socket.error as e:
983 984 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
984 985 continue
985 986 raise
986 987 except OSError:
987 988 continue
@@ -1,453 +1,450
1 1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 GIT commit module
21 21 """
22 22
23 23 import io
24 24 import configparser
25 25 import logging
26 26 from itertools import chain
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 from rhodecode.lib.datelib import utcdate_fromtimestamp
31 31 from rhodecode.lib.str_utils import safe_bytes, safe_str
32 32 from rhodecode.lib.vcs.backends import base
33 33 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
34 34 from rhodecode.lib.vcs.nodes import (
35 35 FileNode,
36 36 DirNode,
37 37 NodeKind,
38 38 RootNode,
39 39 SubModuleNode,
40 40 LargeFileNode,
41 41 )
42 42 from rhodecode.lib.vcs_common import FILEMODE_LINK
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class GitCommit(base.BaseCommit):
48 48 """
49 49 Represents state of the repository at single commit id.
50 50 """
51 51
52 52 _filter_pre_load = [
53 53 # done through a more complex tree walk on parents
54 54 "affected_files",
55 55 # done through subprocess not remote call
56 56 "children",
57 57 # done through a more complex tree walk on parents
58 58 "status",
59 59 # mercurial specific property not supported here
60 60 "obsolete",
61 61 # mercurial specific property not supported here
62 62 "phase",
63 63 # mercurial specific property not supported here
64 64 "hidden",
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self.nodes = {}
78 78 self._path_mode_cache = {} # path stats cache, e.g filemode etc
79 79 self._path_type_cache = {} # path type dir/file/link etc cache
80 80
81 81 self._submodules = None
82 82
83 83 def _set_bulk_properties(self, pre_load):
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
87 87 if not pre_load:
88 88 return
89 89
90 90 result = self._remote.bulk_request(self.raw_id, pre_load)
91 91 for attr, value in result.items():
92 92 if attr in ["author", "message"]:
93 93 if value:
94 94 value = safe_str(value)
95 95 elif attr == "date":
96 96 value = utcdate_fromtimestamp(*value)
97 97 elif attr == "parents":
98 98 value = self._make_commits(value)
99 99 elif attr == "branch":
100 100 value = self._set_branch(value)
101 101 self.__dict__[attr] = value
102 102
103 103 @LazyProperty
104 104 def _commit(self):
105 105 return self._remote[self.raw_id]
106 106
107 107 @LazyProperty
108 108 def _tree_id(self):
109 109 return self._remote[self._commit["tree"]]["id"]
110 110
111 111 @LazyProperty
112 112 def id(self):
113 113 return self.raw_id
114 114
115 115 @LazyProperty
116 116 def short_id(self):
117 117 return self.raw_id[:12]
118 118
119 119 @LazyProperty
120 120 def message(self):
121 121 return safe_str(self._remote.message(self.id))
122 122
123 123 @LazyProperty
124 124 def committer(self):
125 125 return safe_str(self._remote.author(self.id))
126 126
127 127 @LazyProperty
128 128 def author(self):
129 129 return safe_str(self._remote.author(self.id))
130 130
131 131 @LazyProperty
132 132 def date(self):
133 133 unix_ts, tz = self._remote.date(self.raw_id)
134 134 return utcdate_fromtimestamp(unix_ts, tz)
135 135
136 136 @LazyProperty
137 137 def status(self):
138 138 """
139 139 Returns modified, added, removed, deleted files for current commit
140 140 """
141 141 added, modified, deleted = self._changes_cache
142 142 return list(modified), list(modified), list(deleted)
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_str(name) for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
147 147 return tags
148 148
149 149 @LazyProperty
150 150 def commit_branches(self):
151 151 branches = []
152 152 for name, commit_id in self.repository.branches.items():
153 153 if commit_id == self.raw_id:
154 154 branches.append(name)
155 155 return branches
156 156
157 157 def _set_branch(self, branches):
158 158 if branches:
159 159 # actually commit can have multiple branches in git
160 160 return safe_str(branches[0])
161 161
162 162 @LazyProperty
163 163 def branch(self):
164 164 branches = self._remote.branch(self.raw_id)
165 165 return self._set_branch(branches)
166 166
167 167 def _get_path_tree_id_and_type(self, path: bytes):
168 168
169 169 if path in self._path_type_cache:
170 170 return self._path_type_cache[path]
171 171
172 172 if path == b"":
173 173 self._path_type_cache[b""] = [self._tree_id, NodeKind.DIR]
174 174 return self._path_type_cache[path]
175 175
176 176 tree_id, tree_type, tree_mode = self._remote.tree_and_type_for_path(self.raw_id, path)
177 177 if tree_id is None:
178 178 raise self.no_node_at_path(path)
179 179
180 180 self._path_type_cache[path] = [tree_id, tree_type]
181 181 self._path_mode_cache[path] = tree_mode
182 182
183 183 return self._path_type_cache[path]
184 184
185 185 def _get_kind(self, path):
186 186 path = self._fix_path(path)
187 187 _, path_type = self._get_path_tree_id_and_type(path)
188 188 return path_type
189 189
190 190 def _assert_is_path(self, path):
191 191 path = self._fix_path(path)
192 192 if self._get_kind(path) != NodeKind.FILE:
193 193 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
194 194 return path
195 195
196 196 def _get_file_nodes(self):
197 197 return chain(*(t[2] for t in self.walk()))
198 198
199 199 @LazyProperty
200 200 def parents(self):
201 201 """
202 202 Returns list of parent commits.
203 203 """
204 204 parent_ids = self._remote.parents(self.id)
205 205 return self._make_commits(parent_ids)
206 206
207 207 @LazyProperty
208 208 def children(self):
209 209 """
210 210 Returns list of child commits.
211 211 """
212 212
213 213 children = self._remote.children(self.raw_id)
214 214 return self._make_commits(children)
215 215
216 216 def _make_commits(self, commit_ids):
217 217 def commit_maker(_commit_id):
218 218 return self.repository.get_commit(commit_id=_commit_id)
219 219
220 220 return [commit_maker(commit_id) for commit_id in commit_ids]
221 221
222 222 def get_file_mode(self, path: bytes):
223 223 """
224 224 Returns stat mode of the file at the given `path`.
225 225 """
226 226 path = self._assert_is_path(path)
227 227
228 228 # ensure path is traversed
229 229 self._get_path_tree_id_and_type(path)
230 230
231 231 return self._path_mode_cache[path]
232 232
233 233 def is_link(self, path: bytes):
234 234 path = self._assert_is_path(path)
235 235 if path not in self._path_mode_cache:
236 236 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
237 237
238 238 return self._path_mode_cache[path] == FILEMODE_LINK
239 239
240 240 def is_node_binary(self, path):
241 241 tree_id, _ = self._get_path_tree_id_and_type(path)
242 242 return self._remote.is_binary(tree_id)
243 243
244 244 def node_md5_hash(self, path):
245 245 path = self._assert_is_path(path)
246 246 return self._remote.md5_hash(self.raw_id, path)
247 247
248 248 def get_file_content(self, path):
249 249 """
250 250 Returns content of the file at given `path`.
251 251 """
252 252 tree_id, _ = self._get_path_tree_id_and_type(path)
253 253 return self._remote.blob_as_pretty_string(tree_id)
254 254
255 255 def get_file_content_streamed(self, path):
256 256 tree_id, _ = self._get_path_tree_id_and_type(path)
257 257 stream_method = getattr(self._remote, "stream:blob_as_pretty_string")
258 258 return stream_method(tree_id)
259 259
260 260 def get_file_size(self, path):
261 261 """
262 262 Returns size of the file at given `path`.
263 263 """
264 264 tree_id, _ = self._get_path_tree_id_and_type(path)
265 265 return self._remote.blob_raw_length(tree_id)
266 266
267 267 def get_path_history(self, path, limit=None, pre_load=None):
268 268 """
269 269 Returns history of file as reversed list of `GitCommit` objects for
270 270 which file at given `path` has been modified.
271 271 """
272 272 path = self._assert_is_path(path)
273 273 history = self._remote.node_history(self.raw_id, path, limit)
274 274 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
275 275
276 276 def get_file_annotate(self, path, pre_load=None):
277 277 """
278 278 Returns a generator of four element tuples with
279 279 lineno, commit_id, commit lazy loader and line
280 280 """
281 281
282 282 result = self._remote.node_annotate(self.raw_id, path)
283 283
284 284 for ln_no, commit_id, content in result:
285 285 yield (
286 286 ln_no,
287 287 commit_id,
288 288 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
289 289 content,
290 290 )
291 291
292 292 def get_nodes(self, path: bytes, pre_load=None):
293 293
294 294 if self._get_kind(path) != NodeKind.DIR:
295 295 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
296 296 path = self._fix_path(path)
297 297
298 # call and check tree_id for this path
299 tree_id, _ = self._get_path_tree_id_and_type(path)
300
301 298 path_nodes = []
302 299
303 for bytes_name, stat_, tree_item_id, node_kind in self._remote.tree_items(tree_id):
300 for obj_name, stat_, tree_item_id, node_kind, pre_load_data in self._remote.get_nodes(self.raw_id, path, pre_load):
304 301 if node_kind is None:
305 302 raise CommitError(f"Requested object type={node_kind} cannot be determined")
306 303
307 if path != b"":
308 obj_path = b"/".join((path, bytes_name))
304 if path == b"":
305 obj_path = obj_name
309 306 else:
310 obj_path = bytes_name
307 obj_path = b"/".join((path, obj_name))
311 308
312 309 # cache file mode for git, since we have it already
313 310 if obj_path not in self._path_mode_cache:
314 311 self._path_mode_cache[obj_path] = stat_
315 312
316 313 # cache type
317 314 if node_kind not in self._path_type_cache:
318 315 self._path_type_cache[obj_path] = [tree_item_id, node_kind]
319 316
320 317 entry = None
321 318 if obj_path in self.nodes:
322 319 entry = self.nodes[obj_path]
323 320 else:
324 321 if node_kind == NodeKind.SUBMODULE:
325 url = self._get_submodule_url(b"/".join((path, bytes_name)))
326 entry= SubModuleNode(bytes_name, url=url, commit=tree_item_id, alias=self.repository.alias)
322 url = self._get_submodule_url(obj_path)
323 entry= SubModuleNode(obj_name, url=url, commit=tree_item_id, alias=self.repository.alias)
327 324 elif node_kind == NodeKind.DIR:
328 325 entry = DirNode(safe_bytes(obj_path), commit=self)
329 326 elif node_kind == NodeKind.FILE:
330 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
327 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load_data=pre_load_data)
331 328
332 329 if entry:
333 330 self.nodes[obj_path] = entry
334 331 path_nodes.append(entry)
335 332
336 333 path_nodes.sort()
337 334 return path_nodes
338 335
339 336 def get_node(self, path: bytes, pre_load=None):
340 337 path = self._fix_path(path)
341 338
342 339 # use cached, if we have one
343 340 if path in self.nodes:
344 341 return self.nodes[path]
345 342
346 343 try:
347 344 tree_id, path_type = self._get_path_tree_id_and_type(path)
348 345 except CommitError:
349 346 raise NodeDoesNotExistError(f"Cannot find one of parents' directories for a given path: {path}")
350 347
351 348 if path == b"":
352 349 node = RootNode(commit=self)
353 350 else:
354 351 if path_type == NodeKind.SUBMODULE:
355 352 url = self._get_submodule_url(path)
356 353 node = SubModuleNode(path, url=url, commit=tree_id, alias=self.repository.alias)
357 354 elif path_type == NodeKind.DIR:
358 355 node = DirNode(safe_bytes(path), commit=self)
359 356 elif path_type == NodeKind.FILE:
360 357 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
361 358 self._path_mode_cache[path] = node.mode
362 359 else:
363 360 raise self.no_node_at_path(path)
364 361
365 362 # cache node
366 363 self.nodes[path] = node
367 364 return self.nodes[path]
368 365
369 366 def get_largefile_node(self, path: bytes):
370 367 tree_id, _ = self._get_path_tree_id_and_type(path)
371 368 pointer_spec = self._remote.is_large_file(tree_id)
372 369
373 370 if pointer_spec:
374 371 # content of that file regular FileNode is the hash of largefile
375 372 file_id = pointer_spec.get("oid_hash")
376 373 if not self._remote.in_largefiles_store(file_id):
377 374 log.warning(f'Largefile oid={file_id} not found in store')
378 375 return None
379 376
380 377 lf_path = self._remote.store_path(file_id)
381 378 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
382 379
383 380 @LazyProperty
384 381 def affected_files(self) -> list[bytes]:
385 382 """
386 383 Gets a fast accessible file changes for given commit
387 384 """
388 385 added, modified, deleted = self._changes_cache
389 386 return list(added.union(modified).union(deleted))
390 387
391 388 @LazyProperty
392 389 def _changes_cache(self) -> tuple[set, set, set]:
393 390 added = set()
394 391 modified = set()
395 392 deleted = set()
396 393
397 394 parents = self.parents
398 395 if not self.parents:
399 396 parents = [base.EmptyCommit()]
400 397 for parent in parents:
401 398 if isinstance(parent, base.EmptyCommit):
402 399 oid = None
403 400 else:
404 401 oid = parent.raw_id
405 402 _added, _modified, _deleted = self._remote.tree_changes(oid, self.raw_id)
406 403 added = added | set(_added)
407 404 modified = modified | set(_modified)
408 405 deleted = deleted | set(_deleted)
409 406
410 407 return added, modified, deleted
411 408
412 409 def _get_paths_for_status(self, status):
413 410 """
414 411 Returns sorted list of paths for given ``status``.
415 412
416 413 :param status: one of: *added*, *modified* or *deleted*
417 414 """
418 415 added, modified, deleted = self._changes_cache
419 416 return sorted({"added": list(added), "modified": list(modified), "deleted": list(deleted)}[status])
420 417
421 418 @LazyProperty
422 419 def added_paths(self):
423 420 return [n for n in self._get_paths_for_status("added")]
424 421
425 422 @LazyProperty
426 423 def changed_paths(self):
427 424 return [n for n in self._get_paths_for_status("modified")]
428 425
429 426 @LazyProperty
430 427 def removed_paths(self):
431 428 return [n for n in self._get_paths_for_status("deleted")]
432 429
433 430 def _get_submodule_url(self, submodule_path: bytes):
434 431 git_modules_path = b".gitmodules"
435 432
436 433 if self._submodules is None:
437 434 self._submodules = {}
438 435
439 436 try:
440 437 submodules_node = self.get_node(git_modules_path)
441 438 except NodeDoesNotExistError:
442 439 return None
443 440
444 441 parser = configparser.RawConfigParser()
445 442 parser.read_file(io.StringIO(submodules_node.str_content))
446 443
447 444 for section in parser.sections():
448 445 path = parser.get(section, "path")
449 446 url = parser.get(section, "url")
450 447 if path and url:
451 448 self._submodules[safe_bytes(path).strip(b"/")] = url
452 449
453 450 return self._submodules.get(submodule_path.strip(b"/"))
@@ -1,397 +1,397
1 1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG commit module
21 21 """
22 22 import os
23 23 import logging
24 24
25 25 from zope.cachedescriptors.property import Lazy as LazyProperty
26 26
27 27 from rhodecode.lib.datelib import utcdate_fromtimestamp
28 28 from rhodecode.lib.str_utils import safe_bytes, safe_str
29 29 from rhodecode.lib.vcs.backends import base
30 30 from rhodecode.lib.vcs.exceptions import CommitError
31 31 from rhodecode.lib.vcs.nodes import (
32 32 DirNode,
33 33 FileNode,
34 34 NodeKind,
35 35 RootNode,
36 36 SubModuleNode,
37 37 LargeFileNode,
38 38 )
39 39 from rhodecode.lib.vcs_common import FILEMODE_LINK
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 class MercurialCommit(base.BaseCommit):
45 45 """
46 46 Represents state of the repository at the single commit.
47 47 """
48 48
49 49 _filter_pre_load = [
50 50 # git specific property not supported here
51 51 "_commit",
52 52 ]
53 53
54 54 def __init__(self, repository, raw_id, idx, pre_load=None):
55 55 raw_id = safe_str(raw_id)
56 56
57 57 self.repository = repository
58 58 self._remote = repository._remote
59 59
60 60 self.raw_id = raw_id
61 61 self.idx = idx
62 62
63 63 self._set_bulk_properties(pre_load)
64 64
65 65 # caches
66 66 self.nodes = {}
67 67 self._path_mode_cache = {} # path stats cache, e.g filemode etc
68 68 self._path_type_cache = {} # path type dir/file/link etc cache
69 69
70 70 def _set_bulk_properties(self, pre_load):
71 71 if not pre_load:
72 72 return
73 73 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
74 74 if not pre_load:
75 75 return
76 76
77 77 result = self._remote.bulk_request(self.raw_id, pre_load)
78 78
79 79 for attr, value in result.items():
80 80 if attr in ["author", "branch", "message"]:
81 81 value = safe_str(value)
82 82 elif attr == "affected_files":
83 83 value = list(map(safe_str, value))
84 84 elif attr == "date":
85 85 value = utcdate_fromtimestamp(*value)
86 86 elif attr in ["children", "parents"]:
87 87 value = self._make_commits(value)
88 88 elif attr in ["phase"]:
89 89 value = self._get_phase_text(value)
90 90 self.__dict__[attr] = value
91 91
92 92 @LazyProperty
93 93 def tags(self):
94 94 tags = [name for name, commit_id in self.repository.tags.items() if commit_id == self.raw_id]
95 95 return tags
96 96
97 97 @LazyProperty
98 98 def branch(self):
99 99 return safe_str(self._remote.ctx_branch(self.raw_id))
100 100
101 101 @LazyProperty
102 102 def bookmarks(self):
103 103 bookmarks = [name for name, commit_id in self.repository.bookmarks.items() if commit_id == self.raw_id]
104 104 return bookmarks
105 105
106 106 @LazyProperty
107 107 def message(self):
108 108 return safe_str(self._remote.ctx_description(self.raw_id))
109 109
110 110 @LazyProperty
111 111 def committer(self):
112 112 return safe_str(self.author)
113 113
114 114 @LazyProperty
115 115 def author(self):
116 116 return safe_str(self._remote.ctx_user(self.raw_id))
117 117
118 118 @LazyProperty
119 119 def date(self):
120 120 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
121 121
122 122 @LazyProperty
123 123 def status(self):
124 124 """
125 125 Returns modified, added, removed, deleted files for current commit
126 126 """
127 127 modified, added, deleted, *_ = self._remote.ctx_status(self.raw_id)
128 128 return modified, added, deleted
129 129
130 130 @LazyProperty
131 131 def id(self):
132 132 if self.last:
133 133 return "tip"
134 134 return self.short_id
135 135
136 136 @LazyProperty
137 137 def short_id(self):
138 138 return self.raw_id[:12]
139 139
140 140 def _make_commits(self, commit_ids, pre_load=None):
141 141 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in commit_ids]
142 142
143 143 @LazyProperty
144 144 def parents(self):
145 145 """
146 146 Returns list of parent commits.
147 147 """
148 148 parents = self._remote.ctx_parents(self.raw_id)
149 149 return self._make_commits(parents)
150 150
151 151 def _get_phase_text(self, phase_id):
152 152 return {
153 153 0: "public",
154 154 1: "draft",
155 155 2: "secret",
156 156 }.get(phase_id) or ""
157 157
158 158 @LazyProperty
159 159 def phase(self):
160 160 phase_id = self._remote.ctx_phase(self.raw_id)
161 161 phase_text = self._get_phase_text(phase_id)
162 162
163 163 return safe_str(phase_text)
164 164
165 165 @LazyProperty
166 166 def obsolete(self):
167 167 obsolete = self._remote.ctx_obsolete(self.raw_id)
168 168 return obsolete
169 169
170 170 @LazyProperty
171 171 def hidden(self):
172 172 hidden = self._remote.ctx_hidden(self.raw_id)
173 173 return hidden
174 174
175 175 @LazyProperty
176 176 def children(self):
177 177 """
178 178 Returns list of child commits.
179 179 """
180 180 children = self._remote.ctx_children(self.raw_id)
181 181 return self._make_commits(children)
182 182
183 183 def _get_kind(self, path):
184 184 path = self._fix_path(path)
185 185 path_type = self._get_path_type(path)
186 186 return path_type
187 187
188 188 def _assert_is_path(self, path) -> str | bytes:
189 189 path = self._fix_path(path)
190 190
191 191 if self._get_kind(path) != NodeKind.FILE:
192 192 raise CommitError(f"File at path={path} does not exist for commit {self.raw_id}")
193 193
194 194 return path
195 195
196 196 def get_file_mode(self, path: bytes):
197 197 """
198 198 Returns stat mode of the file at the given ``path``.
199 199 """
200 200 path = self._assert_is_path(path)
201 201 if path not in self._path_mode_cache:
202 202 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
203 203
204 204 return self._path_mode_cache[path]
205 205
206 206 def is_link(self, path: bytes):
207 207 path = self._assert_is_path(path)
208 208 if path not in self._path_mode_cache:
209 209 self._path_mode_cache[path] = self._remote.fctx_flags(self.raw_id, path)
210 210
211 211 return self._path_mode_cache[path] == FILEMODE_LINK
212 212
213 213 def is_node_binary(self, path):
214 214 path = self._assert_is_path(path)
215 215 return self._remote.is_binary(self.raw_id, path)
216 216
217 217 def node_md5_hash(self, path):
218 218 path = self._assert_is_path(path)
219 219 return self._remote.md5_hash(self.raw_id, path)
220 220
221 221 def get_file_content(self, path):
222 222 """
223 223 Returns content of the file at given ``path``.
224 224 """
225 225 path = self._assert_is_path(path)
226 226 return self._remote.fctx_node_data(self.raw_id, path)
227 227
228 228 def get_file_content_streamed(self, path):
229 229 path = self._assert_is_path(path)
230 230 stream_method = getattr(self._remote, "stream:fctx_node_data")
231 231 return stream_method(self.raw_id, path)
232 232
233 233 def get_file_size(self, path):
234 234 """
235 235 Returns size of the file at given ``path``.
236 236 """
237 237 path = self._assert_is_path(path)
238 238 return self._remote.fctx_size(self.raw_id, path)
239 239
240 240 def get_path_history(self, path, limit=None, pre_load=None):
241 241 """
242 242 Returns history of file as reversed list of `MercurialCommit` objects
243 243 for which file at given ``path`` has been modified.
244 244 """
245 245 path = self._assert_is_path(path)
246 246 history = self._remote.node_history(self.raw_id, path, limit)
247 247 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) for commit_id in history]
248 248
249 249 def get_file_annotate(self, path, pre_load=None):
250 250 """
251 251 Returns a generator of four element tuples with
252 252 lineno, commit_id, commit lazy loader and line
253 253 """
254 254 result = self._remote.fctx_annotate(self.raw_id, path)
255 255
256 256 for ln_no, commit_id, content in result:
257 257 yield (
258 258 ln_no,
259 259 commit_id,
260 260 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
261 261 content,
262 262 )
263 263
264 264 def get_nodes(self, path: bytes, pre_load=None):
265 265 """
266 266 Returns combined ``DirNode`` and ``FileNode`` objects list representing
267 267 state of commit at the given ``path``. If node at the given ``path``
268 268 is not instance of ``DirNode``, CommitError would be raised.
269 269 """
270 270
271 271 if self._get_kind(path) != NodeKind.DIR:
272 272 raise CommitError(f"Directory does not exist for idx {self.raw_id} at '{path}'")
273 273 path = self._fix_path(path)
274 274
275 275 path_nodes = []
276 276
277 for obj_path, (node_kind, flags) in self._remote.dir_items(self.raw_id, path):
277 for obj_path, node_kind, flags, pre_load_data in self._remote.get_nodes(self.raw_id, path, pre_load):
278 278
279 279 if node_kind is None:
280 280 raise CommitError(f"Requested object type={node_kind} cannot be mapped to a proper type")
281 281
282 282 stat_ = flags
283 283 # cache file mode
284 284 if obj_path not in self._path_mode_cache:
285 285 self._path_mode_cache[obj_path] = stat_
286 286
287 287 # cache type
288 288 if node_kind not in self._path_type_cache:
289 289 self._path_type_cache[obj_path] = node_kind
290 290
291 291 entry = None
292 292 if obj_path in self.nodes:
293 293 entry = self.nodes[obj_path]
294 294 else:
295 295 if node_kind == NodeKind.DIR:
296 296 entry = DirNode(safe_bytes(obj_path), commit=self)
297 297 elif node_kind == NodeKind.FILE:
298 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
298 entry = FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load, pre_load_data=pre_load_data)
299 299 if entry:
300 300 self.nodes[obj_path] = entry
301 301 path_nodes.append(entry)
302 302
303 303 for obj_path, (location, commit, scm_type) in self._submodules.items():
304 304
305 305 if os.path.dirname(obj_path) == path:
306 306 entry = SubModuleNode(obj_path, url=location, commit=commit, alias=scm_type)
307 307 self.nodes[obj_path] = entry
308 308 path_nodes.append(entry)
309 309
310 310 path_nodes.sort()
311 311 return path_nodes
312 312
313 313 def get_node(self, path: bytes, pre_load=None):
314 314 """
315 315 Returns `Node` object from the given `path`. If there is no node at
316 316 the given `path`, `NodeDoesNotExistError` would be raised.
317 317 """
318 318 path = self._fix_path(path)
319 319
320 320 # use cached, if we have one
321 321 if path in self.nodes:
322 322 return self.nodes[path]
323 323
324 324 path_type = self._get_path_type(path)
325 325 if path == b"":
326 326 node = RootNode(commit=self)
327 327 else:
328 328 if path_type == NodeKind.DIR:
329 329 node = DirNode(safe_bytes(path), commit=self)
330 330 elif path_type == NodeKind.FILE:
331 331 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
332 332 self._path_mode_cache[path] = node.mode
333 333 else:
334 334 raise self.no_node_at_path(path)
335 335 # cache node
336 336 self.nodes[path] = node
337 337 return self.nodes[path]
338 338
339 339 def _get_path_type(self, path: bytes):
340 340 if path in self._path_type_cache:
341 341 return self._path_type_cache[path]
342 342
343 343 if path == b"":
344 344 self._path_type_cache[b""] = NodeKind.DIR
345 345 return NodeKind.DIR
346 346
347 347 path_type, flags = self._remote.get_path_type(self.raw_id, path)
348 348
349 349 if not path_type:
350 350 raise self.no_node_at_path(path)
351 351
352 352 self._path_type_cache[path] = path_type
353 353 self._path_mode_cache[path] = flags
354 354
355 355 return self._path_type_cache[path]
356 356
357 357 def get_largefile_node(self, path: bytes):
358 358 pointer_spec = self._remote.is_large_file(self.raw_id, path)
359 359 if pointer_spec:
360 360 # content of that file regular FileNode is the hash of largefile
361 361 file_id = self.get_file_content(path).strip()
362 362
363 363 if self._remote.in_largefiles_store(file_id):
364 364 lf_path = self._remote.store_path(file_id)
365 365 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
366 366 elif self._remote.in_user_cache(file_id):
367 367 lf_path = self._remote.store_path(file_id)
368 368 self._remote.link(file_id, path)
369 369 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
370 370
371 371 @LazyProperty
372 372 def _submodules(self):
373 373 """
374 374 Returns a dictionary with submodule information from substate file
375 375 of hg repository.
376 376 """
377 377 return self._remote.ctx_substate(self.raw_id)
378 378
379 379 @LazyProperty
380 380 def affected_files(self) -> list[bytes]:
381 381 """
382 382 Gets a fast accessible file changes for given commit
383 383 """
384 384 return self._remote.ctx_files(self.raw_id)
385 385
386 386 @LazyProperty
387 387 def added_paths(self):
388 388 return [n for n in self.status[1]]
389 389
390 390 @LazyProperty
391 391 def changed_paths(self):
392 392 return [n for n in self.status[0]]
393 393
394 394
395 395 @LazyProperty
396 396 def removed_paths(self):
397 397 return [n for n in self.status[2]]
@@ -1,278 +1,278
1 1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 SVN commit module
21 21 """
22 22 import logging
23 23 import dateutil.parser
24 24 from zope.cachedescriptors.property import Lazy as LazyProperty
25 25
26 26 from rhodecode.lib.str_utils import safe_bytes, safe_str
27 27 from rhodecode.lib.vcs import nodes, path as vcspath
28 28 from rhodecode.lib.vcs.backends import base
29 29 from rhodecode.lib.vcs.exceptions import CommitError
30 30 from vcsserver.lib.vcs_common import NodeKind, FILEMODE_EXECUTABLE, FILEMODE_DEFAULT, FILEMODE_LINK
31 31 _SVN_PROP_TRUE = "*"
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class SubversionCommit(base.BaseCommit):
37 37 """
38 38 Subversion specific implementation of commits
39 39
40 40 .. attribute:: branch
41 41
42 42 The Subversion backend does not support to assign branches to
43 43 specific commits. This attribute has always the value `None`.
44 44
45 45 """
46 46
47 47 def __init__(self, repository, commit_id):
48 48 self.repository = repository
49 49 self.idx = self.repository._get_commit_idx(commit_id)
50 50 self._svn_rev = self.idx + 1
51 51 self._remote = repository._remote
52 52 # TODO: handling of raw_id should be a method on repository itself,
53 53 # which knows how to translate commit index and commit id
54 54 self.raw_id = commit_id
55 55 self.short_id = commit_id
56 56 self.id = f"r{commit_id}"
57 57
58 58 self.nodes = {}
59 59 self._path_mode_cache = {} # path stats cache, e.g filemode etc
60 60 self._path_type_cache = {} # path type dir/file/link etc cache
61 61 self.tags = []
62 62
63 63 @property
64 64 def author(self):
65 65 return safe_str(self._properties.get("svn:author"))
66 66
67 67 @property
68 68 def date(self):
69 69 return _date_from_svn_properties(self._properties)
70 70
71 71 @property
72 72 def message(self):
73 73 return safe_str(self._properties.get("svn:log"))
74 74
75 75 @LazyProperty
76 76 def _properties(self):
77 77 return self._remote.revision_properties(self._svn_rev)
78 78
79 79 @LazyProperty
80 80 def parents(self):
81 81 parent_idx = self.idx - 1
82 82 if parent_idx >= 0:
83 83 parent = self.repository.get_commit(commit_idx=parent_idx)
84 84 return [parent]
85 85 return []
86 86
87 87 @LazyProperty
88 88 def children(self):
89 89 child_idx = self.idx + 1
90 90 if child_idx < len(self.repository.commit_ids):
91 91 child = self.repository.get_commit(commit_idx=child_idx)
92 92 return [child]
93 93 return []
94 94
95 95 def _calculate_file_mode(self, path: bytes):
96 96 # Note: Subversion flags files which are executable with a special
97 97 # property `svn:executable` which is set to the value ``"*"``.
98 98 if self._get_file_property(path, "svn:executable") == _SVN_PROP_TRUE:
99 99 return FILEMODE_EXECUTABLE
100 100 else:
101 101 return FILEMODE_DEFAULT
102 102
103 103 def get_file_mode(self, path: bytes):
104 104 path = self._fix_path(path)
105 105
106 106 if path not in self._path_mode_cache:
107 107 self._path_mode_cache[path] = self._calculate_file_mode(path)
108 108
109 109 return self._path_mode_cache[path]
110 110
111 111 def _get_path_type(self, path: bytes):
112 112 if path in self._path_type_cache:
113 113 return self._path_type_cache[path]
114 114
115 115 if path == b"":
116 116 self._path_type_cache[b""] = NodeKind.DIR
117 117 return NodeKind.DIR
118 118
119 119 path_type = self._remote.get_node_type(self._svn_rev, path)
120 120
121 121 if not path_type:
122 122 raise self.no_node_at_path(path)
123 123
124 124 #flags = None
125 125 self._path_type_cache[path] = path_type
126 126 #self._path_mode_cache[path] = flags
127 127
128 128 return self._path_type_cache[path]
129 129
130 130 def is_link(self, path: bytes):
131 131 # Note: Subversion has a flag for special files, the content of the
132 132 # file contains the type of that file.
133 133 if self._get_file_property(path, "svn:special") == _SVN_PROP_TRUE:
134 134 return self.get_file_content(path).startswith(b"link")
135 135 return False
136 136
137 137 def is_node_binary(self, path):
138 138 path = self._fix_path(path)
139 139 return self._remote.is_binary(self._svn_rev, safe_str(path))
140 140
141 141 def node_md5_hash(self, path):
142 142 path = self._fix_path(path)
143 143 return self._remote.md5_hash(self._svn_rev, safe_str(path))
144 144
145 145 def _get_file_property(self, path, name):
146 146 file_properties = self._remote.node_properties(safe_str(path), self._svn_rev)
147 147 return file_properties.get(name)
148 148
149 149 def get_file_content(self, path):
150 150 path = self._fix_path(path)
151 151 return self._remote.get_file_content(self._svn_rev, safe_str(path))
152 152
153 153 def get_file_content_streamed(self, path):
154 154 path = self._fix_path(path)
155 155
156 156 stream_method = getattr(self._remote, "stream:get_file_content")
157 157 return stream_method(self._svn_rev, safe_str(path))
158 158
159 159 def get_file_size(self, path):
160 160 path = self._fix_path(path)
161 161 return self._remote.get_file_size(self._svn_rev, safe_str(path))
162 162
163 163 def get_path_history(self, path, limit=None, pre_load=None):
164 164 path = self._fix_path(path)
165 165 history = self._remote.node_history(self._svn_rev, safe_str(path), limit)
166 166 return [self.repository.get_commit(commit_id=str(svn_rev)) for svn_rev in history]
167 167
168 168 def get_file_annotate(self, path, pre_load=None):
169 169 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
170 170
171 171 for zero_based_line_no, svn_rev, content in result:
172 172 commit_id = str(svn_rev)
173 173 line_no = zero_based_line_no + 1
174 174 yield line_no, commit_id, lambda: self.repository.get_commit(commit_id=commit_id), content
175 175
176 176 def get_node(self, path: bytes, pre_load=None):
177 177 path = self._fix_path(path)
178 178
179 179 # use cached, if we have one
180 180 if path in self.nodes:
181 181 return self.nodes[path]
182 182
183 183 path_type = self._get_path_type(path)
184 184 if path == b"":
185 185 node = nodes.RootNode(commit=self)
186 186 else:
187 187 if path_type == NodeKind.DIR:
188 188 node = nodes.DirNode(safe_bytes(path), commit=self)
189 189 elif path_type == NodeKind.FILE:
190 190 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
191 191 self._path_mode_cache[path] = node.mode
192 192 else:
193 193 raise self.no_node_at_path(path)
194 194
195 195 self.nodes[path] = node
196 196 return self.nodes[path]
197 197
198 198 def get_nodes(self, path: bytes, pre_load=None):
199 199 if self._get_kind(path) != nodes.NodeKind.DIR:
200 200 raise CommitError(f"Directory does not exist for commit {self.raw_id} at '{path}'")
201 201 path = self._fix_path(path)
202 202
203 203 path_nodes = []
204 for name, node_kind in self._remote.get_nodes(self._svn_rev, path):
205 obj_path = vcspath.join(path, name)
204
205 for obj_path, node_kind, pre_load_data in self._remote.get_nodes(self._svn_rev, path, pre_load):
206 206
207 207 if node_kind is None:
208 208 raise CommitError(f"Requested object type={node_kind} cannot be determined")
209 209
210 210 # TODO: implement it ??
211 211 stat_ = None
212 212 # # cache file mode
213 213 # if obj_path not in self._path_mode_cache:
214 214 # self._path_mode_cache[obj_path] = stat_
215 215
216 216 # cache type
217 217 if node_kind not in self._path_type_cache:
218 218 self._path_type_cache[obj_path] = node_kind
219 219
220 220 entry = None
221 221 if obj_path in self.nodes:
222 222 entry = self.nodes[obj_path]
223 223 else:
224 224 if node_kind == NodeKind.DIR:
225 225 entry = nodes.DirNode(safe_bytes(obj_path), commit=self)
226 226 elif node_kind == NodeKind.FILE:
227 entry = nodes.FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load)
227 entry = nodes.FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load, pre_load_data=pre_load_data)
228 228 if entry:
229 229 self.nodes[obj_path] = entry
230 230 path_nodes.append(entry)
231 231
232 232 path_nodes.sort()
233 233 return path_nodes
234 234
235 235 def _get_kind(self, path):
236 236 path = self._fix_path(path)
237 237 path_type = self._get_path_type(path)
238 238 return path_type
239 239
240 240 @LazyProperty
241 241 def _changes_cache(self):
242 242 return self._remote.revision_changes(self._svn_rev)
243 243
244 244 @LazyProperty
245 245 def affected_files(self) -> list[bytes]:
246 246 changed_files = set()
247 247 for files in self._changes_cache.values():
248 248 changed_files.update(files)
249 249 return list(changed_files)
250 250
251 251 @LazyProperty
252 252 def id(self):
253 253 return self.raw_id
254 254
255 255 @LazyProperty
256 256 def added_paths(self):
257 257 return [n for n in self._changes_cache["added"]]
258 258
259 259 @LazyProperty
260 260 def changed_paths(self):
261 261 return [n for n in self._changes_cache["changed"]]
262 262
263 263 @LazyProperty
264 264 def removed_paths(self):
265 265 return [n for n in self._changes_cache["removed"]]
266 266
267 267
268 268 def _date_from_svn_properties(properties):
269 269 """
270 270 Parses the date out of given svn properties.
271 271
272 272 :return: :class:`datetime.datetime` instance. The object is naive.
273 273 """
274 274
275 275 aware_date = dateutil.parser.parse(properties.get("svn:date"))
276 276 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
277 277 final_date = aware_date
278 278 return final_date.replace(tzinfo=None)
@@ -1,783 +1,788
1 1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Module holding everything related to vcs nodes, with vcs2 architecture.
21 21 """
22 22
23 23 import functools
24 24 import os
25 25 import stat
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
30 30 from rhodecode.lib.str_utils import safe_str, safe_bytes
31 31 from rhodecode.lib.hash_utils import md5
32 32 from rhodecode.lib.vcs import path as vcspath
33 33 from rhodecode.lib.vcs.backends.base import EmptyCommit
34 34 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
35 35 from rhodecode.lib.vcs.exceptions import NodeError
36 36 from rhodecode.lib.vcs_common import NodeKind, FILEMODE_DEFAULT
37 37
38 38 LARGEFILE_PREFIX = ".hglf"
39 39
40 40
41 41 class NodeState:
42 42 ADDED = "added"
43 43 CHANGED = "changed"
44 44 NOT_CHANGED = "not changed"
45 45 REMOVED = "removed"
46 46
47 47
48 48 # TODO: not sure if that should be bytes or str ?
49 49 # most probably bytes because content should be bytes and we check it
50 50 BIN_BYTE_MARKER = b"\0"
51 51
52 52
53 53
54 54 @functools.total_ordering
55 55 class Node(object):
56 56 """
57 57 Simplest class representing file or directory on repository. SCM backends
58 58 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
59 59 directly.
60 60
61 61 Node's ``path`` cannot start with slash as we operate on *relative* paths
62 62 only. Moreover, every single node is identified by the ``path`` attribute,
63 63 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
64 64 """
65 65
66 66 # RTLO marker allows swapping text, and certain
67 67 # security attacks could be used with this
68 68 RTLO_MARKER = "\u202e"
69 69
70 70 commit = None
71 71
72 72 def __init__(self, path: bytes, kind):
73 73 self._validate_path(path) # can throw exception if path is invalid
74 74
75 75 self.bytes_path: bytes = path.rstrip(b"/") # store for mixed encoding, and raw version
76 76 self.str_path: str = safe_str(self.bytes_path) # we store paths as str
77 77 self.path: str = self.str_path
78 78
79 79 if self.bytes_path == b"" and kind != NodeKind.DIR:
80 80 raise NodeError("Only DirNode and its subclasses may be initialized with empty path")
81 81 self.kind = kind
82 82
83 83 if self.is_root() and not self.is_dir():
84 84 raise NodeError("Root node cannot be FILE kind")
85 85
86 86 def __eq__(self, other):
87 87 if type(self) is not type(other):
88 88 return False
89 89 for attr in ["name", "path", "kind"]:
90 90 if getattr(self, attr) != getattr(other, attr):
91 91 return False
92 92 if self.is_file():
93 93 # FileNode compare, we need to fallback to content compare
94 94 return None
95 95 else:
96 96 # For DirNode's check without entering each dir
97 97 self_nodes_paths = list(sorted(n.path for n in self.nodes))
98 98 other_nodes_paths = list(sorted(n.path for n in self.nodes))
99 99 if self_nodes_paths != other_nodes_paths:
100 100 return False
101 101 return True
102 102
103 103 def __lt__(self, other):
104 104 if self.kind < other.kind:
105 105 return True
106 106 if self.kind > other.kind:
107 107 return False
108 108 if self.path < other.path:
109 109 return True
110 110 if self.path > other.path:
111 111 return False
112 112
113 113 def __repr__(self):
114 114 maybe_path = getattr(self, "path", "UNKNOWN_PATH")
115 115 return f"<{self.__class__.__name__} {maybe_path!r}>"
116 116
117 117 def __str__(self):
118 118 return self.name
119 119
120 120 def _validate_path(self, path: bytes):
121 121 self._assert_bytes(path)
122 122
123 123 if path.startswith(b"/"):
124 124 raise NodeError(
125 125 f"Cannot initialize Node objects with slash at "
126 126 f"the beginning as only relative paths are supported. "
127 127 f"Got {path}"
128 128 )
129 129
130 130 @classmethod
131 131 def _assert_bytes(cls, value):
132 132 if not isinstance(value, bytes):
133 133 raise TypeError(f"Bytes required as input, got {type(value)} of {value}.")
134 134
135 135 @LazyProperty
136 136 def parent(self):
137 137 parent_path: bytes = self.get_parent_path()
138 138 if parent_path:
139 139 if self.commit:
140 140 return self.commit.get_node(parent_path)
141 141 return DirNode(parent_path)
142 142 return None
143 143
144 144 @LazyProperty
145 145 def has_rtlo(self):
146 146 """Detects if a path has right-to-left-override marker"""
147 147 return self.RTLO_MARKER in self.str_path
148 148
149 149 @LazyProperty
150 150 def dir_path(self):
151 151 """
152 152 Returns name of the directory from full path of this vcs node. Empty
153 153 string is returned if there's no directory in the path
154 154 """
155 155 _parts = self.path.rstrip("/").rsplit("/", 1)
156 156 if len(_parts) == 2:
157 157 return _parts[0]
158 158 return ""
159 159
160 160 @LazyProperty
161 161 def name(self):
162 162 """
163 163 Returns name of the node so if its path
164 164 then only last part is returned.
165 165 """
166 166 return self.str_path.rstrip("/").split("/")[-1]
167 167
168 168 @property
169 169 def kind(self):
170 170 return self._kind
171 171
172 172 @kind.setter
173 173 def kind(self, kind):
174 174 if hasattr(self, "_kind"):
175 175 raise NodeError("Cannot change node's kind")
176 176 else:
177 177 self._kind = kind
178 178 # Post setter check (path's trailing slash)
179 179 if self.str_path.endswith("/"):
180 180 raise NodeError("Node's path cannot end with slash")
181 181
182 182 def get_parent_path(self) -> bytes:
183 183 """
184 184 Returns node's parent path or empty string if node is root.
185 185 """
186 186 if self.is_root():
187 187 return b""
188 188 str_path = vcspath.dirname(self.bytes_path.rstrip(b"/")) + b"/"
189 189
190 190 return safe_bytes(str_path)
191 191
192 192 def is_file(self):
193 193 """
194 194 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
195 195 otherwise.
196 196 """
197 197 return self.kind == NodeKind.FILE
198 198
199 199 def is_dir(self):
200 200 """
201 201 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
202 202 otherwise.
203 203 """
204 204 return self.kind == NodeKind.DIR
205 205
206 206 def is_root(self):
207 207 """
208 208 Returns ``True`` if node is a root node and ``False`` otherwise.
209 209 """
210 210 return self.kind == NodeKind.DIR and self.path == ""
211 211
212 212 def is_submodule(self):
213 213 """
214 214 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
215 215 otherwise.
216 216 """
217 217 return self.kind == NodeKind.SUBMODULE
218 218
219 219 def is_largefile(self):
220 220 """
221 221 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
222 222 otherwise
223 223 """
224 224 return self.kind == NodeKind.LARGE_FILE
225 225
226 226 def is_link(self):
227 227 if self.commit:
228 228 return self.commit.is_link(self.bytes_path)
229 229 return False
230 230
231 231
232 232 class FileNode(Node):
233 233 """
234 234 Class representing file nodes.
235 235
236 236 :attribute: path: path to the node, relative to repository's root
237 237 :attribute: content: if given arbitrary sets content of the file
238 238 :attribute: commit: if given, first time content is accessed, callback
239 239 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
240 240 """
241 241
242 242 _filter_pre_load = []
243 243
244 def __init__(self, path: bytes, content: bytes | None = None, commit=None, mode=None, pre_load=None):
244 def __init__(self, path: bytes, content: bytes | None = None, commit=None, mode=None, pre_load=None, pre_load_data=None):
245 245 """
246 246 Only one of ``content`` and ``commit`` may be given. Passing both
247 247 would raise ``NodeError`` exception.
248 248
249 249 :param path: relative path to the node
250 250 :param content: content may be passed to constructor
251 251 :param commit: if given, will use it to lazily fetch content
252 252 :param mode: ST_MODE (i.e. 0100644)
253 253 """
254 254 if content and commit:
255 255 raise NodeError("Cannot use both content and commit")
256 256
257 257 super().__init__(path, kind=NodeKind.FILE)
258 258
259 259 self.commit = commit
260 260 if content and not isinstance(content, bytes):
261 261 # File content is one thing that inherently must be bytes
262 262 # we support passing str too, and convert the content
263 263 content = safe_bytes(content)
264 264 self._content = content
265 265 self._mode = mode or FILEMODE_DEFAULT
266
266 if pre_load_data:
267 self._store_pre_load(pre_load_data)
268 else:
267 269 self._set_bulk_properties(pre_load)
268 270
269 271 def __eq__(self, other):
270 272 eq = super().__eq__(other)
271 273 if eq is not None:
272 274 return eq
273 275 return self.content == other.content
274 276
275 277 def __hash__(self):
276 278 raw_id = getattr(self.commit, "raw_id", "")
277 279 return hash((self.path, raw_id))
278 280
279 281 def __lt__(self, other):
280 282 lt = super().__lt__(other)
281 283 if lt is not None:
282 284 return lt
283 285 return self.content < other.content
284 286
285 287 def __repr__(self):
286 288 short_id = getattr(self.commit, "short_id", "")
287 289 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
288 290
289 291 def _set_bulk_properties(self, pre_load):
290 292 if not pre_load:
291 293 return
292 294 pre_load = [entry for entry in pre_load if entry not in self._filter_pre_load]
293 295 if not pre_load:
294 296 return
295 297
296 298 remote = self.commit.get_remote()
297 299 result = remote.bulk_file_request(self.commit.raw_id, self.bytes_path, pre_load)
298 300
299 for attr, value in result.items():
301 self._store_pre_load(result.items())
302
303 def _store_pre_load(self, pre_load_data):
304 for attr, value in pre_load_data:
300 305 if attr == "flags":
301 306 self.__dict__["mode"] = safe_str(value)
302 307 elif attr == "size":
303 308 self.__dict__["size"] = value
304 309 elif attr == "data":
305 310 self.__dict__["_content"] = value
306 311 elif attr == "is_binary":
307 312 self.__dict__["is_binary"] = value
308 313 elif attr == "md5":
309 314 self.__dict__["md5"] = value
310 315 else:
311 316 raise ValueError(f"Unsupported attr in bulk_property: {attr}")
312 317
313 318 @LazyProperty
314 319 def mode(self):
315 320 """
316 321 Returns lazily mode of the FileNode. If `commit` is not set, would
317 322 use value given at initialization or `FILEMODE_DEFAULT` (default).
318 323 """
319 324 if self.commit:
320 325 mode = self.commit.get_file_mode(self.bytes_path)
321 326 else:
322 327 mode = self._mode
323 328 return mode
324 329
325 330 @LazyProperty
326 331 def raw_bytes(self) -> bytes:
327 332 """
328 333 Returns lazily the raw bytes of the FileNode.
329 334 """
330 335 if self.commit:
331 336 if self._content is None:
332 337 self._content = self.commit.get_file_content(self.bytes_path)
333 338 content = self._content
334 339 else:
335 340 content = self._content
336 341 return content
337 342
338 343 def content_uncached(self):
339 344 """
340 345 Returns lazily content of the FileNode.
341 346 """
342 347 if self.commit:
343 348 content = self.commit.get_file_content(self.bytes_path)
344 349 else:
345 350 content = self._content
346 351 return content
347 352
348 353 def stream_bytes(self):
349 354 """
350 355 Returns an iterator that will stream the content of the file directly from
351 356 vcsserver without loading it to memory.
352 357 """
353 358 if self.commit:
354 359 return self.commit.get_file_content_streamed(self.bytes_path)
355 360 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
356 361
357 362 def metadata_uncached(self):
358 363 """
359 364 Returns md5, binary flag of the file node, without any cache usage.
360 365 """
361 366
362 367 content = self.content_uncached()
363 368
364 369 is_binary = bool(content and BIN_BYTE_MARKER in content)
365 370 size = 0
366 371 if content:
367 372 size = len(content)
368 373
369 374 return is_binary, md5(content), size, content
370 375
371 376 @LazyProperty
372 377 def content(self) -> bytes:
373 378 """
374 379 Returns lazily content of the FileNode.
375 380 """
376 381 content = self.raw_bytes
377 382 if content and not isinstance(content, bytes):
378 383 raise ValueError(f"Content is of type {type(content)} instead of bytes")
379 384 return content
380 385
381 386 @LazyProperty
382 387 def str_content(self) -> str:
383 388 return safe_str(self.raw_bytes)
384 389
385 390 @LazyProperty
386 391 def size(self):
387 392 if self.commit:
388 393 return self.commit.get_file_size(self.bytes_path)
389 394 raise NodeError("Cannot retrieve size of the file without related commit attribute")
390 395
391 396 @LazyProperty
392 397 def message(self):
393 398 if self.commit:
394 399 return self.last_commit.message
395 400 raise NodeError("Cannot retrieve message of the file without related " "commit attribute")
396 401
397 402 @LazyProperty
398 403 def last_commit(self):
399 404 if self.commit:
400 405 pre_load = ["author", "date", "message", "parents"]
401 406 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
402 407 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
403 408
404 409 def get_mimetype(self):
405 410 """
406 411 Mimetype is calculated based on the file's content. If ``_mimetype``
407 412 attribute is available, it will be returned (backends which store
408 413 mimetypes or can easily recognize them, should set this private
409 414 attribute to indicate that type should *NOT* be calculated).
410 415 """
411 416
412 417 if hasattr(self, "_mimetype"):
413 418 if isinstance(self._mimetype, (tuple, list)) and len(self._mimetype) == 2:
414 419 return self._mimetype
415 420 else:
416 421 raise NodeError("given _mimetype attribute must be an 2 element list or tuple")
417 422
418 423 db = get_mimetypes_db()
419 424 mtype, encoding = db.guess_type(self.name)
420 425
421 426 if mtype is None:
422 427 if not self.is_largefile() and self.is_binary:
423 428 mtype = "application/octet-stream"
424 429 encoding = None
425 430 else:
426 431 mtype = "text/plain"
427 432 encoding = None
428 433
429 434 # try with pygments
430 435 try:
431 436 from pygments.lexers import get_lexer_for_filename
432 437
433 438 mt = get_lexer_for_filename(self.name).mimetypes
434 439 except Exception:
435 440 mt = None
436 441
437 442 if mt:
438 443 mtype = mt[0]
439 444
440 445 return mtype, encoding
441 446
442 447 @LazyProperty
443 448 def mimetype(self):
444 449 """
445 450 Wrapper around full mimetype info. It returns only type of fetched
446 451 mimetype without the encoding part. use get_mimetype function to fetch
447 452 full set of (type,encoding)
448 453 """
449 454 return self.get_mimetype()[0]
450 455
451 456 @LazyProperty
452 457 def mimetype_main(self):
453 458 return self.mimetype.split("/")[0]
454 459
455 460 @classmethod
456 461 def get_lexer(cls, filename, content=None):
457 462 from pygments import lexers
458 463
459 464 extension = filename.split(".")[-1]
460 465 lexer = None
461 466
462 467 try:
463 468 lexer = lexers.guess_lexer_for_filename(filename, content, stripnl=False)
464 469 except lexers.ClassNotFound:
465 470 pass
466 471
467 472 # try our EXTENSION_MAP
468 473 if not lexer:
469 474 try:
470 475 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
471 476 if lexer_class:
472 477 lexer = lexers.get_lexer_by_name(lexer_class[0])
473 478 except lexers.ClassNotFound:
474 479 pass
475 480
476 481 if not lexer:
477 482 lexer = lexers.TextLexer(stripnl=False)
478 483
479 484 return lexer
480 485
481 486 @LazyProperty
482 487 def lexer(self):
483 488 """
484 489 Returns pygment's lexer class. Would try to guess lexer taking file's
485 490 content, name and mimetype.
486 491 """
487 492 # TODO: this is more proper, but super heavy on investigating the type based on the content
488 493 # self.get_lexer(self.name, self.content)
489 494
490 495 return self.get_lexer(self.name)
491 496
492 497 @LazyProperty
493 498 def lexer_alias(self):
494 499 """
495 500 Returns first alias of the lexer guessed for this file.
496 501 """
497 502 return self.lexer.aliases[0]
498 503
499 504 @LazyProperty
500 505 def history(self):
501 506 """
502 507 Returns a list of commit for this file in which the file was changed
503 508 """
504 509 if self.commit is None:
505 510 raise NodeError("Unable to get commit for this FileNode")
506 511 return self.commit.get_path_history(self.bytes_path)
507 512
508 513 @LazyProperty
509 514 def annotate(self):
510 515 """
511 516 Returns a list of three element tuples with lineno, commit and line
512 517 """
513 518 if self.commit is None:
514 519 raise NodeError("Unable to get commit for this FileNode")
515 520 pre_load = ["author", "date", "message", "parents"]
516 521 return self.commit.get_file_annotate(self.bytes_path, pre_load=pre_load)
517 522
518 523 @LazyProperty
519 524 def is_binary(self):
520 525 """
521 526 Returns True if file has binary content.
522 527 """
523 528 if self.commit:
524 529 return self.commit.is_node_binary(self.bytes_path)
525 530 else:
526 531 raw_bytes = self._content
527 532 return bool(raw_bytes and BIN_BYTE_MARKER in raw_bytes)
528 533
529 534 @LazyProperty
530 535 def md5(self):
531 536 """
532 537 Returns md5 of the file node.
533 538 """
534 539
535 540 if self.commit:
536 541 return self.commit.node_md5_hash(self.bytes_path)
537 542 else:
538 543 raw_bytes = self._content
539 544 # TODO: this sucks, we're computing md5 on potentially super big stream data...
540 545 return md5(raw_bytes)
541 546
542 547 @LazyProperty
543 548 def extension(self):
544 549 """Returns filenode extension"""
545 550 return self.name.split(".")[-1]
546 551
547 552 @property
548 553 def is_executable(self):
549 554 """
550 555 Returns ``True`` if file has executable flag turned on.
551 556 """
552 557 return bool(self.mode & stat.S_IXUSR)
553 558
554 559 def get_largefile_node(self):
555 560 """
556 561 Try to return a Mercurial FileNode from this node. It does internal
557 562 checks inside largefile store, if that file exist there it will
558 563 create special instance of LargeFileNode which can get content from
559 564 LF store.
560 565 """
561 566 if self.commit:
562 567 return self.commit.get_largefile_node(self.bytes_path)
563 568
564 569 def count_lines(self, content: str | bytes, count_empty=False):
565 570 if isinstance(content, str):
566 571 newline_marker = "\n"
567 572 elif isinstance(content, bytes):
568 573 newline_marker = b"\n"
569 574 else:
570 575 raise ValueError("content must be bytes or str got {type(content)} instead")
571 576
572 577 if count_empty:
573 578 all_lines = 0
574 579 empty_lines = 0
575 580 for line in content.splitlines(True):
576 581 if line == newline_marker:
577 582 empty_lines += 1
578 583 all_lines += 1
579 584
580 585 return all_lines, all_lines - empty_lines
581 586 else:
582 587 # fast method
583 588 empty_lines = all_lines = content.count(newline_marker)
584 589 if all_lines == 0 and content:
585 590 # one-line without a newline
586 591 empty_lines = all_lines = 1
587 592
588 593 return all_lines, empty_lines
589 594
590 595 def lines(self, count_empty=False):
591 596 all_lines, empty_lines = 0, 0
592 597
593 598 if not self.is_binary:
594 599 content = self.content
595 600 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
596 601 return all_lines, empty_lines
597 602
598 603
599 604 class DirNode(Node):
600 605 """
601 606 DirNode stores list of files and directories within this node.
602 607 Nodes may be used standalone but within repository context they
603 608 lazily fetch data within same repository's commit.
604 609 """
605 610
606 611 def __init__(self, path, nodes=(), commit=None, default_pre_load=None):
607 612 """
608 613 Only one of ``nodes`` and ``commit`` may be given. Passing both
609 614 would raise ``NodeError`` exception.
610 615
611 616 :param path: relative path to the node
612 617 :param nodes: content may be passed to constructor
613 618 :param commit: if given, will use it to lazily fetch content
614 619 """
615 620 if nodes and commit:
616 621 raise NodeError("Cannot use both nodes and commit")
617 622 super().__init__(path, NodeKind.DIR)
618 623 self.commit = commit
619 624 self._nodes = nodes
620 625 self.default_pre_load = default_pre_load or ["is_binary", "size"]
621 626
622 627 def __iter__(self):
623 628 yield from self.nodes
624 629
625 630 def __eq__(self, other):
626 631 eq = super().__eq__(other)
627 632 if eq is not None:
628 633 return eq
629 634 # check without entering each dir
630 635 self_nodes_paths = list(sorted(n.path for n in self.nodes))
631 636 other_nodes_paths = list(sorted(n.path for n in self.nodes))
632 637 return self_nodes_paths == other_nodes_paths
633 638
634 639 def __lt__(self, other):
635 640 lt = super().__lt__(other)
636 641 if lt is not None:
637 642 return lt
638 643 # check without entering each dir
639 644 self_nodes_paths = list(sorted(n.path for n in self.nodes))
640 645 other_nodes_paths = list(sorted(n.path for n in self.nodes))
641 646 return self_nodes_paths < other_nodes_paths
642 647
643 648 @LazyProperty
644 649 def content(self):
645 650 raise NodeError(f"{self} represents a dir and has no `content` attribute")
646 651
647 652 @LazyProperty
648 653 def nodes(self):
649 654 if self.commit:
650 655 nodes = self.commit.get_nodes(self.bytes_path, pre_load=self.default_pre_load)
651 656 else:
652 657 nodes = self._nodes
653 658 return sorted(nodes)
654 659
655 660 @LazyProperty
656 661 def files(self):
657 662 return sorted(node for node in self.nodes if node.is_file())
658 663
659 664 @LazyProperty
660 665 def dirs(self):
661 666 return sorted(node for node in self.nodes if node.is_dir())
662 667
663 668 @LazyProperty
664 669 def state(self):
665 670 raise NodeError("Cannot access state of DirNode")
666 671
667 672 @LazyProperty
668 673 def size(self):
669 674 size = 0
670 675 for root, dirs, files in self.commit.walk(self.bytes_path):
671 676 for f in files:
672 677 size += f.size
673 678
674 679 return size
675 680
676 681 @LazyProperty
677 682 def last_commit(self):
678 683 if self.commit:
679 684 pre_load = ["author", "date", "message", "parents"]
680 685 return self.commit.get_path_commit(self.bytes_path, pre_load=pre_load)
681 686 raise NodeError("Cannot retrieve last commit of the file without related commit attribute")
682 687
683 688 def __repr__(self):
684 689 short_id = getattr(self.commit, "short_id", "")
685 690 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
686 691
687 692
688 693 class RootNode(DirNode):
689 694 """
690 695 DirNode being the root node of the repository.
691 696 """
692 697
693 698 def __init__(self, nodes=(), commit=None):
694 699 super().__init__(path=b"", nodes=nodes, commit=commit)
695 700
696 701 def __repr__(self):
697 702 short_id = getattr(self.commit, "short_id", "")
698 703 return f"<{self.__class__.__name__} path={self.str_path!r}, short_id={short_id}>"
699 704
700 705
701 706 class SubModuleNode(Node):
702 707 """
703 708 represents a SubModule of Git or SubRepo of Mercurial
704 709 """
705 710
706 711 is_binary = False
707 712 size = 0
708 713
709 714 def __init__(self, name, url=None, commit=None, alias=None):
710 715 self.path: bytes = name
711 716 self.str_path: str = safe_str(self.path) # we store paths as str
712 717 self.kind = NodeKind.SUBMODULE
713 718 self.alias = alias
714 719
715 720 # we have to use EmptyCommit here since this can point to svn/git/hg
716 721 # submodules we cannot get from repository
717 722 self.commit = EmptyCommit(safe_str(commit), alias=alias)
718 723 self.url = safe_str(url) or self._extract_submodule_url()
719 724
720 725 def __repr__(self):
721 726 short_id = getattr(self.commit, "short_id", "")
722 727 return f"<{self.__class__.__name__} {self.str_path!r} @ {short_id}>"
723 728
724 729 def _extract_submodule_url(self):
725 730 # TODO: find a way to parse gits submodule file and extract the linking URL
726 731 return safe_str(self.path)
727 732
728 733 @LazyProperty
729 734 def name(self):
730 735 """
731 736 Returns name of the node so if its path
732 737 then only last part is returned.
733 738 """
734 739 org = self.str_path.rstrip("/").split("/")[-1]
735 740 return f"{org} @ {self.commit.short_id}"
736 741
737 742
738 743 class LargeFileNode(FileNode):
739 744 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
740 745 self._validate_path(path) # can throw exception if path is invalid
741 746 self.org_path = org_path # as stored in VCS as LF pointer
742 747
743 748 self.bytes_path = path.rstrip(b"/") # store for __repr__
744 749 self.str_path = safe_str(self.bytes_path)
745 750 self.path = self.str_path
746 751
747 752 self.kind = NodeKind.LARGE_FILE
748 753 self.alias = alias
749 754 self._content = b""
750 755
751 756 def _validate_path(self, path: bytes):
752 757 """
753 758 we override check since the LargeFileNode path is system absolute, but we check for bytes only
754 759 """
755 760 self._assert_bytes(path)
756 761
757 762 def __repr__(self):
758 763 return f"<{self.__class__.__name__} {self.org_path} -> {self.str_path!r}>"
759 764
760 765 @LazyProperty
761 766 def size(self):
762 767 return os.stat(self.path).st_size
763 768
764 769 @LazyProperty
765 770 def raw_bytes(self):
766 771 with open(self.path, "rb") as f:
767 772 content = f.read()
768 773 return content
769 774
770 775 @LazyProperty
771 776 def name(self):
772 777 """
773 778 Overwrites name to be the org lf path
774 779 """
775 780 return self.org_path
776 781
777 782 def stream_bytes(self):
778 783 with open(self.path, "rb") as stream:
779 784 while True:
780 785 data = stream.read(16 * 1024)
781 786 if not data:
782 787 break
783 788 yield data
@@ -1,1218 +1,1220
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import shutil
22 22 import time
23 23 import logging
24 24 import traceback
25 25 import datetime
26 26
27 27 from pyramid.threadlocal import get_current_request
28 28 from sqlalchemy.orm import aliased
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode import events
32 32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 33 from rhodecode.lib.caching_query import FromCache
34 34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
35 35 from rhodecode.lib import hooks_base
36 36 from rhodecode.lib.str_utils import safe_bytes
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 42 from rhodecode.lib.vcs.backends import get_backend
43 43 from rhodecode.lib.vcs.nodes import NodeKind
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, func, case, joinedload, or_, in_filter_generator,
47 47 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
48 48 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
49 49 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
50 50 from rhodecode.model.permission import PermissionModel
51 51 from rhodecode.model.settings import VcsSettingsModel
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user = def_user
84 84
85 85 return repo_to_perm
86 86
87 87 def get(self, repo_id):
88 88 repo = self.sa.query(Repository) \
89 89 .filter(Repository.repo_id == repo_id)
90 90
91 91 return repo.scalar()
92 92
93 93 def get_repo(self, repository):
94 94 return self._get_repo(repository)
95 95
96 96 def get_by_repo_name(self, repo_name, cache=False):
97 97 repo = self.sa.query(Repository) \
98 98 .filter(Repository.repo_name == repo_name)
99 99
100 100 if cache:
101 101 name_key = _hash_key(repo_name)
102 102 repo = repo.options(
103 103 FromCache("sql_cache_short", f"get_repo_{name_key}"))
104 104 return repo.scalar()
105 105
106 106 def _extract_id_from_repo_name(self, repo_name):
107 107 if repo_name.startswith('/'):
108 108 repo_name = repo_name.lstrip('/')
109 109 by_id_match = re.match(r'^_(\d+)', repo_name)
110 110 if by_id_match:
111 111 return by_id_match.groups()[0]
112 112
113 113 def get_repo_by_id(self, repo_name):
114 114 """
115 115 Extracts repo_name by id from special urls.
116 116 Example url is _11/repo_name
117 117
118 118 :param repo_name:
119 119 :return: repo object if matched else None
120 120 """
121 121 _repo_id = None
122 122 try:
123 123 _repo_id = self._extract_id_from_repo_name(repo_name)
124 124 if _repo_id:
125 125 return self.get(_repo_id)
126 126 except Exception:
127 127 log.exception('Failed to extract repo_name from URL')
128 128 if _repo_id:
129 129 Session().rollback()
130 130
131 131 return None
132 132
133 133 def get_repos_for_root(self, root, traverse=False):
134 134 if traverse:
135 135 like_expression = u'{}%'.format(safe_str(root))
136 136 repos = Repository.query().filter(
137 137 Repository.repo_name.like(like_expression)).all()
138 138 else:
139 139 if root and not isinstance(root, RepoGroup):
140 140 raise ValueError(
141 141 'Root must be an instance '
142 142 'of RepoGroup, got:{} instead'.format(type(root)))
143 143 repos = Repository.query().filter(Repository.group == root).all()
144 144 return repos
145 145
146 146 def get_url(self, repo, request=None, permalink=False):
147 147 if not request:
148 148 request = get_current_request()
149 149
150 150 if not request:
151 151 return
152 152
153 153 if permalink:
154 154 return request.route_url(
155 155 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
156 156 else:
157 157 return request.route_url(
158 158 'repo_summary', repo_name=safe_str(repo.repo_name))
159 159
160 160 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
161 161 if not request:
162 162 request = get_current_request()
163 163
164 164 if not request:
165 165 return
166 166
167 167 if permalink:
168 168 return request.route_url(
169 169 'repo_commit', repo_name=safe_str(repo.repo_id),
170 170 commit_id=commit_id)
171 171
172 172 else:
173 173 return request.route_url(
174 174 'repo_commit', repo_name=safe_str(repo.repo_name),
175 175 commit_id=commit_id)
176 176
177 177 def get_repo_log(self, repo, filter_term):
178 178 repo_log = UserLog.query()\
179 179 .filter(or_(UserLog.repository_id == repo.repo_id,
180 180 UserLog.repository_name == repo.repo_name))\
181 181 .options(joinedload(UserLog.user))\
182 182 .options(joinedload(UserLog.repository))\
183 183 .order_by(UserLog.action_date.desc())
184 184
185 185 repo_log = user_log_filter(repo_log, filter_term)
186 186 return repo_log
187 187
188 188 @classmethod
189 189 def update_commit_cache(cls, repositories=None):
190 190 if not repositories:
191 191 repositories = Repository.getAll()
192 192 for repo in repositories:
193 193 repo.update_commit_cache()
194 194
195 195 def get_repos_as_dict(self, repo_list=None, admin=False,
196 196 super_user_actions=False, short_name=None):
197 197
198 198 _render = get_current_request().get_partial_renderer(
199 199 'rhodecode:templates/data_table/_dt_elements.mako')
200 200 c = _render.get_call_context()
201 201 h = _render.get_helpers()
202 202
203 203 def quick_menu(repo_name):
204 204 return _render('quick_menu', repo_name)
205 205
206 206 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
207 207 if short_name is not None:
208 208 short_name_var = short_name
209 209 else:
210 210 short_name_var = not admin
211 211 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
212 212 short_name=short_name_var, admin=False)
213 213
214 214 def last_change(last_change):
215 215 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
216 216 ts = time.time()
217 217 utc_offset = (datetime.datetime.fromtimestamp(ts)
218 218 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
219 219 last_change = last_change + datetime.timedelta(seconds=utc_offset)
220 220
221 221 return _render("last_change", last_change)
222 222
223 223 def rss_lnk(repo_name):
224 224 return _render("rss", repo_name)
225 225
226 226 def atom_lnk(repo_name):
227 227 return _render("atom", repo_name)
228 228
229 229 def last_rev(repo_name, cs_cache):
230 230 return _render('revision', repo_name, cs_cache.get('revision'),
231 231 cs_cache.get('raw_id'), cs_cache.get('author'),
232 232 cs_cache.get('message'), cs_cache.get('date'))
233 233
234 234 def desc(desc):
235 235 return _render('repo_desc', desc, c.visual.stylify_metatags)
236 236
237 237 def state(repo_state):
238 238 return _render("repo_state", repo_state)
239 239
240 240 def repo_actions(repo_name):
241 241 return _render('repo_actions', repo_name, super_user_actions)
242 242
243 243 def user_profile(username):
244 244 return _render('user_profile', username)
245 245
246 246 repos_data = []
247 247 for repo in repo_list:
248 248 # NOTE(marcink): because we use only raw column we need to load it like that
249 249 changeset_cache = Repository._load_changeset_cache(
250 250 repo.repo_id, repo._changeset_cache)
251 251
252 252 row = {
253 253 "menu": quick_menu(repo.repo_name),
254 254
255 255 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
256 256 repo.private, repo.archived, repo.fork_repo_name),
257 257
258 258 "desc": desc(h.escape(repo.description)),
259 259
260 260 "last_change": last_change(repo.updated_on),
261 261
262 262 "last_changeset": last_rev(repo.repo_name, changeset_cache),
263 263 "last_changeset_raw": changeset_cache.get('revision'),
264 264
265 265 "owner": user_profile(repo.owner_username),
266 266
267 267 "state": state(repo.repo_state),
268 268 "rss": rss_lnk(repo.repo_name),
269 269 "atom": atom_lnk(repo.repo_name),
270 270 }
271 271 if admin:
272 272 row.update({
273 273 "action": repo_actions(repo.repo_name),
274 274 })
275 275 repos_data.append(row)
276 276
277 277 return repos_data
278 278
279 279 def get_repos_data_table(
280 280 self, draw, start, limit,
281 281 search_q, order_by, order_dir,
282 282 auth_user, repo_group_id):
283 283 from rhodecode.model.scm import RepoList
284 284
285 285 _perms = ['repository.read', 'repository.write', 'repository.admin']
286 286
287 287 repos = Repository.query() \
288 288 .filter(Repository.group_id == repo_group_id) \
289 289 .all()
290 290 auth_repo_list = RepoList(
291 291 repos, perm_set=_perms,
292 292 extra_kwargs=dict(user=auth_user))
293 293
294 294 allowed_ids = [-1]
295 295 for repo in auth_repo_list:
296 296 allowed_ids.append(repo.repo_id)
297 297
298 298 repos_data_total_count = Repository.query() \
299 299 .filter(Repository.group_id == repo_group_id) \
300 300 .filter(or_(
301 301 # generate multiple IN to fix limitation problems
302 302 *in_filter_generator(Repository.repo_id, allowed_ids))
303 303 ) \
304 304 .count()
305 305
306 306 RepoFork = aliased(Repository)
307 307 OwnerUser = aliased(User)
308 308 base_q = Session.query(
309 309 Repository.repo_id,
310 310 Repository.repo_name,
311 311 Repository.description,
312 312 Repository.repo_type,
313 313 Repository.repo_state,
314 314 Repository.private,
315 315 Repository.archived,
316 316 Repository.updated_on,
317 317 Repository._changeset_cache,
318 318 RepoFork.repo_name.label('fork_repo_name'),
319 319 OwnerUser.username.label('owner_username'),
320 320 ) \
321 321 .filter(Repository.group_id == repo_group_id) \
322 322 .filter(or_(
323 323 # generate multiple IN to fix limitation problems
324 324 *in_filter_generator(Repository.repo_id, allowed_ids))
325 325 ) \
326 326 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
327 327 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
328 328
329 329 repos_data_total_filtered_count = base_q.count()
330 330
331 331 sort_defined = False
332 332 if order_by == 'repo_name':
333 333 sort_col = func.lower(Repository.repo_name)
334 334 sort_defined = True
335 335 elif order_by == 'user_username':
336 336 sort_col = User.username
337 337 else:
338 338 sort_col = getattr(Repository, order_by, None)
339 339
340 340 if sort_defined or sort_col:
341 341 if order_dir == 'asc':
342 342 sort_col = sort_col.asc()
343 343 else:
344 344 sort_col = sort_col.desc()
345 345
346 346 base_q = base_q.order_by(sort_col)
347 347 base_q = base_q.offset(start).limit(limit)
348 348
349 349 repos_list = base_q.all()
350 350
351 351 repos_data = RepoModel().get_repos_as_dict(
352 352 repo_list=repos_list, admin=False)
353 353
354 354 data = ({
355 355 'draw': draw,
356 356 'data': repos_data,
357 357 'recordsTotal': repos_data_total_count,
358 358 'recordsFiltered': repos_data_total_filtered_count,
359 359 })
360 360 return data
361 361
362 362 def _get_defaults(self, repo_name):
363 363 """
364 364 Gets information about repository, and returns a dict for
365 365 usage in forms
366 366
367 367 :param repo_name:
368 368 """
369 369
370 370 repo_info = Repository.get_by_repo_name(repo_name)
371 371
372 372 if repo_info is None:
373 373 return None
374 374
375 375 defaults = repo_info.get_dict()
376 376 defaults['repo_name'] = repo_info.just_name
377 377
378 378 groups = repo_info.groups_with_parents
379 379 parent_group = groups[-1] if groups else None
380 380
381 381 # we use -1 as this is how in HTML, we mark an empty group
382 382 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
383 383
384 384 keys_to_process = (
385 385 {'k': 'repo_type', 'strip': False},
386 386 {'k': 'repo_enable_downloads', 'strip': True},
387 387 {'k': 'repo_description', 'strip': True},
388 388 {'k': 'repo_enable_locking', 'strip': True},
389 389 {'k': 'repo_landing_rev', 'strip': True},
390 390 {'k': 'clone_uri', 'strip': False},
391 391 {'k': 'push_uri', 'strip': False},
392 392 {'k': 'repo_private', 'strip': True},
393 393 {'k': 'repo_enable_statistics', 'strip': True}
394 394 )
395 395
396 396 for item in keys_to_process:
397 397 attr = item['k']
398 398 if item['strip']:
399 399 attr = remove_prefix(item['k'], 'repo_')
400 400
401 401 val = defaults[attr]
402 402 if item['k'] == 'repo_landing_rev':
403 403 val = ':'.join(defaults[attr])
404 404 defaults[item['k']] = val
405 405 if item['k'] == 'clone_uri':
406 406 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
407 407 if item['k'] == 'push_uri':
408 408 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
409 409
410 410 # fill owner
411 411 if repo_info.user:
412 412 defaults.update({'user': repo_info.user.username})
413 413 else:
414 414 replacement_user = User.get_first_super_admin().username
415 415 defaults.update({'user': replacement_user})
416 416
417 417 return defaults
418 418
419 419 def update(self, repo, **kwargs):
420 420 try:
421 421 cur_repo = self._get_repo(repo)
422 422 source_repo_name = cur_repo.repo_name
423 423
424 424 affected_user_ids = []
425 425 if 'user' in kwargs:
426 426 old_owner_id = cur_repo.user.user_id
427 427 new_owner = User.get_by_username(kwargs['user'])
428 428 cur_repo.user = new_owner
429 429
430 430 if old_owner_id != new_owner.user_id:
431 431 affected_user_ids = [new_owner.user_id, old_owner_id]
432 432
433 433 if 'repo_group' in kwargs:
434 434 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
435 435 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
436 436
437 437 update_keys = [
438 438 (1, 'repo_description'),
439 439 (1, 'repo_landing_rev'),
440 440 (1, 'repo_private'),
441 441 (1, 'repo_enable_downloads'),
442 442 (1, 'repo_enable_locking'),
443 443 (1, 'repo_enable_statistics'),
444 444 (0, 'clone_uri'),
445 445 (0, 'push_uri'),
446 446 (0, 'fork_id')
447 447 ]
448 448 for strip, k in update_keys:
449 449 if k in kwargs:
450 450 val = kwargs[k]
451 451 if strip:
452 452 k = remove_prefix(k, 'repo_')
453 453
454 454 setattr(cur_repo, k, val)
455 455
456 456 new_name = source_repo_name
457 457 if 'repo_name' in kwargs:
458 458 new_name = cur_repo.get_new_name(kwargs['repo_name'])
459 459 cur_repo.repo_name = new_name
460 460
461 461 if 'repo_private' in kwargs:
462 462 # if private flag is set to True, reset default permission to NONE
463 463 set_private_to = kwargs.get('repo_private')
464 464 if set_private_to:
465 465 EMPTY_PERM = 'repository.none'
466 466 RepoModel().grant_user_permission(
467 467 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 468 )
469 469 if set_private_to != cur_repo.private:
470 470 # NOTE(dan): we change repo private mode we need to notify all USERS
471 471 # this is just by having this value set to a different value then it was before
472 472 affected_user_ids = User.get_all_user_ids()
473 473
474 474 if kwargs.get('repo_landing_rev'):
475 475 landing_rev_val = kwargs['repo_landing_rev']
476 476 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
477 477
478 478 # handle extra fields
479 479 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
480 480 k = RepositoryField.un_prefix_key(field)
481 481 ex_field = RepositoryField.get_by_key_name(
482 482 key=k, repo=cur_repo)
483 483 if ex_field:
484 484 ex_field.field_value = kwargs[field]
485 485 self.sa.add(ex_field)
486 486
487 487 self.sa.add(cur_repo)
488 488
489 489 if source_repo_name != new_name:
490 490 # rename repository
491 491 self._rename_filesystem_repo(
492 492 old=source_repo_name, new=new_name)
493 493
494 494 if affected_user_ids:
495 495 PermissionModel().trigger_permission_flush(affected_user_ids)
496 496
497 497 return cur_repo
498 498 except Exception:
499 499 log.error(traceback.format_exc())
500 500 raise
501 501
502 502 def _create_repo(self, repo_name, repo_type, description, owner,
503 503 private=False, clone_uri=None, repo_group=None,
504 504 landing_rev=None, fork_of=None,
505 505 copy_fork_permissions=False, enable_statistics=False,
506 506 enable_locking=False, enable_downloads=False,
507 507 copy_group_permissions=False,
508 508 state=Repository.STATE_PENDING):
509 509 """
510 510 Create repository inside database with PENDING state, this should be
511 511 only executed by create() repo. With exception of importing existing
512 512 repos
513 513 """
514 514 from rhodecode.model.scm import ScmModel
515 515
516 516 owner = self._get_user(owner)
517 517 fork_of = self._get_repo(fork_of)
518 518 repo_group = self._get_repo_group(safe_int(repo_group))
519 519 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
520 520 landing_rev = landing_rev or default_landing_ref
521 521
522 522 try:
523 523 repo_name = safe_str(repo_name)
524 524 description = safe_str(description)
525 525 # repo name is just a name of repository
526 526 # while repo_name_full is a full qualified name that is combined
527 527 # with name and path of group
528 528 repo_name_full = repo_name
529 529 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
530 530
531 531 new_repo = Repository()
532 532 new_repo.repo_state = state
533 533 new_repo.enable_statistics = False
534 534 new_repo.repo_name = repo_name_full
535 535 new_repo.repo_type = repo_type
536 536 new_repo.user = owner
537 537 new_repo.group = repo_group
538 538 new_repo.description = description or repo_name
539 539 new_repo.private = private
540 540 new_repo.archived = False
541 541 new_repo.clone_uri = clone_uri
542 542 new_repo.landing_rev = landing_rev
543 543
544 544 new_repo.enable_statistics = enable_statistics
545 545 new_repo.enable_locking = enable_locking
546 546 new_repo.enable_downloads = enable_downloads
547 547
548 548 if repo_group:
549 549 new_repo.enable_locking = repo_group.enable_locking
550 550
551 551 if fork_of:
552 552 parent_repo = fork_of
553 553 new_repo.fork = parent_repo
554 554
555 555 events.trigger(events.RepoPreCreateEvent(new_repo))
556 556
557 557 self.sa.add(new_repo)
558 558
559 559 EMPTY_PERM = 'repository.none'
560 560 if fork_of and copy_fork_permissions:
561 561 repo = fork_of
562 562 user_perms = UserRepoToPerm.query() \
563 563 .filter(UserRepoToPerm.repository == repo).all()
564 564 group_perms = UserGroupRepoToPerm.query() \
565 565 .filter(UserGroupRepoToPerm.repository == repo).all()
566 566
567 567 for perm in user_perms:
568 568 UserRepoToPerm.create(
569 569 perm.user, new_repo, perm.permission)
570 570
571 571 for perm in group_perms:
572 572 UserGroupRepoToPerm.create(
573 573 perm.users_group, new_repo, perm.permission)
574 574 # in case we copy permissions and also set this repo to private
575 575 # override the default user permission to make it a private repo
576 576 if private:
577 577 RepoModel(self.sa).grant_user_permission(
578 578 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
579 579
580 580 elif repo_group and copy_group_permissions:
581 581 user_perms = UserRepoGroupToPerm.query() \
582 582 .filter(UserRepoGroupToPerm.group == repo_group).all()
583 583
584 584 group_perms = UserGroupRepoGroupToPerm.query() \
585 585 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
586 586
587 587 for perm in user_perms:
588 588 perm_name = perm.permission.permission_name.replace(
589 589 'group.', 'repository.')
590 590 perm_obj = Permission.get_by_key(perm_name)
591 591 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
592 592
593 593 for perm in group_perms:
594 594 perm_name = perm.permission.permission_name.replace(
595 595 'group.', 'repository.')
596 596 perm_obj = Permission.get_by_key(perm_name)
597 597 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
598 598
599 599 if private:
600 600 RepoModel(self.sa).grant_user_permission(
601 601 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
602 602
603 603 else:
604 604 perm_obj = self._create_default_perms(new_repo, private)
605 605 self.sa.add(perm_obj)
606 606
607 607 # now automatically start following this repository as owner
608 608 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
609 609
610 610 # we need to flush here, in order to check if database won't
611 611 # throw any exceptions, create filesystem dirs at the very end
612 612 self.sa.flush()
613 613 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
614 614 return new_repo
615 615
616 616 except Exception:
617 617 log.error(traceback.format_exc())
618 618 raise
619 619
620 620 def create(self, form_data, cur_user):
621 621 """
622 622 Create repository using celery tasks
623 623
624 624 :param form_data:
625 625 :param cur_user:
626 626 """
627 627 from rhodecode.lib.celerylib import tasks, run_task
628 628 return run_task(tasks.create_repo, form_data, cur_user)
629 629
630 630 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
631 631 perm_deletions=None, check_perms=True,
632 632 cur_user=None):
633 633 if not perm_additions:
634 634 perm_additions = []
635 635 if not perm_updates:
636 636 perm_updates = []
637 637 if not perm_deletions:
638 638 perm_deletions = []
639 639
640 640 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
641 641
642 642 changes = {
643 643 'added': [],
644 644 'updated': [],
645 645 'deleted': [],
646 646 'default_user_changed': None
647 647 }
648 648
649 649 repo = self._get_repo(repo)
650 650
651 651 # update permissions
652 652 for member_id, perm, member_type in perm_updates:
653 653 member_id = int(member_id)
654 654 if member_type == 'user':
655 655 member_name = User.get(member_id).username
656 656 if member_name == User.DEFAULT_USER:
657 657 # NOTE(dan): detect if we changed permissions for default user
658 658 perm_obj = self.sa.query(UserRepoToPerm) \
659 659 .filter(UserRepoToPerm.user_id == member_id) \
660 660 .filter(UserRepoToPerm.repository == repo) \
661 661 .scalar()
662 662 if perm_obj and perm_obj.permission.permission_name != perm:
663 663 changes['default_user_changed'] = True
664 664
665 665 # this updates also current one if found
666 666 self.grant_user_permission(
667 667 repo=repo, user=member_id, perm=perm)
668 668 elif member_type == 'user_group':
669 669 # check if we have permissions to alter this usergroup
670 670 member_name = UserGroup.get(member_id).users_group_name
671 671 if not check_perms or HasUserGroupPermissionAny(
672 672 *req_perms)(member_name, user=cur_user):
673 673 self.grant_user_group_permission(
674 674 repo=repo, group_name=member_id, perm=perm)
675 675 else:
676 676 raise ValueError("member_type must be 'user' or 'user_group' "
677 677 "got {} instead".format(member_type))
678 678 changes['updated'].append({'type': member_type, 'id': member_id,
679 679 'name': member_name, 'new_perm': perm})
680 680
681 681 # set new permissions
682 682 for member_id, perm, member_type in perm_additions:
683 683 member_id = int(member_id)
684 684 if member_type == 'user':
685 685 member_name = User.get(member_id).username
686 686 self.grant_user_permission(
687 687 repo=repo, user=member_id, perm=perm)
688 688 elif member_type == 'user_group':
689 689 # check if we have permissions to alter this usergroup
690 690 member_name = UserGroup.get(member_id).users_group_name
691 691 if not check_perms or HasUserGroupPermissionAny(
692 692 *req_perms)(member_name, user=cur_user):
693 693 self.grant_user_group_permission(
694 694 repo=repo, group_name=member_id, perm=perm)
695 695 else:
696 696 raise ValueError("member_type must be 'user' or 'user_group' "
697 697 "got {} instead".format(member_type))
698 698
699 699 changes['added'].append({'type': member_type, 'id': member_id,
700 700 'name': member_name, 'new_perm': perm})
701 701 # delete permissions
702 702 for member_id, perm, member_type in perm_deletions:
703 703 member_id = int(member_id)
704 704 if member_type == 'user':
705 705 member_name = User.get(member_id).username
706 706 self.revoke_user_permission(repo=repo, user=member_id)
707 707 elif member_type == 'user_group':
708 708 # check if we have permissions to alter this usergroup
709 709 member_name = UserGroup.get(member_id).users_group_name
710 710 if not check_perms or HasUserGroupPermissionAny(
711 711 *req_perms)(member_name, user=cur_user):
712 712 self.revoke_user_group_permission(
713 713 repo=repo, group_name=member_id)
714 714 else:
715 715 raise ValueError("member_type must be 'user' or 'user_group' "
716 716 "got {} instead".format(member_type))
717 717
718 718 changes['deleted'].append({'type': member_type, 'id': member_id,
719 719 'name': member_name, 'new_perm': perm})
720 720 return changes
721 721
722 722 def create_fork(self, form_data, cur_user):
723 723 """
724 724 Simple wrapper into executing celery task for fork creation
725 725
726 726 :param form_data:
727 727 :param cur_user:
728 728 """
729 729 from rhodecode.lib.celerylib import tasks, run_task
730 730 return run_task(tasks.create_repo_fork, form_data, cur_user)
731 731
732 732 def archive(self, repo):
733 733 """
734 734 Archive given repository. Set archive flag.
735 735
736 736 :param repo:
737 737 """
738 738 repo = self._get_repo(repo)
739 739 if repo:
740 740
741 741 try:
742 742 repo.archived = True
743 743 self.sa.add(repo)
744 744 self.sa.commit()
745 745 except Exception:
746 746 log.error(traceback.format_exc())
747 747 raise
748 748
749 749 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
750 750 """
751 751 Delete given repository, forks parameter defines what do do with
752 752 attached forks. Throws AttachedForksError if deleted repo has attached
753 753 forks
754 754
755 755 :param repo:
756 756 :param forks: str 'delete' or 'detach'
757 757 :param pull_requests: str 'delete' or None
758 758 :param artifacts: str 'delete' or None
759 759 :param fs_remove: remove(archive) repo from filesystem
760 760 """
761 761 if not cur_user:
762 762 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
763 763 repo = self._get_repo(repo)
764 764 if not repo:
765 765 return False
766 766
767 767 if forks == 'detach':
768 768 for r in repo.forks:
769 769 r.fork = None
770 770 self.sa.add(r)
771 771 elif forks == 'delete':
772 772 for r in repo.forks:
773 773 self.delete(r, forks='delete')
774 774 elif [f for f in repo.forks]:
775 775 raise AttachedForksError()
776 776
777 777 # check for pull requests
778 778 pr_sources = repo.pull_requests_source
779 779 pr_targets = repo.pull_requests_target
780 780 if pull_requests != 'delete' and (pr_sources or pr_targets):
781 781 raise AttachedPullRequestsError()
782 782
783 783 artifacts_objs = repo.artifacts
784 784 if artifacts == 'delete':
785 785 for a in artifacts_objs:
786 786 self.sa.delete(a)
787 787 elif [a for a in artifacts_objs]:
788 788 raise AttachedArtifactsError()
789 789
790 790 old_repo_dict = repo.get_dict()
791 791 if call_events:
792 792 events.trigger(events.RepoPreDeleteEvent(repo))
793 793
794 794 try:
795 795 self.sa.delete(repo)
796 796 if fs_remove:
797 797 self._delete_filesystem_repo(repo)
798 798 else:
799 799 log.debug('skipping removal from filesystem')
800 800 old_repo_dict.update({
801 801 'deleted_by': cur_user,
802 802 'deleted_on': time.time(),
803 803 })
804 804 if call_events:
805 805 hooks_base.delete_repository(**old_repo_dict)
806 806 events.trigger(events.RepoDeleteEvent(repo))
807 807 except Exception:
808 808 log.error(traceback.format_exc())
809 809 raise
810 810
811 811 return True
812 812
813 813 def grant_user_permission(self, repo, user, perm):
814 814 """
815 815 Grant permission for user on given repository, or update existing one
816 816 if found
817 817
818 818 :param repo: Instance of Repository, repository_id, or repository name
819 819 :param user: Instance of User, user_id or username
820 820 :param perm: Instance of Permission, or permission_name
821 821 """
822 822 user = self._get_user(user)
823 823 repo = self._get_repo(repo)
824 824 permission = self._get_perm(perm)
825 825
826 826 # check if we have that permission already
827 827 obj = self.sa.query(UserRepoToPerm) \
828 828 .filter(UserRepoToPerm.user == user) \
829 829 .filter(UserRepoToPerm.repository == repo) \
830 830 .scalar()
831 831 if obj is None:
832 832 # create new !
833 833 obj = UserRepoToPerm()
834 834 obj.repository = repo
835 835 obj.user = user
836 836 obj.permission = permission
837 837 self.sa.add(obj)
838 838 log.debug('Granted perm %s to %s on %s', perm, user, repo)
839 839 action_logger_generic(
840 840 'granted permission: {} to user: {} on repo: {}'.format(
841 841 perm, user, repo), namespace='security.repo')
842 842 return obj
843 843
844 844 def revoke_user_permission(self, repo, user):
845 845 """
846 846 Revoke permission for user on given repository
847 847
848 848 :param repo: Instance of Repository, repository_id, or repository name
849 849 :param user: Instance of User, user_id or username
850 850 """
851 851
852 852 user = self._get_user(user)
853 853 repo = self._get_repo(repo)
854 854
855 855 obj = self.sa.query(UserRepoToPerm) \
856 856 .filter(UserRepoToPerm.repository == repo) \
857 857 .filter(UserRepoToPerm.user == user) \
858 858 .scalar()
859 859 if obj:
860 860 self.sa.delete(obj)
861 861 log.debug('Revoked perm on %s on %s', repo, user)
862 862 action_logger_generic(
863 863 'revoked permission from user: {} on repo: {}'.format(
864 864 user, repo), namespace='security.repo')
865 865
866 866 def grant_user_group_permission(self, repo, group_name, perm):
867 867 """
868 868 Grant permission for user group on given repository, or update
869 869 existing one if found
870 870
871 871 :param repo: Instance of Repository, repository_id, or repository name
872 872 :param group_name: Instance of UserGroup, users_group_id,
873 873 or user group name
874 874 :param perm: Instance of Permission, or permission_name
875 875 """
876 876 repo = self._get_repo(repo)
877 877 group_name = self._get_user_group(group_name)
878 878 permission = self._get_perm(perm)
879 879
880 880 # check if we have that permission already
881 881 obj = self.sa.query(UserGroupRepoToPerm) \
882 882 .filter(UserGroupRepoToPerm.users_group == group_name) \
883 883 .filter(UserGroupRepoToPerm.repository == repo) \
884 884 .scalar()
885 885
886 886 if obj is None:
887 887 # create new
888 888 obj = UserGroupRepoToPerm()
889 889
890 890 obj.repository = repo
891 891 obj.users_group = group_name
892 892 obj.permission = permission
893 893 self.sa.add(obj)
894 894 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
895 895 action_logger_generic(
896 896 'granted permission: {} to usergroup: {} on repo: {}'.format(
897 897 perm, group_name, repo), namespace='security.repo')
898 898
899 899 return obj
900 900
901 901 def revoke_user_group_permission(self, repo, group_name):
902 902 """
903 903 Revoke permission for user group on given repository
904 904
905 905 :param repo: Instance of Repository, repository_id, or repository name
906 906 :param group_name: Instance of UserGroup, users_group_id,
907 907 or user group name
908 908 """
909 909 repo = self._get_repo(repo)
910 910 group_name = self._get_user_group(group_name)
911 911
912 912 obj = self.sa.query(UserGroupRepoToPerm) \
913 913 .filter(UserGroupRepoToPerm.repository == repo) \
914 914 .filter(UserGroupRepoToPerm.users_group == group_name) \
915 915 .scalar()
916 916 if obj:
917 917 self.sa.delete(obj)
918 918 log.debug('Revoked perm to %s on %s', repo, group_name)
919 919 action_logger_generic(
920 920 'revoked permission from usergroup: {} on repo: {}'.format(
921 921 group_name, repo), namespace='security.repo')
922 922
923 923 def delete_stats(self, repo_name):
924 924 """
925 925 removes stats for given repo
926 926
927 927 :param repo_name:
928 928 """
929 929 repo = self._get_repo(repo_name)
930 930 try:
931 931 obj = self.sa.query(Statistics) \
932 932 .filter(Statistics.repository == repo).scalar()
933 933 if obj:
934 934 self.sa.delete(obj)
935 935 except Exception:
936 936 log.error(traceback.format_exc())
937 937 raise
938 938
939 939 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
940 940 field_type='str', field_desc=''):
941 941
942 942 repo = self._get_repo(repo_name)
943 943
944 944 new_field = RepositoryField()
945 945 new_field.repository = repo
946 946 new_field.field_key = field_key
947 947 new_field.field_type = field_type # python type
948 948 new_field.field_value = field_value
949 949 new_field.field_desc = field_desc
950 950 new_field.field_label = field_label
951 951 self.sa.add(new_field)
952 952 return new_field
953 953
954 954 def delete_repo_field(self, repo_name, field_key):
955 955 repo = self._get_repo(repo_name)
956 956 field = RepositoryField.get_by_key_name(field_key, repo)
957 957 if field:
958 958 self.sa.delete(field)
959 959
960 960 def set_landing_rev(self, repo, landing_rev_name):
961 961 if landing_rev_name.startswith('branch:'):
962 962 landing_rev_name = landing_rev_name.split('branch:')[-1]
963 963 scm_instance = repo.scm_instance()
964 964 if scm_instance:
965 965 return scm_instance._remote.set_head_ref(landing_rev_name)
966 966
967 967 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
968 968 clone_uri=None, repo_store_location=None,
969 969 use_global_config=False, install_hooks=True):
970 970 """
971 971 makes repository on filesystem. It's group aware means it'll create
972 972 a repository within a group, and alter the paths accordingly of
973 973 group location
974 974
975 975 :param repo_name:
976 976 :param alias:
977 977 :param parent:
978 978 :param clone_uri:
979 979 :param repo_store_location:
980 980 """
981 981 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
982 982 from rhodecode.model.scm import ScmModel
983 983
984 984 if Repository.NAME_SEP in repo_name:
985 985 raise ValueError(
986 986 'repo_name must not contain groups got `%s`' % repo_name)
987 987
988 988 if isinstance(repo_group, RepoGroup):
989 989 new_parent_path = os.sep.join(repo_group.full_path_splitted)
990 990 else:
991 991 new_parent_path = repo_group or ''
992 992
993 993 if repo_store_location:
994 994 _paths = [repo_store_location]
995 995 else:
996 996 _paths = [self.repos_path, new_parent_path, repo_name]
997 997 # we need to make it str for mercurial
998 998 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
999 999
1000 1000 # check if this path is not a repository
1001 1001 if is_valid_repo(repo_path, self.repos_path):
1002 1002 raise Exception(f'This path {repo_path} is a valid repository')
1003 1003
1004 1004 # check if this path is a group
1005 1005 if is_valid_repo_group(repo_path, self.repos_path):
1006 1006 raise Exception(f'This path {repo_path} is a valid group')
1007 1007
1008 1008 log.info('creating repo %s in %s from url: `%s`',
1009 1009 repo_name, safe_str(repo_path),
1010 1010 obfuscate_url_pw(clone_uri))
1011 1011
1012 1012 backend = get_backend(repo_type)
1013 1013
1014 1014 config_repo = None if use_global_config else repo_name
1015 1015 if config_repo and new_parent_path:
1016 1016 config_repo = Repository.NAME_SEP.join(
1017 1017 (new_parent_path, config_repo))
1018 1018 config = make_db_config(clear_session=False, repo=config_repo)
1019 1019 config.set('extensions', 'largefiles', '')
1020 1020
1021 1021 # patch and reset hooks section of UI config to not run any
1022 1022 # hooks on creating remote repo
1023 1023 config.clear_section('hooks')
1024 1024
1025 1025 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1026 1026 if repo_type == 'git':
1027 1027 repo = backend(
1028 1028 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1029 1029 with_wire={"cache": False})
1030 1030 else:
1031 1031 repo = backend(
1032 1032 repo_path, config=config, create=True, src_url=clone_uri,
1033 1033 with_wire={"cache": False})
1034 1034
1035 1035 if install_hooks:
1036 1036 repo.install_hooks()
1037 1037
1038 1038 log.debug('Created repo %s with %s backend',
1039 1039 safe_str(repo_name), safe_str(repo_type))
1040 1040 return repo
1041 1041
1042 1042 def _rename_filesystem_repo(self, old, new):
1043 1043 """
1044 1044 renames repository on filesystem
1045 1045
1046 1046 :param old: old name
1047 1047 :param new: new name
1048 1048 """
1049 1049 log.info('renaming repo from %s to %s', old, new)
1050 1050
1051 1051 old_path = os.path.join(self.repos_path, old)
1052 1052 new_path = os.path.join(self.repos_path, new)
1053 1053 if os.path.isdir(new_path):
1054 1054 raise Exception(
1055 1055 'Was trying to rename to already existing dir %s' % new_path
1056 1056 )
1057 1057 shutil.move(old_path, new_path)
1058 1058
1059 1059 def _delete_filesystem_repo(self, repo):
1060 1060 """
1061 1061 removes repo from filesystem, the removal is actually made by
1062 1062 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1063 1063 repository is no longer valid for rhodecode, can be undeleted later on
1064 1064 by reverting the renames on this repository
1065 1065
1066 1066 :param repo: repo object
1067 1067 """
1068 1068 rm_path = os.path.join(self.repos_path, repo.repo_name)
1069 1069 repo_group = repo.group
1070 1070 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1071 1071 # disable hg/git internal that it doesn't get detected as repo
1072 1072 alias = repo.repo_type
1073 1073
1074 1074 config = make_db_config(clear_session=False)
1075 1075 config.set('extensions', 'largefiles', '')
1076 1076 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1077 1077
1078 1078 # skip this for bare git repos
1079 1079 if not bare:
1080 1080 # disable VCS repo
1081 1081 vcs_path = os.path.join(rm_path, '.%s' % alias)
1082 1082 if os.path.exists(vcs_path):
1083 1083 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1084 1084
1085 1085 _now = datetime.datetime.now()
1086 1086 _ms = str(_now.microsecond).rjust(6, '0')
1087 1087 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1088 1088 repo.just_name)
1089 1089 if repo_group:
1090 1090 # if repository is in group, prefix the removal path with the group
1091 1091 args = repo_group.full_path_splitted + [_d]
1092 1092 _d = os.path.join(*args)
1093 1093
1094 1094 if os.path.isdir(rm_path):
1095 1095 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1096 1096
1097 1097 # finally cleanup diff-cache if it exists
1098 1098 cached_diffs_dir = repo.cached_diffs_dir
1099 1099 if os.path.isdir(cached_diffs_dir):
1100 1100 shutil.rmtree(cached_diffs_dir)
1101 1101
1102 1102
1103 1103 class ReadmeFinder:
1104 1104 """
1105 1105 Utility which knows how to find a readme for a specific commit.
1106 1106
1107 1107 The main idea is that this is a configurable algorithm. When creating an
1108 1108 instance you can define parameters, currently only the `default_renderer`.
1109 1109 Based on this configuration the method :meth:`search` behaves slightly
1110 1110 different.
1111 1111 """
1112 1112
1113 1113 readme_re = re.compile(br'^readme(\.[^.]+)?$', re.IGNORECASE)
1114 1114 path_re = re.compile(br'^docs?', re.IGNORECASE)
1115 1115
1116 1116 default_priorities = {
1117 1117 None: 0,
1118 1118 b'.rst': 1,
1119 1119 b'.md': 1,
1120 1120 b'.rest': 2,
1121 1121 b'.mkdn': 2,
1122 1122 b'.text': 2,
1123 1123 b'.txt': 3,
1124 1124 b'.mdown': 3,
1125 1125 b'.markdown': 4,
1126 1126 }
1127 1127
1128 1128 path_priority = {
1129 1129 b'doc': 0,
1130 1130 b'docs': 1,
1131 1131 }
1132 1132
1133 1133 FALLBACK_PRIORITY = 99
1134 1134
1135 1135 RENDERER_TO_EXTENSION = {
1136 1136 'rst': [b'.rst', b'.rest'],
1137 1137 'markdown': [b'.md', b'mkdn', b'.mdown', b'.markdown'],
1138 1138 }
1139 1139
1140 1140 def __init__(self, default_renderer=None):
1141 1141 self._default_renderer = default_renderer
1142 1142 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(default_renderer, [])
1143 1143
1144 def search(self, commit, path=b'/'):
1144 def search(self, commit, path=b'/', nodes=None):
1145 1145 """
1146 1146 Find a readme in the given `commit`.
1147 1147 """
1148 1148 # firstly, check the PATH type if it is actually a DIR
1149 1149 bytes_path = safe_bytes(path)
1150 1150 if commit.get_node(bytes_path).kind != NodeKind.DIR:
1151 1151 return None
1152 1152
1153 if not nodes:
1153 1154 nodes = commit.get_nodes(bytes_path)
1155
1154 1156 matches = self._match_readmes(nodes)
1155 1157 matches = self._sort_according_to_priority(matches)
1156 1158 if matches:
1157 1159 return matches[0].node
1158 1160
1159 1161 paths = self._match_paths(nodes)
1160 1162 paths = self._sort_paths_according_to_priority(paths)
1161 1163 for bytes_path in paths:
1162 1164 match = self.search(commit, path=bytes_path)
1163 1165 if match:
1164 1166 return match
1165 1167
1166 1168 return None
1167 1169
1168 1170 def _match_readmes(self, nodes):
1169 1171 for node in nodes:
1170 1172 if not node.is_file():
1171 1173 continue
1172 1174 path = node.bytes_path.rsplit(b'/', 1)[-1]
1173 1175 match = self.readme_re.match(path)
1174 1176 if match:
1175 1177 extension = match.group(1)
1176 1178 yield ReadmeMatch(node, match, self._priority(extension))
1177 1179
1178 1180 def _match_paths(self, nodes):
1179 1181 for node in nodes:
1180 1182 if not node.is_dir():
1181 1183 continue
1182 1184 match = self.path_re.match(node.bytes_path)
1183 1185 if match:
1184 1186 yield node.bytes_path
1185 1187
1186 1188 def _priority(self, extension):
1187 1189 renderer_priority = 0 if extension in self._renderer_extensions else 1
1188 1190 extension_priority = self.default_priorities.get(extension, self.FALLBACK_PRIORITY)
1189 1191 return renderer_priority, extension_priority
1190 1192
1191 1193 def _sort_according_to_priority(self, matches):
1192 1194
1193 1195 def priority_and_path(match):
1194 1196 return match.priority, match.path
1195 1197
1196 1198 return sorted(matches, key=priority_and_path)
1197 1199
1198 1200 def _sort_paths_according_to_priority(self, paths):
1199 1201
1200 1202 def priority_and_path(path):
1201 1203 return self.path_priority.get(path, self.FALLBACK_PRIORITY), path
1202 1204
1203 1205 return sorted(paths, key=priority_and_path)
1204 1206
1205 1207
1206 1208 class ReadmeMatch:
1207 1209
1208 1210 def __init__(self, node, match, priority):
1209 1211 self.node = node
1210 1212 self._match = match
1211 1213 self.priority = priority
1212 1214
1213 1215 @property
1214 1216 def path(self):
1215 1217 return self.node.path
1216 1218
1217 1219 def __repr__(self):
1218 1220 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,116 +1,118
1 1 <%namespace name="base" file="/base/base.mako"/>
2 2
3 3 <%doc>
4 4 Please note the content of this file is cached, so changes here might not be reflected when editing.
5 5 add ?no-cache=true into the file url to disable caches.
6 6
7 7 e.g
8 8 http://docker-dev:10020/ipython/files/master/IPython/frontend/html/notebook/static?no-cache=1
9 9
10 10 </%doc>
11 11 <%
12 12 at_ref = request.GET.get('at')
13 13 if at_ref:
14 14 query={'at': at_ref}
15 15 default_landing_ref = at_ref or c.rhodecode_db_repo.landing_ref_name
16 16 else:
17 17 query=None
18 18 default_landing_ref = c.commit.raw_id
19 19 %>
20 20 <div id="file-tree-wrapper" class="browser-body ${('full-load' if c.full_load else '')}">
21 21 <table class="code-browser rctable table-bordered">
22 22 <thead>
23 23 <tr>
24 24 <th>${_('Name')}</th>
25 25 <th>${_('Size')}</th>
26 26 <th>${_('Modified')}</th>
27 27 <th>${_('Last Commit')}</th>
28 28 <th>${_('Author')}</th>
29 29 </tr>
30 30 </thead>
31 31
32 32 <tbody id="tbody">
33 33 <tr>
34 34 <td colspan="5">
35 35 ${h.files_breadcrumbs(c.repo_name, c.rhodecode_db_repo.repo_type, c.commit.raw_id, c.file.path, c.rhodecode_db_repo.landing_ref_name, request.GET.get('at'), limit_items=True)}
36 36 </td>
37 37 </tr>
38 38
39 39 <% has_files = False %>
40 40 % if not c.file.is_submodule():
41 % for cnt, node in enumerate(c.file):
41 % for cnt, node in enumerate(c.file_nodes):
42 42 <% has_files = True %>
43 43 <tr class="parity${(cnt % 2)}">
44 44 <td class="td-componentname">
45 45 % if node.is_submodule():
46 46 <span class="submodule-dir">
47 47 % if node.url.startswith('http://') or node.url.startswith('https://'):
48 48 <a href="${node.url}">
49 49 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
50 50 </a>
51 51 % else:
52 52 <i class="icon-directory browser-dir"></i><span class="tooltip-hovercard" data-hovercard-alt="${node.url}" data-hovercard-url="javascript:renderTemplate('submoduleHovercard', {'submodule_url':'${node.url}'})">${node.name}</span>
53 53 % endif
54 54 </span>
55 55 % else:
56 56 <a href="${h.repo_files_by_ref_url(c.repo_name, c.rhodecode_db_repo.repo_type, f_path=h.safe_str(node.path), ref_name=default_landing_ref, commit_id=c.commit.raw_id, query=query)}">
57 57 <i class="${('icon-file-text browser-file' if node.is_file() else 'icon-directory browser-dir')}"></i>${node.name}
58 58 </a>
59 59 % endif
60 60 </td>
61 61 %if node.is_file():
62 62 <td class="td-size" data-attr-name="size">
63 63 % if c.full_load:
64 64 <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span>
65 65 % else:
66 66 ${_('Loading ...')}
67 67 % endif
68 68 </td>
69 69 <td class="td-time" data-attr-name="modified_at">
70 70 % if c.full_load:
71 71 <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span>
72 72 % endif
73 73 </td>
74 74 <td class="td-hash" data-attr-name="commit_id">
75 75 % if c.full_load:
76 76 <div class="tooltip-hovercard" data-hovercard-alt="${node.last_commit.message}" data-hovercard-url="${h.route_path('hovercard_repo_commit', repo_name=c.repo_name, commit_id=node.last_commit.raw_id)}">
77 77 <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.idx}:${node.last_commit.short_id}</pre>
78 78 </div>
79 79 % endif
80 80 </td>
81 81 <td class="td-user" data-attr-name="author">
82 82 % if c.full_load:
83 83 <span data-author="${node.last_commit.author}">${h.gravatar_with_user(request, node.last_commit.author, tooltip=True)|n}</span>
84 84 % endif
85 85 </td>
86 86 %else:
87 87 <td></td>
88 88 <td></td>
89 89 <td></td>
90 90 <td></td>
91 91 %endif
92 92 </tr>
93 93 % endfor
94 94 % endif
95 95
96 96 % if not has_files:
97 97 <tr>
98 98 <td colspan="5">
99 99 ##empty-dir mostly SVN
100 100
101 101 ## submodule if we somehow endup
102 102 % if c.file.is_submodule():
103 103 <span class="submodule-dir">
104 104 <strong>Submodule Node</strong><br/>
105 105 ${h.escape(c.file.name)}
106 106 <pre>${c.file.url}</pre>
107 107 </span>
108 % else:
109 <div>${_('Empty directory')}</div>
108 110 %endif
109 111 </td>
110 112 </tr>
111 113 % endif
112 114
113 115 </tbody>
114 116 <tbody id="tbody_filtered"></tbody>
115 117 </table>
116 118 </div>
@@ -1,1249 +1,1251
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import datetime
20 20 import mock
21 21 import os
22 22 import sys
23 23 import shutil
24 24
25 25 import pytest
26 26
27 27 from rhodecode.lib.utils import make_db_config
28 28 from rhodecode.lib.vcs.backends.base import Reference
29 29 from rhodecode.lib.vcs.backends.git import GitRepository, GitCommit, discover_git_version
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
31 31 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState, SubModuleNode, RootNode
32 32 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
33 33 from rhodecode.tests.vcs.conftest import BackendTestMixin
34 34
35 35
36 36 pytestmark = pytest.mark.backends("git")
37 37
38 38
39 39 DIFF_FROM_REMOTE = rb"""diff --git a/foobar b/foobar
40 40 new file mode 100644
41 41 index 0000000..f6ea049
42 42 --- /dev/null
43 43 +++ b/foobar
44 44 @@ -0,0 +1 @@
45 45 +foobar
46 46 \ No newline at end of file
47 47 diff --git a/foobar2 b/foobar2
48 48 new file mode 100644
49 49 index 0000000..e8c9d6b
50 50 --- /dev/null
51 51 +++ b/foobar2
52 52 @@ -0,0 +1 @@
53 53 +foobar2
54 54 \ No newline at end of file
55 55 """
56 56
57 57
58 58 def callable_get_diff(*args, **kwargs):
59 59 return DIFF_FROM_REMOTE
60 60
61 61
62 62 class TestGitRepository(object):
63 63 @pytest.fixture(autouse=True)
64 64 def prepare(self, request, baseapp):
65 65 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
66 66 self.repo.count()
67 67
68 68 def get_clone_repo(self, tmpdir):
69 69 """
70 70 Return a non bare clone of the base repo.
71 71 """
72 72 clone_path = str(tmpdir.join("clone-repo"))
73 73 repo_clone = GitRepository(clone_path, create=True, src_url=self.repo.path, bare=False)
74 74
75 75 return repo_clone
76 76
77 77 def get_empty_repo(self, tmpdir, bare=False):
78 78 """
79 79 Return a non bare empty repo.
80 80 """
81 81 clone_path = str(tmpdir.join("empty-repo"))
82 82 return GitRepository(clone_path, create=True, bare=bare)
83 83
84 84 def test_wrong_repo_path(self):
85 85 wrong_repo_path = "/tmp/errorrepo_git"
86 86 with pytest.raises(RepositoryError):
87 87 GitRepository(wrong_repo_path)
88 88
89 89 def test_repo_clone(self, tmp_path_factory):
90 90 repo = GitRepository(TEST_GIT_REPO)
91 91 clone_path = f"{tmp_path_factory.mktemp('_')}_{TEST_GIT_REPO_CLONE}"
92 92 repo_clone = GitRepository(clone_path, src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
93 93
94 94 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
95 95 # Checking hashes of commits should be enough
96 96 for commit in repo.get_commits():
97 97 raw_id = commit.raw_id
98 98 assert raw_id == repo_clone.get_commit(raw_id).raw_id
99 99
100 100 def test_repo_clone_without_create(self):
101 101 with pytest.raises(RepositoryError):
102 102 GitRepository(TEST_GIT_REPO_CLONE + "_wo_create", src_url=TEST_GIT_REPO)
103 103
104 104 def test_repo_clone_with_update(self, tmp_path_factory):
105 105 repo = GitRepository(TEST_GIT_REPO)
106 106 clone_path = "{}_{}_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
107 107
108 108 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
109 109 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110 110
111 111 # check if current workdir was updated
112 112 fpath = os.path.join(clone_path, "MANIFEST.in")
113 113 assert os.path.isfile(fpath)
114 114
115 115 def test_repo_clone_without_update(self, tmp_path_factory):
116 116 repo = GitRepository(TEST_GIT_REPO)
117 117 clone_path = "{}_{}_without_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
118 118 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
119 119 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
120 120 # check if current workdir was *NOT* updated
121 121 fpath = os.path.join(clone_path, "MANIFEST.in")
122 122 # Make sure it's not bare repo
123 123 assert not repo_clone.bare
124 124 assert not os.path.isfile(fpath)
125 125
126 126 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
127 127 repo = GitRepository(TEST_GIT_REPO)
128 128 clone_path = "{}_{}_bare.git".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
129 129 repo_clone = GitRepository(clone_path, create=True, src_url=repo.path, bare=True)
130 130 assert repo_clone.bare
131 131
132 132 def test_create_repo_is_not_bare_by_default(self):
133 133 repo = GitRepository(get_new_dir("not-bare-by-default"), create=True)
134 134 assert not repo.bare
135 135
136 136 def test_create_bare_repo(self):
137 137 repo = GitRepository(get_new_dir("bare-repo"), create=True, bare=True)
138 138 assert repo.bare
139 139
140 140 def test_update_server_info(self):
141 141 self.repo._update_server_info()
142 142
143 143 def test_fetch(self, vcsbackend_git):
144 144 # Note: This is a git specific part of the API, it's only implemented
145 145 # by the git backend.
146 146 source_repo = vcsbackend_git.repo
147 147 target_repo = vcsbackend_git.create_repo(bare=True)
148 148 target_repo.fetch(source_repo.path)
149 149 # Note: Get a fresh instance, avoids caching trouble
150 150 target_repo = vcsbackend_git.backend(target_repo.path)
151 151 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
152 152
153 153 def test_commit_ids(self):
154 154 # there are 112 commits (by now)
155 155 # so we can assume they would be available from now on
156 156 subset = {
157 157 "c1214f7e79e02fc37156ff215cd71275450cffc3",
158 158 "38b5fe81f109cb111f549bfe9bb6b267e10bc557",
159 159 "fa6600f6848800641328adbf7811fd2372c02ab2",
160 160 "102607b09cdd60e2793929c4f90478be29f85a17",
161 161 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
162 162 "2d1028c054665b962fa3d307adfc923ddd528038",
163 163 "d7e0d30fbcae12c90680eb095a4f5f02505ce501",
164 164 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
165 165 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
166 166 "8430a588b43b5d6da365400117c89400326e7992",
167 167 "d955cd312c17b02143c04fa1099a352b04368118",
168 168 "f67b87e5c629c2ee0ba58f85197e423ff28d735b",
169 169 "add63e382e4aabc9e1afdc4bdc24506c269b7618",
170 170 "f298fe1189f1b69779a4423f40b48edf92a703fc",
171 171 "bd9b619eb41994cac43d67cf4ccc8399c1125808",
172 172 "6e125e7c890379446e98980d8ed60fba87d0f6d1",
173 173 "d4a54db9f745dfeba6933bf5b1e79e15d0af20bd",
174 174 "0b05e4ed56c802098dfc813cbe779b2f49e92500",
175 175 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
176 176 "45223f8f114c64bf4d6f853e3c35a369a6305520",
177 177 "ca1eb7957a54bce53b12d1a51b13452f95bc7c7e",
178 178 "f5ea29fc42ef67a2a5a7aecff10e1566699acd68",
179 179 "27d48942240f5b91dfda77accd2caac94708cc7d",
180 180 "622f0eb0bafd619d2560c26f80f09e3b0b0d78af",
181 181 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
182 182 }
183 183 assert subset.issubset(set(self.repo.commit_ids))
184 184
185 185 def test_slicing(self):
186 186 # 4 1 5 10 95
187 187 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]:
188 188 commit_ids = list(self.repo[sfrom:sto])
189 189 assert len(commit_ids) == size
190 190 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
191 191 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
192 192
193 193 def test_branches(self):
194 194 # TODO: Need more tests here
195 195 # Removed (those are 'remotes' branches for cloned repo)
196 196 # assert 'master' in self.repo.branches
197 197 # assert 'gittree' in self.repo.branches
198 198 # assert 'web-branch' in self.repo.branches
199 199 for __, commit_id in self.repo.branches.items():
200 200 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
201 201
202 202 def test_tags(self):
203 203 # TODO: Need more tests here
204 204 assert "v0.1.1" in self.repo.tags
205 205 assert "v0.1.2" in self.repo.tags
206 206 for __, commit_id in self.repo.tags.items():
207 207 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208 208
209 209 def _test_single_commit_cache(self, commit_id):
210 210 commit = self.repo.get_commit(commit_id)
211 211 assert commit_id in self.repo.commits
212 212 assert commit is self.repo.commits[commit_id]
213 213
214 214 def test_initial_commit(self):
215 215 commit_id = self.repo.commit_ids[0]
216 216 init_commit = self.repo.get_commit(commit_id)
217 217 init_author = init_commit.author
218 218
219 219 assert init_commit.message == "initial import\n"
220 220 assert init_author == "Marcin Kuzminski <marcin@python-blog.com>"
221 221 assert init_author == init_commit.committer
222 222 assert sorted(init_commit.added_paths) == sorted(
223 223 [
224 224 b"vcs/__init__.py",
225 225 b"vcs/backends/BaseRepository.py",
226 226 b"vcs/backends/__init__.py",
227 227 ]
228 228 )
229 229 assert sorted(init_commit.affected_files) == sorted(
230 230 [
231 231 b"vcs/__init__.py",
232 232 b"vcs/backends/BaseRepository.py",
233 233 b"vcs/backends/__init__.py",
234 234 ]
235 235 )
236 236
237 237 for path in (b"vcs/__init__.py", b"vcs/backends/BaseRepository.py", b"vcs/backends/__init__.py"):
238 238 assert isinstance(init_commit.get_node(path), FileNode)
239 239 for path in (b"", b"vcs", b"vcs/backends"):
240 240 assert isinstance(init_commit.get_node(path), DirNode)
241 241
242 242 with pytest.raises(NodeDoesNotExistError):
243 243 init_commit.get_node(path=b"foobar")
244 244
245 245 node = init_commit.get_node(b"vcs/")
246 246 assert hasattr(node, "kind")
247 247 assert node.kind == NodeKind.DIR
248 248
249 249 node = init_commit.get_node(b"vcs")
250 250 assert hasattr(node, "kind")
251 251 assert node.kind == NodeKind.DIR
252 252
253 253 node = init_commit.get_node(b"vcs/__init__.py")
254 254 assert hasattr(node, "kind")
255 255 assert node.kind == NodeKind.FILE
256 256
257 257 def test_not_existing_commit(self):
258 258 with pytest.raises(RepositoryError):
259 259 self.repo.get_commit("f" * 40)
260 260
261 261 def test_commit10(self):
262 262 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
263 263 README = """===
264 264 VCS
265 265 ===
266 266
267 267 Various Version Control System management abstraction layer for Python.
268 268
269 269 Introduction
270 270 ------------
271 271
272 272 TODO: To be written...
273 273
274 274 """
275 275 node = commit10.get_node(b"README.rst")
276 276 assert node.kind == NodeKind.FILE
277 277 assert node.str_content == README
278 278
279 279 def test_head(self):
280 280 assert self.repo.head == self.repo.get_commit().raw_id
281 281
282 282 def test_checkout_with_create(self, tmpdir):
283 283 repo_clone = self.get_clone_repo(tmpdir)
284 284
285 285 new_branch = "new_branch"
286 286 assert repo_clone._current_branch() == "master"
287 287 assert set(repo_clone.branches) == {"master"}
288 288 repo_clone._checkout(new_branch, create=True)
289 289
290 290 # Branches is a lazy property so we need to recrete the Repo object.
291 291 repo_clone = GitRepository(repo_clone.path)
292 292 assert set(repo_clone.branches) == {"master", new_branch}
293 293 assert repo_clone._current_branch() == new_branch
294 294
295 295 def test_checkout(self, tmpdir):
296 296 repo_clone = self.get_clone_repo(tmpdir)
297 297
298 298 repo_clone._checkout("new_branch", create=True)
299 299 repo_clone._checkout("master")
300 300
301 301 assert repo_clone._current_branch() == "master"
302 302
303 303 def test_checkout_same_branch(self, tmpdir):
304 304 repo_clone = self.get_clone_repo(tmpdir)
305 305
306 306 repo_clone._checkout("master")
307 307 assert repo_clone._current_branch() == "master"
308 308
309 309 def test_checkout_branch_already_exists(self, tmpdir):
310 310 repo_clone = self.get_clone_repo(tmpdir)
311 311
312 312 with pytest.raises(RepositoryError):
313 313 repo_clone._checkout("master", create=True)
314 314
315 315 def test_checkout_bare_repo(self):
316 316 with pytest.raises(RepositoryError):
317 317 self.repo._checkout("master")
318 318
319 319 def test_current_branch_bare_repo(self):
320 320 with pytest.raises(RepositoryError):
321 321 self.repo._current_branch()
322 322
323 323 def test_current_branch_empty_repo(self, tmpdir):
324 324 repo = self.get_empty_repo(tmpdir)
325 325 assert repo._current_branch() is None
326 326
327 327 def test_local_clone(self, tmp_path_factory):
328 328 clone_path = str(tmp_path_factory.mktemp("test-local-clone"))
329 329 self.repo._local_clone(clone_path, "master")
330 330 repo_clone = GitRepository(clone_path)
331 331
332 332 assert self.repo.commit_ids == repo_clone.commit_ids
333 333
334 334 def test_local_clone_with_specific_branch(self, tmpdir):
335 335 source_repo = self.get_clone_repo(tmpdir)
336 336
337 337 # Create a new branch in source repo
338 338 new_branch_commit = source_repo.commit_ids[-3]
339 339 source_repo._checkout(new_branch_commit)
340 340 source_repo._checkout("new_branch", create=True)
341 341
342 342 clone_path = str(tmpdir.join("git-clone-path-1"))
343 343 source_repo._local_clone(clone_path, "new_branch")
344 344 repo_clone = GitRepository(clone_path)
345 345
346 346 assert source_repo.commit_ids[: -3 + 1] == repo_clone.commit_ids
347 347
348 348 clone_path = str(tmpdir.join("git-clone-path-2"))
349 349 source_repo._local_clone(clone_path, "master")
350 350 repo_clone = GitRepository(clone_path)
351 351
352 352 assert source_repo.commit_ids == repo_clone.commit_ids
353 353
354 354 def test_local_clone_fails_if_target_exists(self):
355 355 with pytest.raises(RepositoryError):
356 356 self.repo._local_clone(self.repo.path, "master")
357 357
358 358 def test_local_fetch(self, tmpdir):
359 359 target_repo = self.get_empty_repo(tmpdir)
360 360 source_repo = self.get_clone_repo(tmpdir)
361 361
362 362 # Create a new branch in source repo
363 363 master_commit = source_repo.commit_ids[-1]
364 364 new_branch_commit = source_repo.commit_ids[-3]
365 365 source_repo._checkout(new_branch_commit)
366 366 source_repo._checkout("new_branch", create=True)
367 367
368 368 target_repo._local_fetch(source_repo.path, "new_branch")
369 369 assert target_repo._last_fetch_heads() == [new_branch_commit]
370 370
371 371 target_repo._local_fetch(source_repo.path, "master")
372 372 assert target_repo._last_fetch_heads() == [master_commit]
373 373
374 374 def test_local_fetch_from_bare_repo(self, tmpdir):
375 375 target_repo = self.get_empty_repo(tmpdir)
376 376 target_repo._local_fetch(self.repo.path, "master")
377 377
378 378 master_commit = self.repo.commit_ids[-1]
379 379 assert target_repo._last_fetch_heads() == [master_commit]
380 380
381 381 def test_local_fetch_from_same_repo(self):
382 382 with pytest.raises(ValueError):
383 383 self.repo._local_fetch(self.repo.path, "master")
384 384
385 385 def test_local_fetch_branch_does_not_exist(self, tmpdir):
386 386 target_repo = self.get_empty_repo(tmpdir)
387 387
388 388 with pytest.raises(RepositoryError):
389 389 target_repo._local_fetch(self.repo.path, "new_branch")
390 390
391 391 def test_local_pull(self, tmpdir):
392 392 target_repo = self.get_empty_repo(tmpdir)
393 393 source_repo = self.get_clone_repo(tmpdir)
394 394
395 395 # Create a new branch in source repo
396 396 master_commit = source_repo.commit_ids[-1]
397 397 new_branch_commit = source_repo.commit_ids[-3]
398 398 source_repo._checkout(new_branch_commit)
399 399 source_repo._checkout("new_branch", create=True)
400 400
401 401 target_repo._local_pull(source_repo.path, "new_branch")
402 402 target_repo = GitRepository(target_repo.path)
403 403 assert target_repo.head == new_branch_commit
404 404
405 405 target_repo._local_pull(source_repo.path, "master")
406 406 target_repo = GitRepository(target_repo.path)
407 407 assert target_repo.head == master_commit
408 408
409 409 def test_local_pull_in_bare_repo(self):
410 410 with pytest.raises(RepositoryError):
411 411 self.repo._local_pull(self.repo.path, "master")
412 412
413 413 def test_local_merge(self, tmpdir):
414 414 target_repo = self.get_empty_repo(tmpdir)
415 415 source_repo = self.get_clone_repo(tmpdir)
416 416
417 417 # Create a new branch in source repo
418 418 master_commit = source_repo.commit_ids[-1]
419 419 new_branch_commit = source_repo.commit_ids[-3]
420 420 source_repo._checkout(new_branch_commit)
421 421 source_repo._checkout("new_branch", create=True)
422 422
423 423 # This is required as one cannot do a -ff-only merge in an empty repo.
424 424 target_repo._local_pull(source_repo.path, "new_branch")
425 425
426 426 target_repo._local_fetch(source_repo.path, "master")
427 427 merge_message = "Merge message\n\nDescription:..."
428 428 user_name = "Albert Einstein"
429 429 user_email = "albert@einstein.com"
430 430 target_repo._local_merge(merge_message, user_name, user_email, target_repo._last_fetch_heads())
431 431
432 432 target_repo = GitRepository(target_repo.path)
433 433 assert target_repo.commit_ids[-2] == master_commit
434 434 last_commit = target_repo.get_commit(target_repo.head)
435 435 assert last_commit.message.strip() == merge_message
436 436 assert last_commit.author == "%s <%s>" % (user_name, user_email)
437 437
438 438 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
439 439
440 440 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
441 441 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
442 442 vcsbackend_git.ensure_file(b"README", b"I will conflict with you!!!")
443 443
444 444 target_repo._local_fetch(self.repo.path, "master")
445 445 with pytest.raises(RepositoryError):
446 446 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
447 447
448 448 # Check we are not left in an intermediate merge state
449 449 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
450 450
451 451 def test_local_merge_into_empty_repo(self, tmpdir):
452 452 target_repo = self.get_empty_repo(tmpdir)
453 453
454 454 # This is required as one cannot do a -ff-only merge in an empty repo.
455 455 target_repo._local_fetch(self.repo.path, "master")
456 456 with pytest.raises(RepositoryError):
457 457 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
458 458
459 459 def test_local_merge_in_bare_repo(self):
460 460 with pytest.raises(RepositoryError):
461 461 self.repo._local_merge("merge_message", "user name", "user@name.com", None)
462 462
463 463 def test_local_push_non_bare(self, tmpdir):
464 464 target_repo = self.get_empty_repo(tmpdir)
465 465
466 466 pushed_branch = "pushed_branch"
467 467 self.repo._local_push("master", target_repo.path, pushed_branch)
468 468 # Fix the HEAD of the target repo, or otherwise GitRepository won't
469 469 # report any branches.
470 470 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
471 471 f.write("ref: refs/heads/%s" % pushed_branch)
472 472
473 473 target_repo = GitRepository(target_repo.path)
474 474
475 475 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
476 476
477 477 def test_local_push_bare(self, tmpdir):
478 478 target_repo = self.get_empty_repo(tmpdir, bare=True)
479 479
480 480 pushed_branch = "pushed_branch"
481 481 self.repo._local_push("master", target_repo.path, pushed_branch)
482 482 # Fix the HEAD of the target repo, or otherwise GitRepository won't
483 483 # report any branches.
484 484 with open(os.path.join(target_repo.path, "HEAD"), "w") as f:
485 485 f.write("ref: refs/heads/%s" % pushed_branch)
486 486
487 487 target_repo = GitRepository(target_repo.path)
488 488
489 489 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
490 490
491 491 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
492 492 target_repo = self.get_clone_repo(tmpdir)
493 493
494 494 pushed_branch = "pushed_branch"
495 495 # Create a new branch in source repo
496 496 new_branch_commit = target_repo.commit_ids[-3]
497 497 target_repo._checkout(new_branch_commit)
498 498 target_repo._checkout(pushed_branch, create=True)
499 499
500 500 self.repo._local_push("master", target_repo.path, pushed_branch)
501 501
502 502 target_repo = GitRepository(target_repo.path)
503 503
504 504 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
505 505
506 506 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
507 507 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
508 508 with pytest.raises(RepositoryError):
509 509 self.repo._local_push("master", target_repo.path, "master")
510 510
511 511 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
512 512 target_repo = self.get_empty_repo(tmpdir, bare=True)
513 513
514 514 with mock.patch.object(self.repo, "run_git_command") as run_mock:
515 515 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
516 516 env = run_mock.call_args[1]["extra_env"]
517 517 assert "RC_SKIP_HOOKS" not in env
518 518
519 519 def _add_failing_hook(self, repo_path, hook_name, bare=False):
520 520 path_components = ["hooks", hook_name] if bare else [".git", "hooks", hook_name]
521 521 hook_path = os.path.join(repo_path, *path_components)
522 522 with open(hook_path, "w") as f:
523 523 script_lines = [
524 524 "#!%s" % sys.executable,
525 525 "import os",
526 526 "import sys",
527 527 'if os.environ.get("RC_SKIP_HOOKS"):',
528 528 " sys.exit(0)",
529 529 "sys.exit(1)",
530 530 ]
531 531 f.write("\n".join(script_lines))
532 532 os.chmod(hook_path, 0o755)
533 533
534 534 def test_local_push_does_not_execute_hook(self, tmpdir):
535 535 target_repo = self.get_empty_repo(tmpdir)
536 536
537 537 pushed_branch = "pushed_branch"
538 538 self._add_failing_hook(target_repo.path, "pre-receive")
539 539 self.repo._local_push("master", target_repo.path, pushed_branch)
540 540 # Fix the HEAD of the target repo, or otherwise GitRepository won't
541 541 # report any branches.
542 542 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
543 543 f.write("ref: refs/heads/%s" % pushed_branch)
544 544
545 545 target_repo = GitRepository(target_repo.path)
546 546
547 547 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
548 548
549 549 def test_local_push_executes_hook(self, tmpdir):
550 550 target_repo = self.get_empty_repo(tmpdir, bare=True)
551 551 self._add_failing_hook(target_repo.path, "pre-receive", bare=True)
552 552 with pytest.raises(RepositoryError):
553 553 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
554 554
555 555 def test_maybe_prepare_merge_workspace(self):
556 556 workspace = self.repo._maybe_prepare_merge_workspace(
557 557 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
558 558 )
559 559
560 560 assert os.path.isdir(workspace)
561 561 workspace_repo = GitRepository(workspace)
562 562 assert workspace_repo.branches == self.repo.branches
563 563
564 564 # Calling it a second time should also succeed
565 565 workspace = self.repo._maybe_prepare_merge_workspace(
566 566 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
567 567 )
568 568 assert os.path.isdir(workspace)
569 569
570 570 def test_maybe_prepare_merge_workspace_different_refs(self):
571 571 workspace = self.repo._maybe_prepare_merge_workspace(
572 572 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
573 573 )
574 574
575 575 assert os.path.isdir(workspace)
576 576 workspace_repo = GitRepository(workspace)
577 577 assert workspace_repo.branches == self.repo.branches
578 578
579 579 # Calling it a second time should also succeed
580 580 workspace = self.repo._maybe_prepare_merge_workspace(
581 581 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
582 582 )
583 583 assert os.path.isdir(workspace)
584 584
585 585 def test_cleanup_merge_workspace(self):
586 586 workspace = self.repo._maybe_prepare_merge_workspace(
587 587 2, "pr3", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
588 588 )
589 589 self.repo.cleanup_merge_workspace(2, "pr3")
590 590
591 591 assert not os.path.exists(workspace)
592 592
593 593 def test_cleanup_merge_workspace_invalid_workspace_id(self):
594 594 # No assert: because in case of an inexistent workspace this function
595 595 # should still succeed.
596 596 self.repo.cleanup_merge_workspace(1, "pr4")
597 597
598 598 def test_set_refs(self):
599 599 test_ref = "refs/test-refs/abcde"
600 600 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
601 601
602 602 self.repo.set_refs(test_ref, test_commit_id)
603 603 stdout, _ = self.repo.run_git_command(["show-ref"])
604 604 assert test_ref in stdout
605 605 assert test_commit_id in stdout
606 606
607 607 def test_remove_ref(self):
608 608 test_ref = "refs/test-refs/abcde"
609 609 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
610 610 self.repo.set_refs(test_ref, test_commit_id)
611 611 stdout, _ = self.repo.run_git_command(["show-ref"])
612 612 assert test_ref in stdout
613 613 assert test_commit_id in stdout
614 614
615 615 self.repo.remove_ref(test_ref)
616 616 stdout, _ = self.repo.run_git_command(["show-ref"])
617 617 assert test_ref not in stdout
618 618 assert test_commit_id not in stdout
619 619
620 620
621 621 class TestGitCommit(object):
622 622 @pytest.fixture(autouse=True)
623 623 def prepare(self):
624 624 self.repo = GitRepository(TEST_GIT_REPO)
625 625
626 626 def test_default_commit(self):
627 627 tip = self.repo.get_commit()
628 628 assert tip == self.repo.get_commit(None)
629 629 assert tip == self.repo.get_commit("tip")
630 630
631 631 def test_root_node(self):
632 632 tip = self.repo.get_commit()
633 633 assert tip.root is tip.get_node(b"")
634 634
635 635 def test_lazy_fetch(self):
636 636 """
637 637 Test if commit's nodes expands and are cached as we walk through
638 638 the commit. This test is somewhat hard to write as order of tests
639 639 is a key here. Written by running command after command in a shell.
640 640 """
641 641 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
642 642 assert commit_id in self.repo.commit_ids
643 643 commit = self.repo.get_commit(commit_id)
644 644 assert len(commit.nodes) == 0
645 645 root = commit.root
646 646 assert len(commit.nodes) == 1
647 647 assert len(root.nodes) == 8
648 648 # accessing root.nodes updates commit.nodes
649 649 assert len(commit.nodes) == 9
650 650
651 651 docs = commit.get_node(b"docs")
652 652 # we haven't yet accessed anything new as docs dir was already cached
653 653 assert len(commit.nodes) == 9
654 654 assert len(docs.nodes) == 8
655 655 # accessing docs.nodes updates commit.nodes
656 656 assert len(commit.nodes) == 17
657 657
658 658 assert docs is commit.get_node(b"docs")
659 659 assert docs is root.nodes[0]
660 660 assert docs is root.dirs[0]
661 661 assert docs is commit.get_node(b"docs")
662 662
663 663 def test_nodes_with_commit(self):
664 664 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
665 665 commit = self.repo.get_commit(commit_id)
666 666 root = commit.root
667 667 assert isinstance(root, RootNode)
668 668 docs = commit.get_node(b"docs")
669 669 assert docs is commit.get_node(b"docs")
670 670 api = commit.get_node(b"docs/api")
671 671 assert api is commit.get_node(b"docs/api")
672 672 index = commit.get_node(b"docs/api/index.rst")
673 673 assert index is commit.get_node(b"docs/api/index.rst")
674 674
675 675 def test_branch_and_tags(self):
676 676 """
677 677 rev0 = self.repo.commit_ids[0]
678 678 commit0 = self.repo.get_commit(rev0)
679 679 assert commit0.branch == 'master'
680 680 assert commit0.tags == []
681 681
682 682 rev10 = self.repo.commit_ids[10]
683 683 commit10 = self.repo.get_commit(rev10)
684 684 assert commit10.branch == 'master'
685 685 assert commit10.tags == []
686 686
687 687 rev44 = self.repo.commit_ids[44]
688 688 commit44 = self.repo.get_commit(rev44)
689 689 assert commit44.branch == 'web-branch'
690 690
691 691 tip = self.repo.get_commit('tip')
692 692 assert 'tip' in tip.tags
693 693 """
694 694 # Those tests would fail - branches are now going
695 695 # to be changed at main API in order to support git backend
696 696 pass
697 697
698 698 def test_file_size(self):
699 699 to_check = (
700 700 ("c1214f7e79e02fc37156ff215cd71275450cffc3", b"vcs/backends/BaseRepository.py", 502),
701 701 ("d7e0d30fbcae12c90680eb095a4f5f02505ce501", b"vcs/backends/hg.py", 854),
702 702 ("6e125e7c890379446e98980d8ed60fba87d0f6d1", b"setup.py", 1068),
703 703 ("d955cd312c17b02143c04fa1099a352b04368118", b"vcs/backends/base.py", 2921),
704 704 ("ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", b"vcs/backends/base.py", 3936),
705 705 ("f50f42baeed5af6518ef4b0cb2f1423f3851a941", b"vcs/backends/base.py", 6189),
706 706 )
707 707 for commit_id, path, size in to_check:
708 708 node = self.repo.get_commit(commit_id).get_node(path)
709 709 assert node.is_file()
710 710 assert node.size == size
711 711
712 712 def test_file_history_from_commits(self):
713 713 node = self.repo[10].get_node(b"setup.py")
714 714 commit_ids = [commit.raw_id for commit in node.history]
715 715 assert ["ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == commit_ids
716 716
717 717 node = self.repo[20].get_node(b"setup.py")
718 718 node_ids = [commit.raw_id for commit in node.history]
719 719 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
720 720
721 721 # special case we check history from commit that has this particular
722 722 # file changed this means we check if it's included as well
723 723 node = self.repo.get_commit("191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e").get_node(b"setup.py")
724 724 node_ids = [commit.raw_id for commit in node.history]
725 725 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
726 726
727 727 def test_file_history(self):
728 728 # we can only check if those commits are present in the history
729 729 # as we cannot update this test every time file is changed
730 730 files = {
731 731 b"setup.py": [
732 732 "54386793436c938cff89326944d4c2702340037d",
733 733 "51d254f0ecf5df2ce50c0b115741f4cf13985dab",
734 734 "998ed409c795fec2012b1c0ca054d99888b22090",
735 735 "5e0eb4c47f56564395f76333f319d26c79e2fb09",
736 736 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
737 737 "7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e",
738 738 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
739 739 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
740 740 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
741 741 ],
742 742 b"vcs/nodes.py": [
743 743 "33fa3223355104431402a888fa77a4e9956feb3e",
744 744 "fa014c12c26d10ba682fadb78f2a11c24c8118e1",
745 745 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
746 746 "ab5721ca0a081f26bf43d9051e615af2cc99952f",
747 747 "c877b68d18e792a66b7f4c529ea02c8f80801542",
748 748 "4313566d2e417cb382948f8d9d7c765330356054",
749 749 "6c2303a793671e807d1cfc70134c9ca0767d98c2",
750 750 "54386793436c938cff89326944d4c2702340037d",
751 751 "54000345d2e78b03a99d561399e8e548de3f3203",
752 752 "1c6b3677b37ea064cb4b51714d8f7498f93f4b2b",
753 753 "2d03ca750a44440fb5ea8b751176d1f36f8e8f46",
754 754 "2a08b128c206db48c2f0b8f70df060e6db0ae4f8",
755 755 "30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b",
756 756 "ac71e9503c2ca95542839af0ce7b64011b72ea7c",
757 757 "12669288fd13adba2a9b7dd5b870cc23ffab92d2",
758 758 "5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382",
759 759 "12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5",
760 760 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
761 761 "f50f42baeed5af6518ef4b0cb2f1423f3851a941",
762 762 "d7e390a45f6aa96f04f5e7f583ad4f867431aa25",
763 763 "f15c21f97864b4f071cddfbf2750ec2e23859414",
764 764 "e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade",
765 765 "ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b",
766 766 "84dec09632a4458f79f50ddbbd155506c460b4f9",
767 767 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
768 768 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
769 769 "3bf1c5868e570e39569d094f922d33ced2fa3b2b",
770 770 "b8d04012574729d2c29886e53b1a43ef16dd00a1",
771 771 "6970b057cffe4aab0a792aa634c89f4bebf01441",
772 772 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
773 773 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
774 774 ],
775 775 b"vcs/backends/git.py": [
776 776 "4cf116ad5a457530381135e2f4c453e68a1b0105",
777 777 "9a751d84d8e9408e736329767387f41b36935153",
778 778 "cb681fb539c3faaedbcdf5ca71ca413425c18f01",
779 779 "428f81bb652bcba8d631bce926e8834ff49bdcc6",
780 780 "180ab15aebf26f98f714d8c68715e0f05fa6e1c7",
781 781 "2b8e07312a2e89e92b90426ab97f349f4bce2a3a",
782 782 "50e08c506174d8645a4bb517dd122ac946a0f3bf",
783 783 "54000345d2e78b03a99d561399e8e548de3f3203",
784 784 ],
785 785 }
786 786 for path, commit_ids in files.items():
787 787 node = self.repo.get_commit(commit_ids[0]).get_node(path)
788 788 node_ids = [commit.raw_id for commit in node.history]
789 789 assert set(commit_ids).issubset(set(node_ids)), (
790 790 "We assumed that %s is subset of commit_ids for which file %s "
791 791 "has been changed, and history of that node returned: %s" % (commit_ids, path, node_ids)
792 792 )
793 793
794 794 def test_file_annotate(self):
795 795 files = {
796 796 b"vcs/backends/__init__.py": {
797 797 "c1214f7e79e02fc37156ff215cd71275450cffc3": {
798 798 "lines_no": 1,
799 799 "commits": [
800 800 "c1214f7e79e02fc37156ff215cd71275450cffc3",
801 801 ],
802 802 },
803 803 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647": {
804 804 "lines_no": 21,
805 805 "commits": [
806 806 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
807 807 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
808 808 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
809 809 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
810 810 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
811 811 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
812 812 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
813 813 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
814 814 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
815 815 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
816 816 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
817 817 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
818 818 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
819 819 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
820 820 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
821 821 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
822 822 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
823 823 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
824 824 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
825 825 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
826 826 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
827 827 ],
828 828 },
829 829 "e29b67bd158580fc90fc5e9111240b90e6e86064": {
830 830 "lines_no": 32,
831 831 "commits": [
832 832 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
833 833 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
834 834 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
835 835 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
836 836 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
837 837 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
838 838 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
839 839 "54000345d2e78b03a99d561399e8e548de3f3203",
840 840 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
841 841 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
842 842 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
843 843 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
844 844 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
845 845 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
846 846 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
847 847 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
848 848 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
849 849 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
850 850 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
851 851 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
852 852 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
853 853 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
854 854 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
855 855 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
856 856 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
857 857 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
858 858 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
859 859 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
860 860 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
861 861 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
862 862 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
863 863 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
864 864 ],
865 865 },
866 866 },
867 867 }
868 868
869 869 for fname, commit_dict in files.items():
870 870 for commit_id, __ in commit_dict.items():
871 871 commit = self.repo.get_commit(commit_id)
872 872
873 873 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
874 874 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
875 875 assert l1_1 == l1_2
876 876 l1 = l1_1
877 877 l2 = files[fname][commit_id]["commits"]
878 878 assert l1 == l2, (
879 879 "The lists of commit_ids for %s@commit_id %s"
880 880 "from annotation list should match each other, "
881 881 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)
882 882 )
883 883
884 884 def test_files_state(self):
885 885 """
886 886 Tests state of FileNodes.
887 887 """
888 888 commit = self.repo.get_commit("e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0")
889 889 node = commit.get_node(b"vcs/utils/diffs.py")
890 890 assert node.bytes_path in commit.added_paths
891 891
892 892 commit = self.repo.get_commit("33fa3223355104431402a888fa77a4e9956feb3e")
893 893 node = commit.get_node(b".hgignore")
894 894 assert node.bytes_path in commit.changed_paths
895 895
896 896 commit = self.repo.get_commit("e29b67bd158580fc90fc5e9111240b90e6e86064")
897 897 node = commit.get_node(b"setup.py")
898 898 assert node.bytes_path not in commit.affected_files
899 899
900 900 # If node has REMOVED state then trying to fetch it would raise
901 901 # CommitError exception
902 902 commit = self.repo.get_commit("fa6600f6848800641328adbf7811fd2372c02ab2")
903 903 path = b"vcs/backends/BaseRepository.py"
904 904 with pytest.raises(NodeDoesNotExistError):
905 905 commit.get_node(path)
906 906
907 907 # but it would be one of ``removed`` (commit's attribute)
908 908 assert path in [rf for rf in commit.removed_paths]
909 909
910 910 commit = self.repo.get_commit("54386793436c938cff89326944d4c2702340037d")
911 911 changed = [b"setup.py", b"tests/test_nodes.py", b"vcs/backends/hg.py", b"vcs/nodes.py"]
912 912 assert set(changed) == set([f for f in commit.changed_paths])
913 913
914 914 def test_unicode_branch_refs(self):
915 915 unicode_branches = {
916 916 "refs/heads/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
917 917 "refs/heads/uniΓ§ΓΆβˆ‚e": "ΓΌrl",
918 918 }
919 919 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_branches):
920 920 branches = self.repo.branches
921 921
922 922 assert "unicode" in branches
923 923 assert "uniΓ§ΓΆβˆ‚e" in branches
924 924
925 925 def test_unicode_tag_refs(self):
926 926 unicode_tags = {
927 927 "refs/tags/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
928 928 "refs/tags/uniΓ§ΓΆβˆ‚e": "6c0ce52b229aa978889e91b38777f800e85f330b",
929 929 }
930 930 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_tags):
931 931 tags = self.repo.tags
932 932
933 933 assert "unicode" in tags
934 934 assert "uniΓ§ΓΆβˆ‚e" in tags
935 935
936 936 def test_commit_message_is_unicode(self):
937 937 for commit in self.repo:
938 938 assert type(commit.message) == str
939 939
940 940 def test_commit_author_is_unicode(self):
941 941 for commit in self.repo:
942 942 assert type(commit.author) == str
943 943
944 944 def test_repo_files_content_types(self):
945 945 commit = self.repo.get_commit()
946 946 for node in commit.get_node(b"/"):
947 947 if node.is_file():
948 948 assert type(node.content) == bytes
949 949 assert type(node.str_content) == str
950 950
951 951 def test_wrong_path(self):
952 952 # There is 'setup.py' in the root dir but not there:
953 953 path = b"foo/bar/setup.py"
954 954 tip = self.repo.get_commit()
955 955 with pytest.raises(VCSError):
956 956 tip.get_node(path)
957 957
958 958 @pytest.mark.parametrize(
959 959 "author_email, commit_id",
960 960 [
961 961 ("marcin@python-blog.com", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
962 962 ("lukasz.balcerzak@python-center.pl", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
963 963 ("none@none", "8430a588b43b5d6da365400117c89400326e7992"),
964 964 ],
965 965 )
966 966 def test_author_email(self, author_email, commit_id):
967 967 commit = self.repo.get_commit(commit_id)
968 968 assert author_email == commit.author_email
969 969
970 970 @pytest.mark.parametrize(
971 971 "author, commit_id",
972 972 [
973 973 ("Marcin Kuzminski", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
974 974 ("Lukasz Balcerzak", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
975 975 ("marcink", "8430a588b43b5d6da365400117c89400326e7992"),
976 976 ],
977 977 )
978 978 def test_author_username(self, author, commit_id):
979 979 commit = self.repo.get_commit(commit_id)
980 980 assert author == commit.author_name
981 981
982 982
983 983 class TestLargeFileRepo(object):
984 984 def test_large_file(self, backend_git):
985 985 conf = make_db_config()
986 986 git_largefiles_store = conf.get("vcs_git_lfs", "store_location")
987 987
988 988 repo = backend_git.create_test_repo("largefiles", conf)
989 989
990 990 tip = repo.scm_instance().get_commit()
991 991 node = tip.get_node(b"1MB.zip")
992 992
993 993 # extract stored LF node into the origin cache
994 994 repo_lfs_store: str = os.path.join(repo.repo_path, repo.repo_name, "lfs_store")
995 995
996 996 oid: str = "7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf"
997 997 # where the OID actually is INSIDE the repo...
998 998 oid_path = os.path.join(repo_lfs_store, oid)
999 999
1000 1000 # Note: oid path depends on LFSOidStore.store_suffix. Once it will be changed update below line accordingly
1001 1001 oid_destination = os.path.join(git_largefiles_store, f"objects/{oid[:2]}/{oid[2:4]}/{oid}")
1002 1002
1003 1003 spec_path = os.path.dirname(oid_destination)
1004 1004 os.makedirs(spec_path, exist_ok=True)
1005 1005 shutil.copy(oid_path, oid_destination)
1006 1006
1007 1007 lf_node = node.get_largefile_node()
1008 1008
1009 1009 assert lf_node.is_largefile() is True
1010 1010 assert lf_node.size == 1024000
1011 1011 assert lf_node.name == b"1MB.zip"
1012 1012
1013 1013
1014 1014 @pytest.mark.usefixtures("vcs_repository_support")
1015 1015 class TestGitSpecificWithRepo(BackendTestMixin):
1016 1016 @classmethod
1017 1017 def _get_commits(cls):
1018 1018 return [
1019 1019 {
1020 1020 "message": "Initial",
1021 1021 "author": "Joe Doe <joe.doe@example.com>",
1022 1022 "date": datetime.datetime(2010, 1, 1, 20),
1023 1023 "added": [
1024 1024 FileNode(b"foobar/static/js/admin/base.js", content=b"base"),
1025 1025 FileNode(b"foobar/static/admin", content=b"admin", mode=0o120000), # this is a link
1026 1026 FileNode(b"foo", content=b"foo"),
1027 1027 ],
1028 1028 },
1029 1029 {
1030 1030 "message": "Second",
1031 1031 "author": "Joe Doe <joe.doe@example.com>",
1032 1032 "date": datetime.datetime(2010, 1, 1, 22),
1033 1033 "added": [
1034 1034 FileNode(b"foo2", content=b"foo2"),
1035 1035 ],
1036 1036 },
1037 1037 ]
1038 1038
1039 1039 def test_paths_slow_traversing(self):
1040 1040 commit = self.repo.get_commit()
1041 1041 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1042 1042
1043 1043 def test_paths_fast_traversing(self):
1044 1044 commit = self.repo.get_commit()
1045 1045 assert commit.get_node(b"foobar/static/js/admin/base.js").content == b"base"
1046 1046
1047 1047 def test_get_diff_runs_git_command_with_hashes(self):
1048 1048 comm1 = self.repo[0]
1049 1049 comm2 = self.repo[1]
1050 1050
1051 1051 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1052 1052 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1053 1053 self.repo.get_diff(comm1, comm2)
1054 1054
1055 1055 remote_mock.diff.assert_called_once_with(
1056 1056 comm1.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1057 1057 )
1058 1058
1059 1059 def test_get_diff_runs_git_command_with_str_hashes(self):
1060 1060 comm2 = self.repo[1]
1061 1061
1062 1062 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1063 1063 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1064 1064 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1065 1065
1066 1066 remote_mock.diff.assert_called_once_with(
1067 1067 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1068 1068 )
1069 1069
1070 1070 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1071 1071 comm1 = self.repo[0]
1072 1072 comm2 = self.repo[1]
1073 1073
1074 1074 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1075 1075 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1076 1076 self.repo.get_diff(comm1, comm2, "foo")
1077 1077
1078 1078 remote_mock.diff.assert_called_once_with(
1079 1079 self.repo._lookup_commit(0), comm2.raw_id, file_filter="foo", opt_ignorews=False, context=3
1080 1080 )
1081 1081
1082 1082
1083 1083 @pytest.mark.usefixtures("vcs_repository_support")
1084 1084 class TestGitRegression(BackendTestMixin):
1085 1085 @classmethod
1086 1086 def _get_commits(cls):
1087 1087 return [
1088 1088 {
1089 1089 "message": "Initial",
1090 1090 "author": "Joe Doe <joe.doe@example.com>",
1091 1091 "date": datetime.datetime(2010, 1, 1, 20),
1092 1092 "added": [
1093 1093 FileNode(b"bot/__init__.py", content=b"base"),
1094 1094 FileNode(b"bot/templates/404.html", content=b"base"),
1095 1095 FileNode(b"bot/templates/500.html", content=b"base"),
1096 1096 ],
1097 1097 },
1098 1098 {
1099 1099 "message": "Second",
1100 1100 "author": "Joe Doe <joe.doe@example.com>",
1101 1101 "date": datetime.datetime(2010, 1, 1, 22),
1102 1102 "added": [
1103 1103 FileNode(b"bot/build/migrations/1.py", content=b"foo2"),
1104 1104 FileNode(b"bot/build/migrations/2.py", content=b"foo2"),
1105 1105 FileNode(b"bot/build/static/templates/f.html", content=b"foo2"),
1106 1106 FileNode(b"bot/build/static/templates/f1.html", content=b"foo2"),
1107 1107 FileNode(b"bot/build/templates/err.html", content=b"foo2"),
1108 1108 FileNode(b"bot/build/templates/err2.html", content=b"foo2"),
1109 1109 ],
1110 1110 },
1111 1111 ]
1112 1112
1113 1113 @pytest.mark.parametrize(
1114 1114 "path, expected_paths",
1115 1115 [
1116 1116 (b"bot", ["bot/build", "bot/templates", "bot/__init__.py"]),
1117 1117 (b"bot/build", ["bot/build/migrations", "bot/build/static", "bot/build/templates"]),
1118 1118 (b"bot/build/static", ["bot/build/static/templates"]),
1119 1119 (
1120 1120 b"bot/build/static/templates",
1121 1121 ["bot/build/static/templates/f.html", "bot/build/static/templates/f1.html"],
1122 1122 ),
1123 1123 (b"bot/build/templates", ["bot/build/templates/err.html", "bot/build/templates/err2.html"]),
1124 1124 (b"bot/templates/", ["bot/templates/404.html", "bot/templates/500.html"]),
1125 1125 ],
1126 1126 )
1127 1127 def test_similar_paths(self, path, expected_paths):
1128 1128 commit = self.repo.get_commit()
1129 1129 paths = [n.path for n in commit.get_nodes(path)]
1130 1130 assert paths == expected_paths
1131 1131
1132 1132
1133 1133 class TestDiscoverGitVersion(object):
1134 1134 def test_returns_git_version(self, baseapp):
1135 1135 version = discover_git_version()
1136 1136 assert version
1137 1137
1138 1138 def test_returns_empty_string_without_vcsserver(self):
1139 1139 mock_connection = mock.Mock()
1140 1140 mock_connection.discover_git_version = mock.Mock(side_effect=Exception)
1141 1141 with mock.patch("rhodecode.lib.vcs.connection.Git", mock_connection):
1142 1142 version = discover_git_version()
1143 1143 assert version == ""
1144 1144
1145 1145
1146 1146 class TestGetSubmoduleUrl(object):
1147 1147 def test_submodules_file_found(self):
1148 1148 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1149 1149 node = mock.Mock()
1150 1150
1151 1151 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1152 1152 node.str_content = (
1153 1153 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1154 1154 )
1155 1155 result = commit._get_submodule_url(b"subrepo1")
1156 1156 get_node_mock.assert_called_once_with(b".gitmodules")
1157 1157 assert result == "https://code.rhodecode.com/dulwich"
1158 1158
1159 1159 def test_complex_submodule_path(self):
1160 1160 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1161 1161 node = mock.Mock()
1162 1162
1163 1163 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1164 1164 node.str_content = (
1165 1165 '[submodule "complex/subrepo/path"]\n'
1166 1166 "\tpath = complex/subrepo/path\n"
1167 1167 "\turl = https://code.rhodecode.com/dulwich\n"
1168 1168 )
1169 1169 result = commit._get_submodule_url(b"complex/subrepo/path")
1170 1170 get_node_mock.assert_called_once_with(b".gitmodules")
1171 1171 assert result == "https://code.rhodecode.com/dulwich"
1172 1172
1173 1173 def test_submodules_file_not_found(self):
1174 1174 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1175 1175 with mock.patch.object(commit, "get_node", side_effect=NodeDoesNotExistError):
1176 1176 result = commit._get_submodule_url(b"complex/subrepo/path")
1177 1177 assert result is None
1178 1178
1179 1179 def test_path_not_found(self):
1180 1180 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1181 1181 node = mock.Mock()
1182 1182
1183 1183 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1184 1184 node.str_content = (
1185 1185 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1186 1186 )
1187 1187 result = commit._get_submodule_url(b"subrepo2")
1188 1188 get_node_mock.assert_called_once_with(b".gitmodules")
1189 1189 assert result is None
1190 1190
1191 1191 def test_returns_cached_values(self):
1192 1192 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1193 1193 node = mock.Mock()
1194 1194
1195 1195 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1196 1196 node.str_content = (
1197 1197 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1198 1198 )
1199 1199 for _ in range(3):
1200 1200 commit._get_submodule_url(b"subrepo1")
1201 1201 get_node_mock.assert_called_once_with(b".gitmodules")
1202 1202
1203 1203 def test_get_node_returns_a_link(self):
1204 1204 repository = mock.Mock()
1205 1205 repository.alias = "git"
1206 1206 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1207 1207 submodule_url = "https://code.rhodecode.com/dulwich"
1208 1208 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.SUBMODULE))
1209 1209 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1210 1210
1211 1211 with get_id_patch, get_submodule_patch as submodule_mock:
1212 1212 node = commit.get_node(b"/abcde")
1213 1213
1214 1214 submodule_mock.assert_called_once_with(b"/abcde")
1215 1215 assert type(node) == SubModuleNode
1216 1216 assert node.url == submodule_url
1217 1217
1218 1218 def test_get_nodes_returns_links(self):
1219 1219 repository = mock.MagicMock()
1220 1220 repository.alias = "git"
1221 repository._remote.tree_items.return_value = [(b"subrepo", "stat", 1, NodeKind.SUBMODULE)]
1221 # obj_name, stat_, tree_item_id, node_kind, pre_load_data
1222 repository._remote.get_nodes.return_value = [(b"subrepo", "stat", 1, NodeKind.SUBMODULE, [])]
1222 1223 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1223 1224 submodule_url = "https://code.rhodecode.com/dulwich"
1225
1224 1226 get_id_patch = mock.patch.object(commit, "_get_path_tree_id_and_type", return_value=(1, NodeKind.DIR))
1225 1227 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1226 1228
1227 1229 with get_id_patch, get_submodule_patch as submodule_mock:
1228 1230 nodes = commit.get_nodes(b"/abcde")
1229 1231
1230 submodule_mock.assert_called_once_with(b"/abcde/subrepo")
1231 1232 assert len(nodes) == 1
1232 1233 assert type(nodes[0]) == SubModuleNode
1233 1234 assert nodes[0].url == submodule_url
1235 submodule_mock.assert_called_once_with(b"/abcde/subrepo")
1234 1236
1235 1237
1236 1238 class TestGetShadowInstance(object):
1237 1239 @pytest.fixture()
1238 1240 def repo(self, vcsbackend_git):
1239 1241 _git_repo = vcsbackend_git.repo
1240 1242
1241 1243 mock.patch.object(_git_repo, "config", mock.Mock())
1242 1244 connection_mock = mock.Mock(unsafe=True, name="connection.Hg")
1243 1245
1244 1246 mock.patch("rhodecode.lib.vcs.connection.Git", connection_mock)
1245 1247 return _git_repo
1246 1248
1247 1249 def test_getting_shadow_instance_copies_config(self, repo):
1248 1250 shadow = repo.get_shadow_instance(repo.path)
1249 1251 assert shadow.config.serialize() == repo.config.serialize()
General Comments 0
You need to be logged in to leave comments. Login now