##// END OF EJS Templates
auth: use single instance of auth checker to speed up execution of the permissions check by not creating new instance in each loop iteration.
marcink -
r4142:79ad8339 default
parent child Browse files
Show More
@@ -1,1021 +1,1020 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 or_, false,
51 51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 52 PullRequest, FileStore)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class UserTemp(object):
60 60 def __init__(self, user_id):
61 61 self.user_id = user_id
62 62
63 63 def __repr__(self):
64 64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 65
66 66
67 67 class RepoTemp(object):
68 68 def __init__(self, repo_id):
69 69 self.repo_id = repo_id
70 70
71 71 def __repr__(self):
72 72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 73
74 74
75 75 class SimpleCachedRepoList(object):
76 76 """
77 77 Lighter version of of iteration of repos without the scm initialisation,
78 78 and with cache usage
79 79 """
80 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 81 self.db_repo_list = db_repo_list
82 82 self.repos_path = repos_path
83 83 self.order_by = order_by
84 84 self.reversed = (order_by or '').startswith('-')
85 85 if not perm_set:
86 86 perm_set = ['repository.read', 'repository.write',
87 87 'repository.admin']
88 88 self.perm_set = perm_set
89 89
90 90 def __len__(self):
91 91 return len(self.db_repo_list)
92 92
93 93 def __repr__(self):
94 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 95
96 96 def __iter__(self):
97 97 for dbr in self.db_repo_list:
98 98 # check permission at this level
99 99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 100 dbr.repo_name, 'SimpleCachedRepoList check')
101 101 if not has_perm:
102 102 continue
103 103
104 104 tmp_d = {
105 105 'name': dbr.repo_name,
106 106 'dbrepo': dbr.get_dict(),
107 107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 108 }
109 109 yield tmp_d
110 110
111 111
112 112 class _PermCheckIterator(object):
113 113
114 114 def __init__(
115 115 self, obj_list, obj_attr, perm_set, perm_checker,
116 116 extra_kwargs=None):
117 117 """
118 118 Creates iterator from given list of objects, additionally
119 119 checking permission for them from perm_set var
120 120
121 121 :param obj_list: list of db objects
122 122 :param obj_attr: attribute of object to pass into perm_checker
123 123 :param perm_set: list of permissions to check
124 124 :param perm_checker: callable to check permissions against
125 125 """
126 126 self.obj_list = obj_list
127 127 self.obj_attr = obj_attr
128 128 self.perm_set = perm_set
129 self.perm_checker = perm_checker
129 self.perm_checker = perm_checker(*self.perm_set)
130 130 self.extra_kwargs = extra_kwargs or {}
131 131
132 132 def __len__(self):
133 133 return len(self.obj_list)
134 134
135 135 def __repr__(self):
136 136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 137
138 138 def __iter__(self):
139 checker = self.perm_checker(*self.perm_set)
140 139 for db_obj in self.obj_list:
141 140 # check permission at this level
142 141 name = getattr(db_obj, self.obj_attr, None)
143 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 143 continue
145 144
146 145 yield db_obj
147 146
148 147
149 148 class RepoList(_PermCheckIterator):
150 149
151 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 151 if not perm_set:
153 152 perm_set = [
154 153 'repository.read', 'repository.write', 'repository.admin']
155 154
156 155 super(RepoList, self).__init__(
157 156 obj_list=db_repo_list,
158 157 obj_attr='repo_name', perm_set=perm_set,
159 158 perm_checker=HasRepoPermissionAny,
160 159 extra_kwargs=extra_kwargs)
161 160
162 161
163 162 class RepoGroupList(_PermCheckIterator):
164 163
165 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 165 if not perm_set:
167 166 perm_set = ['group.read', 'group.write', 'group.admin']
168 167
169 168 super(RepoGroupList, self).__init__(
170 169 obj_list=db_repo_group_list,
171 170 obj_attr='group_name', perm_set=perm_set,
172 171 perm_checker=HasRepoGroupPermissionAny,
173 172 extra_kwargs=extra_kwargs)
174 173
175 174
176 175 class UserGroupList(_PermCheckIterator):
177 176
178 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 178 if not perm_set:
180 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 180
182 181 super(UserGroupList, self).__init__(
183 182 obj_list=db_user_group_list,
184 183 obj_attr='users_group_name', perm_set=perm_set,
185 184 perm_checker=HasUserGroupPermissionAny,
186 185 extra_kwargs=extra_kwargs)
187 186
188 187
189 188 class ScmModel(BaseModel):
190 189 """
191 190 Generic Scm Model
192 191 """
193 192
194 193 @LazyProperty
195 194 def repos_path(self):
196 195 """
197 196 Gets the repositories root path from database
198 197 """
199 198
200 199 settings_model = VcsSettingsModel(sa=self.sa)
201 200 return settings_model.get_repos_location()
202 201
203 202 def repo_scan(self, repos_path=None):
204 203 """
205 204 Listing of repositories in given path. This path should not be a
206 205 repository itself. Return a dictionary of repository objects
207 206
208 207 :param repos_path: path to directory containing repositories
209 208 """
210 209
211 210 if repos_path is None:
212 211 repos_path = self.repos_path
213 212
214 213 log.info('scanning for repositories in %s', repos_path)
215 214
216 215 config = make_db_config()
217 216 config.set('extensions', 'largefiles', '')
218 217 repos = {}
219 218
220 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 220 # name need to be decomposed and put back together using the /
222 221 # since this is internal storage separator for rhodecode
223 222 name = Repository.normalize_repo_name(name)
224 223
225 224 try:
226 225 if name in repos:
227 226 raise RepositoryError('Duplicate repository name %s '
228 227 'found in %s' % (name, path))
229 228 elif path[0] in rhodecode.BACKENDS:
230 229 backend = get_backend(path[0])
231 230 repos[name] = backend(path[1], config=config,
232 231 with_wire={"cache": False})
233 232 except OSError:
234 233 continue
235 234 log.debug('found %s paths with repositories', len(repos))
236 235 return repos
237 236
238 237 def get_repos(self, all_repos=None, sort_key=None):
239 238 """
240 239 Get all repositories from db and for each repo create it's
241 240 backend instance and fill that backed with information from database
242 241
243 242 :param all_repos: list of repository names as strings
244 243 give specific repositories list, good for filtering
245 244
246 245 :param sort_key: initial sorting of repositories
247 246 """
248 247 if all_repos is None:
249 248 all_repos = self.sa.query(Repository)\
250 249 .filter(Repository.group_id == None)\
251 250 .order_by(func.lower(Repository.repo_name)).all()
252 251 repo_iter = SimpleCachedRepoList(
253 252 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 253 return repo_iter
255 254
256 255 def get_repo_groups(self, all_groups=None):
257 256 if all_groups is None:
258 257 all_groups = RepoGroup.query()\
259 258 .filter(RepoGroup.group_parent_id == None).all()
260 259 return [x for x in RepoGroupList(all_groups)]
261 260
262 261 def mark_for_invalidation(self, repo_name, delete=False):
263 262 """
264 263 Mark caches of this repo invalid in the database. `delete` flag
265 264 removes the cache entries
266 265
267 266 :param repo_name: the repo_name for which caches should be marked
268 267 invalid, or deleted
269 268 :param delete: delete the entry keys instead of setting bool
270 269 flag on them, and also purge caches used by the dogpile
271 270 """
272 271 repo = Repository.get_by_repo_name(repo_name)
273 272
274 273 if repo:
275 274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
276 275 repo_id=repo.repo_id)
277 276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
278 277
279 278 repo_id = repo.repo_id
280 279 config = repo._config
281 280 config.set('extensions', 'largefiles', '')
282 281 repo.update_commit_cache(config=config, cs_cache=None)
283 282 if delete:
284 283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
285 284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
286 285
287 286 def toggle_following_repo(self, follow_repo_id, user_id):
288 287
289 288 f = self.sa.query(UserFollowing)\
290 289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
291 290 .filter(UserFollowing.user_id == user_id).scalar()
292 291
293 292 if f is not None:
294 293 try:
295 294 self.sa.delete(f)
296 295 return
297 296 except Exception:
298 297 log.error(traceback.format_exc())
299 298 raise
300 299
301 300 try:
302 301 f = UserFollowing()
303 302 f.user_id = user_id
304 303 f.follows_repo_id = follow_repo_id
305 304 self.sa.add(f)
306 305 except Exception:
307 306 log.error(traceback.format_exc())
308 307 raise
309 308
310 309 def toggle_following_user(self, follow_user_id, user_id):
311 310 f = self.sa.query(UserFollowing)\
312 311 .filter(UserFollowing.follows_user_id == follow_user_id)\
313 312 .filter(UserFollowing.user_id == user_id).scalar()
314 313
315 314 if f is not None:
316 315 try:
317 316 self.sa.delete(f)
318 317 return
319 318 except Exception:
320 319 log.error(traceback.format_exc())
321 320 raise
322 321
323 322 try:
324 323 f = UserFollowing()
325 324 f.user_id = user_id
326 325 f.follows_user_id = follow_user_id
327 326 self.sa.add(f)
328 327 except Exception:
329 328 log.error(traceback.format_exc())
330 329 raise
331 330
332 331 def is_following_repo(self, repo_name, user_id, cache=False):
333 332 r = self.sa.query(Repository)\
334 333 .filter(Repository.repo_name == repo_name).scalar()
335 334
336 335 f = self.sa.query(UserFollowing)\
337 336 .filter(UserFollowing.follows_repository == r)\
338 337 .filter(UserFollowing.user_id == user_id).scalar()
339 338
340 339 return f is not None
341 340
342 341 def is_following_user(self, username, user_id, cache=False):
343 342 u = User.get_by_username(username)
344 343
345 344 f = self.sa.query(UserFollowing)\
346 345 .filter(UserFollowing.follows_user == u)\
347 346 .filter(UserFollowing.user_id == user_id).scalar()
348 347
349 348 return f is not None
350 349
351 350 def get_followers(self, repo):
352 351 repo = self._get_repo(repo)
353 352
354 353 return self.sa.query(UserFollowing)\
355 354 .filter(UserFollowing.follows_repository == repo).count()
356 355
357 356 def get_forks(self, repo):
358 357 repo = self._get_repo(repo)
359 358 return self.sa.query(Repository)\
360 359 .filter(Repository.fork == repo).count()
361 360
362 361 def get_pull_requests(self, repo):
363 362 repo = self._get_repo(repo)
364 363 return self.sa.query(PullRequest)\
365 364 .filter(PullRequest.target_repo == repo)\
366 365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
367 366
368 367 def get_artifacts(self, repo):
369 368 repo = self._get_repo(repo)
370 369 return self.sa.query(FileStore)\
371 370 .filter(FileStore.repo == repo)\
372 371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
373 372
374 373 def mark_as_fork(self, repo, fork, user):
375 374 repo = self._get_repo(repo)
376 375 fork = self._get_repo(fork)
377 376 if fork and repo.repo_id == fork.repo_id:
378 377 raise Exception("Cannot set repository as fork of itself")
379 378
380 379 if fork and repo.repo_type != fork.repo_type:
381 380 raise RepositoryError(
382 381 "Cannot set repository as fork of repository with other type")
383 382
384 383 repo.fork = fork
385 384 self.sa.add(repo)
386 385 return repo
387 386
388 387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
389 388 dbrepo = self._get_repo(repo)
390 389 remote_uri = remote_uri or dbrepo.clone_uri
391 390 if not remote_uri:
392 391 raise Exception("This repository doesn't have a clone uri")
393 392
394 393 repo = dbrepo.scm_instance(cache=False)
395 394 repo.config.clear_section('hooks')
396 395
397 396 try:
398 397 # NOTE(marcink): add extra validation so we skip invalid urls
399 398 # this is due this tasks can be executed via scheduler without
400 399 # proper validation of remote_uri
401 400 if validate_uri:
402 401 config = make_db_config(clear_session=False)
403 402 url_validator(remote_uri, dbrepo.repo_type, config)
404 403 except InvalidCloneUrl:
405 404 raise
406 405
407 406 repo_name = dbrepo.repo_name
408 407 try:
409 408 # TODO: we need to make sure those operations call proper hooks !
410 409 repo.fetch(remote_uri)
411 410
412 411 self.mark_for_invalidation(repo_name)
413 412 except Exception:
414 413 log.error(traceback.format_exc())
415 414 raise
416 415
417 416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
418 417 dbrepo = self._get_repo(repo)
419 418 remote_uri = remote_uri or dbrepo.push_uri
420 419 if not remote_uri:
421 420 raise Exception("This repository doesn't have a clone uri")
422 421
423 422 repo = dbrepo.scm_instance(cache=False)
424 423 repo.config.clear_section('hooks')
425 424
426 425 try:
427 426 # NOTE(marcink): add extra validation so we skip invalid urls
428 427 # this is due this tasks can be executed via scheduler without
429 428 # proper validation of remote_uri
430 429 if validate_uri:
431 430 config = make_db_config(clear_session=False)
432 431 url_validator(remote_uri, dbrepo.repo_type, config)
433 432 except InvalidCloneUrl:
434 433 raise
435 434
436 435 try:
437 436 repo.push(remote_uri)
438 437 except Exception:
439 438 log.error(traceback.format_exc())
440 439 raise
441 440
442 441 def commit_change(self, repo, repo_name, commit, user, author, message,
443 442 content, f_path):
444 443 """
445 444 Commits changes
446 445
447 446 :param repo: SCM instance
448 447
449 448 """
450 449 user = self._get_user(user)
451 450
452 451 # decoding here will force that we have proper encoded values
453 452 # in any other case this will throw exceptions and deny commit
454 453 content = safe_str(content)
455 454 path = safe_str(f_path)
456 455 # message and author needs to be unicode
457 456 # proper backend should then translate that into required type
458 457 message = safe_unicode(message)
459 458 author = safe_unicode(author)
460 459 imc = repo.in_memory_commit
461 460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
462 461 try:
463 462 # TODO: handle pre-push action !
464 463 tip = imc.commit(
465 464 message=message, author=author, parents=[commit],
466 465 branch=commit.branch)
467 466 except Exception as e:
468 467 log.error(traceback.format_exc())
469 468 raise IMCCommitError(str(e))
470 469 finally:
471 470 # always clear caches, if commit fails we want fresh object also
472 471 self.mark_for_invalidation(repo_name)
473 472
474 473 # We trigger the post-push action
475 474 hooks_utils.trigger_post_push_hook(
476 475 username=user.username, action='push_local', hook_type='post_push',
477 476 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
478 477 return tip
479 478
480 479 def _sanitize_path(self, f_path):
481 480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
482 481 raise NonRelativePathError('%s is not an relative path' % f_path)
483 482 if f_path:
484 483 f_path = os.path.normpath(f_path)
485 484 return f_path
486 485
487 486 def get_dirnode_metadata(self, request, commit, dir_node):
488 487 if not dir_node.is_dir():
489 488 return []
490 489
491 490 data = []
492 491 for node in dir_node:
493 492 if not node.is_file():
494 493 # we skip file-nodes
495 494 continue
496 495
497 496 last_commit = node.last_commit
498 497 last_commit_date = last_commit.date
499 498 data.append({
500 499 'name': node.name,
501 500 'size': h.format_byte_size_binary(node.size),
502 501 'modified_at': h.format_date(last_commit_date),
503 502 'modified_ts': last_commit_date.isoformat(),
504 503 'revision': last_commit.revision,
505 504 'short_id': last_commit.short_id,
506 505 'message': h.escape(last_commit.message),
507 506 'author': h.escape(last_commit.author),
508 507 'user_profile': h.gravatar_with_user(
509 508 request, last_commit.author),
510 509 })
511 510
512 511 return data
513 512
514 513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
515 514 extended_info=False, content=False, max_file_bytes=None):
516 515 """
517 516 recursive walk in root dir and return a set of all path in that dir
518 517 based on repository walk function
519 518
520 519 :param repo_name: name of repository
521 520 :param commit_id: commit id for which to list nodes
522 521 :param root_path: root path to list
523 522 :param flat: return as a list, if False returns a dict with description
524 523 :param extended_info: show additional info such as md5, binary, size etc
525 524 :param content: add nodes content to the return data
526 525 :param max_file_bytes: will not return file contents over this limit
527 526
528 527 """
529 528 _files = list()
530 529 _dirs = list()
531 530 try:
532 531 _repo = self._get_repo(repo_name)
533 532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
534 533 root_path = root_path.lstrip('/')
535 534 for __, dirs, files in commit.walk(root_path):
536 535
537 536 for f in files:
538 537 _content = None
539 538 _data = f_name = f.unicode_path
540 539
541 540 if not flat:
542 541 _data = {
543 542 "name": h.escape(f_name),
544 543 "type": "file",
545 544 }
546 545 if extended_info:
547 546 _data.update({
548 547 "md5": f.md5,
549 548 "binary": f.is_binary,
550 549 "size": f.size,
551 550 "extension": f.extension,
552 551 "mimetype": f.mimetype,
553 552 "lines": f.lines()[0]
554 553 })
555 554
556 555 if content:
557 556 over_size_limit = (max_file_bytes is not None
558 557 and f.size > max_file_bytes)
559 558 full_content = None
560 559 if not f.is_binary and not over_size_limit:
561 560 full_content = safe_str(f.content)
562 561
563 562 _data.update({
564 563 "content": full_content,
565 564 })
566 565 _files.append(_data)
567 566
568 567 for d in dirs:
569 568 _data = d_name = d.unicode_path
570 569 if not flat:
571 570 _data = {
572 571 "name": h.escape(d_name),
573 572 "type": "dir",
574 573 }
575 574 if extended_info:
576 575 _data.update({
577 576 "md5": None,
578 577 "binary": None,
579 578 "size": None,
580 579 "extension": None,
581 580 })
582 581 if content:
583 582 _data.update({
584 583 "content": None
585 584 })
586 585 _dirs.append(_data)
587 586 except RepositoryError:
588 587 log.exception("Exception in get_nodes")
589 588 raise
590 589
591 590 return _dirs, _files
592 591
593 592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
594 593 """
595 594 Generate files for quick filter in files view
596 595 """
597 596
598 597 _files = list()
599 598 _dirs = list()
600 599 try:
601 600 _repo = self._get_repo(repo_name)
602 601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
603 602 root_path = root_path.lstrip('/')
604 603 for __, dirs, files in commit.walk(root_path):
605 604
606 605 for f in files:
607 606
608 607 _data = {
609 608 "name": h.escape(f.unicode_path),
610 609 "type": "file",
611 610 }
612 611
613 612 _files.append(_data)
614 613
615 614 for d in dirs:
616 615
617 616 _data = {
618 617 "name": h.escape(d.unicode_path),
619 618 "type": "dir",
620 619 }
621 620
622 621 _dirs.append(_data)
623 622 except RepositoryError:
624 623 log.exception("Exception in get_quick_filter_nodes")
625 624 raise
626 625
627 626 return _dirs, _files
628 627
629 628 def get_node(self, repo_name, commit_id, file_path,
630 629 extended_info=False, content=False, max_file_bytes=None, cache=True):
631 630 """
632 631 retrieve single node from commit
633 632 """
634 633 try:
635 634
636 635 _repo = self._get_repo(repo_name)
637 636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
638 637
639 638 file_node = commit.get_node(file_path)
640 639 if file_node.is_dir():
641 640 raise RepositoryError('The given path is a directory')
642 641
643 642 _content = None
644 643 f_name = file_node.unicode_path
645 644
646 645 file_data = {
647 646 "name": h.escape(f_name),
648 647 "type": "file",
649 648 }
650 649
651 650 if extended_info:
652 651 file_data.update({
653 652 "extension": file_node.extension,
654 653 "mimetype": file_node.mimetype,
655 654 })
656 655
657 656 if cache:
658 657 md5 = file_node.md5
659 658 is_binary = file_node.is_binary
660 659 size = file_node.size
661 660 else:
662 661 is_binary, md5, size, _content = file_node.metadata_uncached()
663 662
664 663 file_data.update({
665 664 "md5": md5,
666 665 "binary": is_binary,
667 666 "size": size,
668 667 })
669 668
670 669 if content and cache:
671 670 # get content + cache
672 671 size = file_node.size
673 672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
674 673 full_content = None
675 674 all_lines = 0
676 675 if not file_node.is_binary and not over_size_limit:
677 676 full_content = safe_unicode(file_node.content)
678 677 all_lines, empty_lines = file_node.count_lines(full_content)
679 678
680 679 file_data.update({
681 680 "content": full_content,
682 681 "lines": all_lines
683 682 })
684 683 elif content:
685 684 # get content *without* cache
686 685 if _content is None:
687 686 is_binary, md5, size, _content = file_node.metadata_uncached()
688 687
689 688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
690 689 full_content = None
691 690 all_lines = 0
692 691 if not is_binary and not over_size_limit:
693 692 full_content = safe_unicode(_content)
694 693 all_lines, empty_lines = file_node.count_lines(full_content)
695 694
696 695 file_data.update({
697 696 "content": full_content,
698 697 "lines": all_lines
699 698 })
700 699
701 700 except RepositoryError:
702 701 log.exception("Exception in get_node")
703 702 raise
704 703
705 704 return file_data
706 705
707 706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
708 707 """
709 708 Fetch node tree for usage in full text search
710 709 """
711 710
712 711 tree_info = list()
713 712
714 713 try:
715 714 _repo = self._get_repo(repo_name)
716 715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
717 716 root_path = root_path.lstrip('/')
718 717 for __, dirs, files in commit.walk(root_path):
719 718
720 719 for f in files:
721 720 is_binary, md5, size, _content = f.metadata_uncached()
722 721 _data = {
723 722 "name": f.unicode_path,
724 723 "md5": md5,
725 724 "extension": f.extension,
726 725 "binary": is_binary,
727 726 "size": size
728 727 }
729 728
730 729 tree_info.append(_data)
731 730
732 731 except RepositoryError:
733 732 log.exception("Exception in get_nodes")
734 733 raise
735 734
736 735 return tree_info
737 736
738 737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
739 738 author=None, trigger_push_hook=True):
740 739 """
741 740 Commits given multiple nodes into repo
742 741
743 742 :param user: RhodeCode User object or user_id, the commiter
744 743 :param repo: RhodeCode Repository object
745 744 :param message: commit message
746 745 :param nodes: mapping {filename:{'content':content},...}
747 746 :param parent_commit: parent commit, can be empty than it's
748 747 initial commit
749 748 :param author: author of commit, cna be different that commiter
750 749 only for git
751 750 :param trigger_push_hook: trigger push hooks
752 751
753 752 :returns: new commited commit
754 753 """
755 754
756 755 user = self._get_user(user)
757 756 scm_instance = repo.scm_instance(cache=False)
758 757
759 758 processed_nodes = []
760 759 for f_path in nodes:
761 760 f_path = self._sanitize_path(f_path)
762 761 content = nodes[f_path]['content']
763 762 f_path = safe_str(f_path)
764 763 # decoding here will force that we have proper encoded values
765 764 # in any other case this will throw exceptions and deny commit
766 765 if isinstance(content, (basestring,)):
767 766 content = safe_str(content)
768 767 elif isinstance(content, (file, cStringIO.OutputType,)):
769 768 content = content.read()
770 769 else:
771 770 raise Exception('Content is of unrecognized type %s' % (
772 771 type(content)
773 772 ))
774 773 processed_nodes.append((f_path, content))
775 774
776 775 message = safe_unicode(message)
777 776 commiter = user.full_contact
778 777 author = safe_unicode(author) if author else commiter
779 778
780 779 imc = scm_instance.in_memory_commit
781 780
782 781 if not parent_commit:
783 782 parent_commit = EmptyCommit(alias=scm_instance.alias)
784 783
785 784 if isinstance(parent_commit, EmptyCommit):
786 785 # EmptyCommit means we we're editing empty repository
787 786 parents = None
788 787 else:
789 788 parents = [parent_commit]
790 789 # add multiple nodes
791 790 for path, content in processed_nodes:
792 791 imc.add(FileNode(path, content=content))
793 792 # TODO: handle pre push scenario
794 793 tip = imc.commit(message=message,
795 794 author=author,
796 795 parents=parents,
797 796 branch=parent_commit.branch)
798 797
799 798 self.mark_for_invalidation(repo.repo_name)
800 799 if trigger_push_hook:
801 800 hooks_utils.trigger_post_push_hook(
802 801 username=user.username, action='push_local',
803 802 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
804 803 hook_type='post_push',
805 804 commit_ids=[tip.raw_id])
806 805 return tip
807 806
808 807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
809 808 author=None, trigger_push_hook=True):
810 809 user = self._get_user(user)
811 810 scm_instance = repo.scm_instance(cache=False)
812 811
813 812 message = safe_unicode(message)
814 813 commiter = user.full_contact
815 814 author = safe_unicode(author) if author else commiter
816 815
817 816 imc = scm_instance.in_memory_commit
818 817
819 818 if not parent_commit:
820 819 parent_commit = EmptyCommit(alias=scm_instance.alias)
821 820
822 821 if isinstance(parent_commit, EmptyCommit):
823 822 # EmptyCommit means we we're editing empty repository
824 823 parents = None
825 824 else:
826 825 parents = [parent_commit]
827 826
828 827 # add multiple nodes
829 828 for _filename, data in nodes.items():
830 829 # new filename, can be renamed from the old one, also sanitaze
831 830 # the path for any hack around relative paths like ../../ etc.
832 831 filename = self._sanitize_path(data['filename'])
833 832 old_filename = self._sanitize_path(_filename)
834 833 content = data['content']
835 834 file_mode = data.get('mode')
836 835 filenode = FileNode(old_filename, content=content, mode=file_mode)
837 836 op = data['op']
838 837 if op == 'add':
839 838 imc.add(filenode)
840 839 elif op == 'del':
841 840 imc.remove(filenode)
842 841 elif op == 'mod':
843 842 if filename != old_filename:
844 843 # TODO: handle renames more efficient, needs vcs lib changes
845 844 imc.remove(filenode)
846 845 imc.add(FileNode(filename, content=content, mode=file_mode))
847 846 else:
848 847 imc.change(filenode)
849 848
850 849 try:
851 850 # TODO: handle pre push scenario commit changes
852 851 tip = imc.commit(message=message,
853 852 author=author,
854 853 parents=parents,
855 854 branch=parent_commit.branch)
856 855 except NodeNotChangedError:
857 856 raise
858 857 except Exception as e:
859 858 log.exception("Unexpected exception during call to imc.commit")
860 859 raise IMCCommitError(str(e))
861 860 finally:
862 861 # always clear caches, if commit fails we want fresh object also
863 862 self.mark_for_invalidation(repo.repo_name)
864 863
865 864 if trigger_push_hook:
866 865 hooks_utils.trigger_post_push_hook(
867 866 username=user.username, action='push_local', hook_type='post_push',
868 867 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
869 868 commit_ids=[tip.raw_id])
870 869
871 870 return tip
872 871
873 872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
874 873 author=None, trigger_push_hook=True):
875 874 """
876 875 Deletes given multiple nodes into `repo`
877 876
878 877 :param user: RhodeCode User object or user_id, the committer
879 878 :param repo: RhodeCode Repository object
880 879 :param message: commit message
881 880 :param nodes: mapping {filename:{'content':content},...}
882 881 :param parent_commit: parent commit, can be empty than it's initial
883 882 commit
884 883 :param author: author of commit, cna be different that commiter only
885 884 for git
886 885 :param trigger_push_hook: trigger push hooks
887 886
888 887 :returns: new commit after deletion
889 888 """
890 889
891 890 user = self._get_user(user)
892 891 scm_instance = repo.scm_instance(cache=False)
893 892
894 893 processed_nodes = []
895 894 for f_path in nodes:
896 895 f_path = self._sanitize_path(f_path)
897 896 # content can be empty but for compatabilty it allows same dicts
898 897 # structure as add_nodes
899 898 content = nodes[f_path].get('content')
900 899 processed_nodes.append((f_path, content))
901 900
902 901 message = safe_unicode(message)
903 902 commiter = user.full_contact
904 903 author = safe_unicode(author) if author else commiter
905 904
906 905 imc = scm_instance.in_memory_commit
907 906
908 907 if not parent_commit:
909 908 parent_commit = EmptyCommit(alias=scm_instance.alias)
910 909
911 910 if isinstance(parent_commit, EmptyCommit):
912 911 # EmptyCommit means we we're editing empty repository
913 912 parents = None
914 913 else:
915 914 parents = [parent_commit]
916 915 # add multiple nodes
917 916 for path, content in processed_nodes:
918 917 imc.remove(FileNode(path, content=content))
919 918
920 919 # TODO: handle pre push scenario
921 920 tip = imc.commit(message=message,
922 921 author=author,
923 922 parents=parents,
924 923 branch=parent_commit.branch)
925 924
926 925 self.mark_for_invalidation(repo.repo_name)
927 926 if trigger_push_hook:
928 927 hooks_utils.trigger_post_push_hook(
929 928 username=user.username, action='push_local', hook_type='post_push',
930 929 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
931 930 commit_ids=[tip.raw_id])
932 931 return tip
933 932
934 933 def strip(self, repo, commit_id, branch):
935 934 scm_instance = repo.scm_instance(cache=False)
936 935 scm_instance.config.clear_section('hooks')
937 936 scm_instance.strip(commit_id, branch)
938 937 self.mark_for_invalidation(repo.repo_name)
939 938
940 939 def get_unread_journal(self):
941 940 return self.sa.query(UserLog).count()
942 941
943 942 @classmethod
944 943 def backend_landing_ref(cls, repo_type):
945 944 """
946 945 Return a default landing ref based on a repository type.
947 946 """
948 947
949 948 landing_ref = {
950 949 'hg': ('branch:default', 'default'),
951 950 'git': ('branch:master', 'master'),
952 951 'svn': ('rev:tip', 'latest tip'),
953 952 'default': ('rev:tip', 'latest tip'),
954 953 }
955 954
956 955 return landing_ref.get(repo_type) or landing_ref['default']
957 956
958 957 def get_repo_landing_revs(self, translator, repo=None):
959 958 """
960 959 Generates select option with tags branches and bookmarks (for hg only)
961 960 grouped by type
962 961
963 962 :param repo:
964 963 """
965 964 _ = translator
966 965 repo = self._get_repo(repo)
967 966
968 967 if repo:
969 968 repo_type = repo.repo_type
970 969 else:
971 970 repo_type = 'default'
972 971
973 972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
974 973
975 974 default_ref_options = [
976 975 [default_landing_ref, landing_ref_lbl]
977 976 ]
978 977 default_choices = [
979 978 default_landing_ref
980 979 ]
981 980
982 981 if not repo:
983 982 return default_choices, default_ref_options
984 983
985 984 repo = repo.scm_instance()
986 985
987 986 ref_options = [('rev:tip', 'latest tip')]
988 987 choices = ['rev:tip']
989 988
990 989 # branches
991 990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
992 991 if not branch_group:
993 992 # new repo, or without maybe a branch?
994 993 branch_group = default_ref_options
995 994
996 995 branches_group = (branch_group, _("Branches"))
997 996 ref_options.append(branches_group)
998 997 choices.extend([x[0] for x in branches_group[0]])
999 998
1000 999 # bookmarks for HG
1001 1000 if repo.alias == 'hg':
1002 1001 bookmarks_group = (
1003 1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1004 1003 for b in repo.bookmarks],
1005 1004 _("Bookmarks"))
1006 1005 ref_options.append(bookmarks_group)
1007 1006 choices.extend([x[0] for x in bookmarks_group[0]])
1008 1007
1009 1008 # tags
1010 1009 tags_group = (
1011 1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1012 1011 for t in repo.tags],
1013 1012 _("Tags"))
1014 1013 ref_options.append(tags_group)
1015 1014 choices.extend([x[0] for x in tags_group[0]])
1016 1015
1017 1016 return choices, ref_options
1018 1017
1019 1018 def get_server_info(self, environ=None):
1020 1019 server_info = get_system_info(environ)
1021 1020 return server_info
General Comments 0
You need to be logged in to leave comments. Login now