##// END OF EJS Templates
auth: optimize attribute fetching in AuthList iterators
marcink -
r4149:117ca338 default
parent child Browse files
Show More
@@ -1,1019 +1,1020 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 or_, false,
51 51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 52 PullRequest, FileStore)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class UserTemp(object):
60 60 def __init__(self, user_id):
61 61 self.user_id = user_id
62 62
63 63 def __repr__(self):
64 64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 65
66 66
67 67 class RepoTemp(object):
68 68 def __init__(self, repo_id):
69 69 self.repo_id = repo_id
70 70
71 71 def __repr__(self):
72 72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 73
74 74
75 75 class SimpleCachedRepoList(object):
76 76 """
77 77 Lighter version of of iteration of repos without the scm initialisation,
78 78 and with cache usage
79 79 """
80 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 81 self.db_repo_list = db_repo_list
82 82 self.repos_path = repos_path
83 83 self.order_by = order_by
84 84 self.reversed = (order_by or '').startswith('-')
85 85 if not perm_set:
86 86 perm_set = ['repository.read', 'repository.write',
87 87 'repository.admin']
88 88 self.perm_set = perm_set
89 89
90 90 def __len__(self):
91 91 return len(self.db_repo_list)
92 92
93 93 def __repr__(self):
94 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 95
96 96 def __iter__(self):
97 97 for dbr in self.db_repo_list:
98 98 # check permission at this level
99 99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 100 dbr.repo_name, 'SimpleCachedRepoList check')
101 101 if not has_perm:
102 102 continue
103 103
104 104 tmp_d = {
105 105 'name': dbr.repo_name,
106 106 'dbrepo': dbr.get_dict(),
107 107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 108 }
109 109 yield tmp_d
110 110
111 111
112 112 class _PermCheckIterator(object):
113 113
114 114 def __init__(
115 115 self, obj_list, obj_attr, perm_set, perm_checker,
116 116 extra_kwargs=None):
117 117 """
118 118 Creates iterator from given list of objects, additionally
119 119 checking permission for them from perm_set var
120 120
121 121 :param obj_list: list of db objects
122 122 :param obj_attr: attribute of object to pass into perm_checker
123 123 :param perm_set: list of permissions to check
124 124 :param perm_checker: callable to check permissions against
125 125 """
126 126 self.obj_list = obj_list
127 127 self.obj_attr = obj_attr
128 128 self.perm_set = perm_set
129 129 self.perm_checker = perm_checker(*self.perm_set)
130 130 self.extra_kwargs = extra_kwargs or {}
131 131
132 132 def __len__(self):
133 133 return len(self.obj_list)
134 134
135 135 def __repr__(self):
136 136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 137
138 138 def __iter__(self):
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 144 continue
144 145
145 146 yield db_obj
146 147
147 148
148 149 class RepoList(_PermCheckIterator):
149 150
150 151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 152 if not perm_set:
152 153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 154
154 155 super(RepoList, self).__init__(
155 156 obj_list=db_repo_list,
156 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
157 158 perm_checker=HasRepoPermissionAny,
158 159 extra_kwargs=extra_kwargs)
159 160
160 161
161 162 class RepoGroupList(_PermCheckIterator):
162 163
163 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 165 if not perm_set:
165 166 perm_set = ['group.read', 'group.write', 'group.admin']
166 167
167 168 super(RepoGroupList, self).__init__(
168 169 obj_list=db_repo_group_list,
169 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
170 171 perm_checker=HasRepoGroupPermissionAny,
171 172 extra_kwargs=extra_kwargs)
172 173
173 174
174 175 class UserGroupList(_PermCheckIterator):
175 176
176 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 178 if not perm_set:
178 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 180
180 181 super(UserGroupList, self).__init__(
181 182 obj_list=db_user_group_list,
182 183 obj_attr='users_group_name', perm_set=perm_set,
183 184 perm_checker=HasUserGroupPermissionAny,
184 185 extra_kwargs=extra_kwargs)
185 186
186 187
187 188 class ScmModel(BaseModel):
188 189 """
189 190 Generic Scm Model
190 191 """
191 192
192 193 @LazyProperty
193 194 def repos_path(self):
194 195 """
195 196 Gets the repositories root path from database
196 197 """
197 198
198 199 settings_model = VcsSettingsModel(sa=self.sa)
199 200 return settings_model.get_repos_location()
200 201
201 202 def repo_scan(self, repos_path=None):
202 203 """
203 204 Listing of repositories in given path. This path should not be a
204 205 repository itself. Return a dictionary of repository objects
205 206
206 207 :param repos_path: path to directory containing repositories
207 208 """
208 209
209 210 if repos_path is None:
210 211 repos_path = self.repos_path
211 212
212 213 log.info('scanning for repositories in %s', repos_path)
213 214
214 215 config = make_db_config()
215 216 config.set('extensions', 'largefiles', '')
216 217 repos = {}
217 218
218 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 220 # name need to be decomposed and put back together using the /
220 221 # since this is internal storage separator for rhodecode
221 222 name = Repository.normalize_repo_name(name)
222 223
223 224 try:
224 225 if name in repos:
225 226 raise RepositoryError('Duplicate repository name %s '
226 227 'found in %s' % (name, path))
227 228 elif path[0] in rhodecode.BACKENDS:
228 229 backend = get_backend(path[0])
229 230 repos[name] = backend(path[1], config=config,
230 231 with_wire={"cache": False})
231 232 except OSError:
232 233 continue
233 234 log.debug('found %s paths with repositories', len(repos))
234 235 return repos
235 236
236 237 def get_repos(self, all_repos=None, sort_key=None):
237 238 """
238 239 Get all repositories from db and for each repo create it's
239 240 backend instance and fill that backed with information from database
240 241
241 242 :param all_repos: list of repository names as strings
242 243 give specific repositories list, good for filtering
243 244
244 245 :param sort_key: initial sorting of repositories
245 246 """
246 247 if all_repos is None:
247 248 all_repos = self.sa.query(Repository)\
248 249 .filter(Repository.group_id == None)\
249 250 .order_by(func.lower(Repository.repo_name)).all()
250 251 repo_iter = SimpleCachedRepoList(
251 252 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 253 return repo_iter
253 254
254 255 def get_repo_groups(self, all_groups=None):
255 256 if all_groups is None:
256 257 all_groups = RepoGroup.query()\
257 258 .filter(RepoGroup.group_parent_id == None).all()
258 259 return [x for x in RepoGroupList(all_groups)]
259 260
260 261 def mark_for_invalidation(self, repo_name, delete=False):
261 262 """
262 263 Mark caches of this repo invalid in the database. `delete` flag
263 264 removes the cache entries
264 265
265 266 :param repo_name: the repo_name for which caches should be marked
266 267 invalid, or deleted
267 268 :param delete: delete the entry keys instead of setting bool
268 269 flag on them, and also purge caches used by the dogpile
269 270 """
270 271 repo = Repository.get_by_repo_name(repo_name)
271 272
272 273 if repo:
273 274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 275 repo_id=repo.repo_id)
275 276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 277
277 278 repo_id = repo.repo_id
278 279 config = repo._config
279 280 config.set('extensions', 'largefiles', '')
280 281 repo.update_commit_cache(config=config, cs_cache=None)
281 282 if delete:
282 283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 285
285 286 def toggle_following_repo(self, follow_repo_id, user_id):
286 287
287 288 f = self.sa.query(UserFollowing)\
288 289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 290 .filter(UserFollowing.user_id == user_id).scalar()
290 291
291 292 if f is not None:
292 293 try:
293 294 self.sa.delete(f)
294 295 return
295 296 except Exception:
296 297 log.error(traceback.format_exc())
297 298 raise
298 299
299 300 try:
300 301 f = UserFollowing()
301 302 f.user_id = user_id
302 303 f.follows_repo_id = follow_repo_id
303 304 self.sa.add(f)
304 305 except Exception:
305 306 log.error(traceback.format_exc())
306 307 raise
307 308
308 309 def toggle_following_user(self, follow_user_id, user_id):
309 310 f = self.sa.query(UserFollowing)\
310 311 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 312 .filter(UserFollowing.user_id == user_id).scalar()
312 313
313 314 if f is not None:
314 315 try:
315 316 self.sa.delete(f)
316 317 return
317 318 except Exception:
318 319 log.error(traceback.format_exc())
319 320 raise
320 321
321 322 try:
322 323 f = UserFollowing()
323 324 f.user_id = user_id
324 325 f.follows_user_id = follow_user_id
325 326 self.sa.add(f)
326 327 except Exception:
327 328 log.error(traceback.format_exc())
328 329 raise
329 330
330 331 def is_following_repo(self, repo_name, user_id, cache=False):
331 332 r = self.sa.query(Repository)\
332 333 .filter(Repository.repo_name == repo_name).scalar()
333 334
334 335 f = self.sa.query(UserFollowing)\
335 336 .filter(UserFollowing.follows_repository == r)\
336 337 .filter(UserFollowing.user_id == user_id).scalar()
337 338
338 339 return f is not None
339 340
340 341 def is_following_user(self, username, user_id, cache=False):
341 342 u = User.get_by_username(username)
342 343
343 344 f = self.sa.query(UserFollowing)\
344 345 .filter(UserFollowing.follows_user == u)\
345 346 .filter(UserFollowing.user_id == user_id).scalar()
346 347
347 348 return f is not None
348 349
349 350 def get_followers(self, repo):
350 351 repo = self._get_repo(repo)
351 352
352 353 return self.sa.query(UserFollowing)\
353 354 .filter(UserFollowing.follows_repository == repo).count()
354 355
355 356 def get_forks(self, repo):
356 357 repo = self._get_repo(repo)
357 358 return self.sa.query(Repository)\
358 359 .filter(Repository.fork == repo).count()
359 360
360 361 def get_pull_requests(self, repo):
361 362 repo = self._get_repo(repo)
362 363 return self.sa.query(PullRequest)\
363 364 .filter(PullRequest.target_repo == repo)\
364 365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 366
366 367 def get_artifacts(self, repo):
367 368 repo = self._get_repo(repo)
368 369 return self.sa.query(FileStore)\
369 370 .filter(FileStore.repo == repo)\
370 371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
371 372
372 373 def mark_as_fork(self, repo, fork, user):
373 374 repo = self._get_repo(repo)
374 375 fork = self._get_repo(fork)
375 376 if fork and repo.repo_id == fork.repo_id:
376 377 raise Exception("Cannot set repository as fork of itself")
377 378
378 379 if fork and repo.repo_type != fork.repo_type:
379 380 raise RepositoryError(
380 381 "Cannot set repository as fork of repository with other type")
381 382
382 383 repo.fork = fork
383 384 self.sa.add(repo)
384 385 return repo
385 386
386 387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
387 388 dbrepo = self._get_repo(repo)
388 389 remote_uri = remote_uri or dbrepo.clone_uri
389 390 if not remote_uri:
390 391 raise Exception("This repository doesn't have a clone uri")
391 392
392 393 repo = dbrepo.scm_instance(cache=False)
393 394 repo.config.clear_section('hooks')
394 395
395 396 try:
396 397 # NOTE(marcink): add extra validation so we skip invalid urls
397 398 # this is due this tasks can be executed via scheduler without
398 399 # proper validation of remote_uri
399 400 if validate_uri:
400 401 config = make_db_config(clear_session=False)
401 402 url_validator(remote_uri, dbrepo.repo_type, config)
402 403 except InvalidCloneUrl:
403 404 raise
404 405
405 406 repo_name = dbrepo.repo_name
406 407 try:
407 408 # TODO: we need to make sure those operations call proper hooks !
408 409 repo.fetch(remote_uri)
409 410
410 411 self.mark_for_invalidation(repo_name)
411 412 except Exception:
412 413 log.error(traceback.format_exc())
413 414 raise
414 415
415 416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
416 417 dbrepo = self._get_repo(repo)
417 418 remote_uri = remote_uri or dbrepo.push_uri
418 419 if not remote_uri:
419 420 raise Exception("This repository doesn't have a clone uri")
420 421
421 422 repo = dbrepo.scm_instance(cache=False)
422 423 repo.config.clear_section('hooks')
423 424
424 425 try:
425 426 # NOTE(marcink): add extra validation so we skip invalid urls
426 427 # this is due this tasks can be executed via scheduler without
427 428 # proper validation of remote_uri
428 429 if validate_uri:
429 430 config = make_db_config(clear_session=False)
430 431 url_validator(remote_uri, dbrepo.repo_type, config)
431 432 except InvalidCloneUrl:
432 433 raise
433 434
434 435 try:
435 436 repo.push(remote_uri)
436 437 except Exception:
437 438 log.error(traceback.format_exc())
438 439 raise
439 440
440 441 def commit_change(self, repo, repo_name, commit, user, author, message,
441 442 content, f_path):
442 443 """
443 444 Commits changes
444 445
445 446 :param repo: SCM instance
446 447
447 448 """
448 449 user = self._get_user(user)
449 450
450 451 # decoding here will force that we have proper encoded values
451 452 # in any other case this will throw exceptions and deny commit
452 453 content = safe_str(content)
453 454 path = safe_str(f_path)
454 455 # message and author needs to be unicode
455 456 # proper backend should then translate that into required type
456 457 message = safe_unicode(message)
457 458 author = safe_unicode(author)
458 459 imc = repo.in_memory_commit
459 460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
460 461 try:
461 462 # TODO: handle pre-push action !
462 463 tip = imc.commit(
463 464 message=message, author=author, parents=[commit],
464 465 branch=commit.branch)
465 466 except Exception as e:
466 467 log.error(traceback.format_exc())
467 468 raise IMCCommitError(str(e))
468 469 finally:
469 470 # always clear caches, if commit fails we want fresh object also
470 471 self.mark_for_invalidation(repo_name)
471 472
472 473 # We trigger the post-push action
473 474 hooks_utils.trigger_post_push_hook(
474 475 username=user.username, action='push_local', hook_type='post_push',
475 476 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
476 477 return tip
477 478
478 479 def _sanitize_path(self, f_path):
479 480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
480 481 raise NonRelativePathError('%s is not an relative path' % f_path)
481 482 if f_path:
482 483 f_path = os.path.normpath(f_path)
483 484 return f_path
484 485
485 486 def get_dirnode_metadata(self, request, commit, dir_node):
486 487 if not dir_node.is_dir():
487 488 return []
488 489
489 490 data = []
490 491 for node in dir_node:
491 492 if not node.is_file():
492 493 # we skip file-nodes
493 494 continue
494 495
495 496 last_commit = node.last_commit
496 497 last_commit_date = last_commit.date
497 498 data.append({
498 499 'name': node.name,
499 500 'size': h.format_byte_size_binary(node.size),
500 501 'modified_at': h.format_date(last_commit_date),
501 502 'modified_ts': last_commit_date.isoformat(),
502 503 'revision': last_commit.revision,
503 504 'short_id': last_commit.short_id,
504 505 'message': h.escape(last_commit.message),
505 506 'author': h.escape(last_commit.author),
506 507 'user_profile': h.gravatar_with_user(
507 508 request, last_commit.author),
508 509 })
509 510
510 511 return data
511 512
512 513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
513 514 extended_info=False, content=False, max_file_bytes=None):
514 515 """
515 516 recursive walk in root dir and return a set of all path in that dir
516 517 based on repository walk function
517 518
518 519 :param repo_name: name of repository
519 520 :param commit_id: commit id for which to list nodes
520 521 :param root_path: root path to list
521 522 :param flat: return as a list, if False returns a dict with description
522 523 :param extended_info: show additional info such as md5, binary, size etc
523 524 :param content: add nodes content to the return data
524 525 :param max_file_bytes: will not return file contents over this limit
525 526
526 527 """
527 528 _files = list()
528 529 _dirs = list()
529 530 try:
530 531 _repo = self._get_repo(repo_name)
531 532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
532 533 root_path = root_path.lstrip('/')
533 534 for __, dirs, files in commit.walk(root_path):
534 535
535 536 for f in files:
536 537 _content = None
537 538 _data = f_name = f.unicode_path
538 539
539 540 if not flat:
540 541 _data = {
541 542 "name": h.escape(f_name),
542 543 "type": "file",
543 544 }
544 545 if extended_info:
545 546 _data.update({
546 547 "md5": f.md5,
547 548 "binary": f.is_binary,
548 549 "size": f.size,
549 550 "extension": f.extension,
550 551 "mimetype": f.mimetype,
551 552 "lines": f.lines()[0]
552 553 })
553 554
554 555 if content:
555 556 over_size_limit = (max_file_bytes is not None
556 557 and f.size > max_file_bytes)
557 558 full_content = None
558 559 if not f.is_binary and not over_size_limit:
559 560 full_content = safe_str(f.content)
560 561
561 562 _data.update({
562 563 "content": full_content,
563 564 })
564 565 _files.append(_data)
565 566
566 567 for d in dirs:
567 568 _data = d_name = d.unicode_path
568 569 if not flat:
569 570 _data = {
570 571 "name": h.escape(d_name),
571 572 "type": "dir",
572 573 }
573 574 if extended_info:
574 575 _data.update({
575 576 "md5": None,
576 577 "binary": None,
577 578 "size": None,
578 579 "extension": None,
579 580 })
580 581 if content:
581 582 _data.update({
582 583 "content": None
583 584 })
584 585 _dirs.append(_data)
585 586 except RepositoryError:
586 587 log.exception("Exception in get_nodes")
587 588 raise
588 589
589 590 return _dirs, _files
590 591
591 592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
592 593 """
593 594 Generate files for quick filter in files view
594 595 """
595 596
596 597 _files = list()
597 598 _dirs = list()
598 599 try:
599 600 _repo = self._get_repo(repo_name)
600 601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
601 602 root_path = root_path.lstrip('/')
602 603 for __, dirs, files in commit.walk(root_path):
603 604
604 605 for f in files:
605 606
606 607 _data = {
607 608 "name": h.escape(f.unicode_path),
608 609 "type": "file",
609 610 }
610 611
611 612 _files.append(_data)
612 613
613 614 for d in dirs:
614 615
615 616 _data = {
616 617 "name": h.escape(d.unicode_path),
617 618 "type": "dir",
618 619 }
619 620
620 621 _dirs.append(_data)
621 622 except RepositoryError:
622 623 log.exception("Exception in get_quick_filter_nodes")
623 624 raise
624 625
625 626 return _dirs, _files
626 627
627 628 def get_node(self, repo_name, commit_id, file_path,
628 629 extended_info=False, content=False, max_file_bytes=None, cache=True):
629 630 """
630 631 retrieve single node from commit
631 632 """
632 633 try:
633 634
634 635 _repo = self._get_repo(repo_name)
635 636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
636 637
637 638 file_node = commit.get_node(file_path)
638 639 if file_node.is_dir():
639 640 raise RepositoryError('The given path is a directory')
640 641
641 642 _content = None
642 643 f_name = file_node.unicode_path
643 644
644 645 file_data = {
645 646 "name": h.escape(f_name),
646 647 "type": "file",
647 648 }
648 649
649 650 if extended_info:
650 651 file_data.update({
651 652 "extension": file_node.extension,
652 653 "mimetype": file_node.mimetype,
653 654 })
654 655
655 656 if cache:
656 657 md5 = file_node.md5
657 658 is_binary = file_node.is_binary
658 659 size = file_node.size
659 660 else:
660 661 is_binary, md5, size, _content = file_node.metadata_uncached()
661 662
662 663 file_data.update({
663 664 "md5": md5,
664 665 "binary": is_binary,
665 666 "size": size,
666 667 })
667 668
668 669 if content and cache:
669 670 # get content + cache
670 671 size = file_node.size
671 672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
672 673 full_content = None
673 674 all_lines = 0
674 675 if not file_node.is_binary and not over_size_limit:
675 676 full_content = safe_unicode(file_node.content)
676 677 all_lines, empty_lines = file_node.count_lines(full_content)
677 678
678 679 file_data.update({
679 680 "content": full_content,
680 681 "lines": all_lines
681 682 })
682 683 elif content:
683 684 # get content *without* cache
684 685 if _content is None:
685 686 is_binary, md5, size, _content = file_node.metadata_uncached()
686 687
687 688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
688 689 full_content = None
689 690 all_lines = 0
690 691 if not is_binary and not over_size_limit:
691 692 full_content = safe_unicode(_content)
692 693 all_lines, empty_lines = file_node.count_lines(full_content)
693 694
694 695 file_data.update({
695 696 "content": full_content,
696 697 "lines": all_lines
697 698 })
698 699
699 700 except RepositoryError:
700 701 log.exception("Exception in get_node")
701 702 raise
702 703
703 704 return file_data
704 705
705 706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
706 707 """
707 708 Fetch node tree for usage in full text search
708 709 """
709 710
710 711 tree_info = list()
711 712
712 713 try:
713 714 _repo = self._get_repo(repo_name)
714 715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
715 716 root_path = root_path.lstrip('/')
716 717 for __, dirs, files in commit.walk(root_path):
717 718
718 719 for f in files:
719 720 is_binary, md5, size, _content = f.metadata_uncached()
720 721 _data = {
721 722 "name": f.unicode_path,
722 723 "md5": md5,
723 724 "extension": f.extension,
724 725 "binary": is_binary,
725 726 "size": size
726 727 }
727 728
728 729 tree_info.append(_data)
729 730
730 731 except RepositoryError:
731 732 log.exception("Exception in get_nodes")
732 733 raise
733 734
734 735 return tree_info
735 736
736 737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
737 738 author=None, trigger_push_hook=True):
738 739 """
739 740 Commits given multiple nodes into repo
740 741
741 742 :param user: RhodeCode User object or user_id, the commiter
742 743 :param repo: RhodeCode Repository object
743 744 :param message: commit message
744 745 :param nodes: mapping {filename:{'content':content},...}
745 746 :param parent_commit: parent commit, can be empty than it's
746 747 initial commit
747 748 :param author: author of commit, cna be different that commiter
748 749 only for git
749 750 :param trigger_push_hook: trigger push hooks
750 751
751 752 :returns: new commited commit
752 753 """
753 754
754 755 user = self._get_user(user)
755 756 scm_instance = repo.scm_instance(cache=False)
756 757
757 758 processed_nodes = []
758 759 for f_path in nodes:
759 760 f_path = self._sanitize_path(f_path)
760 761 content = nodes[f_path]['content']
761 762 f_path = safe_str(f_path)
762 763 # decoding here will force that we have proper encoded values
763 764 # in any other case this will throw exceptions and deny commit
764 765 if isinstance(content, (basestring,)):
765 766 content = safe_str(content)
766 767 elif isinstance(content, (file, cStringIO.OutputType,)):
767 768 content = content.read()
768 769 else:
769 770 raise Exception('Content is of unrecognized type %s' % (
770 771 type(content)
771 772 ))
772 773 processed_nodes.append((f_path, content))
773 774
774 775 message = safe_unicode(message)
775 776 commiter = user.full_contact
776 777 author = safe_unicode(author) if author else commiter
777 778
778 779 imc = scm_instance.in_memory_commit
779 780
780 781 if not parent_commit:
781 782 parent_commit = EmptyCommit(alias=scm_instance.alias)
782 783
783 784 if isinstance(parent_commit, EmptyCommit):
784 785 # EmptyCommit means we we're editing empty repository
785 786 parents = None
786 787 else:
787 788 parents = [parent_commit]
788 789 # add multiple nodes
789 790 for path, content in processed_nodes:
790 791 imc.add(FileNode(path, content=content))
791 792 # TODO: handle pre push scenario
792 793 tip = imc.commit(message=message,
793 794 author=author,
794 795 parents=parents,
795 796 branch=parent_commit.branch)
796 797
797 798 self.mark_for_invalidation(repo.repo_name)
798 799 if trigger_push_hook:
799 800 hooks_utils.trigger_post_push_hook(
800 801 username=user.username, action='push_local',
801 802 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
802 803 hook_type='post_push',
803 804 commit_ids=[tip.raw_id])
804 805 return tip
805 806
806 807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
807 808 author=None, trigger_push_hook=True):
808 809 user = self._get_user(user)
809 810 scm_instance = repo.scm_instance(cache=False)
810 811
811 812 message = safe_unicode(message)
812 813 commiter = user.full_contact
813 814 author = safe_unicode(author) if author else commiter
814 815
815 816 imc = scm_instance.in_memory_commit
816 817
817 818 if not parent_commit:
818 819 parent_commit = EmptyCommit(alias=scm_instance.alias)
819 820
820 821 if isinstance(parent_commit, EmptyCommit):
821 822 # EmptyCommit means we we're editing empty repository
822 823 parents = None
823 824 else:
824 825 parents = [parent_commit]
825 826
826 827 # add multiple nodes
827 828 for _filename, data in nodes.items():
828 829 # new filename, can be renamed from the old one, also sanitaze
829 830 # the path for any hack around relative paths like ../../ etc.
830 831 filename = self._sanitize_path(data['filename'])
831 832 old_filename = self._sanitize_path(_filename)
832 833 content = data['content']
833 834 file_mode = data.get('mode')
834 835 filenode = FileNode(old_filename, content=content, mode=file_mode)
835 836 op = data['op']
836 837 if op == 'add':
837 838 imc.add(filenode)
838 839 elif op == 'del':
839 840 imc.remove(filenode)
840 841 elif op == 'mod':
841 842 if filename != old_filename:
842 843 # TODO: handle renames more efficient, needs vcs lib changes
843 844 imc.remove(filenode)
844 845 imc.add(FileNode(filename, content=content, mode=file_mode))
845 846 else:
846 847 imc.change(filenode)
847 848
848 849 try:
849 850 # TODO: handle pre push scenario commit changes
850 851 tip = imc.commit(message=message,
851 852 author=author,
852 853 parents=parents,
853 854 branch=parent_commit.branch)
854 855 except NodeNotChangedError:
855 856 raise
856 857 except Exception as e:
857 858 log.exception("Unexpected exception during call to imc.commit")
858 859 raise IMCCommitError(str(e))
859 860 finally:
860 861 # always clear caches, if commit fails we want fresh object also
861 862 self.mark_for_invalidation(repo.repo_name)
862 863
863 864 if trigger_push_hook:
864 865 hooks_utils.trigger_post_push_hook(
865 866 username=user.username, action='push_local', hook_type='post_push',
866 867 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
867 868 commit_ids=[tip.raw_id])
868 869
869 870 return tip
870 871
871 872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
872 873 author=None, trigger_push_hook=True):
873 874 """
874 875 Deletes given multiple nodes into `repo`
875 876
876 877 :param user: RhodeCode User object or user_id, the committer
877 878 :param repo: RhodeCode Repository object
878 879 :param message: commit message
879 880 :param nodes: mapping {filename:{'content':content},...}
880 881 :param parent_commit: parent commit, can be empty than it's initial
881 882 commit
882 883 :param author: author of commit, cna be different that commiter only
883 884 for git
884 885 :param trigger_push_hook: trigger push hooks
885 886
886 887 :returns: new commit after deletion
887 888 """
888 889
889 890 user = self._get_user(user)
890 891 scm_instance = repo.scm_instance(cache=False)
891 892
892 893 processed_nodes = []
893 894 for f_path in nodes:
894 895 f_path = self._sanitize_path(f_path)
895 896 # content can be empty but for compatabilty it allows same dicts
896 897 # structure as add_nodes
897 898 content = nodes[f_path].get('content')
898 899 processed_nodes.append((f_path, content))
899 900
900 901 message = safe_unicode(message)
901 902 commiter = user.full_contact
902 903 author = safe_unicode(author) if author else commiter
903 904
904 905 imc = scm_instance.in_memory_commit
905 906
906 907 if not parent_commit:
907 908 parent_commit = EmptyCommit(alias=scm_instance.alias)
908 909
909 910 if isinstance(parent_commit, EmptyCommit):
910 911 # EmptyCommit means we we're editing empty repository
911 912 parents = None
912 913 else:
913 914 parents = [parent_commit]
914 915 # add multiple nodes
915 916 for path, content in processed_nodes:
916 917 imc.remove(FileNode(path, content=content))
917 918
918 919 # TODO: handle pre push scenario
919 920 tip = imc.commit(message=message,
920 921 author=author,
921 922 parents=parents,
922 923 branch=parent_commit.branch)
923 924
924 925 self.mark_for_invalidation(repo.repo_name)
925 926 if trigger_push_hook:
926 927 hooks_utils.trigger_post_push_hook(
927 928 username=user.username, action='push_local', hook_type='post_push',
928 929 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
929 930 commit_ids=[tip.raw_id])
930 931 return tip
931 932
932 933 def strip(self, repo, commit_id, branch):
933 934 scm_instance = repo.scm_instance(cache=False)
934 935 scm_instance.config.clear_section('hooks')
935 936 scm_instance.strip(commit_id, branch)
936 937 self.mark_for_invalidation(repo.repo_name)
937 938
938 939 def get_unread_journal(self):
939 940 return self.sa.query(UserLog).count()
940 941
941 942 @classmethod
942 943 def backend_landing_ref(cls, repo_type):
943 944 """
944 945 Return a default landing ref based on a repository type.
945 946 """
946 947
947 948 landing_ref = {
948 949 'hg': ('branch:default', 'default'),
949 950 'git': ('branch:master', 'master'),
950 951 'svn': ('rev:tip', 'latest tip'),
951 952 'default': ('rev:tip', 'latest tip'),
952 953 }
953 954
954 955 return landing_ref.get(repo_type) or landing_ref['default']
955 956
956 957 def get_repo_landing_revs(self, translator, repo=None):
957 958 """
958 959 Generates select option with tags branches and bookmarks (for hg only)
959 960 grouped by type
960 961
961 962 :param repo:
962 963 """
963 964 _ = translator
964 965 repo = self._get_repo(repo)
965 966
966 967 if repo:
967 968 repo_type = repo.repo_type
968 969 else:
969 970 repo_type = 'default'
970 971
971 972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
972 973
973 974 default_ref_options = [
974 975 [default_landing_ref, landing_ref_lbl]
975 976 ]
976 977 default_choices = [
977 978 default_landing_ref
978 979 ]
979 980
980 981 if not repo:
981 982 return default_choices, default_ref_options
982 983
983 984 repo = repo.scm_instance()
984 985
985 986 ref_options = [('rev:tip', 'latest tip')]
986 987 choices = ['rev:tip']
987 988
988 989 # branches
989 990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
990 991 if not branch_group:
991 992 # new repo, or without maybe a branch?
992 993 branch_group = default_ref_options
993 994
994 995 branches_group = (branch_group, _("Branches"))
995 996 ref_options.append(branches_group)
996 997 choices.extend([x[0] for x in branches_group[0]])
997 998
998 999 # bookmarks for HG
999 1000 if repo.alias == 'hg':
1000 1001 bookmarks_group = (
1001 1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1002 1003 for b in repo.bookmarks],
1003 1004 _("Bookmarks"))
1004 1005 ref_options.append(bookmarks_group)
1005 1006 choices.extend([x[0] for x in bookmarks_group[0]])
1006 1007
1007 1008 # tags
1008 1009 tags_group = (
1009 1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1010 1011 for t in repo.tags],
1011 1012 _("Tags"))
1012 1013 ref_options.append(tags_group)
1013 1014 choices.extend([x[0] for x in tags_group[0]])
1014 1015
1015 1016 return choices, ref_options
1016 1017
1017 1018 def get_server_info(self, environ=None):
1018 1019 server_info = get_system_info(environ)
1019 1020 return server_info
General Comments 0
You need to be logged in to leave comments. Login now