##// END OF EJS Templates
system-info: fix possible float division by zero in calculating % value
marcink -
r1088:7d451b5a default
parent child Browse files
Show More
@@ -1,1120 +1,1123 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import time
29 29 import traceback
30 30 import logging
31 31 import cStringIO
32 32 import pkg_resources
33 33
34 34 import pylons
35 35 from pylons.i18n.translation import _
36 36 from sqlalchemy import func
37 37 from zope.cachedescriptors.property import Lazy as LazyProperty
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.vcs import get_backend
41 41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 42 from rhodecode.lib.vcs.nodes import FileNode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib import helpers as h
45 45
46 46 from rhodecode.lib.auth import (
47 47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 48 HasUserGroupPermissionAny)
49 49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 50 from rhodecode.lib import hooks_utils, caches
51 51 from rhodecode.lib.utils import (
52 52 get_filesystem_repos, action_logger, make_db_config)
53 53 from rhodecode.lib.utils2 import (
54 54 safe_str, safe_unicode, get_server_url, md5)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.db import (
57 57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 58 PullRequest, DbMigrateVersion)
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class UserTemp(object):
65 65 def __init__(self, user_id):
66 66 self.user_id = user_id
67 67
68 68 def __repr__(self):
69 69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70 70
71 71
72 72 class RepoTemp(object):
73 73 def __init__(self, repo_id):
74 74 self.repo_id = repo_id
75 75
76 76 def __repr__(self):
77 77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78 78
79 79
80 80 class SimpleCachedRepoList(object):
81 81 """
82 82 Lighter version of of iteration of repos without the scm initialisation,
83 83 and with cache usage
84 84 """
85 85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 86 self.db_repo_list = db_repo_list
87 87 self.repos_path = repos_path
88 88 self.order_by = order_by
89 89 self.reversed = (order_by or '').startswith('-')
90 90 if not perm_set:
91 91 perm_set = ['repository.read', 'repository.write',
92 92 'repository.admin']
93 93 self.perm_set = perm_set
94 94
95 95 def __len__(self):
96 96 return len(self.db_repo_list)
97 97
98 98 def __repr__(self):
99 99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100 100
101 101 def __iter__(self):
102 102 for dbr in self.db_repo_list:
103 103 # check permission at this level
104 104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 105 dbr.repo_name, 'SimpleCachedRepoList check')
106 106 if not has_perm:
107 107 continue
108 108
109 109 tmp_d = {
110 110 'name': dbr.repo_name,
111 111 'dbrepo': dbr.get_dict(),
112 112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 113 }
114 114 yield tmp_d
115 115
116 116
117 117 class _PermCheckIterator(object):
118 118
119 119 def __init__(
120 120 self, obj_list, obj_attr, perm_set, perm_checker,
121 121 extra_kwargs=None):
122 122 """
123 123 Creates iterator from given list of objects, additionally
124 124 checking permission for them from perm_set var
125 125
126 126 :param obj_list: list of db objects
127 127 :param obj_attr: attribute of object to pass into perm_checker
128 128 :param perm_set: list of permissions to check
129 129 :param perm_checker: callable to check permissions against
130 130 """
131 131 self.obj_list = obj_list
132 132 self.obj_attr = obj_attr
133 133 self.perm_set = perm_set
134 134 self.perm_checker = perm_checker
135 135 self.extra_kwargs = extra_kwargs or {}
136 136
137 137 def __len__(self):
138 138 return len(self.obj_list)
139 139
140 140 def __repr__(self):
141 141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142 142
143 143 def __iter__(self):
144 144 checker = self.perm_checker(*self.perm_set)
145 145 for db_obj in self.obj_list:
146 146 # check permission at this level
147 147 name = getattr(db_obj, self.obj_attr, None)
148 148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 149 continue
150 150
151 151 yield db_obj
152 152
153 153
154 154 class RepoList(_PermCheckIterator):
155 155
156 156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 157 if not perm_set:
158 158 perm_set = [
159 159 'repository.read', 'repository.write', 'repository.admin']
160 160
161 161 super(RepoList, self).__init__(
162 162 obj_list=db_repo_list,
163 163 obj_attr='repo_name', perm_set=perm_set,
164 164 perm_checker=HasRepoPermissionAny,
165 165 extra_kwargs=extra_kwargs)
166 166
167 167
168 168 class RepoGroupList(_PermCheckIterator):
169 169
170 170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 171 if not perm_set:
172 172 perm_set = ['group.read', 'group.write', 'group.admin']
173 173
174 174 super(RepoGroupList, self).__init__(
175 175 obj_list=db_repo_group_list,
176 176 obj_attr='group_name', perm_set=perm_set,
177 177 perm_checker=HasRepoGroupPermissionAny,
178 178 extra_kwargs=extra_kwargs)
179 179
180 180
181 181 class UserGroupList(_PermCheckIterator):
182 182
183 183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 184 if not perm_set:
185 185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186 186
187 187 super(UserGroupList, self).__init__(
188 188 obj_list=db_user_group_list,
189 189 obj_attr='users_group_name', perm_set=perm_set,
190 190 perm_checker=HasUserGroupPermissionAny,
191 191 extra_kwargs=extra_kwargs)
192 192
193 193
194 194 class ScmModel(BaseModel):
195 195 """
196 196 Generic Scm Model
197 197 """
198 198
199 199 @LazyProperty
200 200 def repos_path(self):
201 201 """
202 202 Gets the repositories root path from database
203 203 """
204 204
205 205 settings_model = VcsSettingsModel(sa=self.sa)
206 206 return settings_model.get_repos_location()
207 207
208 208 def repo_scan(self, repos_path=None):
209 209 """
210 210 Listing of repositories in given path. This path should not be a
211 211 repository itself. Return a dictionary of repository objects
212 212
213 213 :param repos_path: path to directory containing repositories
214 214 """
215 215
216 216 if repos_path is None:
217 217 repos_path = self.repos_path
218 218
219 219 log.info('scanning for repositories in %s', repos_path)
220 220
221 221 config = make_db_config()
222 222 config.set('extensions', 'largefiles', '')
223 223 repos = {}
224 224
225 225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 226 # name need to be decomposed and put back together using the /
227 227 # since this is internal storage separator for rhodecode
228 228 name = Repository.normalize_repo_name(name)
229 229
230 230 try:
231 231 if name in repos:
232 232 raise RepositoryError('Duplicate repository name %s '
233 233 'found in %s' % (name, path))
234 234 elif path[0] in rhodecode.BACKENDS:
235 235 klass = get_backend(path[0])
236 236 repos[name] = klass(path[1], config=config)
237 237 except OSError:
238 238 continue
239 239 log.debug('found %s paths with repositories', len(repos))
240 240 return repos
241 241
242 242 def get_repos(self, all_repos=None, sort_key=None):
243 243 """
244 244 Get all repositories from db and for each repo create it's
245 245 backend instance and fill that backed with information from database
246 246
247 247 :param all_repos: list of repository names as strings
248 248 give specific repositories list, good for filtering
249 249
250 250 :param sort_key: initial sorting of repositories
251 251 """
252 252 if all_repos is None:
253 253 all_repos = self.sa.query(Repository)\
254 254 .filter(Repository.group_id == None)\
255 255 .order_by(func.lower(Repository.repo_name)).all()
256 256 repo_iter = SimpleCachedRepoList(
257 257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 258 return repo_iter
259 259
260 260 def get_repo_groups(self, all_groups=None):
261 261 if all_groups is None:
262 262 all_groups = RepoGroup.query()\
263 263 .filter(RepoGroup.group_parent_id == None).all()
264 264 return [x for x in RepoGroupList(all_groups)]
265 265
266 266 def mark_for_invalidation(self, repo_name, delete=False):
267 267 """
268 268 Mark caches of this repo invalid in the database. `delete` flag
269 269 removes the cache entries
270 270
271 271 :param repo_name: the repo_name for which caches should be marked
272 272 invalid, or deleted
273 273 :param delete: delete the entry keys instead of setting bool
274 274 flag on them
275 275 """
276 276 CacheKey.set_invalidate(repo_name, delete=delete)
277 277 repo = Repository.get_by_repo_name(repo_name)
278 278
279 279 if repo:
280 280 config = repo._config
281 281 config.set('extensions', 'largefiles', '')
282 282 repo.update_commit_cache(config=config, cs_cache=None)
283 283 caches.clear_repo_caches(repo_name)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 action_logger(UserTemp(user_id),
295 295 'stopped_following_repo',
296 296 RepoTemp(follow_repo_id))
297 297 return
298 298 except Exception:
299 299 log.error(traceback.format_exc())
300 300 raise
301 301
302 302 try:
303 303 f = UserFollowing()
304 304 f.user_id = user_id
305 305 f.follows_repo_id = follow_repo_id
306 306 self.sa.add(f)
307 307
308 308 action_logger(UserTemp(user_id),
309 309 'started_following_repo',
310 310 RepoTemp(follow_repo_id))
311 311 except Exception:
312 312 log.error(traceback.format_exc())
313 313 raise
314 314
315 315 def toggle_following_user(self, follow_user_id, user_id):
316 316 f = self.sa.query(UserFollowing)\
317 317 .filter(UserFollowing.follows_user_id == follow_user_id)\
318 318 .filter(UserFollowing.user_id == user_id).scalar()
319 319
320 320 if f is not None:
321 321 try:
322 322 self.sa.delete(f)
323 323 return
324 324 except Exception:
325 325 log.error(traceback.format_exc())
326 326 raise
327 327
328 328 try:
329 329 f = UserFollowing()
330 330 f.user_id = user_id
331 331 f.follows_user_id = follow_user_id
332 332 self.sa.add(f)
333 333 except Exception:
334 334 log.error(traceback.format_exc())
335 335 raise
336 336
337 337 def is_following_repo(self, repo_name, user_id, cache=False):
338 338 r = self.sa.query(Repository)\
339 339 .filter(Repository.repo_name == repo_name).scalar()
340 340
341 341 f = self.sa.query(UserFollowing)\
342 342 .filter(UserFollowing.follows_repository == r)\
343 343 .filter(UserFollowing.user_id == user_id).scalar()
344 344
345 345 return f is not None
346 346
347 347 def is_following_user(self, username, user_id, cache=False):
348 348 u = User.get_by_username(username)
349 349
350 350 f = self.sa.query(UserFollowing)\
351 351 .filter(UserFollowing.follows_user == u)\
352 352 .filter(UserFollowing.user_id == user_id).scalar()
353 353
354 354 return f is not None
355 355
356 356 def get_followers(self, repo):
357 357 repo = self._get_repo(repo)
358 358
359 359 return self.sa.query(UserFollowing)\
360 360 .filter(UserFollowing.follows_repository == repo).count()
361 361
362 362 def get_forks(self, repo):
363 363 repo = self._get_repo(repo)
364 364 return self.sa.query(Repository)\
365 365 .filter(Repository.fork == repo).count()
366 366
367 367 def get_pull_requests(self, repo):
368 368 repo = self._get_repo(repo)
369 369 return self.sa.query(PullRequest)\
370 370 .filter(PullRequest.target_repo == repo)\
371 371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
372 372
373 373 def mark_as_fork(self, repo, fork, user):
374 374 repo = self._get_repo(repo)
375 375 fork = self._get_repo(fork)
376 376 if fork and repo.repo_id == fork.repo_id:
377 377 raise Exception("Cannot set repository as fork of itself")
378 378
379 379 if fork and repo.repo_type != fork.repo_type:
380 380 raise RepositoryError(
381 381 "Cannot set repository as fork of repository with other type")
382 382
383 383 repo.fork = fork
384 384 self.sa.add(repo)
385 385 return repo
386 386
387 387 def pull_changes(self, repo, username):
388 388 dbrepo = self._get_repo(repo)
389 389 clone_uri = dbrepo.clone_uri
390 390 if not clone_uri:
391 391 raise Exception("This repository doesn't have a clone uri")
392 392
393 393 repo = dbrepo.scm_instance(cache=False)
394 394 # TODO: marcink fix this an re-enable since we need common logic
395 395 # for hg/git remove hooks so we don't trigger them on fetching
396 396 # commits from remote
397 397 repo.config.clear_section('hooks')
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.pull(clone_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def commit_change(self, repo, repo_name, commit, user, author, message,
410 410 content, f_path):
411 411 """
412 412 Commits changes
413 413
414 414 :param repo: SCM instance
415 415
416 416 """
417 417 user = self._get_user(user)
418 418
419 419 # decoding here will force that we have proper encoded values
420 420 # in any other case this will throw exceptions and deny commit
421 421 content = safe_str(content)
422 422 path = safe_str(f_path)
423 423 # message and author needs to be unicode
424 424 # proper backend should then translate that into required type
425 425 message = safe_unicode(message)
426 426 author = safe_unicode(author)
427 427 imc = repo.in_memory_commit
428 428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
429 429 try:
430 430 # TODO: handle pre-push action !
431 431 tip = imc.commit(
432 432 message=message, author=author, parents=[commit],
433 433 branch=commit.branch)
434 434 except Exception as e:
435 435 log.error(traceback.format_exc())
436 436 raise IMCCommitError(str(e))
437 437 finally:
438 438 # always clear caches, if commit fails we want fresh object also
439 439 self.mark_for_invalidation(repo_name)
440 440
441 441 # We trigger the post-push action
442 442 hooks_utils.trigger_post_push_hook(
443 443 username=user.username, action='push_local', repo_name=repo_name,
444 444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
445 445 return tip
446 446
447 447 def _sanitize_path(self, f_path):
448 448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
449 449 raise NonRelativePathError('%s is not an relative path' % f_path)
450 450 if f_path:
451 451 f_path = os.path.normpath(f_path)
452 452 return f_path
453 453
454 454 def get_dirnode_metadata(self, commit, dir_node):
455 455 if not dir_node.is_dir():
456 456 return []
457 457
458 458 data = []
459 459 for node in dir_node:
460 460 if not node.is_file():
461 461 # we skip file-nodes
462 462 continue
463 463
464 464 last_commit = node.last_commit
465 465 last_commit_date = last_commit.date
466 466 data.append({
467 467 'name': node.name,
468 468 'size': h.format_byte_size_binary(node.size),
469 469 'modified_at': h.format_date(last_commit_date),
470 470 'modified_ts': last_commit_date.isoformat(),
471 471 'revision': last_commit.revision,
472 472 'short_id': last_commit.short_id,
473 473 'message': h.escape(last_commit.message),
474 474 'author': h.escape(last_commit.author),
475 475 'user_profile': h.gravatar_with_user(last_commit.author),
476 476 })
477 477
478 478 return data
479 479
480 480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
481 481 extended_info=False, content=False, max_file_bytes=None):
482 482 """
483 483 recursive walk in root dir and return a set of all path in that dir
484 484 based on repository walk function
485 485
486 486 :param repo_name: name of repository
487 487 :param commit_id: commit id for which to list nodes
488 488 :param root_path: root path to list
489 489 :param flat: return as a list, if False returns a dict with description
490 490 :param max_file_bytes: will not return file contents over this limit
491 491
492 492 """
493 493 _files = list()
494 494 _dirs = list()
495 495 try:
496 496 _repo = self._get_repo(repo_name)
497 497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
498 498 root_path = root_path.lstrip('/')
499 499 for __, dirs, files in commit.walk(root_path):
500 500 for f in files:
501 501 _content = None
502 502 _data = f.unicode_path
503 503 over_size_limit = (max_file_bytes is not None
504 504 and f.size > max_file_bytes)
505 505
506 506 if not flat:
507 507 _data = {
508 508 "name": f.unicode_path,
509 509 "type": "file",
510 510 }
511 511 if extended_info:
512 512 _data.update({
513 513 "md5": f.md5,
514 514 "binary": f.is_binary,
515 515 "size": f.size,
516 516 "extension": f.extension,
517 517 "mimetype": f.mimetype,
518 518 "lines": f.lines()[0]
519 519 })
520 520
521 521 if content:
522 522 full_content = None
523 523 if not f.is_binary and not over_size_limit:
524 524 full_content = safe_str(f.content)
525 525
526 526 _data.update({
527 527 "content": full_content,
528 528 })
529 529 _files.append(_data)
530 530 for d in dirs:
531 531 _data = d.unicode_path
532 532 if not flat:
533 533 _data = {
534 534 "name": d.unicode_path,
535 535 "type": "dir",
536 536 }
537 537 if extended_info:
538 538 _data.update({
539 539 "md5": None,
540 540 "binary": None,
541 541 "size": None,
542 542 "extension": None,
543 543 })
544 544 if content:
545 545 _data.update({
546 546 "content": None
547 547 })
548 548 _dirs.append(_data)
549 549 except RepositoryError:
550 550 log.debug("Exception in get_nodes", exc_info=True)
551 551 raise
552 552
553 553 return _dirs, _files
554 554
555 555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
556 556 author=None, trigger_push_hook=True):
557 557 """
558 558 Commits given multiple nodes into repo
559 559
560 560 :param user: RhodeCode User object or user_id, the commiter
561 561 :param repo: RhodeCode Repository object
562 562 :param message: commit message
563 563 :param nodes: mapping {filename:{'content':content},...}
564 564 :param parent_commit: parent commit, can be empty than it's
565 565 initial commit
566 566 :param author: author of commit, cna be different that commiter
567 567 only for git
568 568 :param trigger_push_hook: trigger push hooks
569 569
570 570 :returns: new commited commit
571 571 """
572 572
573 573 user = self._get_user(user)
574 574 scm_instance = repo.scm_instance(cache=False)
575 575
576 576 processed_nodes = []
577 577 for f_path in nodes:
578 578 f_path = self._sanitize_path(f_path)
579 579 content = nodes[f_path]['content']
580 580 f_path = safe_str(f_path)
581 581 # decoding here will force that we have proper encoded values
582 582 # in any other case this will throw exceptions and deny commit
583 583 if isinstance(content, (basestring,)):
584 584 content = safe_str(content)
585 585 elif isinstance(content, (file, cStringIO.OutputType,)):
586 586 content = content.read()
587 587 else:
588 588 raise Exception('Content is of unrecognized type %s' % (
589 589 type(content)
590 590 ))
591 591 processed_nodes.append((f_path, content))
592 592
593 593 message = safe_unicode(message)
594 594 commiter = user.full_contact
595 595 author = safe_unicode(author) if author else commiter
596 596
597 597 imc = scm_instance.in_memory_commit
598 598
599 599 if not parent_commit:
600 600 parent_commit = EmptyCommit(alias=scm_instance.alias)
601 601
602 602 if isinstance(parent_commit, EmptyCommit):
603 603 # EmptyCommit means we we're editing empty repository
604 604 parents = None
605 605 else:
606 606 parents = [parent_commit]
607 607 # add multiple nodes
608 608 for path, content in processed_nodes:
609 609 imc.add(FileNode(path, content=content))
610 610 # TODO: handle pre push scenario
611 611 tip = imc.commit(message=message,
612 612 author=author,
613 613 parents=parents,
614 614 branch=parent_commit.branch)
615 615
616 616 self.mark_for_invalidation(repo.repo_name)
617 617 if trigger_push_hook:
618 618 hooks_utils.trigger_post_push_hook(
619 619 username=user.username, action='push_local',
620 620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
621 621 commit_ids=[tip.raw_id])
622 622 return tip
623 623
624 624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
625 625 author=None, trigger_push_hook=True):
626 626 user = self._get_user(user)
627 627 scm_instance = repo.scm_instance(cache=False)
628 628
629 629 message = safe_unicode(message)
630 630 commiter = user.full_contact
631 631 author = safe_unicode(author) if author else commiter
632 632
633 633 imc = scm_instance.in_memory_commit
634 634
635 635 if not parent_commit:
636 636 parent_commit = EmptyCommit(alias=scm_instance.alias)
637 637
638 638 if isinstance(parent_commit, EmptyCommit):
639 639 # EmptyCommit means we we're editing empty repository
640 640 parents = None
641 641 else:
642 642 parents = [parent_commit]
643 643
644 644 # add multiple nodes
645 645 for _filename, data in nodes.items():
646 646 # new filename, can be renamed from the old one, also sanitaze
647 647 # the path for any hack around relative paths like ../../ etc.
648 648 filename = self._sanitize_path(data['filename'])
649 649 old_filename = self._sanitize_path(_filename)
650 650 content = data['content']
651 651
652 652 filenode = FileNode(old_filename, content=content)
653 653 op = data['op']
654 654 if op == 'add':
655 655 imc.add(filenode)
656 656 elif op == 'del':
657 657 imc.remove(filenode)
658 658 elif op == 'mod':
659 659 if filename != old_filename:
660 660 # TODO: handle renames more efficient, needs vcs lib
661 661 # changes
662 662 imc.remove(filenode)
663 663 imc.add(FileNode(filename, content=content))
664 664 else:
665 665 imc.change(filenode)
666 666
667 667 try:
668 668 # TODO: handle pre push scenario
669 669 # commit changes
670 670 tip = imc.commit(message=message,
671 671 author=author,
672 672 parents=parents,
673 673 branch=parent_commit.branch)
674 674 except NodeNotChangedError:
675 675 raise
676 676 except Exception as e:
677 677 log.exception("Unexpected exception during call to imc.commit")
678 678 raise IMCCommitError(str(e))
679 679 finally:
680 680 # always clear caches, if commit fails we want fresh object also
681 681 self.mark_for_invalidation(repo.repo_name)
682 682
683 683 if trigger_push_hook:
684 684 hooks_utils.trigger_post_push_hook(
685 685 username=user.username, action='push_local',
686 686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
687 687 commit_ids=[tip.raw_id])
688 688
689 689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
690 690 author=None, trigger_push_hook=True):
691 691 """
692 692 Deletes given multiple nodes into `repo`
693 693
694 694 :param user: RhodeCode User object or user_id, the committer
695 695 :param repo: RhodeCode Repository object
696 696 :param message: commit message
697 697 :param nodes: mapping {filename:{'content':content},...}
698 698 :param parent_commit: parent commit, can be empty than it's initial
699 699 commit
700 700 :param author: author of commit, cna be different that commiter only
701 701 for git
702 702 :param trigger_push_hook: trigger push hooks
703 703
704 704 :returns: new commit after deletion
705 705 """
706 706
707 707 user = self._get_user(user)
708 708 scm_instance = repo.scm_instance(cache=False)
709 709
710 710 processed_nodes = []
711 711 for f_path in nodes:
712 712 f_path = self._sanitize_path(f_path)
713 713 # content can be empty but for compatabilty it allows same dicts
714 714 # structure as add_nodes
715 715 content = nodes[f_path].get('content')
716 716 processed_nodes.append((f_path, content))
717 717
718 718 message = safe_unicode(message)
719 719 commiter = user.full_contact
720 720 author = safe_unicode(author) if author else commiter
721 721
722 722 imc = scm_instance.in_memory_commit
723 723
724 724 if not parent_commit:
725 725 parent_commit = EmptyCommit(alias=scm_instance.alias)
726 726
727 727 if isinstance(parent_commit, EmptyCommit):
728 728 # EmptyCommit means we we're editing empty repository
729 729 parents = None
730 730 else:
731 731 parents = [parent_commit]
732 732 # add multiple nodes
733 733 for path, content in processed_nodes:
734 734 imc.remove(FileNode(path, content=content))
735 735
736 736 # TODO: handle pre push scenario
737 737 tip = imc.commit(message=message,
738 738 author=author,
739 739 parents=parents,
740 740 branch=parent_commit.branch)
741 741
742 742 self.mark_for_invalidation(repo.repo_name)
743 743 if trigger_push_hook:
744 744 hooks_utils.trigger_post_push_hook(
745 745 username=user.username, action='push_local',
746 746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
747 747 commit_ids=[tip.raw_id])
748 748 return tip
749 749
750 750 def strip(self, repo, commit_id, branch):
751 751 scm_instance = repo.scm_instance(cache=False)
752 752 scm_instance.config.clear_section('hooks')
753 753 scm_instance.strip(commit_id, branch)
754 754 self.mark_for_invalidation(repo.repo_name)
755 755
756 756 def get_unread_journal(self):
757 757 return self.sa.query(UserLog).count()
758 758
759 759 def get_repo_landing_revs(self, repo=None):
760 760 """
761 761 Generates select option with tags branches and bookmarks (for hg only)
762 762 grouped by type
763 763
764 764 :param repo:
765 765 """
766 766
767 767 hist_l = []
768 768 choices = []
769 769 repo = self._get_repo(repo)
770 770 hist_l.append(['rev:tip', _('latest tip')])
771 771 choices.append('rev:tip')
772 772 if not repo:
773 773 return choices, hist_l
774 774
775 775 repo = repo.scm_instance()
776 776
777 777 branches_group = (
778 778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
779 779 for b in repo.branches],
780 780 _("Branches"))
781 781 hist_l.append(branches_group)
782 782 choices.extend([x[0] for x in branches_group[0]])
783 783
784 784 if repo.alias == 'hg':
785 785 bookmarks_group = (
786 786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
787 787 for b in repo.bookmarks],
788 788 _("Bookmarks"))
789 789 hist_l.append(bookmarks_group)
790 790 choices.extend([x[0] for x in bookmarks_group[0]])
791 791
792 792 tags_group = (
793 793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
794 794 for t in repo.tags],
795 795 _("Tags"))
796 796 hist_l.append(tags_group)
797 797 choices.extend([x[0] for x in tags_group[0]])
798 798
799 799 return choices, hist_l
800 800
801 801 def install_git_hook(self, repo, force_create=False):
802 802 """
803 803 Creates a rhodecode hook inside a git repository
804 804
805 805 :param repo: Instance of VCS repo
806 806 :param force_create: Create even if same name hook exists
807 807 """
808 808
809 809 loc = os.path.join(repo.path, 'hooks')
810 810 if not repo.bare:
811 811 loc = os.path.join(repo.path, '.git', 'hooks')
812 812 if not os.path.isdir(loc):
813 813 os.makedirs(loc, mode=0777)
814 814
815 815 tmpl_post = pkg_resources.resource_string(
816 816 'rhodecode', '/'.join(
817 817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
818 818 tmpl_pre = pkg_resources.resource_string(
819 819 'rhodecode', '/'.join(
820 820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
821 821
822 822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
823 823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
824 824 log.debug('Installing git hook in repo %s', repo)
825 825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
826 826
827 827 if _rhodecode_hook or force_create:
828 828 log.debug('writing %s hook file !', h_type)
829 829 try:
830 830 with open(_hook_file, 'wb') as f:
831 831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
832 832 tmpl = tmpl.replace('_ENV_', sys.executable)
833 833 f.write(tmpl)
834 834 os.chmod(_hook_file, 0755)
835 835 except IOError:
836 836 log.exception('error writing hook file %s', _hook_file)
837 837 else:
838 838 log.debug('skipping writing hook file')
839 839
840 840 def install_svn_hooks(self, repo, force_create=False):
841 841 """
842 842 Creates rhodecode hooks inside a svn repository
843 843
844 844 :param repo: Instance of VCS repo
845 845 :param force_create: Create even if same name hook exists
846 846 """
847 847 hooks_path = os.path.join(repo.path, 'hooks')
848 848 if not os.path.isdir(hooks_path):
849 849 os.makedirs(hooks_path)
850 850 post_commit_tmpl = pkg_resources.resource_string(
851 851 'rhodecode', '/'.join(
852 852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
853 853 pre_commit_template = pkg_resources.resource_string(
854 854 'rhodecode', '/'.join(
855 855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
856 856 templates = {
857 857 'post-commit': post_commit_tmpl,
858 858 'pre-commit': pre_commit_template
859 859 }
860 860 for filename in templates:
861 861 _hook_file = os.path.join(hooks_path, filename)
862 862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
863 863 if _rhodecode_hook or force_create:
864 864 log.debug('writing %s hook file !', filename)
865 865 template = templates[filename]
866 866 try:
867 867 with open(_hook_file, 'wb') as f:
868 868 template = template.replace(
869 869 '_TMPL_', rhodecode.__version__)
870 870 template = template.replace('_ENV_', sys.executable)
871 871 f.write(template)
872 872 os.chmod(_hook_file, 0755)
873 873 except IOError:
874 874 log.exception('error writing hook file %s', filename)
875 875 else:
876 876 log.debug('skipping writing hook file')
877 877
878 878 def install_hooks(self, repo, repo_type):
879 879 if repo_type == 'git':
880 880 self.install_git_hook(repo)
881 881 elif repo_type == 'svn':
882 882 self.install_svn_hooks(repo)
883 883
884 884 def get_server_info(self, environ=None):
885 885 import platform
886 886 import rhodecode
887 887 import pkg_resources
888 888 from rhodecode.model.meta import Base as sql_base, Session
889 889 from sqlalchemy.engine import url
890 890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
891 891 from rhodecode.lib.vcs.backends.git import discover_git_version
892 892 from rhodecode.model.gist import GIST_STORE_LOC
893 893
894 894 def percentage(part, whole):
895 return 100 * float(part) / float(whole)
895 whole = float(whole)
896 if whole > 0:
897 return 100 * float(part) / whole
898 return 0
896 899
897 900 try:
898 901 # cygwin cannot have yet psutil support.
899 902 import psutil
900 903 except ImportError:
901 904 psutil = None
902 905
903 906 environ = environ or {}
904 907 _NA = 'NOT AVAILABLE'
905 908 _memory = _NA
906 909 _uptime = _NA
907 910 _boot_time = _NA
908 911 _cpu = _NA
909 912 _disk = dict(percent=0, used=0, total=0, error='')
910 913 _disk_inodes = dict(percent=0, free=0, used=0, total=0, error='')
911 914 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
912 915
913 916 model = VcsSettingsModel()
914 917 storage_path = model.get_repos_location()
915 918 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
916 919 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
917 920 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
918 921
919 922 if psutil:
920 923 # disk storage
921 924 try:
922 925 _disk = dict(psutil.disk_usage(storage_path)._asdict())
923 926 except Exception as e:
924 927 log.exception('Failed to fetch disk info')
925 928 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
926 929
927 930 # disk inodes usage
928 931 try:
929 932 i_stat = os.statvfs(storage_path)
930 933
931 934 _disk_inodes['used'] = i_stat.f_ffree
932 935 _disk_inodes['free'] = i_stat.f_favail
933 936 _disk_inodes['total'] = i_stat.f_files
934 937 _disk_inodes['percent'] = percentage(
935 938 _disk_inodes['used'], _disk_inodes['total'])
936 939 except Exception as e:
937 940 log.exception('Failed to fetch disk inodes info')
938 941 _disk_inodes['error'] = str(e)
939 942
940 943 # memory
941 944 _memory = dict(psutil.virtual_memory()._asdict())
942 945 _memory['percent2'] = psutil._common.usage_percent(
943 946 (_memory['total'] - _memory['free']),
944 947 _memory['total'], 1)
945 948
946 949 # load averages
947 950 if hasattr(psutil.os, 'getloadavg'):
948 951 _load = dict(zip(
949 952 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
950 953 _uptime = time.time() - psutil.boot_time()
951 954 _boot_time = psutil.boot_time()
952 955 _cpu = psutil.cpu_percent(0.5)
953 956
954 957 mods = dict([(p.project_name, p.version)
955 958 for p in pkg_resources.working_set])
956 959
957 960 def get_storage_size(storage_path):
958 961 sizes = []
959 962 for file_ in os.listdir(storage_path):
960 963 storage_file = os.path.join(storage_path, file_)
961 964 if os.path.isfile(storage_file):
962 965 try:
963 966 sizes.append(os.path.getsize(storage_file))
964 967 except OSError:
965 968 log.exception('Failed to get size of storage file %s',
966 969 storage_file)
967 970 pass
968 971
969 972 return sum(sizes)
970 973
971 974 # archive cache storage
972 975 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
973 976 try:
974 977 archive_storage_path_exists = os.path.isdir(
975 978 archive_storage_path)
976 979 if archive_storage_path and archive_storage_path_exists:
977 980 used = get_storage_size(archive_storage_path)
978 981 _disk_archive.update({
979 982 'used': used,
980 983 'total': used,
981 984 })
982 985 except Exception as e:
983 986 log.exception('failed to fetch archive cache storage')
984 987 _disk_archive['error'] = str(e)
985 988
986 989 # search index storage
987 990 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
988 991 try:
989 992 search_index_storage_path_exists = os.path.isdir(
990 993 search_index_storage_path)
991 994 if search_index_storage_path_exists:
992 995 used = get_storage_size(search_index_storage_path)
993 996 _disk_index.update({
994 997 'percent': 100,
995 998 'used': used,
996 999 'total': used,
997 1000 })
998 1001 except Exception as e:
999 1002 log.exception('failed to fetch search index storage')
1000 1003 _disk_index['error'] = str(e)
1001 1004
1002 1005 # gist storage
1003 1006 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
1004 1007 try:
1005 1008 items_count = 0
1006 1009 used = 0
1007 1010 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
1008 1011 if root == gist_storage_path:
1009 1012 items_count = len(dirs)
1010 1013
1011 1014 for f in files:
1012 1015 try:
1013 1016 used += os.path.getsize(os.path.join(root, f))
1014 1017 except OSError:
1015 1018 pass
1016 1019 _disk_gist.update({
1017 1020 'percent': 100,
1018 1021 'used': used,
1019 1022 'total': used,
1020 1023 'items': items_count
1021 1024 })
1022 1025 except Exception as e:
1023 1026 log.exception('failed to fetch gist storage items')
1024 1027 _disk_gist['error'] = str(e)
1025 1028
1026 1029 # GIT info
1027 1030 git_ver = discover_git_version()
1028 1031
1029 1032 # SVN info
1030 1033 # TODO: johbo: Add discover_svn_version to replace this code.
1031 1034 try:
1032 1035 import svn.core
1033 1036 svn_ver = svn.core.SVN_VERSION
1034 1037 except ImportError:
1035 1038 svn_ver = None
1036 1039
1037 1040 # DB stuff
1038 1041 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1039 1042 db_type = db_info.__to_string__()
1040 1043 try:
1041 1044 engine = sql_base.metadata.bind
1042 1045 db_server_info = engine.dialect._get_server_version_info(
1043 1046 Session.connection(bind=engine))
1044 1047 db_version = '%s %s' % (db_info.drivername,
1045 1048 '.'.join(map(str, db_server_info)))
1046 1049 except Exception:
1047 1050 log.exception('failed to fetch db version')
1048 1051 db_version = '%s %s' % (db_info.drivername, '?')
1049 1052
1050 1053 db_migrate = DbMigrateVersion.query().filter(
1051 1054 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1052 1055 db_migrate_version = db_migrate.version
1053 1056
1054 1057 info = {
1055 1058 'py_version': ' '.join(platform._sys_version()),
1056 1059 'py_path': sys.executable,
1057 1060 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1058 1061
1059 1062 'platform': safe_unicode(platform.platform()),
1060 1063 'storage': storage_path,
1061 1064 'archive_storage': archive_storage_path,
1062 1065 'index_storage': search_index_storage_path,
1063 1066 'gist_storage': gist_storage_path,
1064 1067
1065 1068
1066 1069 'db_type': db_type,
1067 1070 'db_version': db_version,
1068 1071 'db_migrate_version': db_migrate_version,
1069 1072
1070 1073 'rhodecode_version': rhodecode.__version__,
1071 1074 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1072 1075 'server_ip': '%s:%s' % (
1073 1076 get_server_ip_addr(environ, log_errors=False),
1074 1077 get_server_port(environ)
1075 1078 ),
1076 1079 'server_id': rhodecode.CONFIG.get('instance_id'),
1077 1080
1078 1081 'git_version': safe_unicode(git_ver),
1079 1082 'hg_version': mods.get('mercurial'),
1080 1083 'svn_version': svn_ver,
1081 1084
1082 1085 'uptime': _uptime,
1083 1086 'boot_time': _boot_time,
1084 1087 'load': _load,
1085 1088 'cpu': _cpu,
1086 1089 'memory': _memory,
1087 1090 'disk': _disk,
1088 1091 'disk_inodes': _disk_inodes,
1089 1092 'disk_archive': _disk_archive,
1090 1093 'disk_gist': _disk_gist,
1091 1094 'disk_index': _disk_index,
1092 1095 }
1093 1096 return info
1094 1097
1095 1098
1096 1099 def _check_rhodecode_hook(hook_path):
1097 1100 """
1098 1101 Check if the hook was created by RhodeCode
1099 1102 """
1100 1103 if not os.path.exists(hook_path):
1101 1104 return True
1102 1105
1103 1106 log.debug('hook exists, checking if it is from rhodecode')
1104 1107 hook_content = _read_hook(hook_path)
1105 1108 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1106 1109 if matches:
1107 1110 try:
1108 1111 version = matches.groups()[0]
1109 1112 log.debug('got %s, it is rhodecode', version)
1110 1113 return True
1111 1114 except Exception:
1112 1115 log.exception("Exception while reading the hook version.")
1113 1116
1114 1117 return False
1115 1118
1116 1119
1117 1120 def _read_hook(hook_path):
1118 1121 with open(hook_path, 'rb') as f:
1119 1122 content = f.read()
1120 1123 return content
General Comments 0
You need to be logged in to leave comments. Login now