##// END OF EJS Templates
scm: clear the cs cache on delete/remap
dan -
r337:e35f8ec4 default
parent child Browse files
Show More
@@ -1,1105 +1,1101 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import time
29 29 import traceback
30 30 import logging
31 31 import cStringIO
32 32 import pkg_resources
33 33
34 34 import pylons
35 35 from pylons.i18n.translation import _
36 36 from sqlalchemy import func
37 37 from zope.cachedescriptors.property import Lazy as LazyProperty
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.vcs import get_backend
41 41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 42 from rhodecode.lib.vcs.nodes import FileNode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib import helpers as h
45 45
46 46 from rhodecode.lib.auth import (
47 47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 48 HasUserGroupPermissionAny)
49 49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 50 from rhodecode.lib import hooks_utils, caches
51 51 from rhodecode.lib.utils import (
52 52 get_filesystem_repos, action_logger, make_db_config)
53 53 from rhodecode.lib.utils2 import (
54 54 safe_str, safe_unicode, get_server_url, md5)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.db import (
57 57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 58 PullRequest, DbMigrateVersion)
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class UserTemp(object):
65 65 def __init__(self, user_id):
66 66 self.user_id = user_id
67 67
68 68 def __repr__(self):
69 69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70 70
71 71
72 72 class RepoTemp(object):
73 73 def __init__(self, repo_id):
74 74 self.repo_id = repo_id
75 75
76 76 def __repr__(self):
77 77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78 78
79 79
80 80 class SimpleCachedRepoList(object):
81 81 """
82 82 Lighter version of of iteration of repos without the scm initialisation,
83 83 and with cache usage
84 84 """
85 85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 86 self.db_repo_list = db_repo_list
87 87 self.repos_path = repos_path
88 88 self.order_by = order_by
89 89 self.reversed = (order_by or '').startswith('-')
90 90 if not perm_set:
91 91 perm_set = ['repository.read', 'repository.write',
92 92 'repository.admin']
93 93 self.perm_set = perm_set
94 94
95 95 def __len__(self):
96 96 return len(self.db_repo_list)
97 97
98 98 def __repr__(self):
99 99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100 100
101 101 def __iter__(self):
102 102 for dbr in self.db_repo_list:
103 103 # check permission at this level
104 104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 105 dbr.repo_name, 'SimpleCachedRepoList check')
106 106 if not has_perm:
107 107 continue
108 108
109 109 tmp_d = {
110 110 'name': dbr.repo_name,
111 111 'dbrepo': dbr.get_dict(),
112 112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 113 }
114 114 yield tmp_d
115 115
116 116
117 117 class _PermCheckIterator(object):
118 118
119 119 def __init__(
120 120 self, obj_list, obj_attr, perm_set, perm_checker,
121 121 extra_kwargs=None):
122 122 """
123 123 Creates iterator from given list of objects, additionally
124 124 checking permission for them from perm_set var
125 125
126 126 :param obj_list: list of db objects
127 127 :param obj_attr: attribute of object to pass into perm_checker
128 128 :param perm_set: list of permissions to check
129 129 :param perm_checker: callable to check permissions against
130 130 """
131 131 self.obj_list = obj_list
132 132 self.obj_attr = obj_attr
133 133 self.perm_set = perm_set
134 134 self.perm_checker = perm_checker
135 135 self.extra_kwargs = extra_kwargs or {}
136 136
137 137 def __len__(self):
138 138 return len(self.obj_list)
139 139
140 140 def __repr__(self):
141 141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142 142
143 143 def __iter__(self):
144 144 checker = self.perm_checker(*self.perm_set)
145 145 for db_obj in self.obj_list:
146 146 # check permission at this level
147 147 name = getattr(db_obj, self.obj_attr, None)
148 148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 149 continue
150 150
151 151 yield db_obj
152 152
153 153
154 154 class RepoList(_PermCheckIterator):
155 155
156 156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 157 if not perm_set:
158 158 perm_set = [
159 159 'repository.read', 'repository.write', 'repository.admin']
160 160
161 161 super(RepoList, self).__init__(
162 162 obj_list=db_repo_list,
163 163 obj_attr='repo_name', perm_set=perm_set,
164 164 perm_checker=HasRepoPermissionAny,
165 165 extra_kwargs=extra_kwargs)
166 166
167 167
168 168 class RepoGroupList(_PermCheckIterator):
169 169
170 170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 171 if not perm_set:
172 172 perm_set = ['group.read', 'group.write', 'group.admin']
173 173
174 174 super(RepoGroupList, self).__init__(
175 175 obj_list=db_repo_group_list,
176 176 obj_attr='group_name', perm_set=perm_set,
177 177 perm_checker=HasRepoGroupPermissionAny,
178 178 extra_kwargs=extra_kwargs)
179 179
180 180
181 181 class UserGroupList(_PermCheckIterator):
182 182
183 183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 184 if not perm_set:
185 185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186 186
187 187 super(UserGroupList, self).__init__(
188 188 obj_list=db_user_group_list,
189 189 obj_attr='users_group_name', perm_set=perm_set,
190 190 perm_checker=HasUserGroupPermissionAny,
191 191 extra_kwargs=extra_kwargs)
192 192
193 193
194 194 class ScmModel(BaseModel):
195 195 """
196 196 Generic Scm Model
197 197 """
198 198
199 199 @LazyProperty
200 200 def repos_path(self):
201 201 """
202 202 Gets the repositories root path from database
203 203 """
204 204
205 205 settings_model = VcsSettingsModel(sa=self.sa)
206 206 return settings_model.get_repos_location()
207 207
208 208 def repo_scan(self, repos_path=None):
209 209 """
210 210 Listing of repositories in given path. This path should not be a
211 211 repository itself. Return a dictionary of repository objects
212 212
213 213 :param repos_path: path to directory containing repositories
214 214 """
215 215
216 216 if repos_path is None:
217 217 repos_path = self.repos_path
218 218
219 219 log.info('scanning for repositories in %s', repos_path)
220 220
221 221 config = make_db_config()
222 222 config.set('extensions', 'largefiles', '')
223 223 repos = {}
224 224
225 225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 226 # name need to be decomposed and put back together using the /
227 227 # since this is internal storage separator for rhodecode
228 228 name = Repository.normalize_repo_name(name)
229 229
230 230 try:
231 231 if name in repos:
232 232 raise RepositoryError('Duplicate repository name %s '
233 233 'found in %s' % (name, path))
234 234 elif path[0] in rhodecode.BACKENDS:
235 235 klass = get_backend(path[0])
236 236 repos[name] = klass(path[1], config=config)
237 237 except OSError:
238 238 continue
239 239 log.debug('found %s paths with repositories', len(repos))
240 240 return repos
241 241
242 242 def get_repos(self, all_repos=None, sort_key=None):
243 243 """
244 244 Get all repositories from db and for each repo create it's
245 245 backend instance and fill that backed with information from database
246 246
247 247 :param all_repos: list of repository names as strings
248 248 give specific repositories list, good for filtering
249 249
250 250 :param sort_key: initial sorting of repositories
251 251 """
252 252 if all_repos is None:
253 253 all_repos = self.sa.query(Repository)\
254 254 .filter(Repository.group_id == None)\
255 255 .order_by(func.lower(Repository.repo_name)).all()
256 256 repo_iter = SimpleCachedRepoList(
257 257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 258 return repo_iter
259 259
260 260 def get_repo_groups(self, all_groups=None):
261 261 if all_groups is None:
262 262 all_groups = RepoGroup.query()\
263 263 .filter(RepoGroup.group_parent_id == None).all()
264 264 return [x for x in RepoGroupList(all_groups)]
265 265
266 266 def mark_for_invalidation(self, repo_name, delete=False):
267 267 """
268 268 Mark caches of this repo invalid in the database. `delete` flag
269 269 removes the cache entries
270 270
271 271 :param repo_name: the repo_name for which caches should be marked
272 272 invalid, or deleted
273 273 :param delete: delete the entry keys instead of setting bool
274 274 flag on them
275 275 """
276 276 CacheKey.set_invalidate(repo_name, delete=delete)
277 277 repo = Repository.get_by_repo_name(repo_name)
278 278
279 279 if repo:
280 280 config = repo._config
281 281 config.set('extensions', 'largefiles', '')
282 cs_cache = None
283 if delete:
284 # if we do a hard clear, reset last-commit to Empty
285 cs_cache = EmptyCommit()
286 repo.update_commit_cache(config=config, cs_cache=cs_cache)
282 repo.update_commit_cache(config=config, cs_cache=None)
287 283 caches.clear_repo_caches(repo_name)
288 284
289 285 def toggle_following_repo(self, follow_repo_id, user_id):
290 286
291 287 f = self.sa.query(UserFollowing)\
292 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 289 .filter(UserFollowing.user_id == user_id).scalar()
294 290
295 291 if f is not None:
296 292 try:
297 293 self.sa.delete(f)
298 294 action_logger(UserTemp(user_id),
299 295 'stopped_following_repo',
300 296 RepoTemp(follow_repo_id))
301 297 return
302 298 except Exception:
303 299 log.error(traceback.format_exc())
304 300 raise
305 301
306 302 try:
307 303 f = UserFollowing()
308 304 f.user_id = user_id
309 305 f.follows_repo_id = follow_repo_id
310 306 self.sa.add(f)
311 307
312 308 action_logger(UserTemp(user_id),
313 309 'started_following_repo',
314 310 RepoTemp(follow_repo_id))
315 311 except Exception:
316 312 log.error(traceback.format_exc())
317 313 raise
318 314
319 315 def toggle_following_user(self, follow_user_id, user_id):
320 316 f = self.sa.query(UserFollowing)\
321 317 .filter(UserFollowing.follows_user_id == follow_user_id)\
322 318 .filter(UserFollowing.user_id == user_id).scalar()
323 319
324 320 if f is not None:
325 321 try:
326 322 self.sa.delete(f)
327 323 return
328 324 except Exception:
329 325 log.error(traceback.format_exc())
330 326 raise
331 327
332 328 try:
333 329 f = UserFollowing()
334 330 f.user_id = user_id
335 331 f.follows_user_id = follow_user_id
336 332 self.sa.add(f)
337 333 except Exception:
338 334 log.error(traceback.format_exc())
339 335 raise
340 336
341 337 def is_following_repo(self, repo_name, user_id, cache=False):
342 338 r = self.sa.query(Repository)\
343 339 .filter(Repository.repo_name == repo_name).scalar()
344 340
345 341 f = self.sa.query(UserFollowing)\
346 342 .filter(UserFollowing.follows_repository == r)\
347 343 .filter(UserFollowing.user_id == user_id).scalar()
348 344
349 345 return f is not None
350 346
351 347 def is_following_user(self, username, user_id, cache=False):
352 348 u = User.get_by_username(username)
353 349
354 350 f = self.sa.query(UserFollowing)\
355 351 .filter(UserFollowing.follows_user == u)\
356 352 .filter(UserFollowing.user_id == user_id).scalar()
357 353
358 354 return f is not None
359 355
360 356 def get_followers(self, repo):
361 357 repo = self._get_repo(repo)
362 358
363 359 return self.sa.query(UserFollowing)\
364 360 .filter(UserFollowing.follows_repository == repo).count()
365 361
366 362 def get_forks(self, repo):
367 363 repo = self._get_repo(repo)
368 364 return self.sa.query(Repository)\
369 365 .filter(Repository.fork == repo).count()
370 366
371 367 def get_pull_requests(self, repo):
372 368 repo = self._get_repo(repo)
373 369 return self.sa.query(PullRequest)\
374 370 .filter(PullRequest.target_repo == repo)\
375 371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
376 372
377 373 def mark_as_fork(self, repo, fork, user):
378 374 repo = self._get_repo(repo)
379 375 fork = self._get_repo(fork)
380 376 if fork and repo.repo_id == fork.repo_id:
381 377 raise Exception("Cannot set repository as fork of itself")
382 378
383 379 if fork and repo.repo_type != fork.repo_type:
384 380 raise RepositoryError(
385 381 "Cannot set repository as fork of repository with other type")
386 382
387 383 repo.fork = fork
388 384 self.sa.add(repo)
389 385 return repo
390 386
391 387 def pull_changes(self, repo, username):
392 388 dbrepo = self._get_repo(repo)
393 389 clone_uri = dbrepo.clone_uri
394 390 if not clone_uri:
395 391 raise Exception("This repository doesn't have a clone uri")
396 392
397 393 repo = dbrepo.scm_instance(cache=False)
398 394 # TODO: marcink fix this an re-enable since we need common logic
399 395 # for hg/git remove hooks so we don't trigger them on fetching
400 396 # commits from remote
401 397 repo.config.clear_section('hooks')
402 398
403 399 repo_name = dbrepo.repo_name
404 400 try:
405 401 # TODO: we need to make sure those operations call proper hooks !
406 402 repo.pull(clone_uri)
407 403
408 404 self.mark_for_invalidation(repo_name)
409 405 except Exception:
410 406 log.error(traceback.format_exc())
411 407 raise
412 408
413 409 def commit_change(self, repo, repo_name, commit, user, author, message,
414 410 content, f_path):
415 411 """
416 412 Commits changes
417 413
418 414 :param repo: SCM instance
419 415
420 416 """
421 417 user = self._get_user(user)
422 418
423 419 # decoding here will force that we have proper encoded values
424 420 # in any other case this will throw exceptions and deny commit
425 421 content = safe_str(content)
426 422 path = safe_str(f_path)
427 423 # message and author needs to be unicode
428 424 # proper backend should then translate that into required type
429 425 message = safe_unicode(message)
430 426 author = safe_unicode(author)
431 427 imc = repo.in_memory_commit
432 428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 429 try:
434 430 # TODO: handle pre-push action !
435 431 tip = imc.commit(
436 432 message=message, author=author, parents=[commit],
437 433 branch=commit.branch)
438 434 except Exception as e:
439 435 log.error(traceback.format_exc())
440 436 raise IMCCommitError(str(e))
441 437 finally:
442 438 # always clear caches, if commit fails we want fresh object also
443 439 self.mark_for_invalidation(repo_name)
444 440
445 441 # We trigger the post-push action
446 442 hooks_utils.trigger_post_push_hook(
447 443 username=user.username, action='push_local', repo_name=repo_name,
448 444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 445 return tip
450 446
451 447 def _sanitize_path(self, f_path):
452 448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 449 raise NonRelativePathError('%s is not an relative path' % f_path)
454 450 if f_path:
455 451 f_path = os.path.normpath(f_path)
456 452 return f_path
457 453
458 454 def get_dirnode_metadata(self, commit, dir_node):
459 455 if not dir_node.is_dir():
460 456 return []
461 457
462 458 data = []
463 459 for node in dir_node:
464 460 if not node.is_file():
465 461 # we skip file-nodes
466 462 continue
467 463
468 464 last_commit = node.last_commit
469 465 last_commit_date = last_commit.date
470 466 data.append({
471 467 'name': node.name,
472 468 'size': h.format_byte_size_binary(node.size),
473 469 'modified_at': h.format_date(last_commit_date),
474 470 'modified_ts': last_commit_date.isoformat(),
475 471 'revision': last_commit.revision,
476 472 'short_id': last_commit.short_id,
477 473 'message': h.escape(last_commit.message),
478 474 'author': h.escape(last_commit.author),
479 475 'user_profile': h.gravatar_with_user(last_commit.author),
480 476 })
481 477
482 478 return data
483 479
484 480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 481 extended_info=False, content=False):
486 482 """
487 483 recursive walk in root dir and return a set of all path in that dir
488 484 based on repository walk function
489 485
490 486 :param repo_name: name of repository
491 487 :param commit_id: commit id for which to list nodes
492 488 :param root_path: root path to list
493 489 :param flat: return as a list, if False returns a dict with decription
494 490
495 491 """
496 492 _files = list()
497 493 _dirs = list()
498 494 try:
499 495 _repo = self._get_repo(repo_name)
500 496 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
501 497 root_path = root_path.lstrip('/')
502 498 for __, dirs, files in commit.walk(root_path):
503 499 for f in files:
504 500 _content = None
505 501 _data = f.unicode_path
506 502 if not flat:
507 503 _data = {
508 504 "name": f.unicode_path,
509 505 "type": "file",
510 506 }
511 507 if extended_info:
512 508 _content = safe_str(f.content)
513 509 _data.update({
514 510 "md5": md5(_content),
515 511 "binary": f.is_binary,
516 512 "size": f.size,
517 513 "extension": f.extension,
518 514
519 515 "mimetype": f.mimetype,
520 516 "lines": f.lines()[0]
521 517 })
522 518 if content:
523 519 full_content = None
524 520 if not f.is_binary:
525 521 # in case we loaded the _content already
526 522 # re-use it, or load from f[ile]
527 523 full_content = _content or safe_str(f.content)
528 524
529 525 _data.update({
530 526 "content": full_content
531 527 })
532 528 _files.append(_data)
533 529 for d in dirs:
534 530 _data = d.unicode_path
535 531 if not flat:
536 532 _data = {
537 533 "name": d.unicode_path,
538 534 "type": "dir",
539 535 }
540 536 if extended_info:
541 537 _data.update({
542 538 "md5": None,
543 539 "binary": None,
544 540 "size": None,
545 541 "extension": None,
546 542 })
547 543 if content:
548 544 _data.update({
549 545 "content": None
550 546 })
551 547 _dirs.append(_data)
552 548 except RepositoryError:
553 549 log.debug("Exception in get_nodes", exc_info=True)
554 550 raise
555 551
556 552 return _dirs, _files
557 553
558 554 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
559 555 author=None, trigger_push_hook=True):
560 556 """
561 557 Commits given multiple nodes into repo
562 558
563 559 :param user: RhodeCode User object or user_id, the commiter
564 560 :param repo: RhodeCode Repository object
565 561 :param message: commit message
566 562 :param nodes: mapping {filename:{'content':content},...}
567 563 :param parent_commit: parent commit, can be empty than it's
568 564 initial commit
569 565 :param author: author of commit, cna be different that commiter
570 566 only for git
571 567 :param trigger_push_hook: trigger push hooks
572 568
573 569 :returns: new commited commit
574 570 """
575 571
576 572 user = self._get_user(user)
577 573 scm_instance = repo.scm_instance(cache=False)
578 574
579 575 processed_nodes = []
580 576 for f_path in nodes:
581 577 f_path = self._sanitize_path(f_path)
582 578 content = nodes[f_path]['content']
583 579 f_path = safe_str(f_path)
584 580 # decoding here will force that we have proper encoded values
585 581 # in any other case this will throw exceptions and deny commit
586 582 if isinstance(content, (basestring,)):
587 583 content = safe_str(content)
588 584 elif isinstance(content, (file, cStringIO.OutputType,)):
589 585 content = content.read()
590 586 else:
591 587 raise Exception('Content is of unrecognized type %s' % (
592 588 type(content)
593 589 ))
594 590 processed_nodes.append((f_path, content))
595 591
596 592 message = safe_unicode(message)
597 593 commiter = user.full_contact
598 594 author = safe_unicode(author) if author else commiter
599 595
600 596 imc = scm_instance.in_memory_commit
601 597
602 598 if not parent_commit:
603 599 parent_commit = EmptyCommit(alias=scm_instance.alias)
604 600
605 601 if isinstance(parent_commit, EmptyCommit):
606 602 # EmptyCommit means we we're editing empty repository
607 603 parents = None
608 604 else:
609 605 parents = [parent_commit]
610 606 # add multiple nodes
611 607 for path, content in processed_nodes:
612 608 imc.add(FileNode(path, content=content))
613 609 # TODO: handle pre push scenario
614 610 tip = imc.commit(message=message,
615 611 author=author,
616 612 parents=parents,
617 613 branch=parent_commit.branch)
618 614
619 615 self.mark_for_invalidation(repo.repo_name)
620 616 if trigger_push_hook:
621 617 hooks_utils.trigger_post_push_hook(
622 618 username=user.username, action='push_local',
623 619 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
624 620 commit_ids=[tip.raw_id])
625 621 return tip
626 622
627 623 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
628 624 author=None, trigger_push_hook=True):
629 625 user = self._get_user(user)
630 626 scm_instance = repo.scm_instance(cache=False)
631 627
632 628 message = safe_unicode(message)
633 629 commiter = user.full_contact
634 630 author = safe_unicode(author) if author else commiter
635 631
636 632 imc = scm_instance.in_memory_commit
637 633
638 634 if not parent_commit:
639 635 parent_commit = EmptyCommit(alias=scm_instance.alias)
640 636
641 637 if isinstance(parent_commit, EmptyCommit):
642 638 # EmptyCommit means we we're editing empty repository
643 639 parents = None
644 640 else:
645 641 parents = [parent_commit]
646 642
647 643 # add multiple nodes
648 644 for _filename, data in nodes.items():
649 645 # new filename, can be renamed from the old one, also sanitaze
650 646 # the path for any hack around relative paths like ../../ etc.
651 647 filename = self._sanitize_path(data['filename'])
652 648 old_filename = self._sanitize_path(_filename)
653 649 content = data['content']
654 650
655 651 filenode = FileNode(old_filename, content=content)
656 652 op = data['op']
657 653 if op == 'add':
658 654 imc.add(filenode)
659 655 elif op == 'del':
660 656 imc.remove(filenode)
661 657 elif op == 'mod':
662 658 if filename != old_filename:
663 659 # TODO: handle renames more efficient, needs vcs lib
664 660 # changes
665 661 imc.remove(filenode)
666 662 imc.add(FileNode(filename, content=content))
667 663 else:
668 664 imc.change(filenode)
669 665
670 666 try:
671 667 # TODO: handle pre push scenario
672 668 # commit changes
673 669 tip = imc.commit(message=message,
674 670 author=author,
675 671 parents=parents,
676 672 branch=parent_commit.branch)
677 673 except NodeNotChangedError:
678 674 raise
679 675 except Exception as e:
680 676 log.exception("Unexpected exception during call to imc.commit")
681 677 raise IMCCommitError(str(e))
682 678 finally:
683 679 # always clear caches, if commit fails we want fresh object also
684 680 self.mark_for_invalidation(repo.repo_name)
685 681
686 682 if trigger_push_hook:
687 683 hooks_utils.trigger_post_push_hook(
688 684 username=user.username, action='push_local',
689 685 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
690 686 commit_ids=[tip.raw_id])
691 687
692 688 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
693 689 author=None, trigger_push_hook=True):
694 690 """
695 691 Deletes given multiple nodes into `repo`
696 692
697 693 :param user: RhodeCode User object or user_id, the committer
698 694 :param repo: RhodeCode Repository object
699 695 :param message: commit message
700 696 :param nodes: mapping {filename:{'content':content},...}
701 697 :param parent_commit: parent commit, can be empty than it's initial
702 698 commit
703 699 :param author: author of commit, cna be different that commiter only
704 700 for git
705 701 :param trigger_push_hook: trigger push hooks
706 702
707 703 :returns: new commit after deletion
708 704 """
709 705
710 706 user = self._get_user(user)
711 707 scm_instance = repo.scm_instance(cache=False)
712 708
713 709 processed_nodes = []
714 710 for f_path in nodes:
715 711 f_path = self._sanitize_path(f_path)
716 712 # content can be empty but for compatabilty it allows same dicts
717 713 # structure as add_nodes
718 714 content = nodes[f_path].get('content')
719 715 processed_nodes.append((f_path, content))
720 716
721 717 message = safe_unicode(message)
722 718 commiter = user.full_contact
723 719 author = safe_unicode(author) if author else commiter
724 720
725 721 imc = scm_instance.in_memory_commit
726 722
727 723 if not parent_commit:
728 724 parent_commit = EmptyCommit(alias=scm_instance.alias)
729 725
730 726 if isinstance(parent_commit, EmptyCommit):
731 727 # EmptyCommit means we we're editing empty repository
732 728 parents = None
733 729 else:
734 730 parents = [parent_commit]
735 731 # add multiple nodes
736 732 for path, content in processed_nodes:
737 733 imc.remove(FileNode(path, content=content))
738 734
739 735 # TODO: handle pre push scenario
740 736 tip = imc.commit(message=message,
741 737 author=author,
742 738 parents=parents,
743 739 branch=parent_commit.branch)
744 740
745 741 self.mark_for_invalidation(repo.repo_name)
746 742 if trigger_push_hook:
747 743 hooks_utils.trigger_post_push_hook(
748 744 username=user.username, action='push_local',
749 745 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
750 746 commit_ids=[tip.raw_id])
751 747 return tip
752 748
753 749 def strip(self, repo, commit_id, branch):
754 750 scm_instance = repo.scm_instance(cache=False)
755 751 scm_instance.config.clear_section('hooks')
756 752 scm_instance.strip(commit_id, branch)
757 753 self.mark_for_invalidation(repo.repo_name)
758 754
759 755 def get_unread_journal(self):
760 756 return self.sa.query(UserLog).count()
761 757
762 758 def get_repo_landing_revs(self, repo=None):
763 759 """
764 760 Generates select option with tags branches and bookmarks (for hg only)
765 761 grouped by type
766 762
767 763 :param repo:
768 764 """
769 765
770 766 hist_l = []
771 767 choices = []
772 768 repo = self._get_repo(repo)
773 769 hist_l.append(['rev:tip', _('latest tip')])
774 770 choices.append('rev:tip')
775 771 if not repo:
776 772 return choices, hist_l
777 773
778 774 repo = repo.scm_instance()
779 775
780 776 branches_group = (
781 777 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
782 778 for b in repo.branches],
783 779 _("Branches"))
784 780 hist_l.append(branches_group)
785 781 choices.extend([x[0] for x in branches_group[0]])
786 782
787 783 if repo.alias == 'hg':
788 784 bookmarks_group = (
789 785 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
790 786 for b in repo.bookmarks],
791 787 _("Bookmarks"))
792 788 hist_l.append(bookmarks_group)
793 789 choices.extend([x[0] for x in bookmarks_group[0]])
794 790
795 791 tags_group = (
796 792 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
797 793 for t in repo.tags],
798 794 _("Tags"))
799 795 hist_l.append(tags_group)
800 796 choices.extend([x[0] for x in tags_group[0]])
801 797
802 798 return choices, hist_l
803 799
804 800 def install_git_hook(self, repo, force_create=False):
805 801 """
806 802 Creates a rhodecode hook inside a git repository
807 803
808 804 :param repo: Instance of VCS repo
809 805 :param force_create: Create even if same name hook exists
810 806 """
811 807
812 808 loc = os.path.join(repo.path, 'hooks')
813 809 if not repo.bare:
814 810 loc = os.path.join(repo.path, '.git', 'hooks')
815 811 if not os.path.isdir(loc):
816 812 os.makedirs(loc, mode=0777)
817 813
818 814 tmpl_post = pkg_resources.resource_string(
819 815 'rhodecode', '/'.join(
820 816 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
821 817 tmpl_pre = pkg_resources.resource_string(
822 818 'rhodecode', '/'.join(
823 819 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
824 820
825 821 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
826 822 _hook_file = os.path.join(loc, '%s-receive' % h_type)
827 823 log.debug('Installing git hook in repo %s', repo)
828 824 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
829 825
830 826 if _rhodecode_hook or force_create:
831 827 log.debug('writing %s hook file !', h_type)
832 828 try:
833 829 with open(_hook_file, 'wb') as f:
834 830 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
835 831 tmpl = tmpl.replace('_ENV_', sys.executable)
836 832 f.write(tmpl)
837 833 os.chmod(_hook_file, 0755)
838 834 except IOError:
839 835 log.exception('error writing hook file %s', _hook_file)
840 836 else:
841 837 log.debug('skipping writing hook file')
842 838
843 839 def install_svn_hooks(self, repo, force_create=False):
844 840 """
845 841 Creates rhodecode hooks inside a svn repository
846 842
847 843 :param repo: Instance of VCS repo
848 844 :param force_create: Create even if same name hook exists
849 845 """
850 846 hooks_path = os.path.join(repo.path, 'hooks')
851 847 if not os.path.isdir(hooks_path):
852 848 os.makedirs(hooks_path)
853 849 post_commit_tmpl = pkg_resources.resource_string(
854 850 'rhodecode', '/'.join(
855 851 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
856 852 pre_commit_template = pkg_resources.resource_string(
857 853 'rhodecode', '/'.join(
858 854 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
859 855 templates = {
860 856 'post-commit': post_commit_tmpl,
861 857 'pre-commit': pre_commit_template
862 858 }
863 859 for filename in templates:
864 860 _hook_file = os.path.join(hooks_path, filename)
865 861 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
866 862 if _rhodecode_hook or force_create:
867 863 log.debug('writing %s hook file !', filename)
868 864 template = templates[filename]
869 865 try:
870 866 with open(_hook_file, 'wb') as f:
871 867 template = template.replace(
872 868 '_TMPL_', rhodecode.__version__)
873 869 template = template.replace('_ENV_', sys.executable)
874 870 f.write(template)
875 871 os.chmod(_hook_file, 0755)
876 872 except IOError:
877 873 log.exception('error writing hook file %s', filename)
878 874 else:
879 875 log.debug('skipping writing hook file')
880 876
881 877 def install_hooks(self, repo, repo_type):
882 878 if repo_type == 'git':
883 879 self.install_git_hook(repo)
884 880 elif repo_type == 'svn':
885 881 self.install_svn_hooks(repo)
886 882
887 883 def get_server_info(self, environ=None):
888 884 import platform
889 885 import rhodecode
890 886 import pkg_resources
891 887 from rhodecode.model.meta import Base as sql_base, Session
892 888 from sqlalchemy.engine import url
893 889 from rhodecode.lib.base import get_server_ip_addr, get_server_port
894 890 from rhodecode.lib.vcs.backends.git import discover_git_version
895 891 from rhodecode.model.gist import GIST_STORE_LOC
896 892
897 893 try:
898 894 # cygwin cannot have yet psutil support.
899 895 import psutil
900 896 except ImportError:
901 897 psutil = None
902 898
903 899 environ = environ or {}
904 900 _NA = 'NOT AVAILABLE'
905 901 _memory = _NA
906 902 _uptime = _NA
907 903 _boot_time = _NA
908 904 _cpu = _NA
909 905 _disk = dict(percent=0, used=0, total=0, error='')
910 906 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
911 907
912 908 model = VcsSettingsModel()
913 909 storage_path = model.get_repos_location()
914 910 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
915 911 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
916 912 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
917 913
918 914 if psutil:
919 915 # disk storage
920 916 try:
921 917 _disk = dict(psutil.disk_usage(storage_path)._asdict())
922 918 except Exception as e:
923 919 log.exception('Failed to fetch disk info')
924 920 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
925 921
926 922 # memory
927 923 _memory = dict(psutil.virtual_memory()._asdict())
928 924 _memory['percent2'] = psutil._common.usage_percent(
929 925 (_memory['total'] - _memory['free']),
930 926 _memory['total'], 1)
931 927
932 928 # load averages
933 929 if hasattr(psutil.os, 'getloadavg'):
934 930 _load = dict(zip(
935 931 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
936 932 _uptime = time.time() - psutil.boot_time()
937 933 _boot_time = psutil.boot_time()
938 934 _cpu = psutil.cpu_percent(0.5)
939 935
940 936 mods = dict([(p.project_name, p.version)
941 937 for p in pkg_resources.working_set])
942 938
943 939 def get_storage_size(storage_path):
944 940 sizes = []
945 941 for file_ in os.listdir(storage_path):
946 942 storage_file = os.path.join(storage_path, file_)
947 943 if os.path.isfile(storage_file):
948 944 try:
949 945 sizes.append(os.path.getsize(storage_file))
950 946 except OSError:
951 947 log.exception('Failed to get size of storage file %s',
952 948 storage_file)
953 949 pass
954 950
955 951 return sum(sizes)
956 952
957 953 # archive cache storage
958 954 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
959 955 try:
960 956 archive_storage_path_exists = os.path.isdir(
961 957 archive_storage_path)
962 958 if archive_storage_path and archive_storage_path_exists:
963 959 used = get_storage_size(archive_storage_path)
964 960 _disk_archive.update({
965 961 'used': used,
966 962 'total': used,
967 963 })
968 964 except Exception as e:
969 965 log.exception('failed to fetch archive cache storage')
970 966 _disk_archive['error'] = str(e)
971 967
972 968 # search index storage
973 969 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
974 970 try:
975 971 search_index_storage_path_exists = os.path.isdir(
976 972 search_index_storage_path)
977 973 if search_index_storage_path_exists:
978 974 used = get_storage_size(search_index_storage_path)
979 975 _disk_index.update({
980 976 'percent': 100,
981 977 'used': used,
982 978 'total': used,
983 979 })
984 980 except Exception as e:
985 981 log.exception('failed to fetch search index storage')
986 982 _disk_index['error'] = str(e)
987 983
988 984 # gist storage
989 985 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
990 986 try:
991 987 items_count = 0
992 988 used = 0
993 989 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
994 990 if root == gist_storage_path:
995 991 items_count = len(dirs)
996 992
997 993 for f in files:
998 994 try:
999 995 used += os.path.getsize(os.path.join(root, f))
1000 996 except OSError:
1001 997 pass
1002 998 _disk_gist.update({
1003 999 'percent': 100,
1004 1000 'used': used,
1005 1001 'total': used,
1006 1002 'items': items_count
1007 1003 })
1008 1004 except Exception as e:
1009 1005 log.exception('failed to fetch gist storage items')
1010 1006 _disk_gist['error'] = str(e)
1011 1007
1012 1008 # GIT info
1013 1009 git_ver = discover_git_version()
1014 1010
1015 1011 # SVN info
1016 1012 # TODO: johbo: Add discover_svn_version to replace this code.
1017 1013 try:
1018 1014 import svn.core
1019 1015 svn_ver = svn.core.SVN_VERSION
1020 1016 except ImportError:
1021 1017 svn_ver = None
1022 1018
1023 1019 # DB stuff
1024 1020 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1025 1021 db_type = db_info.__to_string__()
1026 1022 try:
1027 1023 engine = sql_base.metadata.bind
1028 1024 db_server_info = engine.dialect._get_server_version_info(
1029 1025 Session.connection(bind=engine))
1030 1026 db_version = '%s %s' % (db_info.drivername,
1031 1027 '.'.join(map(str, db_server_info)))
1032 1028 except Exception:
1033 1029 log.exception('failed to fetch db version')
1034 1030 db_version = '%s %s' % (db_info.drivername, '?')
1035 1031
1036 1032 db_migrate = DbMigrateVersion.query().filter(
1037 1033 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1038 1034 db_migrate_version = db_migrate.version
1039 1035
1040 1036 info = {
1041 1037 'py_version': ' '.join(platform._sys_version()),
1042 1038 'py_path': sys.executable,
1043 1039 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1044 1040
1045 1041 'platform': safe_unicode(platform.platform()),
1046 1042 'storage': storage_path,
1047 1043 'archive_storage': archive_storage_path,
1048 1044 'index_storage': search_index_storage_path,
1049 1045 'gist_storage': gist_storage_path,
1050 1046
1051 1047
1052 1048 'db_type': db_type,
1053 1049 'db_version': db_version,
1054 1050 'db_migrate_version': db_migrate_version,
1055 1051
1056 1052 'rhodecode_version': rhodecode.__version__,
1057 1053 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1058 1054 'server_ip': '%s:%s' % (
1059 1055 get_server_ip_addr(environ, log_errors=False),
1060 1056 get_server_port(environ)
1061 1057 ),
1062 1058 'server_id': rhodecode.CONFIG.get('instance_id'),
1063 1059
1064 1060 'git_version': safe_unicode(git_ver),
1065 1061 'hg_version': mods.get('mercurial'),
1066 1062 'svn_version': svn_ver,
1067 1063
1068 1064 'uptime': _uptime,
1069 1065 'boot_time': _boot_time,
1070 1066 'load': _load,
1071 1067 'cpu': _cpu,
1072 1068 'memory': _memory,
1073 1069 'disk': _disk,
1074 1070 'disk_archive': _disk_archive,
1075 1071 'disk_gist': _disk_gist,
1076 1072 'disk_index': _disk_index,
1077 1073 }
1078 1074 return info
1079 1075
1080 1076
1081 1077 def _check_rhodecode_hook(hook_path):
1082 1078 """
1083 1079 Check if the hook was created by RhodeCode
1084 1080 """
1085 1081 if not os.path.exists(hook_path):
1086 1082 return True
1087 1083
1088 1084 log.debug('hook exists, checking if it is from rhodecode')
1089 1085 hook_content = _read_hook(hook_path)
1090 1086 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1091 1087 if matches:
1092 1088 try:
1093 1089 version = matches.groups()[0]
1094 1090 log.debug('got %s, it is rhodecode', version)
1095 1091 return True
1096 1092 except Exception:
1097 1093 log.exception("Exception while reading the hook version.")
1098 1094
1099 1095 return False
1100 1096
1101 1097
1102 1098 def _read_hook(hook_path):
1103 1099 with open(hook_path, 'rb') as f:
1104 1100 content = f.read()
1105 1101 return content
General Comments 0
You need to be logged in to leave comments. Login now