##// END OF EJS Templates
action_logger: removed really not required actions of start/stop following a repository....
marcink -
r1804:4a92b08c default
parent child Browse files
Show More
@@ -1,915 +1,908 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from pylons.i18n.translation import _
34 34 from sqlalchemy import func
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 import rhodecode
38 38 from rhodecode.lib.vcs import get_backend
39 39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 40 from rhodecode.lib.vcs.nodes import FileNode
41 41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 42 from rhodecode.lib import helpers as h
43 43
44 44 from rhodecode.lib.auth import (
45 45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 46 HasUserGroupPermissionAny)
47 47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 48 from rhodecode.lib import hooks_utils, caches
49 49 from rhodecode.lib.utils import (
50 get_filesystem_repos, action_logger, make_db_config)
50 get_filesystem_repos, make_db_config)
51 51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 52 from rhodecode.lib.system_info import get_system_info
53 53 from rhodecode.model import BaseModel
54 54 from rhodecode.model.db import (
55 55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 56 PullRequest)
57 57 from rhodecode.model.settings import VcsSettingsModel
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class UserTemp(object):
63 63 def __init__(self, user_id):
64 64 self.user_id = user_id
65 65
66 66 def __repr__(self):
67 67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68 68
69 69
70 70 class RepoTemp(object):
71 71 def __init__(self, repo_id):
72 72 self.repo_id = repo_id
73 73
74 74 def __repr__(self):
75 75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76 76
77 77
78 78 class SimpleCachedRepoList(object):
79 79 """
80 80 Lighter version of of iteration of repos without the scm initialisation,
81 81 and with cache usage
82 82 """
83 83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 84 self.db_repo_list = db_repo_list
85 85 self.repos_path = repos_path
86 86 self.order_by = order_by
87 87 self.reversed = (order_by or '').startswith('-')
88 88 if not perm_set:
89 89 perm_set = ['repository.read', 'repository.write',
90 90 'repository.admin']
91 91 self.perm_set = perm_set
92 92
93 93 def __len__(self):
94 94 return len(self.db_repo_list)
95 95
96 96 def __repr__(self):
97 97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98 98
99 99 def __iter__(self):
100 100 for dbr in self.db_repo_list:
101 101 # check permission at this level
102 102 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 103 dbr.repo_name, 'SimpleCachedRepoList check')
104 104 if not has_perm:
105 105 continue
106 106
107 107 tmp_d = {
108 108 'name': dbr.repo_name,
109 109 'dbrepo': dbr.get_dict(),
110 110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 111 }
112 112 yield tmp_d
113 113
114 114
115 115 class _PermCheckIterator(object):
116 116
117 117 def __init__(
118 118 self, obj_list, obj_attr, perm_set, perm_checker,
119 119 extra_kwargs=None):
120 120 """
121 121 Creates iterator from given list of objects, additionally
122 122 checking permission for them from perm_set var
123 123
124 124 :param obj_list: list of db objects
125 125 :param obj_attr: attribute of object to pass into perm_checker
126 126 :param perm_set: list of permissions to check
127 127 :param perm_checker: callable to check permissions against
128 128 """
129 129 self.obj_list = obj_list
130 130 self.obj_attr = obj_attr
131 131 self.perm_set = perm_set
132 132 self.perm_checker = perm_checker
133 133 self.extra_kwargs = extra_kwargs or {}
134 134
135 135 def __len__(self):
136 136 return len(self.obj_list)
137 137
138 138 def __repr__(self):
139 139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140 140
141 141 def __iter__(self):
142 142 checker = self.perm_checker(*self.perm_set)
143 143 for db_obj in self.obj_list:
144 144 # check permission at this level
145 145 name = getattr(db_obj, self.obj_attr, None)
146 146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 147 continue
148 148
149 149 yield db_obj
150 150
151 151
152 152 class RepoList(_PermCheckIterator):
153 153
154 154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 155 if not perm_set:
156 156 perm_set = [
157 157 'repository.read', 'repository.write', 'repository.admin']
158 158
159 159 super(RepoList, self).__init__(
160 160 obj_list=db_repo_list,
161 161 obj_attr='repo_name', perm_set=perm_set,
162 162 perm_checker=HasRepoPermissionAny,
163 163 extra_kwargs=extra_kwargs)
164 164
165 165
166 166 class RepoGroupList(_PermCheckIterator):
167 167
168 168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 169 if not perm_set:
170 170 perm_set = ['group.read', 'group.write', 'group.admin']
171 171
172 172 super(RepoGroupList, self).__init__(
173 173 obj_list=db_repo_group_list,
174 174 obj_attr='group_name', perm_set=perm_set,
175 175 perm_checker=HasRepoGroupPermissionAny,
176 176 extra_kwargs=extra_kwargs)
177 177
178 178
179 179 class UserGroupList(_PermCheckIterator):
180 180
181 181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 182 if not perm_set:
183 183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184 184
185 185 super(UserGroupList, self).__init__(
186 186 obj_list=db_user_group_list,
187 187 obj_attr='users_group_name', perm_set=perm_set,
188 188 perm_checker=HasUserGroupPermissionAny,
189 189 extra_kwargs=extra_kwargs)
190 190
191 191
192 192 class ScmModel(BaseModel):
193 193 """
194 194 Generic Scm Model
195 195 """
196 196
197 197 @LazyProperty
198 198 def repos_path(self):
199 199 """
200 200 Gets the repositories root path from database
201 201 """
202 202
203 203 settings_model = VcsSettingsModel(sa=self.sa)
204 204 return settings_model.get_repos_location()
205 205
206 206 def repo_scan(self, repos_path=None):
207 207 """
208 208 Listing of repositories in given path. This path should not be a
209 209 repository itself. Return a dictionary of repository objects
210 210
211 211 :param repos_path: path to directory containing repositories
212 212 """
213 213
214 214 if repos_path is None:
215 215 repos_path = self.repos_path
216 216
217 217 log.info('scanning for repositories in %s', repos_path)
218 218
219 219 config = make_db_config()
220 220 config.set('extensions', 'largefiles', '')
221 221 repos = {}
222 222
223 223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 224 # name need to be decomposed and put back together using the /
225 225 # since this is internal storage separator for rhodecode
226 226 name = Repository.normalize_repo_name(name)
227 227
228 228 try:
229 229 if name in repos:
230 230 raise RepositoryError('Duplicate repository name %s '
231 231 'found in %s' % (name, path))
232 232 elif path[0] in rhodecode.BACKENDS:
233 233 klass = get_backend(path[0])
234 234 repos[name] = klass(path[1], config=config)
235 235 except OSError:
236 236 continue
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 252 .filter(Repository.group_id == None)\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 def get_repo_groups(self, all_groups=None):
259 259 if all_groups is None:
260 260 all_groups = RepoGroup.query()\
261 261 .filter(RepoGroup.group_parent_id == None).all()
262 262 return [x for x in RepoGroupList(all_groups)]
263 263
264 264 def mark_for_invalidation(self, repo_name, delete=False):
265 265 """
266 266 Mark caches of this repo invalid in the database. `delete` flag
267 267 removes the cache entries
268 268
269 269 :param repo_name: the repo_name for which caches should be marked
270 270 invalid, or deleted
271 271 :param delete: delete the entry keys instead of setting bool
272 272 flag on them
273 273 """
274 274 CacheKey.set_invalidate(repo_name, delete=delete)
275 275 repo = Repository.get_by_repo_name(repo_name)
276 276
277 277 if repo:
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 caches.clear_repo_caches(repo_name)
282 282
283 283 def toggle_following_repo(self, follow_repo_id, user_id):
284 284
285 285 f = self.sa.query(UserFollowing)\
286 286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 287 .filter(UserFollowing.user_id == user_id).scalar()
288 288
289 289 if f is not None:
290 290 try:
291 291 self.sa.delete(f)
292 action_logger(UserTemp(user_id),
293 'stopped_following_repo',
294 RepoTemp(follow_repo_id))
295 292 return
296 293 except Exception:
297 294 log.error(traceback.format_exc())
298 295 raise
299 296
300 297 try:
301 298 f = UserFollowing()
302 299 f.user_id = user_id
303 300 f.follows_repo_id = follow_repo_id
304 301 self.sa.add(f)
305
306 action_logger(UserTemp(user_id),
307 'started_following_repo',
308 RepoTemp(follow_repo_id))
309 302 except Exception:
310 303 log.error(traceback.format_exc())
311 304 raise
312 305
313 306 def toggle_following_user(self, follow_user_id, user_id):
314 307 f = self.sa.query(UserFollowing)\
315 308 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 309 .filter(UserFollowing.user_id == user_id).scalar()
317 310
318 311 if f is not None:
319 312 try:
320 313 self.sa.delete(f)
321 314 return
322 315 except Exception:
323 316 log.error(traceback.format_exc())
324 317 raise
325 318
326 319 try:
327 320 f = UserFollowing()
328 321 f.user_id = user_id
329 322 f.follows_user_id = follow_user_id
330 323 self.sa.add(f)
331 324 except Exception:
332 325 log.error(traceback.format_exc())
333 326 raise
334 327
335 328 def is_following_repo(self, repo_name, user_id, cache=False):
336 329 r = self.sa.query(Repository)\
337 330 .filter(Repository.repo_name == repo_name).scalar()
338 331
339 332 f = self.sa.query(UserFollowing)\
340 333 .filter(UserFollowing.follows_repository == r)\
341 334 .filter(UserFollowing.user_id == user_id).scalar()
342 335
343 336 return f is not None
344 337
345 338 def is_following_user(self, username, user_id, cache=False):
346 339 u = User.get_by_username(username)
347 340
348 341 f = self.sa.query(UserFollowing)\
349 342 .filter(UserFollowing.follows_user == u)\
350 343 .filter(UserFollowing.user_id == user_id).scalar()
351 344
352 345 return f is not None
353 346
354 347 def get_followers(self, repo):
355 348 repo = self._get_repo(repo)
356 349
357 350 return self.sa.query(UserFollowing)\
358 351 .filter(UserFollowing.follows_repository == repo).count()
359 352
360 353 def get_forks(self, repo):
361 354 repo = self._get_repo(repo)
362 355 return self.sa.query(Repository)\
363 356 .filter(Repository.fork == repo).count()
364 357
365 358 def get_pull_requests(self, repo):
366 359 repo = self._get_repo(repo)
367 360 return self.sa.query(PullRequest)\
368 361 .filter(PullRequest.target_repo == repo)\
369 362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370 363
371 364 def mark_as_fork(self, repo, fork, user):
372 365 repo = self._get_repo(repo)
373 366 fork = self._get_repo(fork)
374 367 if fork and repo.repo_id == fork.repo_id:
375 368 raise Exception("Cannot set repository as fork of itself")
376 369
377 370 if fork and repo.repo_type != fork.repo_type:
378 371 raise RepositoryError(
379 372 "Cannot set repository as fork of repository with other type")
380 373
381 374 repo.fork = fork
382 375 self.sa.add(repo)
383 376 return repo
384 377
385 378 def pull_changes(self, repo, username):
386 379 dbrepo = self._get_repo(repo)
387 380 clone_uri = dbrepo.clone_uri
388 381 if not clone_uri:
389 382 raise Exception("This repository doesn't have a clone uri")
390 383
391 384 repo = dbrepo.scm_instance(cache=False)
392 385 # TODO: marcink fix this an re-enable since we need common logic
393 386 # for hg/git remove hooks so we don't trigger them on fetching
394 387 # commits from remote
395 388 repo.config.clear_section('hooks')
396 389
397 390 repo_name = dbrepo.repo_name
398 391 try:
399 392 # TODO: we need to make sure those operations call proper hooks !
400 393 repo.pull(clone_uri)
401 394
402 395 self.mark_for_invalidation(repo_name)
403 396 except Exception:
404 397 log.error(traceback.format_exc())
405 398 raise
406 399
407 400 def commit_change(self, repo, repo_name, commit, user, author, message,
408 401 content, f_path):
409 402 """
410 403 Commits changes
411 404
412 405 :param repo: SCM instance
413 406
414 407 """
415 408 user = self._get_user(user)
416 409
417 410 # decoding here will force that we have proper encoded values
418 411 # in any other case this will throw exceptions and deny commit
419 412 content = safe_str(content)
420 413 path = safe_str(f_path)
421 414 # message and author needs to be unicode
422 415 # proper backend should then translate that into required type
423 416 message = safe_unicode(message)
424 417 author = safe_unicode(author)
425 418 imc = repo.in_memory_commit
426 419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
427 420 try:
428 421 # TODO: handle pre-push action !
429 422 tip = imc.commit(
430 423 message=message, author=author, parents=[commit],
431 424 branch=commit.branch)
432 425 except Exception as e:
433 426 log.error(traceback.format_exc())
434 427 raise IMCCommitError(str(e))
435 428 finally:
436 429 # always clear caches, if commit fails we want fresh object also
437 430 self.mark_for_invalidation(repo_name)
438 431
439 432 # We trigger the post-push action
440 433 hooks_utils.trigger_post_push_hook(
441 434 username=user.username, action='push_local', repo_name=repo_name,
442 435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
443 436 return tip
444 437
445 438 def _sanitize_path(self, f_path):
446 439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
447 440 raise NonRelativePathError('%s is not an relative path' % f_path)
448 441 if f_path:
449 442 f_path = os.path.normpath(f_path)
450 443 return f_path
451 444
452 445 def get_dirnode_metadata(self, commit, dir_node):
453 446 if not dir_node.is_dir():
454 447 return []
455 448
456 449 data = []
457 450 for node in dir_node:
458 451 if not node.is_file():
459 452 # we skip file-nodes
460 453 continue
461 454
462 455 last_commit = node.last_commit
463 456 last_commit_date = last_commit.date
464 457 data.append({
465 458 'name': node.name,
466 459 'size': h.format_byte_size_binary(node.size),
467 460 'modified_at': h.format_date(last_commit_date),
468 461 'modified_ts': last_commit_date.isoformat(),
469 462 'revision': last_commit.revision,
470 463 'short_id': last_commit.short_id,
471 464 'message': h.escape(last_commit.message),
472 465 'author': h.escape(last_commit.author),
473 466 'user_profile': h.gravatar_with_user(last_commit.author),
474 467 })
475 468
476 469 return data
477 470
478 471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
479 472 extended_info=False, content=False, max_file_bytes=None):
480 473 """
481 474 recursive walk in root dir and return a set of all path in that dir
482 475 based on repository walk function
483 476
484 477 :param repo_name: name of repository
485 478 :param commit_id: commit id for which to list nodes
486 479 :param root_path: root path to list
487 480 :param flat: return as a list, if False returns a dict with description
488 481 :param max_file_bytes: will not return file contents over this limit
489 482
490 483 """
491 484 _files = list()
492 485 _dirs = list()
493 486 try:
494 487 _repo = self._get_repo(repo_name)
495 488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
496 489 root_path = root_path.lstrip('/')
497 490 for __, dirs, files in commit.walk(root_path):
498 491 for f in files:
499 492 _content = None
500 493 _data = f.unicode_path
501 494 over_size_limit = (max_file_bytes is not None
502 495 and f.size > max_file_bytes)
503 496
504 497 if not flat:
505 498 _data = {
506 499 "name": f.unicode_path,
507 500 "type": "file",
508 501 }
509 502 if extended_info:
510 503 _data.update({
511 504 "md5": f.md5,
512 505 "binary": f.is_binary,
513 506 "size": f.size,
514 507 "extension": f.extension,
515 508 "mimetype": f.mimetype,
516 509 "lines": f.lines()[0]
517 510 })
518 511
519 512 if content:
520 513 full_content = None
521 514 if not f.is_binary and not over_size_limit:
522 515 full_content = safe_str(f.content)
523 516
524 517 _data.update({
525 518 "content": full_content,
526 519 })
527 520 _files.append(_data)
528 521 for d in dirs:
529 522 _data = d.unicode_path
530 523 if not flat:
531 524 _data = {
532 525 "name": d.unicode_path,
533 526 "type": "dir",
534 527 }
535 528 if extended_info:
536 529 _data.update({
537 530 "md5": None,
538 531 "binary": None,
539 532 "size": None,
540 533 "extension": None,
541 534 })
542 535 if content:
543 536 _data.update({
544 537 "content": None
545 538 })
546 539 _dirs.append(_data)
547 540 except RepositoryError:
548 541 log.debug("Exception in get_nodes", exc_info=True)
549 542 raise
550 543
551 544 return _dirs, _files
552 545
553 546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
554 547 author=None, trigger_push_hook=True):
555 548 """
556 549 Commits given multiple nodes into repo
557 550
558 551 :param user: RhodeCode User object or user_id, the commiter
559 552 :param repo: RhodeCode Repository object
560 553 :param message: commit message
561 554 :param nodes: mapping {filename:{'content':content},...}
562 555 :param parent_commit: parent commit, can be empty than it's
563 556 initial commit
564 557 :param author: author of commit, cna be different that commiter
565 558 only for git
566 559 :param trigger_push_hook: trigger push hooks
567 560
568 561 :returns: new commited commit
569 562 """
570 563
571 564 user = self._get_user(user)
572 565 scm_instance = repo.scm_instance(cache=False)
573 566
574 567 processed_nodes = []
575 568 for f_path in nodes:
576 569 f_path = self._sanitize_path(f_path)
577 570 content = nodes[f_path]['content']
578 571 f_path = safe_str(f_path)
579 572 # decoding here will force that we have proper encoded values
580 573 # in any other case this will throw exceptions and deny commit
581 574 if isinstance(content, (basestring,)):
582 575 content = safe_str(content)
583 576 elif isinstance(content, (file, cStringIO.OutputType,)):
584 577 content = content.read()
585 578 else:
586 579 raise Exception('Content is of unrecognized type %s' % (
587 580 type(content)
588 581 ))
589 582 processed_nodes.append((f_path, content))
590 583
591 584 message = safe_unicode(message)
592 585 commiter = user.full_contact
593 586 author = safe_unicode(author) if author else commiter
594 587
595 588 imc = scm_instance.in_memory_commit
596 589
597 590 if not parent_commit:
598 591 parent_commit = EmptyCommit(alias=scm_instance.alias)
599 592
600 593 if isinstance(parent_commit, EmptyCommit):
601 594 # EmptyCommit means we we're editing empty repository
602 595 parents = None
603 596 else:
604 597 parents = [parent_commit]
605 598 # add multiple nodes
606 599 for path, content in processed_nodes:
607 600 imc.add(FileNode(path, content=content))
608 601 # TODO: handle pre push scenario
609 602 tip = imc.commit(message=message,
610 603 author=author,
611 604 parents=parents,
612 605 branch=parent_commit.branch)
613 606
614 607 self.mark_for_invalidation(repo.repo_name)
615 608 if trigger_push_hook:
616 609 hooks_utils.trigger_post_push_hook(
617 610 username=user.username, action='push_local',
618 611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
619 612 commit_ids=[tip.raw_id])
620 613 return tip
621 614
622 615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
623 616 author=None, trigger_push_hook=True):
624 617 user = self._get_user(user)
625 618 scm_instance = repo.scm_instance(cache=False)
626 619
627 620 message = safe_unicode(message)
628 621 commiter = user.full_contact
629 622 author = safe_unicode(author) if author else commiter
630 623
631 624 imc = scm_instance.in_memory_commit
632 625
633 626 if not parent_commit:
634 627 parent_commit = EmptyCommit(alias=scm_instance.alias)
635 628
636 629 if isinstance(parent_commit, EmptyCommit):
637 630 # EmptyCommit means we we're editing empty repository
638 631 parents = None
639 632 else:
640 633 parents = [parent_commit]
641 634
642 635 # add multiple nodes
643 636 for _filename, data in nodes.items():
644 637 # new filename, can be renamed from the old one, also sanitaze
645 638 # the path for any hack around relative paths like ../../ etc.
646 639 filename = self._sanitize_path(data['filename'])
647 640 old_filename = self._sanitize_path(_filename)
648 641 content = data['content']
649 642
650 643 filenode = FileNode(old_filename, content=content)
651 644 op = data['op']
652 645 if op == 'add':
653 646 imc.add(filenode)
654 647 elif op == 'del':
655 648 imc.remove(filenode)
656 649 elif op == 'mod':
657 650 if filename != old_filename:
658 651 # TODO: handle renames more efficient, needs vcs lib
659 652 # changes
660 653 imc.remove(filenode)
661 654 imc.add(FileNode(filename, content=content))
662 655 else:
663 656 imc.change(filenode)
664 657
665 658 try:
666 659 # TODO: handle pre push scenario
667 660 # commit changes
668 661 tip = imc.commit(message=message,
669 662 author=author,
670 663 parents=parents,
671 664 branch=parent_commit.branch)
672 665 except NodeNotChangedError:
673 666 raise
674 667 except Exception as e:
675 668 log.exception("Unexpected exception during call to imc.commit")
676 669 raise IMCCommitError(str(e))
677 670 finally:
678 671 # always clear caches, if commit fails we want fresh object also
679 672 self.mark_for_invalidation(repo.repo_name)
680 673
681 674 if trigger_push_hook:
682 675 hooks_utils.trigger_post_push_hook(
683 676 username=user.username, action='push_local',
684 677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
685 678 commit_ids=[tip.raw_id])
686 679
687 680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
688 681 author=None, trigger_push_hook=True):
689 682 """
690 683 Deletes given multiple nodes into `repo`
691 684
692 685 :param user: RhodeCode User object or user_id, the committer
693 686 :param repo: RhodeCode Repository object
694 687 :param message: commit message
695 688 :param nodes: mapping {filename:{'content':content},...}
696 689 :param parent_commit: parent commit, can be empty than it's initial
697 690 commit
698 691 :param author: author of commit, cna be different that commiter only
699 692 for git
700 693 :param trigger_push_hook: trigger push hooks
701 694
702 695 :returns: new commit after deletion
703 696 """
704 697
705 698 user = self._get_user(user)
706 699 scm_instance = repo.scm_instance(cache=False)
707 700
708 701 processed_nodes = []
709 702 for f_path in nodes:
710 703 f_path = self._sanitize_path(f_path)
711 704 # content can be empty but for compatabilty it allows same dicts
712 705 # structure as add_nodes
713 706 content = nodes[f_path].get('content')
714 707 processed_nodes.append((f_path, content))
715 708
716 709 message = safe_unicode(message)
717 710 commiter = user.full_contact
718 711 author = safe_unicode(author) if author else commiter
719 712
720 713 imc = scm_instance.in_memory_commit
721 714
722 715 if not parent_commit:
723 716 parent_commit = EmptyCommit(alias=scm_instance.alias)
724 717
725 718 if isinstance(parent_commit, EmptyCommit):
726 719 # EmptyCommit means we we're editing empty repository
727 720 parents = None
728 721 else:
729 722 parents = [parent_commit]
730 723 # add multiple nodes
731 724 for path, content in processed_nodes:
732 725 imc.remove(FileNode(path, content=content))
733 726
734 727 # TODO: handle pre push scenario
735 728 tip = imc.commit(message=message,
736 729 author=author,
737 730 parents=parents,
738 731 branch=parent_commit.branch)
739 732
740 733 self.mark_for_invalidation(repo.repo_name)
741 734 if trigger_push_hook:
742 735 hooks_utils.trigger_post_push_hook(
743 736 username=user.username, action='push_local',
744 737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
745 738 commit_ids=[tip.raw_id])
746 739 return tip
747 740
748 741 def strip(self, repo, commit_id, branch):
749 742 scm_instance = repo.scm_instance(cache=False)
750 743 scm_instance.config.clear_section('hooks')
751 744 scm_instance.strip(commit_id, branch)
752 745 self.mark_for_invalidation(repo.repo_name)
753 746
754 747 def get_unread_journal(self):
755 748 return self.sa.query(UserLog).count()
756 749
757 750 def get_repo_landing_revs(self, repo=None):
758 751 """
759 752 Generates select option with tags branches and bookmarks (for hg only)
760 753 grouped by type
761 754
762 755 :param repo:
763 756 """
764 757
765 758 repo = self._get_repo(repo)
766 759
767 760 hist_l = [
768 761 ['rev:tip', _('latest tip')]
769 762 ]
770 763 choices = [
771 764 'rev:tip'
772 765 ]
773 766
774 767 if not repo:
775 768 return choices, hist_l
776 769
777 770 repo = repo.scm_instance()
778 771
779 772 branches_group = (
780 773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
781 774 for b in repo.branches],
782 775 _("Branches"))
783 776 hist_l.append(branches_group)
784 777 choices.extend([x[0] for x in branches_group[0]])
785 778
786 779 if repo.alias == 'hg':
787 780 bookmarks_group = (
788 781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
789 782 for b in repo.bookmarks],
790 783 _("Bookmarks"))
791 784 hist_l.append(bookmarks_group)
792 785 choices.extend([x[0] for x in bookmarks_group[0]])
793 786
794 787 tags_group = (
795 788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
796 789 for t in repo.tags],
797 790 _("Tags"))
798 791 hist_l.append(tags_group)
799 792 choices.extend([x[0] for x in tags_group[0]])
800 793
801 794 return choices, hist_l
802 795
803 796 def install_git_hook(self, repo, force_create=False):
804 797 """
805 798 Creates a rhodecode hook inside a git repository
806 799
807 800 :param repo: Instance of VCS repo
808 801 :param force_create: Create even if same name hook exists
809 802 """
810 803
811 804 loc = os.path.join(repo.path, 'hooks')
812 805 if not repo.bare:
813 806 loc = os.path.join(repo.path, '.git', 'hooks')
814 807 if not os.path.isdir(loc):
815 808 os.makedirs(loc, mode=0777)
816 809
817 810 tmpl_post = pkg_resources.resource_string(
818 811 'rhodecode', '/'.join(
819 812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
820 813 tmpl_pre = pkg_resources.resource_string(
821 814 'rhodecode', '/'.join(
822 815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
823 816
824 817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
825 818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
826 819 log.debug('Installing git hook in repo %s', repo)
827 820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
828 821
829 822 if _rhodecode_hook or force_create:
830 823 log.debug('writing %s hook file !', h_type)
831 824 try:
832 825 with open(_hook_file, 'wb') as f:
833 826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
834 827 tmpl = tmpl.replace('_ENV_', sys.executable)
835 828 f.write(tmpl)
836 829 os.chmod(_hook_file, 0755)
837 830 except IOError:
838 831 log.exception('error writing hook file %s', _hook_file)
839 832 else:
840 833 log.debug('skipping writing hook file')
841 834
842 835 def install_svn_hooks(self, repo, force_create=False):
843 836 """
844 837 Creates rhodecode hooks inside a svn repository
845 838
846 839 :param repo: Instance of VCS repo
847 840 :param force_create: Create even if same name hook exists
848 841 """
849 842 hooks_path = os.path.join(repo.path, 'hooks')
850 843 if not os.path.isdir(hooks_path):
851 844 os.makedirs(hooks_path)
852 845 post_commit_tmpl = pkg_resources.resource_string(
853 846 'rhodecode', '/'.join(
854 847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
855 848 pre_commit_template = pkg_resources.resource_string(
856 849 'rhodecode', '/'.join(
857 850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
858 851 templates = {
859 852 'post-commit': post_commit_tmpl,
860 853 'pre-commit': pre_commit_template
861 854 }
862 855 for filename in templates:
863 856 _hook_file = os.path.join(hooks_path, filename)
864 857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
865 858 if _rhodecode_hook or force_create:
866 859 log.debug('writing %s hook file !', filename)
867 860 template = templates[filename]
868 861 try:
869 862 with open(_hook_file, 'wb') as f:
870 863 template = template.replace(
871 864 '_TMPL_', rhodecode.__version__)
872 865 template = template.replace('_ENV_', sys.executable)
873 866 f.write(template)
874 867 os.chmod(_hook_file, 0755)
875 868 except IOError:
876 869 log.exception('error writing hook file %s', filename)
877 870 else:
878 871 log.debug('skipping writing hook file')
879 872
880 873 def install_hooks(self, repo, repo_type):
881 874 if repo_type == 'git':
882 875 self.install_git_hook(repo)
883 876 elif repo_type == 'svn':
884 877 self.install_svn_hooks(repo)
885 878
886 879 def get_server_info(self, environ=None):
887 880 server_info = get_system_info(environ)
888 881 return server_info
889 882
890 883
891 884 def _check_rhodecode_hook(hook_path):
892 885 """
893 886 Check if the hook was created by RhodeCode
894 887 """
895 888 if not os.path.exists(hook_path):
896 889 return True
897 890
898 891 log.debug('hook exists, checking if it is from rhodecode')
899 892 hook_content = _read_hook(hook_path)
900 893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
901 894 if matches:
902 895 try:
903 896 version = matches.groups()[0]
904 897 log.debug('got %s, it is rhodecode', version)
905 898 return True
906 899 except Exception:
907 900 log.exception("Exception while reading the hook version.")
908 901
909 902 return False
910 903
911 904
912 905 def _read_hook(hook_path):
913 906 with open(hook_path, 'rb') as f:
914 907 content = f.read()
915 908 return content
General Comments 0
You need to be logged in to leave comments. Login now