##// END OF EJS Templates
scm-model: fix the reference to the proper default url used for pushing
marcink -
r2561:d072c29a default
parent child Browse files
Show More
@@ -1,922 +1,922 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from sqlalchemy import func
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 import rhodecode
37 37 from rhodecode.lib.vcs import get_backend
38 38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 39 from rhodecode.lib.vcs.nodes import FileNode
40 40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 41 from rhodecode.lib import helpers as h
42 42 from rhodecode.lib.auth import (
43 43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 44 HasUserGroupPermissionAny)
45 45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 46 from rhodecode.lib import hooks_utils, caches
47 47 from rhodecode.lib.utils import (
48 48 get_filesystem_repos, make_db_config)
49 49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 50 from rhodecode.lib.system_info import get_system_info
51 51 from rhodecode.model import BaseModel
52 52 from rhodecode.model.db import (
53 53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 54 PullRequest)
55 55 from rhodecode.model.settings import VcsSettingsModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class UserTemp(object):
61 61 def __init__(self, user_id):
62 62 self.user_id = user_id
63 63
64 64 def __repr__(self):
65 65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66 66
67 67
68 68 class RepoTemp(object):
69 69 def __init__(self, repo_id):
70 70 self.repo_id = repo_id
71 71
72 72 def __repr__(self):
73 73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74 74
75 75
76 76 class SimpleCachedRepoList(object):
77 77 """
78 78 Lighter version of of iteration of repos without the scm initialisation,
79 79 and with cache usage
80 80 """
81 81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 82 self.db_repo_list = db_repo_list
83 83 self.repos_path = repos_path
84 84 self.order_by = order_by
85 85 self.reversed = (order_by or '').startswith('-')
86 86 if not perm_set:
87 87 perm_set = ['repository.read', 'repository.write',
88 88 'repository.admin']
89 89 self.perm_set = perm_set
90 90
91 91 def __len__(self):
92 92 return len(self.db_repo_list)
93 93
94 94 def __repr__(self):
95 95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96 96
97 97 def __iter__(self):
98 98 for dbr in self.db_repo_list:
99 99 # check permission at this level
100 100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 101 dbr.repo_name, 'SimpleCachedRepoList check')
102 102 if not has_perm:
103 103 continue
104 104
105 105 tmp_d = {
106 106 'name': dbr.repo_name,
107 107 'dbrepo': dbr.get_dict(),
108 108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 109 }
110 110 yield tmp_d
111 111
112 112
113 113 class _PermCheckIterator(object):
114 114
115 115 def __init__(
116 116 self, obj_list, obj_attr, perm_set, perm_checker,
117 117 extra_kwargs=None):
118 118 """
119 119 Creates iterator from given list of objects, additionally
120 120 checking permission for them from perm_set var
121 121
122 122 :param obj_list: list of db objects
123 123 :param obj_attr: attribute of object to pass into perm_checker
124 124 :param perm_set: list of permissions to check
125 125 :param perm_checker: callable to check permissions against
126 126 """
127 127 self.obj_list = obj_list
128 128 self.obj_attr = obj_attr
129 129 self.perm_set = perm_set
130 130 self.perm_checker = perm_checker
131 131 self.extra_kwargs = extra_kwargs or {}
132 132
133 133 def __len__(self):
134 134 return len(self.obj_list)
135 135
136 136 def __repr__(self):
137 137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138 138
139 139 def __iter__(self):
140 140 checker = self.perm_checker(*self.perm_set)
141 141 for db_obj in self.obj_list:
142 142 # check permission at this level
143 143 name = getattr(db_obj, self.obj_attr, None)
144 144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 145 continue
146 146
147 147 yield db_obj
148 148
149 149
150 150 class RepoList(_PermCheckIterator):
151 151
152 152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 153 if not perm_set:
154 154 perm_set = [
155 155 'repository.read', 'repository.write', 'repository.admin']
156 156
157 157 super(RepoList, self).__init__(
158 158 obj_list=db_repo_list,
159 159 obj_attr='repo_name', perm_set=perm_set,
160 160 perm_checker=HasRepoPermissionAny,
161 161 extra_kwargs=extra_kwargs)
162 162
163 163
164 164 class RepoGroupList(_PermCheckIterator):
165 165
166 166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 167 if not perm_set:
168 168 perm_set = ['group.read', 'group.write', 'group.admin']
169 169
170 170 super(RepoGroupList, self).__init__(
171 171 obj_list=db_repo_group_list,
172 172 obj_attr='group_name', perm_set=perm_set,
173 173 perm_checker=HasRepoGroupPermissionAny,
174 174 extra_kwargs=extra_kwargs)
175 175
176 176
177 177 class UserGroupList(_PermCheckIterator):
178 178
179 179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 180 if not perm_set:
181 181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182 182
183 183 super(UserGroupList, self).__init__(
184 184 obj_list=db_user_group_list,
185 185 obj_attr='users_group_name', perm_set=perm_set,
186 186 perm_checker=HasUserGroupPermissionAny,
187 187 extra_kwargs=extra_kwargs)
188 188
189 189
190 190 class ScmModel(BaseModel):
191 191 """
192 192 Generic Scm Model
193 193 """
194 194
195 195 @LazyProperty
196 196 def repos_path(self):
197 197 """
198 198 Gets the repositories root path from database
199 199 """
200 200
201 201 settings_model = VcsSettingsModel(sa=self.sa)
202 202 return settings_model.get_repos_location()
203 203
204 204 def repo_scan(self, repos_path=None):
205 205 """
206 206 Listing of repositories in given path. This path should not be a
207 207 repository itself. Return a dictionary of repository objects
208 208
209 209 :param repos_path: path to directory containing repositories
210 210 """
211 211
212 212 if repos_path is None:
213 213 repos_path = self.repos_path
214 214
215 215 log.info('scanning for repositories in %s', repos_path)
216 216
217 217 config = make_db_config()
218 218 config.set('extensions', 'largefiles', '')
219 219 repos = {}
220 220
221 221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 222 # name need to be decomposed and put back together using the /
223 223 # since this is internal storage separator for rhodecode
224 224 name = Repository.normalize_repo_name(name)
225 225
226 226 try:
227 227 if name in repos:
228 228 raise RepositoryError('Duplicate repository name %s '
229 229 'found in %s' % (name, path))
230 230 elif path[0] in rhodecode.BACKENDS:
231 231 klass = get_backend(path[0])
232 232 repos[name] = klass(path[1], config=config)
233 233 except OSError:
234 234 continue
235 235 log.debug('found %s paths with repositories', len(repos))
236 236 return repos
237 237
238 238 def get_repos(self, all_repos=None, sort_key=None):
239 239 """
240 240 Get all repositories from db and for each repo create it's
241 241 backend instance and fill that backed with information from database
242 242
243 243 :param all_repos: list of repository names as strings
244 244 give specific repositories list, good for filtering
245 245
246 246 :param sort_key: initial sorting of repositories
247 247 """
248 248 if all_repos is None:
249 249 all_repos = self.sa.query(Repository)\
250 250 .filter(Repository.group_id == None)\
251 251 .order_by(func.lower(Repository.repo_name)).all()
252 252 repo_iter = SimpleCachedRepoList(
253 253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 254 return repo_iter
255 255
256 256 def get_repo_groups(self, all_groups=None):
257 257 if all_groups is None:
258 258 all_groups = RepoGroup.query()\
259 259 .filter(RepoGroup.group_parent_id == None).all()
260 260 return [x for x in RepoGroupList(all_groups)]
261 261
262 262 def mark_for_invalidation(self, repo_name, delete=False):
263 263 """
264 264 Mark caches of this repo invalid in the database. `delete` flag
265 265 removes the cache entries
266 266
267 267 :param repo_name: the repo_name for which caches should be marked
268 268 invalid, or deleted
269 269 :param delete: delete the entry keys instead of setting bool
270 270 flag on them
271 271 """
272 272 CacheKey.set_invalidate(repo_name, delete=delete)
273 273 repo = Repository.get_by_repo_name(repo_name)
274 274
275 275 if repo:
276 276 config = repo._config
277 277 config.set('extensions', 'largefiles', '')
278 278 repo.update_commit_cache(config=config, cs_cache=None)
279 279 caches.clear_repo_caches(repo_name)
280 280
281 281 def toggle_following_repo(self, follow_repo_id, user_id):
282 282
283 283 f = self.sa.query(UserFollowing)\
284 284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 285 .filter(UserFollowing.user_id == user_id).scalar()
286 286
287 287 if f is not None:
288 288 try:
289 289 self.sa.delete(f)
290 290 return
291 291 except Exception:
292 292 log.error(traceback.format_exc())
293 293 raise
294 294
295 295 try:
296 296 f = UserFollowing()
297 297 f.user_id = user_id
298 298 f.follows_repo_id = follow_repo_id
299 299 self.sa.add(f)
300 300 except Exception:
301 301 log.error(traceback.format_exc())
302 302 raise
303 303
304 304 def toggle_following_user(self, follow_user_id, user_id):
305 305 f = self.sa.query(UserFollowing)\
306 306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 307 .filter(UserFollowing.user_id == user_id).scalar()
308 308
309 309 if f is not None:
310 310 try:
311 311 self.sa.delete(f)
312 312 return
313 313 except Exception:
314 314 log.error(traceback.format_exc())
315 315 raise
316 316
317 317 try:
318 318 f = UserFollowing()
319 319 f.user_id = user_id
320 320 f.follows_user_id = follow_user_id
321 321 self.sa.add(f)
322 322 except Exception:
323 323 log.error(traceback.format_exc())
324 324 raise
325 325
326 326 def is_following_repo(self, repo_name, user_id, cache=False):
327 327 r = self.sa.query(Repository)\
328 328 .filter(Repository.repo_name == repo_name).scalar()
329 329
330 330 f = self.sa.query(UserFollowing)\
331 331 .filter(UserFollowing.follows_repository == r)\
332 332 .filter(UserFollowing.user_id == user_id).scalar()
333 333
334 334 return f is not None
335 335
336 336 def is_following_user(self, username, user_id, cache=False):
337 337 u = User.get_by_username(username)
338 338
339 339 f = self.sa.query(UserFollowing)\
340 340 .filter(UserFollowing.follows_user == u)\
341 341 .filter(UserFollowing.user_id == user_id).scalar()
342 342
343 343 return f is not None
344 344
345 345 def get_followers(self, repo):
346 346 repo = self._get_repo(repo)
347 347
348 348 return self.sa.query(UserFollowing)\
349 349 .filter(UserFollowing.follows_repository == repo).count()
350 350
351 351 def get_forks(self, repo):
352 352 repo = self._get_repo(repo)
353 353 return self.sa.query(Repository)\
354 354 .filter(Repository.fork == repo).count()
355 355
356 356 def get_pull_requests(self, repo):
357 357 repo = self._get_repo(repo)
358 358 return self.sa.query(PullRequest)\
359 359 .filter(PullRequest.target_repo == repo)\
360 360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361 361
362 362 def mark_as_fork(self, repo, fork, user):
363 363 repo = self._get_repo(repo)
364 364 fork = self._get_repo(fork)
365 365 if fork and repo.repo_id == fork.repo_id:
366 366 raise Exception("Cannot set repository as fork of itself")
367 367
368 368 if fork and repo.repo_type != fork.repo_type:
369 369 raise RepositoryError(
370 370 "Cannot set repository as fork of repository with other type")
371 371
372 372 repo.fork = fork
373 373 self.sa.add(repo)
374 374 return repo
375 375
376 376 def pull_changes(self, repo, username, remote_uri=None):
377 377 dbrepo = self._get_repo(repo)
378 378 remote_uri = remote_uri or dbrepo.clone_uri
379 379 if not remote_uri:
380 380 raise Exception("This repository doesn't have a clone uri")
381 381
382 382 repo = dbrepo.scm_instance(cache=False)
383 383 # TODO: marcink fix this an re-enable since we need common logic
384 384 # for hg/git remove hooks so we don't trigger them on fetching
385 385 # commits from remote
386 386 repo.config.clear_section('hooks')
387 387
388 388 repo_name = dbrepo.repo_name
389 389 try:
390 390 # TODO: we need to make sure those operations call proper hooks !
391 391 repo.pull(remote_uri)
392 392
393 393 self.mark_for_invalidation(repo_name)
394 394 except Exception:
395 395 log.error(traceback.format_exc())
396 396 raise
397 397
398 398 def push_changes(self, repo, username, remote_uri=None):
399 399 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.clone_uri
400 remote_uri = remote_uri or dbrepo.push_uri
401 401 if not remote_uri:
402 402 raise Exception("This repository doesn't have a clone uri")
403 403
404 404 repo = dbrepo.scm_instance(cache=False)
405 405 repo.config.clear_section('hooks')
406 406
407 407 try:
408 408 repo.push(remote_uri)
409 409 except Exception:
410 410 log.error(traceback.format_exc())
411 411 raise
412 412
413 413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 414 content, f_path):
415 415 """
416 416 Commits changes
417 417
418 418 :param repo: SCM instance
419 419
420 420 """
421 421 user = self._get_user(user)
422 422
423 423 # decoding here will force that we have proper encoded values
424 424 # in any other case this will throw exceptions and deny commit
425 425 content = safe_str(content)
426 426 path = safe_str(f_path)
427 427 # message and author needs to be unicode
428 428 # proper backend should then translate that into required type
429 429 message = safe_unicode(message)
430 430 author = safe_unicode(author)
431 431 imc = repo.in_memory_commit
432 432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 433 try:
434 434 # TODO: handle pre-push action !
435 435 tip = imc.commit(
436 436 message=message, author=author, parents=[commit],
437 437 branch=commit.branch)
438 438 except Exception as e:
439 439 log.error(traceback.format_exc())
440 440 raise IMCCommitError(str(e))
441 441 finally:
442 442 # always clear caches, if commit fails we want fresh object also
443 443 self.mark_for_invalidation(repo_name)
444 444
445 445 # We trigger the post-push action
446 446 hooks_utils.trigger_post_push_hook(
447 447 username=user.username, action='push_local', repo_name=repo_name,
448 448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 449 return tip
450 450
451 451 def _sanitize_path(self, f_path):
452 452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 454 if f_path:
455 455 f_path = os.path.normpath(f_path)
456 456 return f_path
457 457
458 458 def get_dirnode_metadata(self, request, commit, dir_node):
459 459 if not dir_node.is_dir():
460 460 return []
461 461
462 462 data = []
463 463 for node in dir_node:
464 464 if not node.is_file():
465 465 # we skip file-nodes
466 466 continue
467 467
468 468 last_commit = node.last_commit
469 469 last_commit_date = last_commit.date
470 470 data.append({
471 471 'name': node.name,
472 472 'size': h.format_byte_size_binary(node.size),
473 473 'modified_at': h.format_date(last_commit_date),
474 474 'modified_ts': last_commit_date.isoformat(),
475 475 'revision': last_commit.revision,
476 476 'short_id': last_commit.short_id,
477 477 'message': h.escape(last_commit.message),
478 478 'author': h.escape(last_commit.author),
479 479 'user_profile': h.gravatar_with_user(
480 480 request, last_commit.author),
481 481 })
482 482
483 483 return data
484 484
485 485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 486 extended_info=False, content=False, max_file_bytes=None):
487 487 """
488 488 recursive walk in root dir and return a set of all path in that dir
489 489 based on repository walk function
490 490
491 491 :param repo_name: name of repository
492 492 :param commit_id: commit id for which to list nodes
493 493 :param root_path: root path to list
494 494 :param flat: return as a list, if False returns a dict with description
495 495 :param max_file_bytes: will not return file contents over this limit
496 496
497 497 """
498 498 _files = list()
499 499 _dirs = list()
500 500 try:
501 501 _repo = self._get_repo(repo_name)
502 502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 503 root_path = root_path.lstrip('/')
504 504 for __, dirs, files in commit.walk(root_path):
505 505 for f in files:
506 506 _content = None
507 507 _data = f.unicode_path
508 508 over_size_limit = (max_file_bytes is not None
509 509 and f.size > max_file_bytes)
510 510
511 511 if not flat:
512 512 _data = {
513 513 "name": h.escape(f.unicode_path),
514 514 "type": "file",
515 515 }
516 516 if extended_info:
517 517 _data.update({
518 518 "md5": f.md5,
519 519 "binary": f.is_binary,
520 520 "size": f.size,
521 521 "extension": f.extension,
522 522 "mimetype": f.mimetype,
523 523 "lines": f.lines()[0]
524 524 })
525 525
526 526 if content:
527 527 full_content = None
528 528 if not f.is_binary and not over_size_limit:
529 529 full_content = safe_str(f.content)
530 530
531 531 _data.update({
532 532 "content": full_content,
533 533 })
534 534 _files.append(_data)
535 535 for d in dirs:
536 536 _data = d.unicode_path
537 537 if not flat:
538 538 _data = {
539 539 "name": h.escape(d.unicode_path),
540 540 "type": "dir",
541 541 }
542 542 if extended_info:
543 543 _data.update({
544 544 "md5": None,
545 545 "binary": None,
546 546 "size": None,
547 547 "extension": None,
548 548 })
549 549 if content:
550 550 _data.update({
551 551 "content": None
552 552 })
553 553 _dirs.append(_data)
554 554 except RepositoryError:
555 555 log.debug("Exception in get_nodes", exc_info=True)
556 556 raise
557 557
558 558 return _dirs, _files
559 559
560 560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 561 author=None, trigger_push_hook=True):
562 562 """
563 563 Commits given multiple nodes into repo
564 564
565 565 :param user: RhodeCode User object or user_id, the commiter
566 566 :param repo: RhodeCode Repository object
567 567 :param message: commit message
568 568 :param nodes: mapping {filename:{'content':content},...}
569 569 :param parent_commit: parent commit, can be empty than it's
570 570 initial commit
571 571 :param author: author of commit, cna be different that commiter
572 572 only for git
573 573 :param trigger_push_hook: trigger push hooks
574 574
575 575 :returns: new commited commit
576 576 """
577 577
578 578 user = self._get_user(user)
579 579 scm_instance = repo.scm_instance(cache=False)
580 580
581 581 processed_nodes = []
582 582 for f_path in nodes:
583 583 f_path = self._sanitize_path(f_path)
584 584 content = nodes[f_path]['content']
585 585 f_path = safe_str(f_path)
586 586 # decoding here will force that we have proper encoded values
587 587 # in any other case this will throw exceptions and deny commit
588 588 if isinstance(content, (basestring,)):
589 589 content = safe_str(content)
590 590 elif isinstance(content, (file, cStringIO.OutputType,)):
591 591 content = content.read()
592 592 else:
593 593 raise Exception('Content is of unrecognized type %s' % (
594 594 type(content)
595 595 ))
596 596 processed_nodes.append((f_path, content))
597 597
598 598 message = safe_unicode(message)
599 599 commiter = user.full_contact
600 600 author = safe_unicode(author) if author else commiter
601 601
602 602 imc = scm_instance.in_memory_commit
603 603
604 604 if not parent_commit:
605 605 parent_commit = EmptyCommit(alias=scm_instance.alias)
606 606
607 607 if isinstance(parent_commit, EmptyCommit):
608 608 # EmptyCommit means we we're editing empty repository
609 609 parents = None
610 610 else:
611 611 parents = [parent_commit]
612 612 # add multiple nodes
613 613 for path, content in processed_nodes:
614 614 imc.add(FileNode(path, content=content))
615 615 # TODO: handle pre push scenario
616 616 tip = imc.commit(message=message,
617 617 author=author,
618 618 parents=parents,
619 619 branch=parent_commit.branch)
620 620
621 621 self.mark_for_invalidation(repo.repo_name)
622 622 if trigger_push_hook:
623 623 hooks_utils.trigger_post_push_hook(
624 624 username=user.username, action='push_local',
625 625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 626 commit_ids=[tip.raw_id])
627 627 return tip
628 628
629 629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 630 author=None, trigger_push_hook=True):
631 631 user = self._get_user(user)
632 632 scm_instance = repo.scm_instance(cache=False)
633 633
634 634 message = safe_unicode(message)
635 635 commiter = user.full_contact
636 636 author = safe_unicode(author) if author else commiter
637 637
638 638 imc = scm_instance.in_memory_commit
639 639
640 640 if not parent_commit:
641 641 parent_commit = EmptyCommit(alias=scm_instance.alias)
642 642
643 643 if isinstance(parent_commit, EmptyCommit):
644 644 # EmptyCommit means we we're editing empty repository
645 645 parents = None
646 646 else:
647 647 parents = [parent_commit]
648 648
649 649 # add multiple nodes
650 650 for _filename, data in nodes.items():
651 651 # new filename, can be renamed from the old one, also sanitaze
652 652 # the path for any hack around relative paths like ../../ etc.
653 653 filename = self._sanitize_path(data['filename'])
654 654 old_filename = self._sanitize_path(_filename)
655 655 content = data['content']
656 656
657 657 filenode = FileNode(old_filename, content=content)
658 658 op = data['op']
659 659 if op == 'add':
660 660 imc.add(filenode)
661 661 elif op == 'del':
662 662 imc.remove(filenode)
663 663 elif op == 'mod':
664 664 if filename != old_filename:
665 665 # TODO: handle renames more efficient, needs vcs lib
666 666 # changes
667 667 imc.remove(filenode)
668 668 imc.add(FileNode(filename, content=content))
669 669 else:
670 670 imc.change(filenode)
671 671
672 672 try:
673 673 # TODO: handle pre push scenario
674 674 # commit changes
675 675 tip = imc.commit(message=message,
676 676 author=author,
677 677 parents=parents,
678 678 branch=parent_commit.branch)
679 679 except NodeNotChangedError:
680 680 raise
681 681 except Exception as e:
682 682 log.exception("Unexpected exception during call to imc.commit")
683 683 raise IMCCommitError(str(e))
684 684 finally:
685 685 # always clear caches, if commit fails we want fresh object also
686 686 self.mark_for_invalidation(repo.repo_name)
687 687
688 688 if trigger_push_hook:
689 689 hooks_utils.trigger_post_push_hook(
690 690 username=user.username, action='push_local',
691 691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 692 commit_ids=[tip.raw_id])
693 693
694 694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 695 author=None, trigger_push_hook=True):
696 696 """
697 697 Deletes given multiple nodes into `repo`
698 698
699 699 :param user: RhodeCode User object or user_id, the committer
700 700 :param repo: RhodeCode Repository object
701 701 :param message: commit message
702 702 :param nodes: mapping {filename:{'content':content},...}
703 703 :param parent_commit: parent commit, can be empty than it's initial
704 704 commit
705 705 :param author: author of commit, cna be different that commiter only
706 706 for git
707 707 :param trigger_push_hook: trigger push hooks
708 708
709 709 :returns: new commit after deletion
710 710 """
711 711
712 712 user = self._get_user(user)
713 713 scm_instance = repo.scm_instance(cache=False)
714 714
715 715 processed_nodes = []
716 716 for f_path in nodes:
717 717 f_path = self._sanitize_path(f_path)
718 718 # content can be empty but for compatabilty it allows same dicts
719 719 # structure as add_nodes
720 720 content = nodes[f_path].get('content')
721 721 processed_nodes.append((f_path, content))
722 722
723 723 message = safe_unicode(message)
724 724 commiter = user.full_contact
725 725 author = safe_unicode(author) if author else commiter
726 726
727 727 imc = scm_instance.in_memory_commit
728 728
729 729 if not parent_commit:
730 730 parent_commit = EmptyCommit(alias=scm_instance.alias)
731 731
732 732 if isinstance(parent_commit, EmptyCommit):
733 733 # EmptyCommit means we we're editing empty repository
734 734 parents = None
735 735 else:
736 736 parents = [parent_commit]
737 737 # add multiple nodes
738 738 for path, content in processed_nodes:
739 739 imc.remove(FileNode(path, content=content))
740 740
741 741 # TODO: handle pre push scenario
742 742 tip = imc.commit(message=message,
743 743 author=author,
744 744 parents=parents,
745 745 branch=parent_commit.branch)
746 746
747 747 self.mark_for_invalidation(repo.repo_name)
748 748 if trigger_push_hook:
749 749 hooks_utils.trigger_post_push_hook(
750 750 username=user.username, action='push_local',
751 751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 752 commit_ids=[tip.raw_id])
753 753 return tip
754 754
755 755 def strip(self, repo, commit_id, branch):
756 756 scm_instance = repo.scm_instance(cache=False)
757 757 scm_instance.config.clear_section('hooks')
758 758 scm_instance.strip(commit_id, branch)
759 759 self.mark_for_invalidation(repo.repo_name)
760 760
761 761 def get_unread_journal(self):
762 762 return self.sa.query(UserLog).count()
763 763
764 764 def get_repo_landing_revs(self, translator, repo=None):
765 765 """
766 766 Generates select option with tags branches and bookmarks (for hg only)
767 767 grouped by type
768 768
769 769 :param repo:
770 770 """
771 771 _ = translator
772 772 repo = self._get_repo(repo)
773 773
774 774 hist_l = [
775 775 ['rev:tip', _('latest tip')]
776 776 ]
777 777 choices = [
778 778 'rev:tip'
779 779 ]
780 780
781 781 if not repo:
782 782 return choices, hist_l
783 783
784 784 repo = repo.scm_instance()
785 785
786 786 branches_group = (
787 787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 788 for b in repo.branches],
789 789 _("Branches"))
790 790 hist_l.append(branches_group)
791 791 choices.extend([x[0] for x in branches_group[0]])
792 792
793 793 if repo.alias == 'hg':
794 794 bookmarks_group = (
795 795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 796 for b in repo.bookmarks],
797 797 _("Bookmarks"))
798 798 hist_l.append(bookmarks_group)
799 799 choices.extend([x[0] for x in bookmarks_group[0]])
800 800
801 801 tags_group = (
802 802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 803 for t in repo.tags],
804 804 _("Tags"))
805 805 hist_l.append(tags_group)
806 806 choices.extend([x[0] for x in tags_group[0]])
807 807
808 808 return choices, hist_l
809 809
810 810 def install_git_hook(self, repo, force_create=False):
811 811 """
812 812 Creates a rhodecode hook inside a git repository
813 813
814 814 :param repo: Instance of VCS repo
815 815 :param force_create: Create even if same name hook exists
816 816 """
817 817
818 818 loc = os.path.join(repo.path, 'hooks')
819 819 if not repo.bare:
820 820 loc = os.path.join(repo.path, '.git', 'hooks')
821 821 if not os.path.isdir(loc):
822 822 os.makedirs(loc, mode=0777)
823 823
824 824 tmpl_post = pkg_resources.resource_string(
825 825 'rhodecode', '/'.join(
826 826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
827 827 tmpl_pre = pkg_resources.resource_string(
828 828 'rhodecode', '/'.join(
829 829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
830 830
831 831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
832 832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
833 833 log.debug('Installing git hook in repo %s', repo)
834 834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
835 835
836 836 if _rhodecode_hook or force_create:
837 837 log.debug('writing %s hook file !', h_type)
838 838 try:
839 839 with open(_hook_file, 'wb') as f:
840 840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
841 841 tmpl = tmpl.replace('_ENV_', sys.executable)
842 842 f.write(tmpl)
843 843 os.chmod(_hook_file, 0755)
844 844 except IOError:
845 845 log.exception('error writing hook file %s', _hook_file)
846 846 else:
847 847 log.debug('skipping writing hook file')
848 848
849 849 def install_svn_hooks(self, repo, force_create=False):
850 850 """
851 851 Creates rhodecode hooks inside a svn repository
852 852
853 853 :param repo: Instance of VCS repo
854 854 :param force_create: Create even if same name hook exists
855 855 """
856 856 hooks_path = os.path.join(repo.path, 'hooks')
857 857 if not os.path.isdir(hooks_path):
858 858 os.makedirs(hooks_path)
859 859 post_commit_tmpl = pkg_resources.resource_string(
860 860 'rhodecode', '/'.join(
861 861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
862 862 pre_commit_template = pkg_resources.resource_string(
863 863 'rhodecode', '/'.join(
864 864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
865 865 templates = {
866 866 'post-commit': post_commit_tmpl,
867 867 'pre-commit': pre_commit_template
868 868 }
869 869 for filename in templates:
870 870 _hook_file = os.path.join(hooks_path, filename)
871 871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
872 872 if _rhodecode_hook or force_create:
873 873 log.debug('writing %s hook file !', filename)
874 874 template = templates[filename]
875 875 try:
876 876 with open(_hook_file, 'wb') as f:
877 877 template = template.replace(
878 878 '_TMPL_', rhodecode.__version__)
879 879 template = template.replace('_ENV_', sys.executable)
880 880 f.write(template)
881 881 os.chmod(_hook_file, 0755)
882 882 except IOError:
883 883 log.exception('error writing hook file %s', filename)
884 884 else:
885 885 log.debug('skipping writing hook file')
886 886
887 887 def install_hooks(self, repo, repo_type):
888 888 if repo_type == 'git':
889 889 self.install_git_hook(repo)
890 890 elif repo_type == 'svn':
891 891 self.install_svn_hooks(repo)
892 892
893 893 def get_server_info(self, environ=None):
894 894 server_info = get_system_info(environ)
895 895 return server_info
896 896
897 897
898 898 def _check_rhodecode_hook(hook_path):
899 899 """
900 900 Check if the hook was created by RhodeCode
901 901 """
902 902 if not os.path.exists(hook_path):
903 903 return True
904 904
905 905 log.debug('hook exists, checking if it is from rhodecode')
906 906 hook_content = _read_hook(hook_path)
907 907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
908 908 if matches:
909 909 try:
910 910 version = matches.groups()[0]
911 911 log.debug('got %s, it is rhodecode', version)
912 912 return True
913 913 except Exception:
914 914 log.exception("Exception while reading the hook version.")
915 915
916 916 return False
917 917
918 918
919 919 def _read_hook(hook_path):
920 920 with open(hook_path, 'rb') as f:
921 921 content = f.read()
922 922 return content
General Comments 0
You need to be logged in to leave comments. Login now