##// END OF EJS Templates
security: escape the returned paths of files and directories....
ergo -
r1827:9e60361c default
parent child Browse files
Show More
@@ -1,908 +1,908 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from pylons.i18n.translation import _
34 34 from sqlalchemy import func
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 import rhodecode
38 38 from rhodecode.lib.vcs import get_backend
39 39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 40 from rhodecode.lib.vcs.nodes import FileNode
41 41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 42 from rhodecode.lib import helpers as h
43 43
44 44 from rhodecode.lib.auth import (
45 45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 46 HasUserGroupPermissionAny)
47 47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 48 from rhodecode.lib import hooks_utils, caches
49 49 from rhodecode.lib.utils import (
50 50 get_filesystem_repos, make_db_config)
51 51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 52 from rhodecode.lib.system_info import get_system_info
53 53 from rhodecode.model import BaseModel
54 54 from rhodecode.model.db import (
55 55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 56 PullRequest)
57 57 from rhodecode.model.settings import VcsSettingsModel
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class UserTemp(object):
63 63 def __init__(self, user_id):
64 64 self.user_id = user_id
65 65
66 66 def __repr__(self):
67 67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68 68
69 69
70 70 class RepoTemp(object):
71 71 def __init__(self, repo_id):
72 72 self.repo_id = repo_id
73 73
74 74 def __repr__(self):
75 75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76 76
77 77
78 78 class SimpleCachedRepoList(object):
79 79 """
80 80 Lighter version of of iteration of repos without the scm initialisation,
81 81 and with cache usage
82 82 """
83 83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 84 self.db_repo_list = db_repo_list
85 85 self.repos_path = repos_path
86 86 self.order_by = order_by
87 87 self.reversed = (order_by or '').startswith('-')
88 88 if not perm_set:
89 89 perm_set = ['repository.read', 'repository.write',
90 90 'repository.admin']
91 91 self.perm_set = perm_set
92 92
93 93 def __len__(self):
94 94 return len(self.db_repo_list)
95 95
96 96 def __repr__(self):
97 97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98 98
99 99 def __iter__(self):
100 100 for dbr in self.db_repo_list:
101 101 # check permission at this level
102 102 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 103 dbr.repo_name, 'SimpleCachedRepoList check')
104 104 if not has_perm:
105 105 continue
106 106
107 107 tmp_d = {
108 108 'name': dbr.repo_name,
109 109 'dbrepo': dbr.get_dict(),
110 110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 111 }
112 112 yield tmp_d
113 113
114 114
115 115 class _PermCheckIterator(object):
116 116
117 117 def __init__(
118 118 self, obj_list, obj_attr, perm_set, perm_checker,
119 119 extra_kwargs=None):
120 120 """
121 121 Creates iterator from given list of objects, additionally
122 122 checking permission for them from perm_set var
123 123
124 124 :param obj_list: list of db objects
125 125 :param obj_attr: attribute of object to pass into perm_checker
126 126 :param perm_set: list of permissions to check
127 127 :param perm_checker: callable to check permissions against
128 128 """
129 129 self.obj_list = obj_list
130 130 self.obj_attr = obj_attr
131 131 self.perm_set = perm_set
132 132 self.perm_checker = perm_checker
133 133 self.extra_kwargs = extra_kwargs or {}
134 134
135 135 def __len__(self):
136 136 return len(self.obj_list)
137 137
138 138 def __repr__(self):
139 139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140 140
141 141 def __iter__(self):
142 142 checker = self.perm_checker(*self.perm_set)
143 143 for db_obj in self.obj_list:
144 144 # check permission at this level
145 145 name = getattr(db_obj, self.obj_attr, None)
146 146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 147 continue
148 148
149 149 yield db_obj
150 150
151 151
152 152 class RepoList(_PermCheckIterator):
153 153
154 154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 155 if not perm_set:
156 156 perm_set = [
157 157 'repository.read', 'repository.write', 'repository.admin']
158 158
159 159 super(RepoList, self).__init__(
160 160 obj_list=db_repo_list,
161 161 obj_attr='repo_name', perm_set=perm_set,
162 162 perm_checker=HasRepoPermissionAny,
163 163 extra_kwargs=extra_kwargs)
164 164
165 165
166 166 class RepoGroupList(_PermCheckIterator):
167 167
168 168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 169 if not perm_set:
170 170 perm_set = ['group.read', 'group.write', 'group.admin']
171 171
172 172 super(RepoGroupList, self).__init__(
173 173 obj_list=db_repo_group_list,
174 174 obj_attr='group_name', perm_set=perm_set,
175 175 perm_checker=HasRepoGroupPermissionAny,
176 176 extra_kwargs=extra_kwargs)
177 177
178 178
179 179 class UserGroupList(_PermCheckIterator):
180 180
181 181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 182 if not perm_set:
183 183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184 184
185 185 super(UserGroupList, self).__init__(
186 186 obj_list=db_user_group_list,
187 187 obj_attr='users_group_name', perm_set=perm_set,
188 188 perm_checker=HasUserGroupPermissionAny,
189 189 extra_kwargs=extra_kwargs)
190 190
191 191
192 192 class ScmModel(BaseModel):
193 193 """
194 194 Generic Scm Model
195 195 """
196 196
197 197 @LazyProperty
198 198 def repos_path(self):
199 199 """
200 200 Gets the repositories root path from database
201 201 """
202 202
203 203 settings_model = VcsSettingsModel(sa=self.sa)
204 204 return settings_model.get_repos_location()
205 205
206 206 def repo_scan(self, repos_path=None):
207 207 """
208 208 Listing of repositories in given path. This path should not be a
209 209 repository itself. Return a dictionary of repository objects
210 210
211 211 :param repos_path: path to directory containing repositories
212 212 """
213 213
214 214 if repos_path is None:
215 215 repos_path = self.repos_path
216 216
217 217 log.info('scanning for repositories in %s', repos_path)
218 218
219 219 config = make_db_config()
220 220 config.set('extensions', 'largefiles', '')
221 221 repos = {}
222 222
223 223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 224 # name need to be decomposed and put back together using the /
225 225 # since this is internal storage separator for rhodecode
226 226 name = Repository.normalize_repo_name(name)
227 227
228 228 try:
229 229 if name in repos:
230 230 raise RepositoryError('Duplicate repository name %s '
231 231 'found in %s' % (name, path))
232 232 elif path[0] in rhodecode.BACKENDS:
233 233 klass = get_backend(path[0])
234 234 repos[name] = klass(path[1], config=config)
235 235 except OSError:
236 236 continue
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 252 .filter(Repository.group_id == None)\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 def get_repo_groups(self, all_groups=None):
259 259 if all_groups is None:
260 260 all_groups = RepoGroup.query()\
261 261 .filter(RepoGroup.group_parent_id == None).all()
262 262 return [x for x in RepoGroupList(all_groups)]
263 263
264 264 def mark_for_invalidation(self, repo_name, delete=False):
265 265 """
266 266 Mark caches of this repo invalid in the database. `delete` flag
267 267 removes the cache entries
268 268
269 269 :param repo_name: the repo_name for which caches should be marked
270 270 invalid, or deleted
271 271 :param delete: delete the entry keys instead of setting bool
272 272 flag on them
273 273 """
274 274 CacheKey.set_invalidate(repo_name, delete=delete)
275 275 repo = Repository.get_by_repo_name(repo_name)
276 276
277 277 if repo:
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 caches.clear_repo_caches(repo_name)
282 282
283 283 def toggle_following_repo(self, follow_repo_id, user_id):
284 284
285 285 f = self.sa.query(UserFollowing)\
286 286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 287 .filter(UserFollowing.user_id == user_id).scalar()
288 288
289 289 if f is not None:
290 290 try:
291 291 self.sa.delete(f)
292 292 return
293 293 except Exception:
294 294 log.error(traceback.format_exc())
295 295 raise
296 296
297 297 try:
298 298 f = UserFollowing()
299 299 f.user_id = user_id
300 300 f.follows_repo_id = follow_repo_id
301 301 self.sa.add(f)
302 302 except Exception:
303 303 log.error(traceback.format_exc())
304 304 raise
305 305
306 306 def toggle_following_user(self, follow_user_id, user_id):
307 307 f = self.sa.query(UserFollowing)\
308 308 .filter(UserFollowing.follows_user_id == follow_user_id)\
309 309 .filter(UserFollowing.user_id == user_id).scalar()
310 310
311 311 if f is not None:
312 312 try:
313 313 self.sa.delete(f)
314 314 return
315 315 except Exception:
316 316 log.error(traceback.format_exc())
317 317 raise
318 318
319 319 try:
320 320 f = UserFollowing()
321 321 f.user_id = user_id
322 322 f.follows_user_id = follow_user_id
323 323 self.sa.add(f)
324 324 except Exception:
325 325 log.error(traceback.format_exc())
326 326 raise
327 327
328 328 def is_following_repo(self, repo_name, user_id, cache=False):
329 329 r = self.sa.query(Repository)\
330 330 .filter(Repository.repo_name == repo_name).scalar()
331 331
332 332 f = self.sa.query(UserFollowing)\
333 333 .filter(UserFollowing.follows_repository == r)\
334 334 .filter(UserFollowing.user_id == user_id).scalar()
335 335
336 336 return f is not None
337 337
338 338 def is_following_user(self, username, user_id, cache=False):
339 339 u = User.get_by_username(username)
340 340
341 341 f = self.sa.query(UserFollowing)\
342 342 .filter(UserFollowing.follows_user == u)\
343 343 .filter(UserFollowing.user_id == user_id).scalar()
344 344
345 345 return f is not None
346 346
347 347 def get_followers(self, repo):
348 348 repo = self._get_repo(repo)
349 349
350 350 return self.sa.query(UserFollowing)\
351 351 .filter(UserFollowing.follows_repository == repo).count()
352 352
353 353 def get_forks(self, repo):
354 354 repo = self._get_repo(repo)
355 355 return self.sa.query(Repository)\
356 356 .filter(Repository.fork == repo).count()
357 357
358 358 def get_pull_requests(self, repo):
359 359 repo = self._get_repo(repo)
360 360 return self.sa.query(PullRequest)\
361 361 .filter(PullRequest.target_repo == repo)\
362 362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
363 363
364 364 def mark_as_fork(self, repo, fork, user):
365 365 repo = self._get_repo(repo)
366 366 fork = self._get_repo(fork)
367 367 if fork and repo.repo_id == fork.repo_id:
368 368 raise Exception("Cannot set repository as fork of itself")
369 369
370 370 if fork and repo.repo_type != fork.repo_type:
371 371 raise RepositoryError(
372 372 "Cannot set repository as fork of repository with other type")
373 373
374 374 repo.fork = fork
375 375 self.sa.add(repo)
376 376 return repo
377 377
378 378 def pull_changes(self, repo, username):
379 379 dbrepo = self._get_repo(repo)
380 380 clone_uri = dbrepo.clone_uri
381 381 if not clone_uri:
382 382 raise Exception("This repository doesn't have a clone uri")
383 383
384 384 repo = dbrepo.scm_instance(cache=False)
385 385 # TODO: marcink fix this an re-enable since we need common logic
386 386 # for hg/git remove hooks so we don't trigger them on fetching
387 387 # commits from remote
388 388 repo.config.clear_section('hooks')
389 389
390 390 repo_name = dbrepo.repo_name
391 391 try:
392 392 # TODO: we need to make sure those operations call proper hooks !
393 393 repo.pull(clone_uri)
394 394
395 395 self.mark_for_invalidation(repo_name)
396 396 except Exception:
397 397 log.error(traceback.format_exc())
398 398 raise
399 399
400 400 def commit_change(self, repo, repo_name, commit, user, author, message,
401 401 content, f_path):
402 402 """
403 403 Commits changes
404 404
405 405 :param repo: SCM instance
406 406
407 407 """
408 408 user = self._get_user(user)
409 409
410 410 # decoding here will force that we have proper encoded values
411 411 # in any other case this will throw exceptions and deny commit
412 412 content = safe_str(content)
413 413 path = safe_str(f_path)
414 414 # message and author needs to be unicode
415 415 # proper backend should then translate that into required type
416 416 message = safe_unicode(message)
417 417 author = safe_unicode(author)
418 418 imc = repo.in_memory_commit
419 419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
420 420 try:
421 421 # TODO: handle pre-push action !
422 422 tip = imc.commit(
423 423 message=message, author=author, parents=[commit],
424 424 branch=commit.branch)
425 425 except Exception as e:
426 426 log.error(traceback.format_exc())
427 427 raise IMCCommitError(str(e))
428 428 finally:
429 429 # always clear caches, if commit fails we want fresh object also
430 430 self.mark_for_invalidation(repo_name)
431 431
432 432 # We trigger the post-push action
433 433 hooks_utils.trigger_post_push_hook(
434 434 username=user.username, action='push_local', repo_name=repo_name,
435 435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
436 436 return tip
437 437
438 438 def _sanitize_path(self, f_path):
439 439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
440 440 raise NonRelativePathError('%s is not an relative path' % f_path)
441 441 if f_path:
442 442 f_path = os.path.normpath(f_path)
443 443 return f_path
444 444
445 445 def get_dirnode_metadata(self, commit, dir_node):
446 446 if not dir_node.is_dir():
447 447 return []
448 448
449 449 data = []
450 450 for node in dir_node:
451 451 if not node.is_file():
452 452 # we skip file-nodes
453 453 continue
454 454
455 455 last_commit = node.last_commit
456 456 last_commit_date = last_commit.date
457 457 data.append({
458 458 'name': node.name,
459 459 'size': h.format_byte_size_binary(node.size),
460 460 'modified_at': h.format_date(last_commit_date),
461 461 'modified_ts': last_commit_date.isoformat(),
462 462 'revision': last_commit.revision,
463 463 'short_id': last_commit.short_id,
464 464 'message': h.escape(last_commit.message),
465 465 'author': h.escape(last_commit.author),
466 466 'user_profile': h.gravatar_with_user(last_commit.author),
467 467 })
468 468
469 469 return data
470 470
471 471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
472 472 extended_info=False, content=False, max_file_bytes=None):
473 473 """
474 474 recursive walk in root dir and return a set of all path in that dir
475 475 based on repository walk function
476 476
477 477 :param repo_name: name of repository
478 478 :param commit_id: commit id for which to list nodes
479 479 :param root_path: root path to list
480 480 :param flat: return as a list, if False returns a dict with description
481 481 :param max_file_bytes: will not return file contents over this limit
482 482
483 483 """
484 484 _files = list()
485 485 _dirs = list()
486 486 try:
487 487 _repo = self._get_repo(repo_name)
488 488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
489 489 root_path = root_path.lstrip('/')
490 490 for __, dirs, files in commit.walk(root_path):
491 491 for f in files:
492 492 _content = None
493 493 _data = f.unicode_path
494 494 over_size_limit = (max_file_bytes is not None
495 495 and f.size > max_file_bytes)
496 496
497 497 if not flat:
498 498 _data = {
499 "name": f.unicode_path,
499 "name": h.escape(f.unicode_path),
500 500 "type": "file",
501 501 }
502 502 if extended_info:
503 503 _data.update({
504 504 "md5": f.md5,
505 505 "binary": f.is_binary,
506 506 "size": f.size,
507 507 "extension": f.extension,
508 508 "mimetype": f.mimetype,
509 509 "lines": f.lines()[0]
510 510 })
511 511
512 512 if content:
513 513 full_content = None
514 514 if not f.is_binary and not over_size_limit:
515 515 full_content = safe_str(f.content)
516 516
517 517 _data.update({
518 518 "content": full_content,
519 519 })
520 520 _files.append(_data)
521 521 for d in dirs:
522 522 _data = d.unicode_path
523 523 if not flat:
524 524 _data = {
525 "name": d.unicode_path,
525 "name": h.escape(d.unicode_path),
526 526 "type": "dir",
527 527 }
528 528 if extended_info:
529 529 _data.update({
530 530 "md5": None,
531 531 "binary": None,
532 532 "size": None,
533 533 "extension": None,
534 534 })
535 535 if content:
536 536 _data.update({
537 537 "content": None
538 538 })
539 539 _dirs.append(_data)
540 540 except RepositoryError:
541 541 log.debug("Exception in get_nodes", exc_info=True)
542 542 raise
543 543
544 544 return _dirs, _files
545 545
546 546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
547 547 author=None, trigger_push_hook=True):
548 548 """
549 549 Commits given multiple nodes into repo
550 550
551 551 :param user: RhodeCode User object or user_id, the commiter
552 552 :param repo: RhodeCode Repository object
553 553 :param message: commit message
554 554 :param nodes: mapping {filename:{'content':content},...}
555 555 :param parent_commit: parent commit, can be empty than it's
556 556 initial commit
557 557 :param author: author of commit, cna be different that commiter
558 558 only for git
559 559 :param trigger_push_hook: trigger push hooks
560 560
561 561 :returns: new commited commit
562 562 """
563 563
564 564 user = self._get_user(user)
565 565 scm_instance = repo.scm_instance(cache=False)
566 566
567 567 processed_nodes = []
568 568 for f_path in nodes:
569 569 f_path = self._sanitize_path(f_path)
570 570 content = nodes[f_path]['content']
571 571 f_path = safe_str(f_path)
572 572 # decoding here will force that we have proper encoded values
573 573 # in any other case this will throw exceptions and deny commit
574 574 if isinstance(content, (basestring,)):
575 575 content = safe_str(content)
576 576 elif isinstance(content, (file, cStringIO.OutputType,)):
577 577 content = content.read()
578 578 else:
579 579 raise Exception('Content is of unrecognized type %s' % (
580 580 type(content)
581 581 ))
582 582 processed_nodes.append((f_path, content))
583 583
584 584 message = safe_unicode(message)
585 585 commiter = user.full_contact
586 586 author = safe_unicode(author) if author else commiter
587 587
588 588 imc = scm_instance.in_memory_commit
589 589
590 590 if not parent_commit:
591 591 parent_commit = EmptyCommit(alias=scm_instance.alias)
592 592
593 593 if isinstance(parent_commit, EmptyCommit):
594 594 # EmptyCommit means we we're editing empty repository
595 595 parents = None
596 596 else:
597 597 parents = [parent_commit]
598 598 # add multiple nodes
599 599 for path, content in processed_nodes:
600 600 imc.add(FileNode(path, content=content))
601 601 # TODO: handle pre push scenario
602 602 tip = imc.commit(message=message,
603 603 author=author,
604 604 parents=parents,
605 605 branch=parent_commit.branch)
606 606
607 607 self.mark_for_invalidation(repo.repo_name)
608 608 if trigger_push_hook:
609 609 hooks_utils.trigger_post_push_hook(
610 610 username=user.username, action='push_local',
611 611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
612 612 commit_ids=[tip.raw_id])
613 613 return tip
614 614
615 615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
616 616 author=None, trigger_push_hook=True):
617 617 user = self._get_user(user)
618 618 scm_instance = repo.scm_instance(cache=False)
619 619
620 620 message = safe_unicode(message)
621 621 commiter = user.full_contact
622 622 author = safe_unicode(author) if author else commiter
623 623
624 624 imc = scm_instance.in_memory_commit
625 625
626 626 if not parent_commit:
627 627 parent_commit = EmptyCommit(alias=scm_instance.alias)
628 628
629 629 if isinstance(parent_commit, EmptyCommit):
630 630 # EmptyCommit means we we're editing empty repository
631 631 parents = None
632 632 else:
633 633 parents = [parent_commit]
634 634
635 635 # add multiple nodes
636 636 for _filename, data in nodes.items():
637 637 # new filename, can be renamed from the old one, also sanitaze
638 638 # the path for any hack around relative paths like ../../ etc.
639 639 filename = self._sanitize_path(data['filename'])
640 640 old_filename = self._sanitize_path(_filename)
641 641 content = data['content']
642 642
643 643 filenode = FileNode(old_filename, content=content)
644 644 op = data['op']
645 645 if op == 'add':
646 646 imc.add(filenode)
647 647 elif op == 'del':
648 648 imc.remove(filenode)
649 649 elif op == 'mod':
650 650 if filename != old_filename:
651 651 # TODO: handle renames more efficient, needs vcs lib
652 652 # changes
653 653 imc.remove(filenode)
654 654 imc.add(FileNode(filename, content=content))
655 655 else:
656 656 imc.change(filenode)
657 657
658 658 try:
659 659 # TODO: handle pre push scenario
660 660 # commit changes
661 661 tip = imc.commit(message=message,
662 662 author=author,
663 663 parents=parents,
664 664 branch=parent_commit.branch)
665 665 except NodeNotChangedError:
666 666 raise
667 667 except Exception as e:
668 668 log.exception("Unexpected exception during call to imc.commit")
669 669 raise IMCCommitError(str(e))
670 670 finally:
671 671 # always clear caches, if commit fails we want fresh object also
672 672 self.mark_for_invalidation(repo.repo_name)
673 673
674 674 if trigger_push_hook:
675 675 hooks_utils.trigger_post_push_hook(
676 676 username=user.username, action='push_local',
677 677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
678 678 commit_ids=[tip.raw_id])
679 679
680 680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
681 681 author=None, trigger_push_hook=True):
682 682 """
683 683 Deletes given multiple nodes into `repo`
684 684
685 685 :param user: RhodeCode User object or user_id, the committer
686 686 :param repo: RhodeCode Repository object
687 687 :param message: commit message
688 688 :param nodes: mapping {filename:{'content':content},...}
689 689 :param parent_commit: parent commit, can be empty than it's initial
690 690 commit
691 691 :param author: author of commit, cna be different that commiter only
692 692 for git
693 693 :param trigger_push_hook: trigger push hooks
694 694
695 695 :returns: new commit after deletion
696 696 """
697 697
698 698 user = self._get_user(user)
699 699 scm_instance = repo.scm_instance(cache=False)
700 700
701 701 processed_nodes = []
702 702 for f_path in nodes:
703 703 f_path = self._sanitize_path(f_path)
704 704 # content can be empty but for compatabilty it allows same dicts
705 705 # structure as add_nodes
706 706 content = nodes[f_path].get('content')
707 707 processed_nodes.append((f_path, content))
708 708
709 709 message = safe_unicode(message)
710 710 commiter = user.full_contact
711 711 author = safe_unicode(author) if author else commiter
712 712
713 713 imc = scm_instance.in_memory_commit
714 714
715 715 if not parent_commit:
716 716 parent_commit = EmptyCommit(alias=scm_instance.alias)
717 717
718 718 if isinstance(parent_commit, EmptyCommit):
719 719 # EmptyCommit means we we're editing empty repository
720 720 parents = None
721 721 else:
722 722 parents = [parent_commit]
723 723 # add multiple nodes
724 724 for path, content in processed_nodes:
725 725 imc.remove(FileNode(path, content=content))
726 726
727 727 # TODO: handle pre push scenario
728 728 tip = imc.commit(message=message,
729 729 author=author,
730 730 parents=parents,
731 731 branch=parent_commit.branch)
732 732
733 733 self.mark_for_invalidation(repo.repo_name)
734 734 if trigger_push_hook:
735 735 hooks_utils.trigger_post_push_hook(
736 736 username=user.username, action='push_local',
737 737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
738 738 commit_ids=[tip.raw_id])
739 739 return tip
740 740
741 741 def strip(self, repo, commit_id, branch):
742 742 scm_instance = repo.scm_instance(cache=False)
743 743 scm_instance.config.clear_section('hooks')
744 744 scm_instance.strip(commit_id, branch)
745 745 self.mark_for_invalidation(repo.repo_name)
746 746
747 747 def get_unread_journal(self):
748 748 return self.sa.query(UserLog).count()
749 749
750 750 def get_repo_landing_revs(self, repo=None):
751 751 """
752 752 Generates select option with tags branches and bookmarks (for hg only)
753 753 grouped by type
754 754
755 755 :param repo:
756 756 """
757 757
758 758 repo = self._get_repo(repo)
759 759
760 760 hist_l = [
761 761 ['rev:tip', _('latest tip')]
762 762 ]
763 763 choices = [
764 764 'rev:tip'
765 765 ]
766 766
767 767 if not repo:
768 768 return choices, hist_l
769 769
770 770 repo = repo.scm_instance()
771 771
772 772 branches_group = (
773 773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
774 774 for b in repo.branches],
775 775 _("Branches"))
776 776 hist_l.append(branches_group)
777 777 choices.extend([x[0] for x in branches_group[0]])
778 778
779 779 if repo.alias == 'hg':
780 780 bookmarks_group = (
781 781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
782 782 for b in repo.bookmarks],
783 783 _("Bookmarks"))
784 784 hist_l.append(bookmarks_group)
785 785 choices.extend([x[0] for x in bookmarks_group[0]])
786 786
787 787 tags_group = (
788 788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
789 789 for t in repo.tags],
790 790 _("Tags"))
791 791 hist_l.append(tags_group)
792 792 choices.extend([x[0] for x in tags_group[0]])
793 793
794 794 return choices, hist_l
795 795
796 796 def install_git_hook(self, repo, force_create=False):
797 797 """
798 798 Creates a rhodecode hook inside a git repository
799 799
800 800 :param repo: Instance of VCS repo
801 801 :param force_create: Create even if same name hook exists
802 802 """
803 803
804 804 loc = os.path.join(repo.path, 'hooks')
805 805 if not repo.bare:
806 806 loc = os.path.join(repo.path, '.git', 'hooks')
807 807 if not os.path.isdir(loc):
808 808 os.makedirs(loc, mode=0777)
809 809
810 810 tmpl_post = pkg_resources.resource_string(
811 811 'rhodecode', '/'.join(
812 812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
813 813 tmpl_pre = pkg_resources.resource_string(
814 814 'rhodecode', '/'.join(
815 815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
816 816
817 817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
818 818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
819 819 log.debug('Installing git hook in repo %s', repo)
820 820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
821 821
822 822 if _rhodecode_hook or force_create:
823 823 log.debug('writing %s hook file !', h_type)
824 824 try:
825 825 with open(_hook_file, 'wb') as f:
826 826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
827 827 tmpl = tmpl.replace('_ENV_', sys.executable)
828 828 f.write(tmpl)
829 829 os.chmod(_hook_file, 0755)
830 830 except IOError:
831 831 log.exception('error writing hook file %s', _hook_file)
832 832 else:
833 833 log.debug('skipping writing hook file')
834 834
835 835 def install_svn_hooks(self, repo, force_create=False):
836 836 """
837 837 Creates rhodecode hooks inside a svn repository
838 838
839 839 :param repo: Instance of VCS repo
840 840 :param force_create: Create even if same name hook exists
841 841 """
842 842 hooks_path = os.path.join(repo.path, 'hooks')
843 843 if not os.path.isdir(hooks_path):
844 844 os.makedirs(hooks_path)
845 845 post_commit_tmpl = pkg_resources.resource_string(
846 846 'rhodecode', '/'.join(
847 847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
848 848 pre_commit_template = pkg_resources.resource_string(
849 849 'rhodecode', '/'.join(
850 850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
851 851 templates = {
852 852 'post-commit': post_commit_tmpl,
853 853 'pre-commit': pre_commit_template
854 854 }
855 855 for filename in templates:
856 856 _hook_file = os.path.join(hooks_path, filename)
857 857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
858 858 if _rhodecode_hook or force_create:
859 859 log.debug('writing %s hook file !', filename)
860 860 template = templates[filename]
861 861 try:
862 862 with open(_hook_file, 'wb') as f:
863 863 template = template.replace(
864 864 '_TMPL_', rhodecode.__version__)
865 865 template = template.replace('_ENV_', sys.executable)
866 866 f.write(template)
867 867 os.chmod(_hook_file, 0755)
868 868 except IOError:
869 869 log.exception('error writing hook file %s', filename)
870 870 else:
871 871 log.debug('skipping writing hook file')
872 872
873 873 def install_hooks(self, repo, repo_type):
874 874 if repo_type == 'git':
875 875 self.install_git_hook(repo)
876 876 elif repo_type == 'svn':
877 877 self.install_svn_hooks(repo)
878 878
879 879 def get_server_info(self, environ=None):
880 880 server_info = get_system_info(environ)
881 881 return server_info
882 882
883 883
884 884 def _check_rhodecode_hook(hook_path):
885 885 """
886 886 Check if the hook was created by RhodeCode
887 887 """
888 888 if not os.path.exists(hook_path):
889 889 return True
890 890
891 891 log.debug('hook exists, checking if it is from rhodecode')
892 892 hook_content = _read_hook(hook_path)
893 893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
894 894 if matches:
895 895 try:
896 896 version = matches.groups()[0]
897 897 log.debug('got %s, it is rhodecode', version)
898 898 return True
899 899 except Exception:
900 900 log.exception("Exception while reading the hook version.")
901 901
902 902 return False
903 903
904 904
905 905 def _read_hook(hook_path):
906 906 with open(hook_path, 'rb') as f:
907 907 content = f.read()
908 908 return content
General Comments 0
You need to be logged in to leave comments. Login now