##// END OF EJS Templates
bugfix: santize wouldnt allow files starting with . - fixes #3936
dan -
r91:88f65698 default
parent child Browse files
Show More
@@ -1,1105 +1,1105 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import time
29 29 import traceback
30 30 import logging
31 31 import cStringIO
32 32 import pkg_resources
33 33
34 34 import pylons
35 35 from pylons.i18n.translation import _
36 36 from sqlalchemy import func
37 37 from zope.cachedescriptors.property import Lazy as LazyProperty
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.vcs import get_backend
41 41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 42 from rhodecode.lib.vcs.nodes import FileNode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib import helpers as h
45 45
46 46 from rhodecode.lib.auth import (
47 47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 48 HasUserGroupPermissionAny)
49 49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 50 from rhodecode.lib import hooks_utils, caches
51 51 from rhodecode.lib.utils import (
52 52 get_filesystem_repos, action_logger, make_db_config)
53 53 from rhodecode.lib.utils2 import (
54 54 safe_str, safe_unicode, get_server_url, md5)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.db import (
57 57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 58 PullRequest, DbMigrateVersion)
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class UserTemp(object):
65 65 def __init__(self, user_id):
66 66 self.user_id = user_id
67 67
68 68 def __repr__(self):
69 69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70 70
71 71
72 72 class RepoTemp(object):
73 73 def __init__(self, repo_id):
74 74 self.repo_id = repo_id
75 75
76 76 def __repr__(self):
77 77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78 78
79 79
80 80 class SimpleCachedRepoList(object):
81 81 """
82 82 Lighter version of of iteration of repos without the scm initialisation,
83 83 and with cache usage
84 84 """
85 85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 86 self.db_repo_list = db_repo_list
87 87 self.repos_path = repos_path
88 88 self.order_by = order_by
89 89 self.reversed = (order_by or '').startswith('-')
90 90 if not perm_set:
91 91 perm_set = ['repository.read', 'repository.write',
92 92 'repository.admin']
93 93 self.perm_set = perm_set
94 94
95 95 def __len__(self):
96 96 return len(self.db_repo_list)
97 97
98 98 def __repr__(self):
99 99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100 100
101 101 def __iter__(self):
102 102 for dbr in self.db_repo_list:
103 103 # check permission at this level
104 104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 105 dbr.repo_name, 'SimpleCachedRepoList check')
106 106 if not has_perm:
107 107 continue
108 108
109 109 tmp_d = {
110 110 'name': dbr.repo_name,
111 111 'dbrepo': dbr.get_dict(),
112 112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 113 }
114 114 yield tmp_d
115 115
116 116
117 117 class _PermCheckIterator(object):
118 118
119 119 def __init__(
120 120 self, obj_list, obj_attr, perm_set, perm_checker,
121 121 extra_kwargs=None):
122 122 """
123 123 Creates iterator from given list of objects, additionally
124 124 checking permission for them from perm_set var
125 125
126 126 :param obj_list: list of db objects
127 127 :param obj_attr: attribute of object to pass into perm_checker
128 128 :param perm_set: list of permissions to check
129 129 :param perm_checker: callable to check permissions against
130 130 """
131 131 self.obj_list = obj_list
132 132 self.obj_attr = obj_attr
133 133 self.perm_set = perm_set
134 134 self.perm_checker = perm_checker
135 135 self.extra_kwargs = extra_kwargs or {}
136 136
137 137 def __len__(self):
138 138 return len(self.obj_list)
139 139
140 140 def __repr__(self):
141 141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142 142
143 143 def __iter__(self):
144 144 checker = self.perm_checker(*self.perm_set)
145 145 for db_obj in self.obj_list:
146 146 # check permission at this level
147 147 name = getattr(db_obj, self.obj_attr, None)
148 148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 149 continue
150 150
151 151 yield db_obj
152 152
153 153
154 154 class RepoList(_PermCheckIterator):
155 155
156 156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 157 if not perm_set:
158 158 perm_set = [
159 159 'repository.read', 'repository.write', 'repository.admin']
160 160
161 161 super(RepoList, self).__init__(
162 162 obj_list=db_repo_list,
163 163 obj_attr='repo_name', perm_set=perm_set,
164 164 perm_checker=HasRepoPermissionAny,
165 165 extra_kwargs=extra_kwargs)
166 166
167 167
168 168 class RepoGroupList(_PermCheckIterator):
169 169
170 170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 171 if not perm_set:
172 172 perm_set = ['group.read', 'group.write', 'group.admin']
173 173
174 174 super(RepoGroupList, self).__init__(
175 175 obj_list=db_repo_group_list,
176 176 obj_attr='group_name', perm_set=perm_set,
177 177 perm_checker=HasRepoGroupPermissionAny,
178 178 extra_kwargs=extra_kwargs)
179 179
180 180
181 181 class UserGroupList(_PermCheckIterator):
182 182
183 183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 184 if not perm_set:
185 185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186 186
187 187 super(UserGroupList, self).__init__(
188 188 obj_list=db_user_group_list,
189 189 obj_attr='users_group_name', perm_set=perm_set,
190 190 perm_checker=HasUserGroupPermissionAny,
191 191 extra_kwargs=extra_kwargs)
192 192
193 193
194 194 class ScmModel(BaseModel):
195 195 """
196 196 Generic Scm Model
197 197 """
198 198
199 199 @LazyProperty
200 200 def repos_path(self):
201 201 """
202 202 Gets the repositories root path from database
203 203 """
204 204
205 205 settings_model = VcsSettingsModel(sa=self.sa)
206 206 return settings_model.get_repos_location()
207 207
208 208 def repo_scan(self, repos_path=None):
209 209 """
210 210 Listing of repositories in given path. This path should not be a
211 211 repository itself. Return a dictionary of repository objects
212 212
213 213 :param repos_path: path to directory containing repositories
214 214 """
215 215
216 216 if repos_path is None:
217 217 repos_path = self.repos_path
218 218
219 219 log.info('scanning for repositories in %s', repos_path)
220 220
221 221 config = make_db_config()
222 222 config.set('extensions', 'largefiles', '')
223 223 repos = {}
224 224
225 225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 226 # name need to be decomposed and put back together using the /
227 227 # since this is internal storage separator for rhodecode
228 228 name = Repository.normalize_repo_name(name)
229 229
230 230 try:
231 231 if name in repos:
232 232 raise RepositoryError('Duplicate repository name %s '
233 233 'found in %s' % (name, path))
234 234 elif path[0] in rhodecode.BACKENDS:
235 235 klass = get_backend(path[0])
236 236 repos[name] = klass(path[1], config=config)
237 237 except OSError:
238 238 continue
239 239 log.debug('found %s paths with repositories', len(repos))
240 240 return repos
241 241
242 242 def get_repos(self, all_repos=None, sort_key=None):
243 243 """
244 244 Get all repositories from db and for each repo create it's
245 245 backend instance and fill that backed with information from database
246 246
247 247 :param all_repos: list of repository names as strings
248 248 give specific repositories list, good for filtering
249 249
250 250 :param sort_key: initial sorting of repositories
251 251 """
252 252 if all_repos is None:
253 253 all_repos = self.sa.query(Repository)\
254 254 .filter(Repository.group_id == None)\
255 255 .order_by(func.lower(Repository.repo_name)).all()
256 256 repo_iter = SimpleCachedRepoList(
257 257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 258 return repo_iter
259 259
260 260 def get_repo_groups(self, all_groups=None):
261 261 if all_groups is None:
262 262 all_groups = RepoGroup.query()\
263 263 .filter(RepoGroup.group_parent_id == None).all()
264 264 return [x for x in RepoGroupList(all_groups)]
265 265
266 266 def mark_for_invalidation(self, repo_name, delete=False):
267 267 """
268 268 Mark caches of this repo invalid in the database. `delete` flag
269 269 removes the cache entries
270 270
271 271 :param repo_name: the repo_name for which caches should be marked
272 272 invalid, or deleted
273 273 :param delete: delete the entry keys instead of setting bool
274 274 flag on them
275 275 """
276 276 CacheKey.set_invalidate(repo_name, delete=delete)
277 277 repo = Repository.get_by_repo_name(repo_name)
278 278
279 279 if repo:
280 280 config = repo._config
281 281 config.set('extensions', 'largefiles', '')
282 282 cs_cache = None
283 283 if delete:
284 284 # if we do a hard clear, reset last-commit to Empty
285 285 cs_cache = EmptyCommit()
286 286 repo.update_commit_cache(config=config, cs_cache=cs_cache)
287 287 caches.clear_repo_caches(repo_name)
288 288
289 289 def toggle_following_repo(self, follow_repo_id, user_id):
290 290
291 291 f = self.sa.query(UserFollowing)\
292 292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 293 .filter(UserFollowing.user_id == user_id).scalar()
294 294
295 295 if f is not None:
296 296 try:
297 297 self.sa.delete(f)
298 298 action_logger(UserTemp(user_id),
299 299 'stopped_following_repo',
300 300 RepoTemp(follow_repo_id))
301 301 return
302 302 except Exception:
303 303 log.error(traceback.format_exc())
304 304 raise
305 305
306 306 try:
307 307 f = UserFollowing()
308 308 f.user_id = user_id
309 309 f.follows_repo_id = follow_repo_id
310 310 self.sa.add(f)
311 311
312 312 action_logger(UserTemp(user_id),
313 313 'started_following_repo',
314 314 RepoTemp(follow_repo_id))
315 315 except Exception:
316 316 log.error(traceback.format_exc())
317 317 raise
318 318
319 319 def toggle_following_user(self, follow_user_id, user_id):
320 320 f = self.sa.query(UserFollowing)\
321 321 .filter(UserFollowing.follows_user_id == follow_user_id)\
322 322 .filter(UserFollowing.user_id == user_id).scalar()
323 323
324 324 if f is not None:
325 325 try:
326 326 self.sa.delete(f)
327 327 return
328 328 except Exception:
329 329 log.error(traceback.format_exc())
330 330 raise
331 331
332 332 try:
333 333 f = UserFollowing()
334 334 f.user_id = user_id
335 335 f.follows_user_id = follow_user_id
336 336 self.sa.add(f)
337 337 except Exception:
338 338 log.error(traceback.format_exc())
339 339 raise
340 340
341 341 def is_following_repo(self, repo_name, user_id, cache=False):
342 342 r = self.sa.query(Repository)\
343 343 .filter(Repository.repo_name == repo_name).scalar()
344 344
345 345 f = self.sa.query(UserFollowing)\
346 346 .filter(UserFollowing.follows_repository == r)\
347 347 .filter(UserFollowing.user_id == user_id).scalar()
348 348
349 349 return f is not None
350 350
351 351 def is_following_user(self, username, user_id, cache=False):
352 352 u = User.get_by_username(username)
353 353
354 354 f = self.sa.query(UserFollowing)\
355 355 .filter(UserFollowing.follows_user == u)\
356 356 .filter(UserFollowing.user_id == user_id).scalar()
357 357
358 358 return f is not None
359 359
360 360 def get_followers(self, repo):
361 361 repo = self._get_repo(repo)
362 362
363 363 return self.sa.query(UserFollowing)\
364 364 .filter(UserFollowing.follows_repository == repo).count()
365 365
366 366 def get_forks(self, repo):
367 367 repo = self._get_repo(repo)
368 368 return self.sa.query(Repository)\
369 369 .filter(Repository.fork == repo).count()
370 370
371 371 def get_pull_requests(self, repo):
372 372 repo = self._get_repo(repo)
373 373 return self.sa.query(PullRequest)\
374 374 .filter(PullRequest.target_repo == repo)\
375 375 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
376 376
377 377 def mark_as_fork(self, repo, fork, user):
378 378 repo = self._get_repo(repo)
379 379 fork = self._get_repo(fork)
380 380 if fork and repo.repo_id == fork.repo_id:
381 381 raise Exception("Cannot set repository as fork of itself")
382 382
383 383 if fork and repo.repo_type != fork.repo_type:
384 384 raise RepositoryError(
385 385 "Cannot set repository as fork of repository with other type")
386 386
387 387 repo.fork = fork
388 388 self.sa.add(repo)
389 389 return repo
390 390
391 391 def pull_changes(self, repo, username):
392 392 dbrepo = self._get_repo(repo)
393 393 clone_uri = dbrepo.clone_uri
394 394 if not clone_uri:
395 395 raise Exception("This repository doesn't have a clone uri")
396 396
397 397 repo = dbrepo.scm_instance(cache=False)
398 398 # TODO: marcink fix this an re-enable since we need common logic
399 399 # for hg/git remove hooks so we don't trigger them on fetching
400 400 # commits from remote
401 401 repo.config.clear_section('hooks')
402 402
403 403 repo_name = dbrepo.repo_name
404 404 try:
405 405 # TODO: we need to make sure those operations call proper hooks !
406 406 repo.pull(clone_uri)
407 407
408 408 self.mark_for_invalidation(repo_name)
409 409 except Exception:
410 410 log.error(traceback.format_exc())
411 411 raise
412 412
413 413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 414 content, f_path):
415 415 """
416 416 Commits changes
417 417
418 418 :param repo: SCM instance
419 419
420 420 """
421 421 user = self._get_user(user)
422 422
423 423 # decoding here will force that we have proper encoded values
424 424 # in any other case this will throw exceptions and deny commit
425 425 content = safe_str(content)
426 426 path = safe_str(f_path)
427 427 # message and author needs to be unicode
428 428 # proper backend should then translate that into required type
429 429 message = safe_unicode(message)
430 430 author = safe_unicode(author)
431 431 imc = repo.in_memory_commit
432 432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 433 try:
434 434 # TODO: handle pre-push action !
435 435 tip = imc.commit(
436 436 message=message, author=author, parents=[commit],
437 437 branch=commit.branch)
438 438 except Exception as e:
439 439 log.error(traceback.format_exc())
440 440 raise IMCCommitError(str(e))
441 441 finally:
442 442 # always clear caches, if commit fails we want fresh object also
443 443 self.mark_for_invalidation(repo_name)
444 444
445 445 # We trigger the post-push action
446 446 hooks_utils.trigger_post_push_hook(
447 447 username=user.username, action='push_local', repo_name=repo_name,
448 448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 449 return tip
450 450
451 451 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 454 if f_path:
455 455 f_path = os.path.normpath(f_path)
456 456 return f_path
457 457
458 458 def get_dirnode_metadata(self, commit, dir_node):
459 459 if not dir_node.is_dir():
460 460 return []
461 461
462 462 data = []
463 463 for node in dir_node:
464 464 if not node.is_file():
465 465 # we skip file-nodes
466 466 continue
467 467
468 468 last_commit = node.last_commit
469 469 last_commit_date = last_commit.date
470 470 data.append({
471 471 'name': node.name,
472 472 'size': h.format_byte_size_binary(node.size),
473 473 'modified_at': h.format_date(last_commit_date),
474 474 'modified_ts': last_commit_date.isoformat(),
475 475 'revision': last_commit.revision,
476 476 'short_id': last_commit.short_id,
477 477 'message': h.escape(last_commit.message),
478 478 'author': h.escape(last_commit.author),
479 479 'user_profile': h.gravatar_with_user(last_commit.author),
480 480 })
481 481
482 482 return data
483 483
484 484 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 485 extended_info=False, content=False):
486 486 """
487 487 recursive walk in root dir and return a set of all path in that dir
488 488 based on repository walk function
489 489
490 490 :param repo_name: name of repository
491 491 :param commit_id: commit id for which to list nodes
492 492 :param root_path: root path to list
493 493 :param flat: return as a list, if False returns a dict with decription
494 494
495 495 """
496 496 _files = list()
497 497 _dirs = list()
498 498 try:
499 499 _repo = self._get_repo(repo_name)
500 500 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
501 501 root_path = root_path.lstrip('/')
502 502 for __, dirs, files in commit.walk(root_path):
503 503 for f in files:
504 504 _content = None
505 505 _data = f.unicode_path
506 506 if not flat:
507 507 _data = {
508 508 "name": f.unicode_path,
509 509 "type": "file",
510 510 }
511 511 if extended_info:
512 512 _content = safe_str(f.content)
513 513 _data.update({
514 514 "md5": md5(_content),
515 515 "binary": f.is_binary,
516 516 "size": f.size,
517 517 "extension": f.extension,
518 518
519 519 "mimetype": f.mimetype,
520 520 "lines": f.lines()[0]
521 521 })
522 522 if content:
523 523 full_content = None
524 524 if not f.is_binary:
525 525 # in case we loaded the _content already
526 526 # re-use it, or load from f[ile]
527 527 full_content = _content or safe_str(f.content)
528 528
529 529 _data.update({
530 530 "content": full_content
531 531 })
532 532 _files.append(_data)
533 533 for d in dirs:
534 534 _data = d.unicode_path
535 535 if not flat:
536 536 _data = {
537 537 "name": d.unicode_path,
538 538 "type": "dir",
539 539 }
540 540 if extended_info:
541 541 _data.update({
542 542 "md5": None,
543 543 "binary": None,
544 544 "size": None,
545 545 "extension": None,
546 546 })
547 547 if content:
548 548 _data.update({
549 549 "content": None
550 550 })
551 551 _dirs.append(_data)
552 552 except RepositoryError:
553 553 log.debug("Exception in get_nodes", exc_info=True)
554 554 raise
555 555
556 556 return _dirs, _files
557 557
558 558 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
559 559 author=None, trigger_push_hook=True):
560 560 """
561 561 Commits given multiple nodes into repo
562 562
563 563 :param user: RhodeCode User object or user_id, the commiter
564 564 :param repo: RhodeCode Repository object
565 565 :param message: commit message
566 566 :param nodes: mapping {filename:{'content':content},...}
567 567 :param parent_commit: parent commit, can be empty than it's
568 568 initial commit
569 569 :param author: author of commit, cna be different that commiter
570 570 only for git
571 571 :param trigger_push_hook: trigger push hooks
572 572
573 573 :returns: new commited commit
574 574 """
575 575
576 576 user = self._get_user(user)
577 577 scm_instance = repo.scm_instance(cache=False)
578 578
579 579 processed_nodes = []
580 580 for f_path in nodes:
581 581 f_path = self._sanitize_path(f_path)
582 582 content = nodes[f_path]['content']
583 583 f_path = safe_str(f_path)
584 584 # decoding here will force that we have proper encoded values
585 585 # in any other case this will throw exceptions and deny commit
586 586 if isinstance(content, (basestring,)):
587 587 content = safe_str(content)
588 588 elif isinstance(content, (file, cStringIO.OutputType,)):
589 589 content = content.read()
590 590 else:
591 591 raise Exception('Content is of unrecognized type %s' % (
592 592 type(content)
593 593 ))
594 594 processed_nodes.append((f_path, content))
595 595
596 596 message = safe_unicode(message)
597 597 commiter = user.full_contact
598 598 author = safe_unicode(author) if author else commiter
599 599
600 600 imc = scm_instance.in_memory_commit
601 601
602 602 if not parent_commit:
603 603 parent_commit = EmptyCommit(alias=scm_instance.alias)
604 604
605 605 if isinstance(parent_commit, EmptyCommit):
606 606 # EmptyCommit means we we're editing empty repository
607 607 parents = None
608 608 else:
609 609 parents = [parent_commit]
610 610 # add multiple nodes
611 611 for path, content in processed_nodes:
612 612 imc.add(FileNode(path, content=content))
613 613 # TODO: handle pre push scenario
614 614 tip = imc.commit(message=message,
615 615 author=author,
616 616 parents=parents,
617 617 branch=parent_commit.branch)
618 618
619 619 self.mark_for_invalidation(repo.repo_name)
620 620 if trigger_push_hook:
621 621 hooks_utils.trigger_post_push_hook(
622 622 username=user.username, action='push_local',
623 623 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
624 624 commit_ids=[tip.raw_id])
625 625 return tip
626 626
627 627 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
628 628 author=None, trigger_push_hook=True):
629 629 user = self._get_user(user)
630 630 scm_instance = repo.scm_instance(cache=False)
631 631
632 632 message = safe_unicode(message)
633 633 commiter = user.full_contact
634 634 author = safe_unicode(author) if author else commiter
635 635
636 636 imc = scm_instance.in_memory_commit
637 637
638 638 if not parent_commit:
639 639 parent_commit = EmptyCommit(alias=scm_instance.alias)
640 640
641 641 if isinstance(parent_commit, EmptyCommit):
642 642 # EmptyCommit means we we're editing empty repository
643 643 parents = None
644 644 else:
645 645 parents = [parent_commit]
646 646
647 647 # add multiple nodes
648 648 for _filename, data in nodes.items():
649 649 # new filename, can be renamed from the old one, also sanitaze
650 650 # the path for any hack around relative paths like ../../ etc.
651 651 filename = self._sanitize_path(data['filename'])
652 652 old_filename = self._sanitize_path(_filename)
653 653 content = data['content']
654 654
655 655 filenode = FileNode(old_filename, content=content)
656 656 op = data['op']
657 657 if op == 'add':
658 658 imc.add(filenode)
659 659 elif op == 'del':
660 660 imc.remove(filenode)
661 661 elif op == 'mod':
662 662 if filename != old_filename:
663 663 # TODO: handle renames more efficient, needs vcs lib
664 664 # changes
665 665 imc.remove(filenode)
666 666 imc.add(FileNode(filename, content=content))
667 667 else:
668 668 imc.change(filenode)
669 669
670 670 try:
671 671 # TODO: handle pre push scenario
672 672 # commit changes
673 673 tip = imc.commit(message=message,
674 674 author=author,
675 675 parents=parents,
676 676 branch=parent_commit.branch)
677 677 except NodeNotChangedError:
678 678 raise
679 679 except Exception as e:
680 680 log.exception("Unexpected exception during call to imc.commit")
681 681 raise IMCCommitError(str(e))
682 682 finally:
683 683 # always clear caches, if commit fails we want fresh object also
684 684 self.mark_for_invalidation(repo.repo_name)
685 685
686 686 if trigger_push_hook:
687 687 hooks_utils.trigger_post_push_hook(
688 688 username=user.username, action='push_local',
689 689 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
690 690 commit_ids=[tip.raw_id])
691 691
692 692 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
693 693 author=None, trigger_push_hook=True):
694 694 """
695 695 Deletes given multiple nodes into `repo`
696 696
697 697 :param user: RhodeCode User object or user_id, the committer
698 698 :param repo: RhodeCode Repository object
699 699 :param message: commit message
700 700 :param nodes: mapping {filename:{'content':content},...}
701 701 :param parent_commit: parent commit, can be empty than it's initial
702 702 commit
703 703 :param author: author of commit, cna be different that commiter only
704 704 for git
705 705 :param trigger_push_hook: trigger push hooks
706 706
707 707 :returns: new commit after deletion
708 708 """
709 709
710 710 user = self._get_user(user)
711 711 scm_instance = repo.scm_instance(cache=False)
712 712
713 713 processed_nodes = []
714 714 for f_path in nodes:
715 715 f_path = self._sanitize_path(f_path)
716 716 # content can be empty but for compatabilty it allows same dicts
717 717 # structure as add_nodes
718 718 content = nodes[f_path].get('content')
719 719 processed_nodes.append((f_path, content))
720 720
721 721 message = safe_unicode(message)
722 722 commiter = user.full_contact
723 723 author = safe_unicode(author) if author else commiter
724 724
725 725 imc = scm_instance.in_memory_commit
726 726
727 727 if not parent_commit:
728 728 parent_commit = EmptyCommit(alias=scm_instance.alias)
729 729
730 730 if isinstance(parent_commit, EmptyCommit):
731 731 # EmptyCommit means we we're editing empty repository
732 732 parents = None
733 733 else:
734 734 parents = [parent_commit]
735 735 # add multiple nodes
736 736 for path, content in processed_nodes:
737 737 imc.remove(FileNode(path, content=content))
738 738
739 739 # TODO: handle pre push scenario
740 740 tip = imc.commit(message=message,
741 741 author=author,
742 742 parents=parents,
743 743 branch=parent_commit.branch)
744 744
745 745 self.mark_for_invalidation(repo.repo_name)
746 746 if trigger_push_hook:
747 747 hooks_utils.trigger_post_push_hook(
748 748 username=user.username, action='push_local',
749 749 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
750 750 commit_ids=[tip.raw_id])
751 751 return tip
752 752
753 753 def strip(self, repo, commit_id, branch):
754 754 scm_instance = repo.scm_instance(cache=False)
755 755 scm_instance.config.clear_section('hooks')
756 756 scm_instance.strip(commit_id, branch)
757 757 self.mark_for_invalidation(repo.repo_name)
758 758
759 759 def get_unread_journal(self):
760 760 return self.sa.query(UserLog).count()
761 761
762 762 def get_repo_landing_revs(self, repo=None):
763 763 """
764 764 Generates select option with tags branches and bookmarks (for hg only)
765 765 grouped by type
766 766
767 767 :param repo:
768 768 """
769 769
770 770 hist_l = []
771 771 choices = []
772 772 repo = self._get_repo(repo)
773 773 hist_l.append(['rev:tip', _('latest tip')])
774 774 choices.append('rev:tip')
775 775 if not repo:
776 776 return choices, hist_l
777 777
778 778 repo = repo.scm_instance()
779 779
780 780 branches_group = (
781 781 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
782 782 for b in repo.branches],
783 783 _("Branches"))
784 784 hist_l.append(branches_group)
785 785 choices.extend([x[0] for x in branches_group[0]])
786 786
787 787 if repo.alias == 'hg':
788 788 bookmarks_group = (
789 789 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
790 790 for b in repo.bookmarks],
791 791 _("Bookmarks"))
792 792 hist_l.append(bookmarks_group)
793 793 choices.extend([x[0] for x in bookmarks_group[0]])
794 794
795 795 tags_group = (
796 796 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
797 797 for t in repo.tags],
798 798 _("Tags"))
799 799 hist_l.append(tags_group)
800 800 choices.extend([x[0] for x in tags_group[0]])
801 801
802 802 return choices, hist_l
803 803
804 804 def install_git_hook(self, repo, force_create=False):
805 805 """
806 806 Creates a rhodecode hook inside a git repository
807 807
808 808 :param repo: Instance of VCS repo
809 809 :param force_create: Create even if same name hook exists
810 810 """
811 811
812 812 loc = os.path.join(repo.path, 'hooks')
813 813 if not repo.bare:
814 814 loc = os.path.join(repo.path, '.git', 'hooks')
815 815 if not os.path.isdir(loc):
816 816 os.makedirs(loc, mode=0777)
817 817
818 818 tmpl_post = pkg_resources.resource_string(
819 819 'rhodecode', '/'.join(
820 820 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
821 821 tmpl_pre = pkg_resources.resource_string(
822 822 'rhodecode', '/'.join(
823 823 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
824 824
825 825 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
826 826 _hook_file = os.path.join(loc, '%s-receive' % h_type)
827 827 log.debug('Installing git hook in repo %s', repo)
828 828 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
829 829
830 830 if _rhodecode_hook or force_create:
831 831 log.debug('writing %s hook file !', h_type)
832 832 try:
833 833 with open(_hook_file, 'wb') as f:
834 834 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
835 835 tmpl = tmpl.replace('_ENV_', sys.executable)
836 836 f.write(tmpl)
837 837 os.chmod(_hook_file, 0755)
838 838 except IOError:
839 839 log.exception('error writing hook file %s', _hook_file)
840 840 else:
841 841 log.debug('skipping writing hook file')
842 842
843 843 def install_svn_hooks(self, repo, force_create=False):
844 844 """
845 845 Creates rhodecode hooks inside a svn repository
846 846
847 847 :param repo: Instance of VCS repo
848 848 :param force_create: Create even if same name hook exists
849 849 """
850 850 hooks_path = os.path.join(repo.path, 'hooks')
851 851 if not os.path.isdir(hooks_path):
852 852 os.makedirs(hooks_path)
853 853 post_commit_tmpl = pkg_resources.resource_string(
854 854 'rhodecode', '/'.join(
855 855 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
856 856 pre_commit_template = pkg_resources.resource_string(
857 857 'rhodecode', '/'.join(
858 858 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
859 859 templates = {
860 860 'post-commit': post_commit_tmpl,
861 861 'pre-commit': pre_commit_template
862 862 }
863 863 for filename in templates:
864 864 _hook_file = os.path.join(hooks_path, filename)
865 865 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
866 866 if _rhodecode_hook or force_create:
867 867 log.debug('writing %s hook file !', filename)
868 868 template = templates[filename]
869 869 try:
870 870 with open(_hook_file, 'wb') as f:
871 871 template = template.replace(
872 872 '_TMPL_', rhodecode.__version__)
873 873 template = template.replace('_ENV_', sys.executable)
874 874 f.write(template)
875 875 os.chmod(_hook_file, 0755)
876 876 except IOError:
877 877 log.exception('error writing hook file %s', filename)
878 878 else:
879 879 log.debug('skipping writing hook file')
880 880
881 881 def install_hooks(self, repo, repo_type):
882 882 if repo_type == 'git':
883 883 self.install_git_hook(repo)
884 884 elif repo_type == 'svn':
885 885 self.install_svn_hooks(repo)
886 886
887 887 def get_server_info(self, environ=None):
888 888 import platform
889 889 import rhodecode
890 890 import pkg_resources
891 891 from rhodecode.model.meta import Base as sql_base, Session
892 892 from sqlalchemy.engine import url
893 893 from rhodecode.lib.base import get_server_ip_addr, get_server_port
894 894 from rhodecode.lib.vcs.backends.git import discover_git_version
895 895 from rhodecode.model.gist import GIST_STORE_LOC
896 896
897 897 try:
898 898 # cygwin cannot have yet psutil support.
899 899 import psutil
900 900 except ImportError:
901 901 psutil = None
902 902
903 903 environ = environ or {}
904 904 _NA = 'NOT AVAILABLE'
905 905 _memory = _NA
906 906 _uptime = _NA
907 907 _boot_time = _NA
908 908 _cpu = _NA
909 909 _disk = dict(percent=0, used=0, total=0, error='')
910 910 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
911 911
912 912 model = VcsSettingsModel()
913 913 storage_path = model.get_repos_location()
914 914 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
915 915 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
916 916 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
917 917
918 918 if psutil:
919 919 # disk storage
920 920 try:
921 921 _disk = dict(psutil.disk_usage(storage_path)._asdict())
922 922 except Exception as e:
923 923 log.exception('Failed to fetch disk info')
924 924 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
925 925
926 926 # memory
927 927 _memory = dict(psutil.virtual_memory()._asdict())
928 928 _memory['percent2'] = psutil._common.usage_percent(
929 929 (_memory['total'] - _memory['free']),
930 930 _memory['total'], 1)
931 931
932 932 # load averages
933 933 if hasattr(psutil.os, 'getloadavg'):
934 934 _load = dict(zip(
935 935 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
936 936 _uptime = time.time() - psutil.boot_time()
937 937 _boot_time = psutil.boot_time()
938 938 _cpu = psutil.cpu_percent(0.5)
939 939
940 940 mods = dict([(p.project_name, p.version)
941 941 for p in pkg_resources.working_set])
942 942
943 943 def get_storage_size(storage_path):
944 944 sizes = []
945 945 for file_ in os.listdir(storage_path):
946 946 storage_file = os.path.join(storage_path, file_)
947 947 if os.path.isfile(storage_file):
948 948 try:
949 949 sizes.append(os.path.getsize(storage_file))
950 950 except OSError:
951 951 log.exception('Failed to get size of storage file %s',
952 952 storage_file)
953 953 pass
954 954
955 955 return sum(sizes)
956 956
957 957 # archive cache storage
958 958 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
959 959 try:
960 960 archive_storage_path_exists = os.path.isdir(
961 961 archive_storage_path)
962 962 if archive_storage_path and archive_storage_path_exists:
963 963 used = get_storage_size(archive_storage_path)
964 964 _disk_archive.update({
965 965 'used': used,
966 966 'total': used,
967 967 })
968 968 except Exception as e:
969 969 log.exception('failed to fetch archive cache storage')
970 970 _disk_archive['error'] = str(e)
971 971
972 972 # search index storage
973 973 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
974 974 try:
975 975 search_index_storage_path_exists = os.path.isdir(
976 976 search_index_storage_path)
977 977 if search_index_storage_path_exists:
978 978 used = get_storage_size(search_index_storage_path)
979 979 _disk_index.update({
980 980 'percent': 100,
981 981 'used': used,
982 982 'total': used,
983 983 })
984 984 except Exception as e:
985 985 log.exception('failed to fetch search index storage')
986 986 _disk_index['error'] = str(e)
987 987
988 988 # gist storage
989 989 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
990 990 try:
991 991 items_count = 0
992 992 used = 0
993 993 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
994 994 if root == gist_storage_path:
995 995 items_count = len(dirs)
996 996
997 997 for f in files:
998 998 try:
999 999 used += os.path.getsize(os.path.join(root, f))
1000 1000 except OSError:
1001 1001 pass
1002 1002 _disk_gist.update({
1003 1003 'percent': 100,
1004 1004 'used': used,
1005 1005 'total': used,
1006 1006 'items': items_count
1007 1007 })
1008 1008 except Exception as e:
1009 1009 log.exception('failed to fetch gist storage items')
1010 1010 _disk_gist['error'] = str(e)
1011 1011
1012 1012 # GIT info
1013 1013 git_ver = discover_git_version()
1014 1014
1015 1015 # SVN info
1016 1016 # TODO: johbo: Add discover_svn_version to replace this code.
1017 1017 try:
1018 1018 import svn.core
1019 1019 svn_ver = svn.core.SVN_VERSION
1020 1020 except ImportError:
1021 1021 svn_ver = None
1022 1022
1023 1023 # DB stuff
1024 1024 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1025 1025 db_type = db_info.__to_string__()
1026 1026 try:
1027 1027 engine = sql_base.metadata.bind
1028 1028 db_server_info = engine.dialect._get_server_version_info(
1029 1029 Session.connection(bind=engine))
1030 1030 db_version = '%s %s' % (db_info.drivername,
1031 1031 '.'.join(map(str, db_server_info)))
1032 1032 except Exception:
1033 1033 log.exception('failed to fetch db version')
1034 1034 db_version = '%s %s' % (db_info.drivername, '?')
1035 1035
1036 1036 db_migrate = DbMigrateVersion.query().filter(
1037 1037 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1038 1038 db_migrate_version = db_migrate.version
1039 1039
1040 1040 info = {
1041 1041 'py_version': ' '.join(platform._sys_version()),
1042 1042 'py_path': sys.executable,
1043 1043 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1044 1044
1045 1045 'platform': safe_unicode(platform.platform()),
1046 1046 'storage': storage_path,
1047 1047 'archive_storage': archive_storage_path,
1048 1048 'index_storage': search_index_storage_path,
1049 1049 'gist_storage': gist_storage_path,
1050 1050
1051 1051
1052 1052 'db_type': db_type,
1053 1053 'db_version': db_version,
1054 1054 'db_migrate_version': db_migrate_version,
1055 1055
1056 1056 'rhodecode_version': rhodecode.__version__,
1057 1057 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1058 1058 'server_ip': '%s:%s' % (
1059 1059 get_server_ip_addr(environ, log_errors=False),
1060 1060 get_server_port(environ)
1061 1061 ),
1062 1062 'server_id': rhodecode.CONFIG.get('instance_id'),
1063 1063
1064 1064 'git_version': safe_unicode(git_ver),
1065 1065 'hg_version': mods.get('mercurial'),
1066 1066 'svn_version': svn_ver,
1067 1067
1068 1068 'uptime': _uptime,
1069 1069 'boot_time': _boot_time,
1070 1070 'load': _load,
1071 1071 'cpu': _cpu,
1072 1072 'memory': _memory,
1073 1073 'disk': _disk,
1074 1074 'disk_archive': _disk_archive,
1075 1075 'disk_gist': _disk_gist,
1076 1076 'disk_index': _disk_index,
1077 1077 }
1078 1078 return info
1079 1079
1080 1080
1081 1081 def _check_rhodecode_hook(hook_path):
1082 1082 """
1083 1083 Check if the hook was created by RhodeCode
1084 1084 """
1085 1085 if not os.path.exists(hook_path):
1086 1086 return True
1087 1087
1088 1088 log.debug('hook exists, checking if it is from rhodecode')
1089 1089 hook_content = _read_hook(hook_path)
1090 1090 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1091 1091 if matches:
1092 1092 try:
1093 1093 version = matches.groups()[0]
1094 1094 log.debug('got %s, it is rhodecode', version)
1095 1095 return True
1096 1096 except Exception:
1097 1097 log.exception("Exception while reading the hook version.")
1098 1098
1099 1099 return False
1100 1100
1101 1101
1102 1102 def _read_hook(hook_path):
1103 1103 with open(hook_path, 'rb') as f:
1104 1104 content = f.read()
1105 1105 return content
General Comments 0
You need to be logged in to leave comments. Login now