##// END OF EJS Templates
repo rescann should detach forks of zombie repos,...
marcink -
r3694:34093903 beta
parent child Browse files
Show More
@@ -1,779 +1,779 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 import decorator
36 36 import warnings
37 37 from os.path import abspath
38 38 from os.path import dirname as dn, join as jn
39 39
40 40 from paste.script.command import Command, BadCommand
41 41
42 42 from mercurial import ui, config
43 43
44 44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 45
46 46 from rhodecode.lib.vcs import get_backend
47 47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 50 from rhodecode.lib.vcs.exceptions import VCSError
51 51
52 52 from rhodecode.lib.caching_query import FromCache
53 53
54 54 from rhodecode.model import meta
55 55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 57 from rhodecode.model.meta import Session
58 58 from rhodecode.model.repos_group import ReposGroupModel
59 59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 60 from rhodecode.lib.vcs.utils.fakemod import create_module
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65 65
66 66
67 67 def recursive_replace(str_, replace=' '):
68 68 """
69 69 Recursive replace of given sign to just one instance
70 70
71 71 :param str_: given string
72 72 :param replace: char to find and replace multiple instances
73 73
74 74 Examples::
75 75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 76 'Mighty-Mighty-Bo-sstones'
77 77 """
78 78
79 79 if str_.find(replace * 2) == -1:
80 80 return str_
81 81 else:
82 82 str_ = str_.replace(replace * 2, replace)
83 83 return recursive_replace(str_, replace)
84 84
85 85
86 86 def repo_name_slug(value):
87 87 """
88 88 Return slug of name of repository
89 89 This function is called on each creation/modification
90 90 of repository to prevent bad names in repo
91 91 """
92 92
93 93 slug = remove_formatting(value)
94 94 slug = strip_tags(slug)
95 95
96 96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 97 slug = slug.replace(c, '-')
98 98 slug = recursive_replace(slug, '-')
99 99 slug = collapse(slug, '-')
100 100 return slug
101 101
102 102
103 103 def get_repo_slug(request):
104 104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 105 if _repo:
106 106 _repo = _repo.rstrip('/')
107 107 return _repo
108 108
109 109
110 110 def get_repos_group_slug(request):
111 111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 112 if _group:
113 113 _group = _group.rstrip('/')
114 114 return _group
115 115
116 116
117 117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 118 """
119 119 Action logger for various actions made by users
120 120
121 121 :param user: user that made this action, can be a unique username string or
122 122 object containing user_id attribute
123 123 :param action: action to log, should be on of predefined unique actions for
124 124 easy translations
125 125 :param repo: string name of repository or object containing repo_id,
126 126 that action was made on
127 127 :param ipaddr: optional ip address from what the action was made
128 128 :param sa: optional sqlalchemy session
129 129
130 130 """
131 131
132 132 if not sa:
133 133 sa = meta.Session()
134 134
135 135 try:
136 136 if hasattr(user, 'user_id'):
137 137 user_obj = User.get(user.user_id)
138 138 elif isinstance(user, basestring):
139 139 user_obj = User.get_by_username(user)
140 140 else:
141 141 raise Exception('You have to provide a user object or a username')
142 142
143 143 if hasattr(repo, 'repo_id'):
144 144 repo_obj = Repository.get(repo.repo_id)
145 145 repo_name = repo_obj.repo_name
146 146 elif isinstance(repo, basestring):
147 147 repo_name = repo.lstrip('/')
148 148 repo_obj = Repository.get_by_repo_name(repo_name)
149 149 else:
150 150 repo_obj = None
151 151 repo_name = ''
152 152
153 153 user_log = UserLog()
154 154 user_log.user_id = user_obj.user_id
155 155 user_log.username = user_obj.username
156 156 user_log.action = safe_unicode(action)
157 157
158 158 user_log.repository = repo_obj
159 159 user_log.repository_name = repo_name
160 160
161 161 user_log.action_date = datetime.datetime.now()
162 162 user_log.user_ip = ipaddr
163 163 sa.add(user_log)
164 164
165 165 log.info('Logging action:%s on %s by user:%s ip:%s' %
166 166 (action, safe_unicode(repo), user_obj, ipaddr))
167 167 if commit:
168 168 sa.commit()
169 169 except Exception:
170 170 log.error(traceback.format_exc())
171 171 raise
172 172
173 173
174 174 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
175 175 """
176 176 Scans given path for repos and return (name,(type,path)) tuple
177 177
178 178 :param path: path to scan for repositories
179 179 :param recursive: recursive search and return names with subdirs in front
180 180 """
181 181
182 182 # remove ending slash for better results
183 183 path = path.rstrip(os.sep)
184 184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185 185
186 186 def _get_repos(p):
187 187 if not os.access(p, os.W_OK):
188 188 log.warn('ignoring repo path without write access: %s', p)
189 189 return
190 190 for dirpath in os.listdir(p):
191 191 if os.path.isfile(os.path.join(p, dirpath)):
192 192 continue
193 193 cur_path = os.path.join(p, dirpath)
194 194
195 195 # skip removed repos
196 196 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
197 197 continue
198 198
199 199 #skip .<somethin> dirs
200 200 if dirpath.startswith('.'):
201 201 continue
202 202
203 203 try:
204 204 scm_info = get_scm(cur_path)
205 205 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
206 206 except VCSError:
207 207 if not recursive:
208 208 continue
209 209 #check if this dir containts other repos for recursive scan
210 210 rec_path = os.path.join(p, dirpath)
211 211 if os.path.isdir(rec_path):
212 212 for inner_scm in _get_repos(rec_path):
213 213 yield inner_scm
214 214
215 215 return _get_repos(path)
216 216
217 217
218 218 def is_valid_repo(repo_name, base_path, scm=None):
219 219 """
220 220 Returns True if given path is a valid repository False otherwise.
221 221 If scm param is given also compare if given scm is the same as expected
222 222 from scm parameter
223 223
224 224 :param repo_name:
225 225 :param base_path:
226 226 :param scm:
227 227
228 228 :return True: if given path is a valid repository
229 229 """
230 230 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
231 231
232 232 try:
233 233 scm_ = get_scm(full_path)
234 234 if scm:
235 235 return scm_[0] == scm
236 236 return True
237 237 except VCSError:
238 238 return False
239 239
240 240
241 241 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
242 242 """
243 243 Returns True if given path is a repository group False otherwise
244 244
245 245 :param repo_name:
246 246 :param base_path:
247 247 """
248 248 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
249 249
250 250 # check if it's not a repo
251 251 if is_valid_repo(repos_group_name, base_path):
252 252 return False
253 253
254 254 try:
255 255 # we need to check bare git repos at higher level
256 256 # since we might match branches/hooks/info/objects or possible
257 257 # other things inside bare git repo
258 258 get_scm(os.path.dirname(full_path))
259 259 return False
260 260 except VCSError:
261 261 pass
262 262
263 263 # check if it's a valid path
264 264 if skip_path_check or os.path.isdir(full_path):
265 265 return True
266 266
267 267 return False
268 268
269 269
270 270 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
271 271 while True:
272 272 ok = raw_input(prompt)
273 273 if ok in ('y', 'ye', 'yes'):
274 274 return True
275 275 if ok in ('n', 'no', 'nop', 'nope'):
276 276 return False
277 277 retries = retries - 1
278 278 if retries < 0:
279 279 raise IOError
280 280 print complaint
281 281
282 282 #propagated from mercurial documentation
283 283 ui_sections = ['alias', 'auth',
284 284 'decode/encode', 'defaults',
285 285 'diff', 'email',
286 286 'extensions', 'format',
287 287 'merge-patterns', 'merge-tools',
288 288 'hooks', 'http_proxy',
289 289 'smtp', 'patch',
290 290 'paths', 'profiling',
291 291 'server', 'trusted',
292 292 'ui', 'web', ]
293 293
294 294
295 295 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
296 296 """
297 297 A function that will read python rc files or database
298 298 and make an mercurial ui object from read options
299 299
300 300 :param path: path to mercurial config file
301 301 :param checkpaths: check the path
302 302 :param read_from: read from 'file' or 'db'
303 303 """
304 304
305 305 baseui = ui.ui()
306 306
307 307 # clean the baseui object
308 308 baseui._ocfg = config.config()
309 309 baseui._ucfg = config.config()
310 310 baseui._tcfg = config.config()
311 311
312 312 if read_from == 'file':
313 313 if not os.path.isfile(path):
314 314 log.debug('hgrc file is not present at %s, skipping...' % path)
315 315 return False
316 316 log.debug('reading hgrc from %s' % path)
317 317 cfg = config.config()
318 318 cfg.read(path)
319 319 for section in ui_sections:
320 320 for k, v in cfg.items(section):
321 321 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
322 322 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
323 323
324 324 elif read_from == 'db':
325 325 sa = meta.Session()
326 326 ret = sa.query(RhodeCodeUi)\
327 327 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
328 328 .all()
329 329
330 330 hg_ui = ret
331 331 for ui_ in hg_ui:
332 332 if ui_.ui_active:
333 333 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
334 334 ui_.ui_key, ui_.ui_value)
335 335 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
336 336 safe_str(ui_.ui_value))
337 337 if ui_.ui_key == 'push_ssl':
338 338 # force set push_ssl requirement to False, rhodecode
339 339 # handles that
340 340 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
341 341 False)
342 342 if clear_session:
343 343 meta.Session.remove()
344 344 return baseui
345 345
346 346
347 347 def set_rhodecode_config(config):
348 348 """
349 349 Updates pylons config with new settings from database
350 350
351 351 :param config:
352 352 """
353 353 hgsettings = RhodeCodeSetting.get_app_settings()
354 354
355 355 for k, v in hgsettings.items():
356 356 config[k] = v
357 357
358 358
359 359 def map_groups(path):
360 360 """
361 361 Given a full path to a repository, create all nested groups that this
362 362 repo is inside. This function creates parent-child relationships between
363 363 groups and creates default perms for all new groups.
364 364
365 365 :param paths: full path to repository
366 366 """
367 367 sa = meta.Session()
368 368 groups = path.split(Repository.url_sep())
369 369 parent = None
370 370 group = None
371 371
372 372 # last element is repo in nested groups structure
373 373 groups = groups[:-1]
374 374 rgm = ReposGroupModel(sa)
375 375 for lvl, group_name in enumerate(groups):
376 376 group_name = '/'.join(groups[:lvl] + [group_name])
377 377 group = RepoGroup.get_by_group_name(group_name)
378 378 desc = '%s group' % group_name
379 379
380 380 # skip folders that are now removed repos
381 381 if REMOVED_REPO_PAT.match(group_name):
382 382 break
383 383
384 384 if group is None:
385 385 log.debug('creating group level: %s group_name: %s' % (lvl,
386 386 group_name))
387 387 group = RepoGroup(group_name, parent)
388 388 group.group_description = desc
389 389 sa.add(group)
390 390 rgm._create_default_perms(group)
391 391 sa.flush()
392 392 parent = group
393 393 return group
394 394
395 395
396 396 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
397 397 install_git_hook=False):
398 398 """
399 399 maps all repos given in initial_repo_list, non existing repositories
400 400 are created, if remove_obsolete is True it also check for db entries
401 401 that are not in initial_repo_list and removes them.
402 402
403 403 :param initial_repo_list: list of repositories found by scanning methods
404 404 :param remove_obsolete: check for obsolete entries in database
405 405 :param install_git_hook: if this is True, also check and install githook
406 406 for a repo if missing
407 407 """
408 408 from rhodecode.model.repo import RepoModel
409 409 from rhodecode.model.scm import ScmModel
410 410 sa = meta.Session()
411 411 rm = RepoModel()
412 412 user = sa.query(User).filter(User.admin == True).first()
413 413 if user is None:
414 414 raise Exception('Missing administrative account!')
415 415 added = []
416 416
417 417 ##creation defaults
418 418 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
419 419 enable_statistics = defs.get('repo_enable_statistics')
420 420 enable_locking = defs.get('repo_enable_locking')
421 421 enable_downloads = defs.get('repo_enable_downloads')
422 422 private = defs.get('repo_private')
423 423
424 424 for name, repo in initial_repo_list.items():
425 425 group = map_groups(name)
426 426 db_repo = rm.get_by_repo_name(name)
427 427 # found repo that is on filesystem not in RhodeCode database
428 428 if not db_repo:
429 429 log.info('repository %s not found, creating now' % name)
430 430 added.append(name)
431 431 desc = (repo.description
432 432 if repo.description != 'unknown'
433 433 else '%s repository' % name)
434 434
435 435 new_repo = rm.create_repo(
436 436 repo_name=name,
437 437 repo_type=repo.alias,
438 438 description=desc,
439 439 repos_group=getattr(group, 'group_id', None),
440 440 owner=user,
441 441 just_db=True,
442 442 enable_locking=enable_locking,
443 443 enable_downloads=enable_downloads,
444 444 enable_statistics=enable_statistics,
445 445 private=private
446 446 )
447 447 # we added that repo just now, and make sure it has githook
448 448 # installed
449 449 if new_repo.repo_type == 'git':
450 450 ScmModel().install_git_hook(new_repo.scm_instance)
451 451 new_repo.update_changeset_cache()
452 452 elif install_git_hook:
453 453 if db_repo.repo_type == 'git':
454 454 ScmModel().install_git_hook(db_repo.scm_instance)
455 455 # during starting install all cache keys for all repositories in the
456 456 # system, this will register all repos and multiple instances
457 457 cache_key = CacheInvalidation._get_cache_key(name)
458 458 log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
459 459 CacheInvalidation.invalidate(name)
460 460
461 461 sa.commit()
462 462 removed = []
463 463 if remove_obsolete:
464 464 # remove from database those repositories that are not in the filesystem
465 465 for repo in sa.query(Repository).all():
466 466 if repo.repo_name not in initial_repo_list.keys():
467 467 log.debug("Removing non-existing repository found in db `%s`" %
468 468 repo.repo_name)
469 469 try:
470 sa.delete(repo)
470 removed.append(repo.repo_name)
471 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
471 472 sa.commit()
472 removed.append(repo.repo_name)
473 473 except Exception:
474 474 #don't hold further removals on error
475 475 log.error(traceback.format_exc())
476 476 sa.rollback()
477 477 return added, removed
478 478
479 479
480 480 # set cache regions for beaker so celery can utilise it
481 481 def add_cache(settings):
482 482 cache_settings = {'regions': None}
483 483 for key in settings.keys():
484 484 for prefix in ['beaker.cache.', 'cache.']:
485 485 if key.startswith(prefix):
486 486 name = key.split(prefix)[1].strip()
487 487 cache_settings[name] = settings[key].strip()
488 488 if cache_settings['regions']:
489 489 for region in cache_settings['regions'].split(','):
490 490 region = region.strip()
491 491 region_settings = {}
492 492 for key, value in cache_settings.items():
493 493 if key.startswith(region):
494 494 region_settings[key.split('.')[1]] = value
495 495 region_settings['expire'] = int(region_settings.get('expire',
496 496 60))
497 497 region_settings.setdefault('lock_dir',
498 498 cache_settings.get('lock_dir'))
499 499 region_settings.setdefault('data_dir',
500 500 cache_settings.get('data_dir'))
501 501
502 502 if 'type' not in region_settings:
503 503 region_settings['type'] = cache_settings.get('type',
504 504 'memory')
505 505 beaker.cache.cache_regions[region] = region_settings
506 506
507 507
508 508 def load_rcextensions(root_path):
509 509 import rhodecode
510 510 from rhodecode.config import conf
511 511
512 512 path = os.path.join(root_path, 'rcextensions', '__init__.py')
513 513 if os.path.isfile(path):
514 514 rcext = create_module('rc', path)
515 515 EXT = rhodecode.EXTENSIONS = rcext
516 516 log.debug('Found rcextensions now loading %s...' % rcext)
517 517
518 518 # Additional mappings that are not present in the pygments lexers
519 519 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
520 520
521 521 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
522 522
523 523 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
524 524 log.debug('settings custom INDEX_EXTENSIONS')
525 525 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
526 526
527 527 #ADDITIONAL MAPPINGS
528 528 log.debug('adding extra into INDEX_EXTENSIONS')
529 529 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
530 530
531 531 # auto check if the module is not missing any data, set to default if is
532 532 # this will help autoupdate new feature of rcext module
533 533 from rhodecode.config import rcextensions
534 534 for k in dir(rcextensions):
535 535 if not k.startswith('_') and not hasattr(EXT, k):
536 536 setattr(EXT, k, getattr(rcextensions, k))
537 537
538 538
539 539 def get_custom_lexer(extension):
540 540 """
541 541 returns a custom lexer if it's defined in rcextensions module, or None
542 542 if there's no custom lexer defined
543 543 """
544 544 import rhodecode
545 545 from pygments import lexers
546 546 #check if we didn't define this extension as other lexer
547 547 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
548 548 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
549 549 return lexers.get_lexer_by_name(_lexer_name)
550 550
551 551
552 552 #==============================================================================
553 553 # TEST FUNCTIONS AND CREATORS
554 554 #==============================================================================
555 555 def create_test_index(repo_location, config, full_index):
556 556 """
557 557 Makes default test index
558 558
559 559 :param config: test config
560 560 :param full_index:
561 561 """
562 562
563 563 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
564 564 from rhodecode.lib.pidlock import DaemonLock, LockHeld
565 565
566 566 repo_location = repo_location
567 567
568 568 index_location = os.path.join(config['app_conf']['index_dir'])
569 569 if not os.path.exists(index_location):
570 570 os.makedirs(index_location)
571 571
572 572 try:
573 573 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
574 574 WhooshIndexingDaemon(index_location=index_location,
575 575 repo_location=repo_location)\
576 576 .run(full_index=full_index)
577 577 l.release()
578 578 except LockHeld:
579 579 pass
580 580
581 581
582 582 def create_test_env(repos_test_path, config):
583 583 """
584 584 Makes a fresh database and
585 585 install test repository into tmp dir
586 586 """
587 587 from rhodecode.lib.db_manage import DbManage
588 588 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
589 589
590 590 # PART ONE create db
591 591 dbconf = config['sqlalchemy.db1.url']
592 592 log.debug('making test db %s' % dbconf)
593 593
594 594 # create test dir if it doesn't exist
595 595 if not os.path.isdir(repos_test_path):
596 596 log.debug('Creating testdir %s' % repos_test_path)
597 597 os.makedirs(repos_test_path)
598 598
599 599 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
600 600 tests=True)
601 601 dbmanage.create_tables(override=True)
602 602 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
603 603 dbmanage.create_default_user()
604 604 dbmanage.admin_prompt()
605 605 dbmanage.create_permissions()
606 606 dbmanage.populate_default_permissions()
607 607 Session().commit()
608 608 # PART TWO make test repo
609 609 log.debug('making test vcs repositories')
610 610
611 611 idx_path = config['app_conf']['index_dir']
612 612 data_path = config['app_conf']['cache_dir']
613 613
614 614 #clean index and data
615 615 if idx_path and os.path.exists(idx_path):
616 616 log.debug('remove %s' % idx_path)
617 617 shutil.rmtree(idx_path)
618 618
619 619 if data_path and os.path.exists(data_path):
620 620 log.debug('remove %s' % data_path)
621 621 shutil.rmtree(data_path)
622 622
623 623 #CREATE DEFAULT TEST REPOS
624 624 cur_dir = dn(dn(abspath(__file__)))
625 625 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
626 626 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
627 627 tar.close()
628 628
629 629 cur_dir = dn(dn(abspath(__file__)))
630 630 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
631 631 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
632 632 tar.close()
633 633
634 634 #LOAD VCS test stuff
635 635 from rhodecode.tests.vcs import setup_package
636 636 setup_package()
637 637
638 638
639 639 #==============================================================================
640 640 # PASTER COMMANDS
641 641 #==============================================================================
642 642 class BasePasterCommand(Command):
643 643 """
644 644 Abstract Base Class for paster commands.
645 645
646 646 The celery commands are somewhat aggressive about loading
647 647 celery.conf, and since our module sets the `CELERY_LOADER`
648 648 environment variable to our loader, we have to bootstrap a bit and
649 649 make sure we've had a chance to load the pylons config off of the
650 650 command line, otherwise everything fails.
651 651 """
652 652 min_args = 1
653 653 min_args_error = "Please provide a paster config file as an argument."
654 654 takes_config_file = 1
655 655 requires_config_file = True
656 656
657 657 def notify_msg(self, msg, log=False):
658 658 """Make a notification to user, additionally if logger is passed
659 659 it logs this action using given logger
660 660
661 661 :param msg: message that will be printed to user
662 662 :param log: logging instance, to use to additionally log this message
663 663
664 664 """
665 665 if log and isinstance(log, logging):
666 666 log(msg)
667 667
668 668 def run(self, args):
669 669 """
670 670 Overrides Command.run
671 671
672 672 Checks for a config file argument and loads it.
673 673 """
674 674 if len(args) < self.min_args:
675 675 raise BadCommand(
676 676 self.min_args_error % {'min_args': self.min_args,
677 677 'actual_args': len(args)})
678 678
679 679 # Decrement because we're going to lob off the first argument.
680 680 # @@ This is hacky
681 681 self.min_args -= 1
682 682 self.bootstrap_config(args[0])
683 683 self.update_parser()
684 684 return super(BasePasterCommand, self).run(args[1:])
685 685
686 686 def update_parser(self):
687 687 """
688 688 Abstract method. Allows for the class's parser to be updated
689 689 before the superclass's `run` method is called. Necessary to
690 690 allow options/arguments to be passed through to the underlying
691 691 celery command.
692 692 """
693 693 raise NotImplementedError("Abstract Method.")
694 694
695 695 def bootstrap_config(self, conf):
696 696 """
697 697 Loads the pylons configuration.
698 698 """
699 699 from pylons import config as pylonsconfig
700 700
701 701 self.path_to_ini_file = os.path.realpath(conf)
702 702 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
703 703 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
704 704
705 705 def _init_session(self):
706 706 """
707 707 Inits SqlAlchemy Session
708 708 """
709 709 logging.config.fileConfig(self.path_to_ini_file)
710 710 from pylons import config
711 711 from rhodecode.model import init_model
712 712 from rhodecode.lib.utils2 import engine_from_config
713 713
714 714 #get to remove repos !!
715 715 add_cache(config)
716 716 engine = engine_from_config(config, 'sqlalchemy.db1.')
717 717 init_model(engine)
718 718
719 719
720 720 def check_git_version():
721 721 """
722 722 Checks what version of git is installed in system, and issues a warning
723 723 if it's too old for RhodeCode to properly work.
724 724 """
725 725 from rhodecode import BACKENDS
726 726 from rhodecode.lib.vcs.backends.git.repository import GitRepository
727 727 from distutils.version import StrictVersion
728 728
729 729 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
730 730 _safe=True)
731 731
732 732 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
733 733 if len(ver.split('.')) > 3:
734 734 #StrictVersion needs to be only 3 element type
735 735 ver = '.'.join(ver.split('.')[:3])
736 736 try:
737 737 _ver = StrictVersion(ver)
738 738 except Exception:
739 739 _ver = StrictVersion('0.0.0')
740 740 stderr = traceback.format_exc()
741 741
742 742 req_ver = '1.7.4'
743 743 to_old_git = False
744 744 if _ver < StrictVersion(req_ver):
745 745 to_old_git = True
746 746
747 747 if 'git' in BACKENDS:
748 748 log.debug('GIT version detected: %s' % stdout)
749 749 if stderr:
750 750 log.warning('Unable to detect git version, org error was: %r' % stderr)
751 751 elif to_old_git:
752 752 log.warning('RhodeCode detected git version %s, which is too old '
753 753 'for the system to function properly. Make sure '
754 754 'its version is at least %s' % (ver, req_ver))
755 755 return _ver
756 756
757 757
758 758 @decorator.decorator
759 759 def jsonify(func, *args, **kwargs):
760 760 """Action decorator that formats output for JSON
761 761
762 762 Given a function that will return content, this decorator will turn
763 763 the result into JSON, with a content-type of 'application/json' and
764 764 output it.
765 765
766 766 """
767 767 from pylons.decorators.util import get_pylons
768 768 from rhodecode.lib.ext_json import json
769 769 pylons = get_pylons(args)
770 770 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
771 771 data = func(*args, **kwargs)
772 772 if isinstance(data, (list, tuple)):
773 773 msg = "JSON responses with Array envelopes are susceptible to " \
774 774 "cross-site data leak attacks, see " \
775 775 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
776 776 warnings.warn(msg, Warning, 2)
777 777 log.warning(msg)
778 778 log.debug("Returning JSON wrapped action output")
779 779 return json.dumps(data, encoding='utf-8')
@@ -1,703 +1,707 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.repo
4 4 ~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Repository model for rhodecode
7 7
8 8 :created_on: Jun 5, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25 from __future__ import with_statement
26 26 import os
27 27 import shutil
28 28 import logging
29 29 import traceback
30 30 from datetime import datetime
31 31
32 32 from rhodecode.lib.vcs.backends import get_backend
33 33 from rhodecode.lib.compat import json
34 34 from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode,\
35 35 remove_prefix, obfuscate_url_pw
36 36 from rhodecode.lib.caching_query import FromCache
37 37 from rhodecode.lib.hooks import log_create_repository, log_delete_repository
38 38
39 39 from rhodecode.model import BaseModel
40 40 from rhodecode.model.db import Repository, UserRepoToPerm, User, Permission, \
41 41 Statistics, UserGroup, UserGroupRepoToPerm, RhodeCodeUi, RepoGroup,\
42 42 RhodeCodeSetting, RepositoryField
43 43 from rhodecode.lib import helpers as h
44 44 from rhodecode.lib.auth import HasRepoPermissionAny
45 45 from rhodecode.lib.exceptions import AttachedForksError
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class RepoModel(BaseModel):
51 51
52 52 cls = Repository
53 53 URL_SEPARATOR = Repository.url_sep()
54 54
55 55 def __get_users_group(self, users_group):
56 56 return self._get_instance(UserGroup, users_group,
57 57 callback=UserGroup.get_by_group_name)
58 58
59 59 def _get_repos_group(self, repos_group):
60 60 return self._get_instance(RepoGroup, repos_group,
61 61 callback=RepoGroup.get_by_group_name)
62 62
63 63 @LazyProperty
64 64 def repos_path(self):
65 65 """
66 66 Get's the repositories root path from database
67 67 """
68 68
69 69 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
70 70 return q.ui_value
71 71
72 72 def get(self, repo_id, cache=False):
73 73 repo = self.sa.query(Repository)\
74 74 .filter(Repository.repo_id == repo_id)
75 75
76 76 if cache:
77 77 repo = repo.options(FromCache("sql_cache_short",
78 78 "get_repo_%s" % repo_id))
79 79 return repo.scalar()
80 80
81 81 def get_repo(self, repository):
82 82 return self._get_repo(repository)
83 83
84 84 def get_by_repo_name(self, repo_name, cache=False):
85 85 repo = self.sa.query(Repository)\
86 86 .filter(Repository.repo_name == repo_name)
87 87
88 88 if cache:
89 89 repo = repo.options(FromCache("sql_cache_short",
90 90 "get_repo_%s" % repo_name))
91 91 return repo.scalar()
92 92
93 93 def get_all_user_repos(self, user):
94 94 """
95 95 Get's all repositories that user have at least read access
96 96
97 97 :param user:
98 98 :type user:
99 99 """
100 100 from rhodecode.lib.auth import AuthUser
101 101 user = self._get_user(user)
102 102 repos = AuthUser(user_id=user.user_id).permissions['repositories']
103 103 access_check = lambda r: r[1] in ['repository.read',
104 104 'repository.write',
105 105 'repository.admin']
106 106 repos = [x[0] for x in filter(access_check, repos.items())]
107 107 return Repository.query().filter(Repository.repo_name.in_(repos))
108 108
109 109 def get_users_js(self):
110 110 users = self.sa.query(User).filter(User.active == True).all()
111 111 return json.dumps([
112 112 {
113 113 'id': u.user_id,
114 114 'fname': u.name,
115 115 'lname': u.lastname,
116 116 'nname': u.username,
117 117 'gravatar_lnk': h.gravatar_url(u.email, 14)
118 118 } for u in users]
119 119 )
120 120
121 121 def get_users_groups_js(self):
122 122 users_groups = self.sa.query(UserGroup)\
123 123 .filter(UserGroup.users_group_active == True).all()
124 124
125 125 return json.dumps([
126 126 {
127 127 'id': gr.users_group_id,
128 128 'grname': gr.users_group_name,
129 129 'grmembers': len(gr.members),
130 130 } for gr in users_groups]
131 131 )
132 132
133 133 @classmethod
134 134 def _render_datatable(cls, tmpl, *args, **kwargs):
135 135 import rhodecode
136 136 from pylons import tmpl_context as c
137 137 from pylons.i18n.translation import _
138 138
139 139 _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup
140 140 template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
141 141
142 142 tmpl = template.get_def(tmpl)
143 143 kwargs.update(dict(_=_, h=h, c=c))
144 144 return tmpl.render(*args, **kwargs)
145 145
146 146 @classmethod
147 147 def update_repoinfo(cls, repositories=None):
148 148 if not repositories:
149 149 repositories = Repository.getAll()
150 150 for repo in repositories:
151 151 repo.update_changeset_cache()
152 152
153 153 def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
154 154 super_user_actions=False):
155 155 _render = self._render_datatable
156 156
157 157 def quick_menu(repo_name):
158 158 return _render('quick_menu', repo_name)
159 159
160 160 def repo_lnk(name, rtype, private, fork_of):
161 161 return _render('repo_name', name, rtype, private, fork_of,
162 162 short_name=not admin, admin=False)
163 163
164 164 def last_change(last_change):
165 165 return _render("last_change", last_change)
166 166
167 167 def rss_lnk(repo_name):
168 168 return _render("rss", repo_name)
169 169
170 170 def atom_lnk(repo_name):
171 171 return _render("atom", repo_name)
172 172
173 173 def last_rev(repo_name, cs_cache):
174 174 return _render('revision', repo_name, cs_cache.get('revision'),
175 175 cs_cache.get('raw_id'), cs_cache.get('author'),
176 176 cs_cache.get('message'))
177 177
178 178 def desc(desc):
179 179 from pylons import tmpl_context as c
180 180 if c.visual.stylify_metatags:
181 181 return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
182 182 else:
183 183 return h.urlify_text(h.truncate(desc, 60))
184 184
185 185 def repo_actions(repo_name):
186 186 return _render('repo_actions', repo_name, super_user_actions)
187 187
188 188 def owner_actions(user_id, username):
189 189 return _render('user_name', user_id, username)
190 190
191 191 repos_data = []
192 192 for repo in repos_list:
193 193 if perm_check:
194 194 # check permission at this level
195 195 if not HasRepoPermissionAny(
196 196 'repository.read', 'repository.write', 'repository.admin'
197 197 )(repo.repo_name, 'get_repos_as_dict check'):
198 198 continue
199 199 cs_cache = repo.changeset_cache
200 200 row = {
201 201 "menu": quick_menu(repo.repo_name),
202 202 "raw_name": repo.repo_name.lower(),
203 203 "name": repo_lnk(repo.repo_name, repo.repo_type,
204 204 repo.private, repo.fork),
205 205 "last_change": last_change(repo.last_db_change),
206 206 "last_changeset": last_rev(repo.repo_name, cs_cache),
207 207 "raw_tip": cs_cache.get('revision'),
208 208 "desc": desc(repo.description),
209 209 "owner": h.person(repo.user.username),
210 210 "rss": rss_lnk(repo.repo_name),
211 211 "atom": atom_lnk(repo.repo_name),
212 212
213 213 }
214 214 if admin:
215 215 row.update({
216 216 "action": repo_actions(repo.repo_name),
217 217 "owner": owner_actions(repo.user.user_id,
218 218 h.person(repo.user.username))
219 219 })
220 220 repos_data.append(row)
221 221
222 222 return {
223 223 "totalRecords": len(repos_list),
224 224 "startIndex": 0,
225 225 "sort": "name",
226 226 "dir": "asc",
227 227 "records": repos_data
228 228 }
229 229
230 230 def _get_defaults(self, repo_name):
231 231 """
232 232 Get's information about repository, and returns a dict for
233 233 usage in forms
234 234
235 235 :param repo_name:
236 236 """
237 237
238 238 repo_info = Repository.get_by_repo_name(repo_name)
239 239
240 240 if repo_info is None:
241 241 return None
242 242
243 243 defaults = repo_info.get_dict()
244 244 group, repo_name, repo_name_full = repo_info.groups_and_repo
245 245 defaults['repo_name'] = repo_name
246 246 defaults['repo_group'] = getattr(group[-1] if group else None,
247 247 'group_id', None)
248 248
249 249 for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
250 250 (1, 'repo_description'), (1, 'repo_enable_locking'),
251 251 (1, 'repo_landing_rev'), (0, 'clone_uri'),
252 252 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
253 253 attr = k
254 254 if strip:
255 255 attr = remove_prefix(k, 'repo_')
256 256
257 257 defaults[k] = defaults[attr]
258 258
259 259 # fill owner
260 260 if repo_info.user:
261 261 defaults.update({'user': repo_info.user.username})
262 262 else:
263 263 replacement_user = User.query().filter(User.admin ==
264 264 True).first().username
265 265 defaults.update({'user': replacement_user})
266 266
267 267 # fill repository users
268 268 for p in repo_info.repo_to_perm:
269 269 defaults.update({'u_perm_%s' % p.user.username:
270 270 p.permission.permission_name})
271 271
272 272 # fill repository groups
273 273 for p in repo_info.users_group_to_perm:
274 274 defaults.update({'g_perm_%s' % p.users_group.users_group_name:
275 275 p.permission.permission_name})
276 276
277 277 return defaults
278 278
279 279 def update(self, org_repo_name, **kwargs):
280 280 try:
281 281 cur_repo = self.get_by_repo_name(org_repo_name, cache=False)
282 282
283 283 if 'user' in kwargs:
284 284 cur_repo.user = User.get_by_username(kwargs['user'])
285 285
286 286 if 'repo_group' in kwargs:
287 287 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
288 288
289 289 for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
290 290 (1, 'repo_description'), (1, 'repo_enable_locking'),
291 291 (1, 'repo_landing_rev'), (0, 'clone_uri'),
292 292 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
293 293 if k in kwargs:
294 294 val = kwargs[k]
295 295 if strip:
296 296 k = remove_prefix(k, 'repo_')
297 297 setattr(cur_repo, k, val)
298 298
299 299 new_name = cur_repo.get_new_name(kwargs['repo_name'])
300 300 cur_repo.repo_name = new_name
301 301 #if private flag is set, reset default permission to NONE
302 302
303 303 if kwargs.get('repo_private'):
304 304 EMPTY_PERM = 'repository.none'
305 305 RepoModel().grant_user_permission(
306 306 repo=cur_repo, user='default', perm=EMPTY_PERM
307 307 )
308 308 #handle extra fields
309 309 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
310 310 k = RepositoryField.un_prefix_key(field)
311 311 ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
312 312 if ex_field:
313 313 ex_field.field_value = kwargs[field]
314 314 self.sa.add(ex_field)
315 315 self.sa.add(cur_repo)
316 316
317 317 if org_repo_name != new_name:
318 318 # rename repository
319 319 self.__rename_repo(old=org_repo_name, new=new_name)
320 320
321 321 return cur_repo
322 322 except Exception:
323 323 log.error(traceback.format_exc())
324 324 raise
325 325
326 326 def create_repo(self, repo_name, repo_type, description, owner,
327 327 private=False, clone_uri=None, repos_group=None,
328 328 landing_rev='tip', just_db=False, fork_of=None,
329 329 copy_fork_permissions=False, enable_statistics=False,
330 330 enable_locking=False, enable_downloads=False):
331 331 """
332 332 Create repository
333 333
334 334 """
335 335 from rhodecode.model.scm import ScmModel
336 336
337 337 owner = self._get_user(owner)
338 338 fork_of = self._get_repo(fork_of)
339 339 repos_group = self._get_repos_group(repos_group)
340 340 try:
341 341
342 342 # repo name is just a name of repository
343 343 # while repo_name_full is a full qualified name that is combined
344 344 # with name and path of group
345 345 repo_name_full = repo_name
346 346 repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
347 347
348 348 new_repo = Repository()
349 349 new_repo.enable_statistics = False
350 350 new_repo.repo_name = repo_name_full
351 351 new_repo.repo_type = repo_type
352 352 new_repo.user = owner
353 353 new_repo.group = repos_group
354 354 new_repo.description = description or repo_name
355 355 new_repo.private = private
356 356 new_repo.clone_uri = clone_uri
357 357 new_repo.landing_rev = landing_rev
358 358
359 359 new_repo.enable_statistics = enable_statistics
360 360 new_repo.enable_locking = enable_locking
361 361 new_repo.enable_downloads = enable_downloads
362 362
363 363 if repos_group:
364 364 new_repo.enable_locking = repos_group.enable_locking
365 365
366 366 if fork_of:
367 367 parent_repo = fork_of
368 368 new_repo.fork = parent_repo
369 369
370 370 self.sa.add(new_repo)
371 371
372 372 def _create_default_perms():
373 373 # create default permission
374 374 repo_to_perm = UserRepoToPerm()
375 375 default = 'repository.read'
376 376 for p in User.get_by_username('default').user_perms:
377 377 if p.permission.permission_name.startswith('repository.'):
378 378 default = p.permission.permission_name
379 379 break
380 380
381 381 default_perm = 'repository.none' if private else default
382 382
383 383 repo_to_perm.permission_id = self.sa.query(Permission)\
384 384 .filter(Permission.permission_name == default_perm)\
385 385 .one().permission_id
386 386
387 387 repo_to_perm.repository = new_repo
388 388 repo_to_perm.user_id = User.get_by_username('default').user_id
389 389
390 390 self.sa.add(repo_to_perm)
391 391
392 392 if fork_of:
393 393 if copy_fork_permissions:
394 394 repo = fork_of
395 395 user_perms = UserRepoToPerm.query()\
396 396 .filter(UserRepoToPerm.repository == repo).all()
397 397 group_perms = UserGroupRepoToPerm.query()\
398 398 .filter(UserGroupRepoToPerm.repository == repo).all()
399 399
400 400 for perm in user_perms:
401 401 UserRepoToPerm.create(perm.user, new_repo,
402 402 perm.permission)
403 403
404 404 for perm in group_perms:
405 405 UserGroupRepoToPerm.create(perm.users_group, new_repo,
406 406 perm.permission)
407 407 else:
408 408 _create_default_perms()
409 409 else:
410 410 _create_default_perms()
411 411
412 412 if not just_db:
413 413 self.__create_repo(repo_name, repo_type,
414 414 repos_group,
415 415 clone_uri)
416 416 log_create_repository(new_repo.get_dict(),
417 417 created_by=owner.username)
418 418
419 419 # now automatically start following this repository as owner
420 420 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
421 421 owner.user_id)
422 422 return new_repo
423 423 except Exception:
424 424 log.error(traceback.format_exc())
425 425 raise
426 426
427 427 def create(self, form_data, cur_user, just_db=False, fork=None):
428 428 """
429 429 Backward compatibility function, just a wrapper on top of create_repo
430 430
431 431 :param form_data:
432 432 :param cur_user:
433 433 :param just_db:
434 434 :param fork:
435 435 """
436 436 owner = cur_user
437 437 repo_name = form_data['repo_name_full']
438 438 repo_type = form_data['repo_type']
439 439 description = form_data['repo_description']
440 440 private = form_data['repo_private']
441 441 clone_uri = form_data.get('clone_uri')
442 442 repos_group = form_data['repo_group']
443 443 landing_rev = form_data['repo_landing_rev']
444 444 copy_fork_permissions = form_data.get('copy_permissions')
445 445 fork_of = form_data.get('fork_parent_id')
446 446
447 447 ## repo creation defaults, private and repo_type are filled in form
448 448 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
449 449 enable_statistics = defs.get('repo_enable_statistics')
450 450 enable_locking = defs.get('repo_enable_locking')
451 451 enable_downloads = defs.get('repo_enable_downloads')
452 452
453 453 return self.create_repo(
454 454 repo_name, repo_type, description, owner, private, clone_uri,
455 455 repos_group, landing_rev, just_db, fork_of, copy_fork_permissions,
456 456 enable_statistics, enable_locking, enable_downloads
457 457 )
458 458
459 459 def create_fork(self, form_data, cur_user):
460 460 """
461 461 Simple wrapper into executing celery task for fork creation
462 462
463 463 :param form_data:
464 464 :param cur_user:
465 465 """
466 466 from rhodecode.lib.celerylib import tasks, run_task
467 467 run_task(tasks.create_repo_fork, form_data, cur_user)
468 468
469 def delete(self, repo, forks=None):
469 def delete(self, repo, forks=None, fs_remove=True):
470 470 """
471 471 Delete given repository, forks parameter defines what do do with
472 472 attached forks. Throws AttachedForksError if deleted repo has attached
473 473 forks
474 474
475 475 :param repo:
476 476 :param forks: str 'delete' or 'detach'
477 :param fs_remove: remove(archive) repo from filesystem
477 478 """
478 479 repo = self._get_repo(repo)
479 480 if repo:
480 481 if forks == 'detach':
481 482 for r in repo.forks:
482 483 r.fork = None
483 484 self.sa.add(r)
484 485 elif forks == 'delete':
485 486 for r in repo.forks:
486 487 self.delete(r, forks='delete')
487 488 elif [f for f in repo.forks]:
488 489 raise AttachedForksError()
489 490
490 491 old_repo_dict = repo.get_dict()
491 492 owner = repo.user
492 493 try:
493 494 self.sa.delete(repo)
495 if fs_remove:
494 496 self.__delete_repo(repo)
497 else:
498 log.debug('skipping removal from filesystem')
495 499 log_delete_repository(old_repo_dict,
496 500 deleted_by=owner.username)
497 501 except Exception:
498 502 log.error(traceback.format_exc())
499 503 raise
500 504
501 505 def grant_user_permission(self, repo, user, perm):
502 506 """
503 507 Grant permission for user on given repository, or update existing one
504 508 if found
505 509
506 510 :param repo: Instance of Repository, repository_id, or repository name
507 511 :param user: Instance of User, user_id or username
508 512 :param perm: Instance of Permission, or permission_name
509 513 """
510 514 user = self._get_user(user)
511 515 repo = self._get_repo(repo)
512 516 permission = self._get_perm(perm)
513 517
514 518 # check if we have that permission already
515 519 obj = self.sa.query(UserRepoToPerm)\
516 520 .filter(UserRepoToPerm.user == user)\
517 521 .filter(UserRepoToPerm.repository == repo)\
518 522 .scalar()
519 523 if obj is None:
520 524 # create new !
521 525 obj = UserRepoToPerm()
522 526 obj.repository = repo
523 527 obj.user = user
524 528 obj.permission = permission
525 529 self.sa.add(obj)
526 530 log.debug('Granted perm %s to %s on %s' % (perm, user, repo))
527 531
528 532 def revoke_user_permission(self, repo, user):
529 533 """
530 534 Revoke permission for user on given repository
531 535
532 536 :param repo: Instance of Repository, repository_id, or repository name
533 537 :param user: Instance of User, user_id or username
534 538 """
535 539
536 540 user = self._get_user(user)
537 541 repo = self._get_repo(repo)
538 542
539 543 obj = self.sa.query(UserRepoToPerm)\
540 544 .filter(UserRepoToPerm.repository == repo)\
541 545 .filter(UserRepoToPerm.user == user)\
542 546 .scalar()
543 547 if obj:
544 548 self.sa.delete(obj)
545 549 log.debug('Revoked perm on %s on %s' % (repo, user))
546 550
547 551 def grant_users_group_permission(self, repo, group_name, perm):
548 552 """
549 553 Grant permission for user group on given repository, or update
550 554 existing one if found
551 555
552 556 :param repo: Instance of Repository, repository_id, or repository name
553 557 :param group_name: Instance of UserGroup, users_group_id,
554 558 or user group name
555 559 :param perm: Instance of Permission, or permission_name
556 560 """
557 561 repo = self._get_repo(repo)
558 562 group_name = self.__get_users_group(group_name)
559 563 permission = self._get_perm(perm)
560 564
561 565 # check if we have that permission already
562 566 obj = self.sa.query(UserGroupRepoToPerm)\
563 567 .filter(UserGroupRepoToPerm.users_group == group_name)\
564 568 .filter(UserGroupRepoToPerm.repository == repo)\
565 569 .scalar()
566 570
567 571 if obj is None:
568 572 # create new
569 573 obj = UserGroupRepoToPerm()
570 574
571 575 obj.repository = repo
572 576 obj.users_group = group_name
573 577 obj.permission = permission
574 578 self.sa.add(obj)
575 579 log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo))
576 580
577 581 def revoke_users_group_permission(self, repo, group_name):
578 582 """
579 583 Revoke permission for user group on given repository
580 584
581 585 :param repo: Instance of Repository, repository_id, or repository name
582 586 :param group_name: Instance of UserGroup, users_group_id,
583 587 or user group name
584 588 """
585 589 repo = self._get_repo(repo)
586 590 group_name = self.__get_users_group(group_name)
587 591
588 592 obj = self.sa.query(UserGroupRepoToPerm)\
589 593 .filter(UserGroupRepoToPerm.repository == repo)\
590 594 .filter(UserGroupRepoToPerm.users_group == group_name)\
591 595 .scalar()
592 596 if obj:
593 597 self.sa.delete(obj)
594 598 log.debug('Revoked perm to %s on %s' % (repo, group_name))
595 599
596 600 def delete_stats(self, repo_name):
597 601 """
598 602 removes stats for given repo
599 603
600 604 :param repo_name:
601 605 """
602 606 repo = self._get_repo(repo_name)
603 607 try:
604 608 obj = self.sa.query(Statistics)\
605 609 .filter(Statistics.repository == repo).scalar()
606 610 if obj:
607 611 self.sa.delete(obj)
608 612 except Exception:
609 613 log.error(traceback.format_exc())
610 614 raise
611 615
612 616 def __create_repo(self, repo_name, alias, parent, clone_uri=False):
613 617 """
614 618 makes repository on filesystem. It's group aware means it'll create
615 619 a repository within a group, and alter the paths accordingly of
616 620 group location
617 621
618 622 :param repo_name:
619 623 :param alias:
620 624 :param parent_id:
621 625 :param clone_uri:
622 626 """
623 627 from rhodecode.lib.utils import is_valid_repo, is_valid_repos_group
624 628 from rhodecode.model.scm import ScmModel
625 629
626 630 if parent:
627 631 new_parent_path = os.sep.join(parent.full_path_splitted)
628 632 else:
629 633 new_parent_path = ''
630 634
631 635 # we need to make it str for mercurial
632 636 repo_path = os.path.join(*map(lambda x: safe_str(x),
633 637 [self.repos_path, new_parent_path, repo_name]))
634 638
635 639 # check if this path is not a repository
636 640 if is_valid_repo(repo_path, self.repos_path):
637 641 raise Exception('This path %s is a valid repository' % repo_path)
638 642
639 643 # check if this path is a group
640 644 if is_valid_repos_group(repo_path, self.repos_path):
641 645 raise Exception('This path %s is a valid group' % repo_path)
642 646
643 647 log.info('creating repo %s in %s @ %s' % (
644 648 repo_name, safe_unicode(repo_path),
645 649 obfuscate_url_pw(clone_uri)
646 650 )
647 651 )
648 652 backend = get_backend(alias)
649 653 if alias == 'hg':
650 654 backend(repo_path, create=True, src_url=clone_uri)
651 655 elif alias == 'git':
652 656 r = backend(repo_path, create=True, src_url=clone_uri, bare=True)
653 657 # add rhodecode hook into this repo
654 658 ScmModel().install_git_hook(repo=r)
655 659 else:
656 660 raise Exception('Undefined alias %s' % alias)
657 661
658 662 def __rename_repo(self, old, new):
659 663 """
660 664 renames repository on filesystem
661 665
662 666 :param old: old name
663 667 :param new: new name
664 668 """
665 669 log.info('renaming repo from %s to %s' % (old, new))
666 670
667 671 old_path = os.path.join(self.repos_path, old)
668 672 new_path = os.path.join(self.repos_path, new)
669 673 if os.path.isdir(new_path):
670 674 raise Exception(
671 675 'Was trying to rename to already existing dir %s' % new_path
672 676 )
673 677 shutil.move(old_path, new_path)
674 678
675 679 def __delete_repo(self, repo):
676 680 """
677 681 removes repo from filesystem, the removal is acctually made by
678 682 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
679 683 repository is no longer valid for rhodecode, can be undeleted later on
680 684 by reverting the renames on this repository
681 685
682 686 :param repo: repo object
683 687 """
684 688 rm_path = os.path.join(self.repos_path, repo.repo_name)
685 689 log.info("Removing %s" % (rm_path))
686 690 # disable hg/git internal that it doesn't get detected as repo
687 691 alias = repo.repo_type
688 692
689 693 bare = getattr(repo.scm_instance, 'bare', False)
690 694
691 695 if not bare:
692 696 # skip this for bare git repos
693 697 shutil.move(os.path.join(rm_path, '.%s' % alias),
694 698 os.path.join(rm_path, 'rm__.%s' % alias))
695 699 # disable repo
696 700 _now = datetime.now()
697 701 _ms = str(_now.microsecond).rjust(6, '0')
698 702 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
699 703 repo.just_name)
700 704 if repo.group:
701 705 args = repo.group.full_path_splitted + [_d]
702 706 _d = os.path.join(*args)
703 707 shutil.move(rm_path, os.path.join(self.repos_path, _d))
General Comments 0
You need to be logged in to leave comments. Login now