##// END OF EJS Templates
fixed python2.5 compat and repo mapper issue
marcink -
r2620:cd207411 beta
parent child Browse files
Show More
@@ -1,697 +1,697 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 from os.path import abspath
36 36 from os.path import dirname as dn, join as jn
37 37
38 38 from paste.script.command import Command, BadCommand
39 39
40 40 from mercurial import ui, config
41 41
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43
44 44 from rhodecode.lib.vcs import get_backend
45 45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49
50 50 from rhodecode.lib.caching_query import FromCache
51 51
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.repos_group import ReposGroupModel
57 57 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 58 from rhodecode.lib.vcs.utils.fakemod import create_module
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
63 63
64 64
65 65 def recursive_replace(str_, replace=' '):
66 66 """
67 67 Recursive replace of given sign to just one instance
68 68
69 69 :param str_: given string
70 70 :param replace: char to find and replace multiple instances
71 71
72 72 Examples::
73 73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 74 'Mighty-Mighty-Bo-sstones'
75 75 """
76 76
77 77 if str_.find(replace * 2) == -1:
78 78 return str_
79 79 else:
80 80 str_ = str_.replace(replace * 2, replace)
81 81 return recursive_replace(str_, replace)
82 82
83 83
84 84 def repo_name_slug(value):
85 85 """
86 86 Return slug of name of repository
87 87 This function is called on each creation/modification
88 88 of repository to prevent bad names in repo
89 89 """
90 90
91 91 slug = remove_formatting(value)
92 92 slug = strip_tags(slug)
93 93
94 94 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 95 slug = slug.replace(c, '-')
96 96 slug = recursive_replace(slug, '-')
97 97 slug = collapse(slug, '-')
98 98 return slug
99 99
100 100
101 101 def get_repo_slug(request):
102 102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
103 103 if _repo:
104 104 _repo = _repo.rstrip('/')
105 105 return _repo
106 106
107 107
108 108 def get_repos_group_slug(request):
109 109 _group = request.environ['pylons.routes_dict'].get('group_name')
110 110 if _group:
111 111 _group = _group.rstrip('/')
112 112 return _group
113 113
114 114
115 115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
116 116 """
117 117 Action logger for various actions made by users
118 118
119 119 :param user: user that made this action, can be a unique username string or
120 120 object containing user_id attribute
121 121 :param action: action to log, should be on of predefined unique actions for
122 122 easy translations
123 123 :param repo: string name of repository or object containing repo_id,
124 124 that action was made on
125 125 :param ipaddr: optional ip address from what the action was made
126 126 :param sa: optional sqlalchemy session
127 127
128 128 """
129 129
130 130 if not sa:
131 131 sa = meta.Session()
132 132
133 133 try:
134 134 if hasattr(user, 'user_id'):
135 135 user_obj = user
136 136 elif isinstance(user, basestring):
137 137 user_obj = User.get_by_username(user)
138 138 else:
139 139 raise Exception('You have to provide user object or username')
140 140
141 141 if hasattr(repo, 'repo_id'):
142 142 repo_obj = Repository.get(repo.repo_id)
143 143 repo_name = repo_obj.repo_name
144 144 elif isinstance(repo, basestring):
145 145 repo_name = repo.lstrip('/')
146 146 repo_obj = Repository.get_by_repo_name(repo_name)
147 147 else:
148 148 repo_obj = None
149 149 repo_name = ''
150 150
151 151 user_log = UserLog()
152 152 user_log.user_id = user_obj.user_id
153 153 user_log.action = safe_unicode(action)
154 154
155 155 user_log.repository = repo_obj
156 156 user_log.repository_name = repo_name
157 157
158 158 user_log.action_date = datetime.datetime.now()
159 159 user_log.user_ip = ipaddr
160 160 sa.add(user_log)
161 161
162 162 log.info(
163 163 'Adding user %s, action %s on %s' % (user_obj, action,
164 164 safe_unicode(repo))
165 165 )
166 166 if commit:
167 167 sa.commit()
168 168 except:
169 169 log.error(traceback.format_exc())
170 170 raise
171 171
172 172
173 173 def get_repos(path, recursive=False):
174 174 """
175 175 Scans given path for repos and return (name,(type,path)) tuple
176 176
177 177 :param path: path to scan for repositories
178 178 :param recursive: recursive search and return names with subdirs in front
179 179 """
180 180
181 181 # remove ending slash for better results
182 182 path = path.rstrip(os.sep)
183 183
184 184 def _get_repos(p):
185 185 if not os.access(p, os.W_OK):
186 186 return
187 187 for dirpath in os.listdir(p):
188 188 if os.path.isfile(os.path.join(p, dirpath)):
189 189 continue
190 190 cur_path = os.path.join(p, dirpath)
191 191 try:
192 192 scm_info = get_scm(cur_path)
193 193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
194 194 except VCSError:
195 195 if not recursive:
196 196 continue
197 197 #check if this dir containts other repos for recursive scan
198 198 rec_path = os.path.join(p, dirpath)
199 199 if os.path.isdir(rec_path):
200 200 for inner_scm in _get_repos(rec_path):
201 201 yield inner_scm
202 202
203 203 return _get_repos(path)
204 204
205 205
206 206 def is_valid_repo(repo_name, base_path):
207 207 """
208 208 Returns True if given path is a valid repository False otherwise
209 209
210 210 :param repo_name:
211 211 :param base_path:
212 212
213 213 :return True: if given path is a valid repository
214 214 """
215 215 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
216 216
217 217 try:
218 218 get_scm(full_path)
219 219 return True
220 220 except VCSError:
221 221 return False
222 222
223 223
224 224 def is_valid_repos_group(repos_group_name, base_path):
225 225 """
226 226 Returns True if given path is a repos group False otherwise
227 227
228 228 :param repo_name:
229 229 :param base_path:
230 230 """
231 231 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
232 232
233 233 # check if it's not a repo
234 234 if is_valid_repo(repos_group_name, base_path):
235 235 return False
236 236
237 237 try:
238 238 # we need to check bare git repos at higher level
239 239 # since we might match branches/hooks/info/objects or possible
240 240 # other things inside bare git repo
241 241 get_scm(os.path.dirname(full_path))
242 242 return False
243 243 except VCSError:
244 244 pass
245 245
246 246 # check if it's a valid path
247 247 if os.path.isdir(full_path):
248 248 return True
249 249
250 250 return False
251 251
252 252
253 253 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
254 254 while True:
255 255 ok = raw_input(prompt)
256 256 if ok in ('y', 'ye', 'yes'):
257 257 return True
258 258 if ok in ('n', 'no', 'nop', 'nope'):
259 259 return False
260 260 retries = retries - 1
261 261 if retries < 0:
262 262 raise IOError
263 263 print complaint
264 264
265 265 #propagated from mercurial documentation
266 266 ui_sections = ['alias', 'auth',
267 267 'decode/encode', 'defaults',
268 268 'diff', 'email',
269 269 'extensions', 'format',
270 270 'merge-patterns', 'merge-tools',
271 271 'hooks', 'http_proxy',
272 272 'smtp', 'patch',
273 273 'paths', 'profiling',
274 274 'server', 'trusted',
275 275 'ui', 'web', ]
276 276
277 277
278 278 def make_ui(read_from='file', path=None, checkpaths=True):
279 279 """
280 280 A function that will read python rc files or database
281 281 and make an mercurial ui object from read options
282 282
283 283 :param path: path to mercurial config file
284 284 :param checkpaths: check the path
285 285 :param read_from: read from 'file' or 'db'
286 286 """
287 287
288 288 baseui = ui.ui()
289 289
290 290 # clean the baseui object
291 291 baseui._ocfg = config.config()
292 292 baseui._ucfg = config.config()
293 293 baseui._tcfg = config.config()
294 294
295 295 if read_from == 'file':
296 296 if not os.path.isfile(path):
297 297 log.debug('hgrc file is not present at %s skipping...' % path)
298 298 return False
299 299 log.debug('reading hgrc from %s' % path)
300 300 cfg = config.config()
301 301 cfg.read(path)
302 302 for section in ui_sections:
303 303 for k, v in cfg.items(section):
304 304 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
305 305 baseui.setconfig(section, k, v)
306 306
307 307 elif read_from == 'db':
308 308 sa = meta.Session()
309 309 ret = sa.query(RhodeCodeUi)\
310 310 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
311 311 .all()
312 312
313 313 hg_ui = ret
314 314 for ui_ in hg_ui:
315 315 if ui_.ui_active:
316 316 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
317 317 ui_.ui_key, ui_.ui_value)
318 318 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
319 319
320 320 meta.Session.remove()
321 321 return baseui
322 322
323 323
324 324 def set_rhodecode_config(config):
325 325 """
326 326 Updates pylons config with new settings from database
327 327
328 328 :param config:
329 329 """
330 330 hgsettings = RhodeCodeSetting.get_app_settings()
331 331
332 332 for k, v in hgsettings.items():
333 333 config[k] = v
334 334
335 335
336 336 def invalidate_cache(cache_key, *args):
337 337 """
338 338 Puts cache invalidation task into db for
339 339 further global cache invalidation
340 340 """
341 341
342 342 from rhodecode.model.scm import ScmModel
343 343
344 344 if cache_key.startswith('get_repo_cached_'):
345 345 name = cache_key.split('get_repo_cached_')[-1]
346 346 ScmModel().mark_for_invalidation(name)
347 347
348 348
349 349 class EmptyChangeset(BaseChangeset):
350 350 """
351 351 An dummy empty changeset. It's possible to pass hash when creating
352 352 an EmptyChangeset
353 353 """
354 354
355 355 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
356 356 alias=None):
357 357 self._empty_cs = cs
358 358 self.revision = -1
359 359 self.message = ''
360 360 self.author = ''
361 361 self.date = ''
362 362 self.repository = repo
363 363 self.requested_revision = requested_revision
364 364 self.alias = alias
365 365
366 366 @LazyProperty
367 367 def raw_id(self):
368 368 """
369 369 Returns raw string identifying this changeset, useful for web
370 370 representation.
371 371 """
372 372
373 373 return self._empty_cs
374 374
375 375 @LazyProperty
376 376 def branch(self):
377 377 return get_backend(self.alias).DEFAULT_BRANCH_NAME
378 378
379 379 @LazyProperty
380 380 def short_id(self):
381 381 return self.raw_id[:12]
382 382
383 383 def get_file_changeset(self, path):
384 384 return self
385 385
386 386 def get_file_content(self, path):
387 387 return u''
388 388
389 389 def get_file_size(self, path):
390 390 return 0
391 391
392 392
393 393 def map_groups(path):
394 394 """
395 395 Given a full path to a repository, create all nested groups that this
396 396 repo is inside. This function creates parent-child relationships between
397 397 groups and creates default perms for all new groups.
398 398
399 399 :param paths: full path to repository
400 400 """
401 401 sa = meta.Session()
402 402 groups = path.split(Repository.url_sep())
403 403 parent = None
404 404 group = None
405 405
406 406 # last element is repo in nested groups structure
407 407 groups = groups[:-1]
408 408 rgm = ReposGroupModel(sa)
409 409 for lvl, group_name in enumerate(groups):
410 410 group_name = '/'.join(groups[:lvl] + [group_name])
411 411 group = RepoGroup.get_by_group_name(group_name)
412 412 desc = '%s group' % group_name
413 413
414 414 # skip folders that are now removed repos
415 415 if REMOVED_REPO_PAT.match(group_name):
416 416 break
417 417
418 418 if group is None:
419 419 log.debug('creating group level: %s group_name: %s' % (lvl,
420 420 group_name))
421 421 group = RepoGroup(group_name, parent)
422 422 group.group_description = desc
423 423 sa.add(group)
424 424 rgm._create_default_perms(group)
425 425 sa.flush()
426 426 parent = group
427 427 return group
428 428
429 429
430 430 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
431 431 install_git_hook=False):
432 432 """
433 433 maps all repos given in initial_repo_list, non existing repositories
434 434 are created, if remove_obsolete is True it also check for db entries
435 435 that are not in initial_repo_list and removes them.
436 436
437 437 :param initial_repo_list: list of repositories found by scanning methods
438 438 :param remove_obsolete: check for obsolete entries in database
439 439 :param install_git_hook: if this is True, also check and install githook
440 440 for a repo if missing
441 441 """
442 442 from rhodecode.model.repo import RepoModel
443 443 from rhodecode.model.scm import ScmModel
444 444 sa = meta.Session()
445 445 rm = RepoModel()
446 446 user = sa.query(User).filter(User.admin == True).first()
447 447 if user is None:
448 448 raise Exception('Missing administrative account !')
449 449 added = []
450 450
451 451 for name, repo in initial_repo_list.items():
452 452 group = map_groups(name)
453 repo = rm.get_by_repo_name(name)
454 if not repo:
453 db_repo = rm.get_by_repo_name(name)
454 if not db_repo:
455 455 log.info('repository %s not found creating now' % name)
456 456 added.append(name)
457 457 desc = (repo.description
458 458 if repo.description != 'unknown'
459 459 else '%s repository' % name)
460 460 rm.create_repo(
461 461 repo_name=name,
462 462 repo_type=repo.alias,
463 463 description=desc,
464 464 repos_group=getattr(group, 'group_id', None),
465 465 owner=user,
466 466 just_db=True
467 467 )
468 468 elif install_git_hook:
469 if repo.repo_type == 'git':
470 ScmModel().install_git_hook(repo.scm_instance)
469 if db_repo.repo_type == 'git':
470 ScmModel().install_git_hook(db_repo.scm_instance)
471 471 sa.commit()
472 472 removed = []
473 473 if remove_obsolete:
474 474 # remove from database those repositories that are not in the filesystem
475 475 for repo in sa.query(Repository).all():
476 476 if repo.repo_name not in initial_repo_list.keys():
477 477 log.debug("Removing non existing repository found in db %s" %
478 478 repo.repo_name)
479 479 try:
480 480 sa.delete(repo)
481 481 sa.commit()
482 482 removed.append(repo.repo_name)
483 483 except:
484 484 #don't hold further removals on error
485 485 log.error(traceback.format_exc())
486 486
487 487 # clear cache keys
488 488 log.debug("Clearing cache keys now...")
489 489 CacheInvalidation.clear_cache()
490 490 sa.commit()
491 491 return added, removed
492 492
493 493
494 494 # set cache regions for beaker so celery can utilise it
495 495 def add_cache(settings):
496 496 cache_settings = {'regions': None}
497 497 for key in settings.keys():
498 498 for prefix in ['beaker.cache.', 'cache.']:
499 499 if key.startswith(prefix):
500 500 name = key.split(prefix)[1].strip()
501 501 cache_settings[name] = settings[key].strip()
502 502 if cache_settings['regions']:
503 503 for region in cache_settings['regions'].split(','):
504 504 region = region.strip()
505 505 region_settings = {}
506 506 for key, value in cache_settings.items():
507 507 if key.startswith(region):
508 508 region_settings[key.split('.')[1]] = value
509 509 region_settings['expire'] = int(region_settings.get('expire',
510 510 60))
511 511 region_settings.setdefault('lock_dir',
512 512 cache_settings.get('lock_dir'))
513 513 region_settings.setdefault('data_dir',
514 514 cache_settings.get('data_dir'))
515 515
516 516 if 'type' not in region_settings:
517 517 region_settings['type'] = cache_settings.get('type',
518 518 'memory')
519 519 beaker.cache.cache_regions[region] = region_settings
520 520
521 521
522 522 def load_rcextensions(root_path):
523 523 import rhodecode
524 524 from rhodecode.config import conf
525 525
526 526 path = os.path.join(root_path, 'rcextensions', '__init__.py')
527 527 if os.path.isfile(path):
528 528 rcext = create_module('rc', path)
529 529 EXT = rhodecode.EXTENSIONS = rcext
530 530 log.debug('Found rcextensions now loading %s...' % rcext)
531 531
532 532 # Additional mappings that are not present in the pygments lexers
533 533 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
534 534
535 535 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
536 536
537 537 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
538 538 log.debug('settings custom INDEX_EXTENSIONS')
539 539 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
540 540
541 541 #ADDITIONAL MAPPINGS
542 542 log.debug('adding extra into INDEX_EXTENSIONS')
543 543 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
544 544
545 545
546 546 #==============================================================================
547 547 # TEST FUNCTIONS AND CREATORS
548 548 #==============================================================================
549 549 def create_test_index(repo_location, config, full_index):
550 550 """
551 551 Makes default test index
552 552
553 553 :param config: test config
554 554 :param full_index:
555 555 """
556 556
557 557 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
558 558 from rhodecode.lib.pidlock import DaemonLock, LockHeld
559 559
560 560 repo_location = repo_location
561 561
562 562 index_location = os.path.join(config['app_conf']['index_dir'])
563 563 if not os.path.exists(index_location):
564 564 os.makedirs(index_location)
565 565
566 566 try:
567 567 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
568 568 WhooshIndexingDaemon(index_location=index_location,
569 569 repo_location=repo_location)\
570 570 .run(full_index=full_index)
571 571 l.release()
572 572 except LockHeld:
573 573 pass
574 574
575 575
576 576 def create_test_env(repos_test_path, config):
577 577 """
578 578 Makes a fresh database and
579 579 install test repository into tmp dir
580 580 """
581 581 from rhodecode.lib.db_manage import DbManage
582 582 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
583 583
584 584 # PART ONE create db
585 585 dbconf = config['sqlalchemy.db1.url']
586 586 log.debug('making test db %s' % dbconf)
587 587
588 588 # create test dir if it doesn't exist
589 589 if not os.path.isdir(repos_test_path):
590 590 log.debug('Creating testdir %s' % repos_test_path)
591 591 os.makedirs(repos_test_path)
592 592
593 593 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
594 594 tests=True)
595 595 dbmanage.create_tables(override=True)
596 596 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
597 597 dbmanage.create_default_user()
598 598 dbmanage.admin_prompt()
599 599 dbmanage.create_permissions()
600 600 dbmanage.populate_default_permissions()
601 601 Session().commit()
602 602 # PART TWO make test repo
603 603 log.debug('making test vcs repositories')
604 604
605 605 idx_path = config['app_conf']['index_dir']
606 606 data_path = config['app_conf']['cache_dir']
607 607
608 608 #clean index and data
609 609 if idx_path and os.path.exists(idx_path):
610 610 log.debug('remove %s' % idx_path)
611 611 shutil.rmtree(idx_path)
612 612
613 613 if data_path and os.path.exists(data_path):
614 614 log.debug('remove %s' % data_path)
615 615 shutil.rmtree(data_path)
616 616
617 617 #CREATE DEFAULT TEST REPOS
618 618 cur_dir = dn(dn(abspath(__file__)))
619 619 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
620 620 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
621 621 tar.close()
622 622
623 623 cur_dir = dn(dn(abspath(__file__)))
624 624 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
625 625 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
626 626 tar.close()
627 627
628 628 #LOAD VCS test stuff
629 629 from rhodecode.tests.vcs import setup_package
630 630 setup_package()
631 631
632 632
633 633 #==============================================================================
634 634 # PASTER COMMANDS
635 635 #==============================================================================
636 636 class BasePasterCommand(Command):
637 637 """
638 638 Abstract Base Class for paster commands.
639 639
640 640 The celery commands are somewhat aggressive about loading
641 641 celery.conf, and since our module sets the `CELERY_LOADER`
642 642 environment variable to our loader, we have to bootstrap a bit and
643 643 make sure we've had a chance to load the pylons config off of the
644 644 command line, otherwise everything fails.
645 645 """
646 646 min_args = 1
647 647 min_args_error = "Please provide a paster config file as an argument."
648 648 takes_config_file = 1
649 649 requires_config_file = True
650 650
651 651 def notify_msg(self, msg, log=False):
652 652 """Make a notification to user, additionally if logger is passed
653 653 it logs this action using given logger
654 654
655 655 :param msg: message that will be printed to user
656 656 :param log: logging instance, to use to additionally log this message
657 657
658 658 """
659 659 if log and isinstance(log, logging):
660 660 log(msg)
661 661
662 662 def run(self, args):
663 663 """
664 664 Overrides Command.run
665 665
666 666 Checks for a config file argument and loads it.
667 667 """
668 668 if len(args) < self.min_args:
669 669 raise BadCommand(
670 670 self.min_args_error % {'min_args': self.min_args,
671 671 'actual_args': len(args)})
672 672
673 673 # Decrement because we're going to lob off the first argument.
674 674 # @@ This is hacky
675 675 self.min_args -= 1
676 676 self.bootstrap_config(args[0])
677 677 self.update_parser()
678 678 return super(BasePasterCommand, self).run(args[1:])
679 679
680 680 def update_parser(self):
681 681 """
682 682 Abstract method. Allows for the class's parser to be updated
683 683 before the superclass's `run` method is called. Necessary to
684 684 allow options/arguments to be passed through to the underlying
685 685 celery command.
686 686 """
687 687 raise NotImplementedError("Abstract Method.")
688 688
689 689 def bootstrap_config(self, conf):
690 690 """
691 691 Loads the pylons configuration.
692 692 """
693 693 from pylons import config as pylonsconfig
694 694
695 695 self.path_to_ini_file = os.path.realpath(conf)
696 696 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
697 697 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
@@ -1,597 +1,598 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.scm
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Scm model for RhodeCode
7 7
8 8 :created_on: Apr 9, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 26 import os
26 27 import re
27 28 import time
28 29 import traceback
29 30 import logging
30 31 import cStringIO
31 32 import pkg_resources
32 33 from os.path import dirname as dn, join as jn
33 34
34 35 from sqlalchemy import func
35 36 from pylons.i18n.translation import _
36 37
37 38 import rhodecode
38 39 from rhodecode.lib.vcs import get_backend
39 40 from rhodecode.lib.vcs.exceptions import RepositoryError
40 41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 42 from rhodecode.lib.vcs.nodes import FileNode
42 43
43 44 from rhodecode import BACKENDS
44 45 from rhodecode.lib import helpers as h
45 46 from rhodecode.lib.utils2 import safe_str, safe_unicode
46 47 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
47 48 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
48 49 action_logger, EmptyChangeset, REMOVED_REPO_PAT
49 50 from rhodecode.model import BaseModel
50 51 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
51 52 UserFollowing, UserLog, User, RepoGroup, PullRequest
52 53
53 54 log = logging.getLogger(__name__)
54 55
55 56
56 57 class UserTemp(object):
57 58 def __init__(self, user_id):
58 59 self.user_id = user_id
59 60
60 61 def __repr__(self):
61 62 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
62 63
63 64
64 65 class RepoTemp(object):
65 66 def __init__(self, repo_id):
66 67 self.repo_id = repo_id
67 68
68 69 def __repr__(self):
69 70 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
70 71
71 72
72 73 class CachedRepoList(object):
73 74 """
74 75 Cached repo list, uses in-memory cache after initialization, that is
75 76 super fast
76 77 """
77 78
78 79 def __init__(self, db_repo_list, repos_path, order_by=None):
79 80 self.db_repo_list = db_repo_list
80 81 self.repos_path = repos_path
81 82 self.order_by = order_by
82 83 self.reversed = (order_by or '').startswith('-')
83 84
84 85 def __len__(self):
85 86 return len(self.db_repo_list)
86 87
87 88 def __repr__(self):
88 89 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
89 90
90 91 def __iter__(self):
91 92 # pre-propagated cache_map to save executing select statements
92 93 # for each repo
93 94 cache_map = CacheInvalidation.get_cache_map()
94 95
95 96 for dbr in self.db_repo_list:
96 97 scmr = dbr.scm_instance_cached(cache_map)
97 98 # check permission at this level
98 99 if not HasRepoPermissionAny(
99 100 'repository.read', 'repository.write', 'repository.admin'
100 101 )(dbr.repo_name, 'get repo check'):
101 102 continue
102 103
103 104 if scmr is None:
104 105 log.error(
105 106 '%s this repository is present in database but it '
106 107 'cannot be created as an scm instance' % dbr.repo_name
107 108 )
108 109 continue
109 110
110 111 last_change = scmr.last_change
111 112 tip = h.get_changeset_safe(scmr, 'tip')
112 113
113 114 tmp_d = {}
114 115 tmp_d['name'] = dbr.repo_name
115 116 tmp_d['name_sort'] = tmp_d['name'].lower()
116 117 tmp_d['description'] = dbr.description
117 118 tmp_d['description_sort'] = tmp_d['description'].lower()
118 119 tmp_d['last_change'] = last_change
119 120 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
120 121 tmp_d['tip'] = tip.raw_id
121 122 tmp_d['tip_sort'] = tip.revision
122 123 tmp_d['rev'] = tip.revision
123 124 tmp_d['contact'] = dbr.user.full_contact
124 125 tmp_d['contact_sort'] = tmp_d['contact']
125 126 tmp_d['owner_sort'] = tmp_d['contact']
126 127 tmp_d['repo_archives'] = list(scmr._get_archives())
127 128 tmp_d['last_msg'] = tip.message
128 129 tmp_d['author'] = tip.author
129 130 tmp_d['dbrepo'] = dbr.get_dict()
130 131 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
131 132 yield tmp_d
132 133
133 134
134 135 class SimpleCachedRepoList(CachedRepoList):
135 136 """
136 137 Lighter version of CachedRepoList without the scm initialisation
137 138 """
138 139
139 140 def __iter__(self):
140 141 for dbr in self.db_repo_list:
141 142 # check permission at this level
142 143 if not HasRepoPermissionAny(
143 144 'repository.read', 'repository.write', 'repository.admin'
144 145 )(dbr.repo_name, 'get repo check'):
145 146 continue
146 147
147 148 tmp_d = {}
148 149 tmp_d['name'] = dbr.repo_name
149 150 tmp_d['name_sort'] = tmp_d['name'].lower()
150 151 tmp_d['description'] = dbr.description
151 152 tmp_d['description_sort'] = tmp_d['description'].lower()
152 153 tmp_d['dbrepo'] = dbr.get_dict()
153 154 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
154 155 yield tmp_d
155 156
156 157
157 158 class GroupList(object):
158 159
159 160 def __init__(self, db_repo_group_list):
160 161 self.db_repo_group_list = db_repo_group_list
161 162
162 163 def __len__(self):
163 164 return len(self.db_repo_group_list)
164 165
165 166 def __repr__(self):
166 167 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
167 168
168 169 def __iter__(self):
169 170 for dbgr in self.db_repo_group_list:
170 171 # check permission at this level
171 172 if not HasReposGroupPermissionAny(
172 173 'group.read', 'group.write', 'group.admin'
173 174 )(dbgr.group_name, 'get group repo check'):
174 175 continue
175 176
176 177 yield dbgr
177 178
178 179
179 180 class ScmModel(BaseModel):
180 181 """
181 182 Generic Scm Model
182 183 """
183 184
184 185 def __get_repo(self, instance):
185 186 cls = Repository
186 187 if isinstance(instance, cls):
187 188 return instance
188 189 elif isinstance(instance, int) or str(instance).isdigit():
189 190 return cls.get(instance)
190 191 elif isinstance(instance, basestring):
191 192 return cls.get_by_repo_name(instance)
192 193 elif instance:
193 194 raise Exception('given object must be int, basestr or Instance'
194 195 ' of %s got %s' % (type(cls), type(instance)))
195 196
196 197 @LazyProperty
197 198 def repos_path(self):
198 199 """
199 200 Get's the repositories root path from database
200 201 """
201 202
202 203 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
203 204
204 205 return q.ui_value
205 206
206 207 def repo_scan(self, repos_path=None):
207 208 """
208 209 Listing of repositories in given path. This path should not be a
209 210 repository itself. Return a dictionary of repository objects
210 211
211 212 :param repos_path: path to directory containing repositories
212 213 """
213 214
214 215 if repos_path is None:
215 216 repos_path = self.repos_path
216 217
217 218 log.info('scanning for repositories in %s' % repos_path)
218 219
219 220 baseui = make_ui('db')
220 221 repos = {}
221 222
222 223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 224 # skip removed repos
224 225 if REMOVED_REPO_PAT.match(name):
225 226 continue
226 227
227 228 # name need to be decomposed and put back together using the /
228 229 # since this is internal storage separator for rhodecode
229 230 name = Repository.url_sep().join(name.split(os.sep))
230 231
231 232 try:
232 233 if name in repos:
233 234 raise RepositoryError('Duplicate repository name %s '
234 235 'found in %s' % (name, path))
235 236 else:
236 237
237 238 klass = get_backend(path[0])
238 239
239 240 if path[0] == 'hg' and path[0] in BACKENDS.keys():
240 241 repos[name] = klass(safe_str(path[1]), baseui=baseui)
241 242
242 243 if path[0] == 'git' and path[0] in BACKENDS.keys():
243 244 repos[name] = klass(path[1])
244 245 except OSError:
245 246 continue
246 247
247 248 return repos
248 249
249 250 def get_repos(self, all_repos=None, sort_key=None, simple=False):
250 251 """
251 252 Get all repos from db and for each repo create it's
252 253 backend instance and fill that backed with information from database
253 254
254 255 :param all_repos: list of repository names as strings
255 256 give specific repositories list, good for filtering
256 257
257 258 :param sort_key: initial sorting of repos
258 259 :param simple: use SimpleCachedList - one without the SCM info
259 260 """
260 261 if all_repos is None:
261 262 all_repos = self.sa.query(Repository)\
262 263 .filter(Repository.group_id == None)\
263 264 .order_by(func.lower(Repository.repo_name)).all()
264 265 if simple:
265 266 repo_iter = SimpleCachedRepoList(all_repos,
266 267 repos_path=self.repos_path,
267 268 order_by=sort_key)
268 269 else:
269 270 repo_iter = CachedRepoList(all_repos,
270 271 repos_path=self.repos_path,
271 272 order_by=sort_key)
272 273
273 274 return repo_iter
274 275
275 276 def get_repos_groups(self, all_groups=None):
276 277 if all_groups is None:
277 278 all_groups = RepoGroup.query()\
278 279 .filter(RepoGroup.group_parent_id == None).all()
279 280 group_iter = GroupList(all_groups)
280 281
281 282 return group_iter
282 283
283 284 def mark_for_invalidation(self, repo_name):
284 285 """
285 286 Puts cache invalidation task into db for
286 287 further global cache invalidation
287 288
288 289 :param repo_name: this repo that should invalidation take place
289 290 """
290 291 CacheInvalidation.set_invalidate(repo_name)
291 292
292 293 def toggle_following_repo(self, follow_repo_id, user_id):
293 294
294 295 f = self.sa.query(UserFollowing)\
295 296 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
296 297 .filter(UserFollowing.user_id == user_id).scalar()
297 298
298 299 if f is not None:
299 300 try:
300 301 self.sa.delete(f)
301 302 action_logger(UserTemp(user_id),
302 303 'stopped_following_repo',
303 304 RepoTemp(follow_repo_id))
304 305 return
305 306 except:
306 307 log.error(traceback.format_exc())
307 308 raise
308 309
309 310 try:
310 311 f = UserFollowing()
311 312 f.user_id = user_id
312 313 f.follows_repo_id = follow_repo_id
313 314 self.sa.add(f)
314 315
315 316 action_logger(UserTemp(user_id),
316 317 'started_following_repo',
317 318 RepoTemp(follow_repo_id))
318 319 except:
319 320 log.error(traceback.format_exc())
320 321 raise
321 322
322 323 def toggle_following_user(self, follow_user_id, user_id):
323 324 f = self.sa.query(UserFollowing)\
324 325 .filter(UserFollowing.follows_user_id == follow_user_id)\
325 326 .filter(UserFollowing.user_id == user_id).scalar()
326 327
327 328 if f is not None:
328 329 try:
329 330 self.sa.delete(f)
330 331 return
331 332 except:
332 333 log.error(traceback.format_exc())
333 334 raise
334 335
335 336 try:
336 337 f = UserFollowing()
337 338 f.user_id = user_id
338 339 f.follows_user_id = follow_user_id
339 340 self.sa.add(f)
340 341 except:
341 342 log.error(traceback.format_exc())
342 343 raise
343 344
344 345 def is_following_repo(self, repo_name, user_id, cache=False):
345 346 r = self.sa.query(Repository)\
346 347 .filter(Repository.repo_name == repo_name).scalar()
347 348
348 349 f = self.sa.query(UserFollowing)\
349 350 .filter(UserFollowing.follows_repository == r)\
350 351 .filter(UserFollowing.user_id == user_id).scalar()
351 352
352 353 return f is not None
353 354
354 355 def is_following_user(self, username, user_id, cache=False):
355 356 u = User.get_by_username(username)
356 357
357 358 f = self.sa.query(UserFollowing)\
358 359 .filter(UserFollowing.follows_user == u)\
359 360 .filter(UserFollowing.user_id == user_id).scalar()
360 361
361 362 return f is not None
362 363
363 364 def get_followers(self, repo):
364 365 repo = self._get_repo(repo)
365 366
366 367 return self.sa.query(UserFollowing)\
367 368 .filter(UserFollowing.follows_repository == repo).count()
368 369
369 370 def get_forks(self, repo):
370 371 repo = self._get_repo(repo)
371 372 return self.sa.query(Repository)\
372 373 .filter(Repository.fork == repo).count()
373 374
374 375 def get_pull_requests(self, repo):
375 376 repo = self._get_repo(repo)
376 377 return self.sa.query(PullRequest)\
377 378 .filter(PullRequest.other_repo == repo).count()
378 379
379 380 def mark_as_fork(self, repo, fork, user):
380 381 repo = self.__get_repo(repo)
381 382 fork = self.__get_repo(fork)
382 383 repo.fork = fork
383 384 self.sa.add(repo)
384 385 return repo
385 386
386 387 def pull_changes(self, repo, username):
387 388 dbrepo = self.__get_repo(repo)
388 389 clone_uri = dbrepo.clone_uri
389 390 if not clone_uri:
390 391 raise Exception("This repository doesn't have a clone uri")
391 392
392 393 repo = dbrepo.scm_instance
393 394 try:
394 395 extras = {
395 396 'ip': '',
396 397 'username': username,
397 398 'action': 'push_remote',
398 399 'repository': dbrepo.repo_name,
399 400 'scm': repo.alias,
400 401 }
401 402 Repository.inject_ui(repo, extras=extras)
402 403
403 404 if repo.alias == 'git':
404 405 repo.fetch(clone_uri)
405 406 else:
406 407 repo.pull(clone_uri)
407 408 self.mark_for_invalidation(dbrepo.repo_name)
408 409 except:
409 410 log.error(traceback.format_exc())
410 411 raise
411 412
412 413 def commit_change(self, repo, repo_name, cs, user, author, message,
413 414 content, f_path):
414 415
415 416 if repo.alias == 'hg':
416 417 from rhodecode.lib.vcs.backends.hg import \
417 418 MercurialInMemoryChangeset as IMC
418 419 elif repo.alias == 'git':
419 420 from rhodecode.lib.vcs.backends.git import \
420 421 GitInMemoryChangeset as IMC
421 422
422 423 # decoding here will force that we have proper encoded values
423 424 # in any other case this will throw exceptions and deny commit
424 425 content = safe_str(content)
425 426 path = safe_str(f_path)
426 427 # message and author needs to be unicode
427 428 # proper backend should then translate that into required type
428 429 message = safe_unicode(message)
429 430 author = safe_unicode(author)
430 431 m = IMC(repo)
431 432 m.change(FileNode(path, content))
432 433 tip = m.commit(message=message,
433 434 author=author,
434 435 parents=[cs], branch=cs.branch)
435 436
436 437 new_cs = tip.short_id
437 438 action = 'push_local:%s' % new_cs
438 439
439 440 action_logger(user, action, repo_name)
440 441
441 442 self.mark_for_invalidation(repo_name)
442 443
443 444 def create_node(self, repo, repo_name, cs, user, author, message, content,
444 445 f_path):
445 446 if repo.alias == 'hg':
446 447 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
447 448 elif repo.alias == 'git':
448 449 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
449 450 # decoding here will force that we have proper encoded values
450 451 # in any other case this will throw exceptions and deny commit
451 452
452 453 if isinstance(content, (basestring,)):
453 454 content = safe_str(content)
454 455 elif isinstance(content, (file, cStringIO.OutputType,)):
455 456 content = content.read()
456 457 else:
457 458 raise Exception('Content is of unrecognized type %s' % (
458 459 type(content)
459 460 ))
460 461
461 462 message = safe_unicode(message)
462 463 author = safe_unicode(author)
463 464 path = safe_str(f_path)
464 465 m = IMC(repo)
465 466
466 467 if isinstance(cs, EmptyChangeset):
467 468 # EmptyChangeset means we we're editing empty repository
468 469 parents = None
469 470 else:
470 471 parents = [cs]
471 472
472 473 m.add(FileNode(path, content=content))
473 474 tip = m.commit(message=message,
474 475 author=author,
475 476 parents=parents, branch=cs.branch)
476 477 new_cs = tip.short_id
477 478 action = 'push_local:%s' % new_cs
478 479
479 480 action_logger(user, action, repo_name)
480 481
481 482 self.mark_for_invalidation(repo_name)
482 483
483 484 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
484 485 """
485 486 recursive walk in root dir and return a set of all path in that dir
486 487 based on repository walk function
487 488
488 489 :param repo_name: name of repository
489 490 :param revision: revision for which to list nodes
490 491 :param root_path: root path to list
491 492 :param flat: return as a list, if False returns a dict with decription
492 493
493 494 """
494 495 _files = list()
495 496 _dirs = list()
496 497 try:
497 498 _repo = self.__get_repo(repo_name)
498 499 changeset = _repo.scm_instance.get_changeset(revision)
499 500 root_path = root_path.lstrip('/')
500 501 for topnode, dirs, files in changeset.walk(root_path):
501 502 for f in files:
502 503 _files.append(f.path if flat else {"name": f.path,
503 504 "type": "file"})
504 505 for d in dirs:
505 506 _dirs.append(d.path if flat else {"name": d.path,
506 507 "type": "dir"})
507 508 except RepositoryError:
508 509 log.debug(traceback.format_exc())
509 510 raise
510 511
511 512 return _dirs, _files
512 513
513 514 def get_unread_journal(self):
514 515 return self.sa.query(UserLog).count()
515 516
516 517 def get_repo_landing_revs(self, repo=None):
517 518 """
518 519 Generates select option with tags branches and bookmarks (for hg only)
519 520 grouped by type
520 521
521 522 :param repo:
522 523 :type repo:
523 524 """
524 525
525 526 hist_l = []
526 527 choices = []
527 528 repo = self.__get_repo(repo)
528 529 hist_l.append(['tip', _('latest tip')])
529 530 choices.append('tip')
530 531 if not repo:
531 532 return choices, hist_l
532 533
533 534 repo = repo.scm_instance
534 535
535 536 branches_group = ([(k, k) for k, v in
536 537 repo.branches.iteritems()], _("Branches"))
537 538 hist_l.append(branches_group)
538 539 choices.extend([x[0] for x in branches_group[0]])
539 540
540 541 if repo.alias == 'hg':
541 542 bookmarks_group = ([(k, k) for k, v in
542 543 repo.bookmarks.iteritems()], _("Bookmarks"))
543 544 hist_l.append(bookmarks_group)
544 545 choices.extend([x[0] for x in bookmarks_group[0]])
545 546
546 547 tags_group = ([(k, k) for k, v in
547 548 repo.tags.iteritems()], _("Tags"))
548 549 hist_l.append(tags_group)
549 550 choices.extend([x[0] for x in tags_group[0]])
550 551
551 552 return choices, hist_l
552 553
553 554 def install_git_hook(self, repo, force_create=False):
554 555 """
555 556 Creates a rhodecode hook inside a git repository
556 557
557 558 :param repo: Instance of VCS repo
558 559 :param force_create: Create even if same name hook exists
559 560 """
560 561
561 562 loc = jn(repo.path, 'hooks')
562 563 if not repo.bare:
563 564 loc = jn(repo.path, '.git', 'hooks')
564 565 if not os.path.isdir(loc):
565 566 os.makedirs(loc)
566 567
567 568 tmpl = pkg_resources.resource_string(
568 569 'rhodecode', jn('config', 'post_receive_tmpl.py')
569 570 )
570 571
571 572 _hook_file = jn(loc, 'post-receive')
572 573 _rhodecode_hook = False
573 574 log.debug('Installing git hook in repo %s' % repo)
574 575 if os.path.exists(_hook_file):
575 576 # let's take a look at this hook, maybe it's rhodecode ?
576 577 log.debug('hook exists, checking if it is from rhodecode')
577 578 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
578 579 with open(_hook_file, 'rb') as f:
579 580 data = f.read()
580 581 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
581 582 % 'RC_HOOK_VER').search(data)
582 583 if matches:
583 584 try:
584 585 ver = matches.groups()[0]
585 586 log.debug('got %s it is rhodecode' % (ver))
586 587 _rhodecode_hook = True
587 588 except:
588 589 log.error(traceback.format_exc())
589 590
590 591 if _rhodecode_hook or force_create:
591 592 log.debug('writing hook file !')
592 593 with open(_hook_file, 'wb') as f:
593 594 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
594 595 f.write(tmpl)
595 596 os.chmod(_hook_file, 0755)
596 597 else:
597 598 log.debug('skipping writing hook file') No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now