##// END OF EJS Templates
reposcann should skip directories with starting with '.'...
marcink -
r3228:ba2e2514 beta
parent child Browse files
Show More
@@ -1,754 +1,767 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import datetime
30 30 import traceback
31 31 import paste
32 32 import beaker
33 33 import tarfile
34 34 import shutil
35 35 import decorator
36 36 import warnings
37 37 from os.path import abspath
38 38 from os.path import dirname as dn, join as jn
39 39
40 40 from paste.script.command import Command, BadCommand
41 41
42 42 from mercurial import ui, config
43 43
44 44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 45
46 46 from rhodecode.lib.vcs import get_backend
47 47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 50 from rhodecode.lib.vcs.exceptions import VCSError
51 51
52 52 from rhodecode.lib.caching_query import FromCache
53 53
54 54 from rhodecode.model import meta
55 55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 57 from rhodecode.model.meta import Session
58 58 from rhodecode.model.repos_group import ReposGroupModel
59 59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 60 from rhodecode.lib.vcs.utils.fakemod import create_module
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65 65
66 66
67 67 def recursive_replace(str_, replace=' '):
68 68 """
69 69 Recursive replace of given sign to just one instance
70 70
71 71 :param str_: given string
72 72 :param replace: char to find and replace multiple instances
73 73
74 74 Examples::
75 75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 76 'Mighty-Mighty-Bo-sstones'
77 77 """
78 78
79 79 if str_.find(replace * 2) == -1:
80 80 return str_
81 81 else:
82 82 str_ = str_.replace(replace * 2, replace)
83 83 return recursive_replace(str_, replace)
84 84
85 85
86 86 def repo_name_slug(value):
87 87 """
88 88 Return slug of name of repository
89 89 This function is called on each creation/modification
90 90 of repository to prevent bad names in repo
91 91 """
92 92
93 93 slug = remove_formatting(value)
94 94 slug = strip_tags(slug)
95 95
96 96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 97 slug = slug.replace(c, '-')
98 98 slug = recursive_replace(slug, '-')
99 99 slug = collapse(slug, '-')
100 100 return slug
101 101
102 102
103 103 def get_repo_slug(request):
104 104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 105 if _repo:
106 106 _repo = _repo.rstrip('/')
107 107 return _repo
108 108
109 109
110 110 def get_repos_group_slug(request):
111 111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 112 if _group:
113 113 _group = _group.rstrip('/')
114 114 return _group
115 115
116 116
117 117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 118 """
119 119 Action logger for various actions made by users
120 120
121 121 :param user: user that made this action, can be a unique username string or
122 122 object containing user_id attribute
123 123 :param action: action to log, should be on of predefined unique actions for
124 124 easy translations
125 125 :param repo: string name of repository or object containing repo_id,
126 126 that action was made on
127 127 :param ipaddr: optional ip address from what the action was made
128 128 :param sa: optional sqlalchemy session
129 129
130 130 """
131 131
132 132 if not sa:
133 133 sa = meta.Session()
134 134
135 135 try:
136 136 if hasattr(user, 'user_id'):
137 137 user_obj = User.get(user.user_id)
138 138 elif isinstance(user, basestring):
139 139 user_obj = User.get_by_username(user)
140 140 else:
141 141 raise Exception('You have to provide a user object or a username')
142 142
143 143 if hasattr(repo, 'repo_id'):
144 144 repo_obj = Repository.get(repo.repo_id)
145 145 repo_name = repo_obj.repo_name
146 146 elif isinstance(repo, basestring):
147 147 repo_name = repo.lstrip('/')
148 148 repo_obj = Repository.get_by_repo_name(repo_name)
149 149 else:
150 150 repo_obj = None
151 151 repo_name = ''
152 152
153 153 user_log = UserLog()
154 154 user_log.user_id = user_obj.user_id
155 155 user_log.username = user_obj.username
156 156 user_log.action = safe_unicode(action)
157 157
158 158 user_log.repository = repo_obj
159 159 user_log.repository_name = repo_name
160 160
161 161 user_log.action_date = datetime.datetime.now()
162 162 user_log.user_ip = ipaddr
163 163 sa.add(user_log)
164 164
165 165 log.info('Logging action %s on %s by %s' %
166 166 (action, safe_unicode(repo), user_obj))
167 167 if commit:
168 168 sa.commit()
169 169 except:
170 170 log.error(traceback.format_exc())
171 171 raise
172 172
173 173
174 def get_repos(path, recursive=False):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 175 """
176 176 Scans given path for repos and return (name,(type,path)) tuple
177 177
178 178 :param path: path to scan for repositories
179 179 :param recursive: recursive search and return names with subdirs in front
180 180 """
181 181
182 182 # remove ending slash for better results
183 183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 185
185 186 def _get_repos(p):
186 187 if not os.access(p, os.W_OK):
187 188 return
188 189 for dirpath in os.listdir(p):
189 190 if os.path.isfile(os.path.join(p, dirpath)):
190 191 continue
191 192 cur_path = os.path.join(p, dirpath)
193
194 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
197
198 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
200 continue
201
192 202 try:
193 203 scm_info = get_scm(cur_path)
194 204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
195 205 except VCSError:
196 206 if not recursive:
197 207 continue
198 208 #check if this dir containts other repos for recursive scan
199 209 rec_path = os.path.join(p, dirpath)
200 210 if os.path.isdir(rec_path):
201 211 for inner_scm in _get_repos(rec_path):
202 212 yield inner_scm
203 213
204 214 return _get_repos(path)
205 215
216 #alias for backward compat
217 get_filesystem_repos = get_repos
218
206 219
207 220 def is_valid_repo(repo_name, base_path, scm=None):
208 221 """
209 222 Returns True if given path is a valid repository False otherwise.
210 223 If scm param is given also compare if given scm is the same as expected
211 224 from scm parameter
212 225
213 226 :param repo_name:
214 227 :param base_path:
215 228 :param scm:
216 229
217 230 :return True: if given path is a valid repository
218 231 """
219 232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
220 233
221 234 try:
222 235 scm_ = get_scm(full_path)
223 236 if scm:
224 237 return scm_[0] == scm
225 238 return True
226 239 except VCSError:
227 240 return False
228 241
229 242
230 243 def is_valid_repos_group(repos_group_name, base_path):
231 244 """
232 245 Returns True if given path is a repos group False otherwise
233 246
234 247 :param repo_name:
235 248 :param base_path:
236 249 """
237 250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
238 251
239 252 # check if it's not a repo
240 253 if is_valid_repo(repos_group_name, base_path):
241 254 return False
242 255
243 256 try:
244 257 # we need to check bare git repos at higher level
245 258 # since we might match branches/hooks/info/objects or possible
246 259 # other things inside bare git repo
247 260 get_scm(os.path.dirname(full_path))
248 261 return False
249 262 except VCSError:
250 263 pass
251 264
252 265 # check if it's a valid path
253 266 if os.path.isdir(full_path):
254 267 return True
255 268
256 269 return False
257 270
258 271
259 272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
260 273 while True:
261 274 ok = raw_input(prompt)
262 275 if ok in ('y', 'ye', 'yes'):
263 276 return True
264 277 if ok in ('n', 'no', 'nop', 'nope'):
265 278 return False
266 279 retries = retries - 1
267 280 if retries < 0:
268 281 raise IOError
269 282 print complaint
270 283
271 284 #propagated from mercurial documentation
272 285 ui_sections = ['alias', 'auth',
273 286 'decode/encode', 'defaults',
274 287 'diff', 'email',
275 288 'extensions', 'format',
276 289 'merge-patterns', 'merge-tools',
277 290 'hooks', 'http_proxy',
278 291 'smtp', 'patch',
279 292 'paths', 'profiling',
280 293 'server', 'trusted',
281 294 'ui', 'web', ]
282 295
283 296
284 297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
285 298 """
286 299 A function that will read python rc files or database
287 300 and make an mercurial ui object from read options
288 301
289 302 :param path: path to mercurial config file
290 303 :param checkpaths: check the path
291 304 :param read_from: read from 'file' or 'db'
292 305 """
293 306
294 307 baseui = ui.ui()
295 308
296 309 # clean the baseui object
297 310 baseui._ocfg = config.config()
298 311 baseui._ucfg = config.config()
299 312 baseui._tcfg = config.config()
300 313
301 314 if read_from == 'file':
302 315 if not os.path.isfile(path):
303 316 log.debug('hgrc file is not present at %s, skipping...' % path)
304 317 return False
305 318 log.debug('reading hgrc from %s' % path)
306 319 cfg = config.config()
307 320 cfg.read(path)
308 321 for section in ui_sections:
309 322 for k, v in cfg.items(section):
310 323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
311 324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
312 325
313 326 elif read_from == 'db':
314 327 sa = meta.Session()
315 328 ret = sa.query(RhodeCodeUi)\
316 329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
317 330 .all()
318 331
319 332 hg_ui = ret
320 333 for ui_ in hg_ui:
321 334 if ui_.ui_active:
322 335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
323 336 ui_.ui_key, ui_.ui_value)
324 337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
325 338 safe_str(ui_.ui_value))
326 339 if ui_.ui_key == 'push_ssl':
327 340 # force set push_ssl requirement to False, rhodecode
328 341 # handles that
329 342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
330 343 False)
331 344 if clear_session:
332 345 meta.Session.remove()
333 346 return baseui
334 347
335 348
336 349 def set_rhodecode_config(config):
337 350 """
338 351 Updates pylons config with new settings from database
339 352
340 353 :param config:
341 354 """
342 355 hgsettings = RhodeCodeSetting.get_app_settings()
343 356
344 357 for k, v in hgsettings.items():
345 358 config[k] = v
346 359
347 360
348 361 def invalidate_cache(cache_key, *args):
349 362 """
350 363 Puts cache invalidation task into db for
351 364 further global cache invalidation
352 365 """
353 366
354 367 from rhodecode.model.scm import ScmModel
355 368
356 369 if cache_key.startswith('get_repo_cached_'):
357 370 name = cache_key.split('get_repo_cached_')[-1]
358 371 ScmModel().mark_for_invalidation(name)
359 372
360 373
361 374 def map_groups(path):
362 375 """
363 376 Given a full path to a repository, create all nested groups that this
364 377 repo is inside. This function creates parent-child relationships between
365 378 groups and creates default perms for all new groups.
366 379
367 380 :param paths: full path to repository
368 381 """
369 382 sa = meta.Session()
370 383 groups = path.split(Repository.url_sep())
371 384 parent = None
372 385 group = None
373 386
374 387 # last element is repo in nested groups structure
375 388 groups = groups[:-1]
376 389 rgm = ReposGroupModel(sa)
377 390 for lvl, group_name in enumerate(groups):
378 391 group_name = '/'.join(groups[:lvl] + [group_name])
379 392 group = RepoGroup.get_by_group_name(group_name)
380 393 desc = '%s group' % group_name
381 394
382 395 # skip folders that are now removed repos
383 396 if REMOVED_REPO_PAT.match(group_name):
384 397 break
385 398
386 399 if group is None:
387 400 log.debug('creating group level: %s group_name: %s' % (lvl,
388 401 group_name))
389 402 group = RepoGroup(group_name, parent)
390 403 group.group_description = desc
391 404 sa.add(group)
392 405 rgm._create_default_perms(group)
393 406 sa.flush()
394 407 parent = group
395 408 return group
396 409
397 410
398 411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
399 412 install_git_hook=False):
400 413 """
401 414 maps all repos given in initial_repo_list, non existing repositories
402 415 are created, if remove_obsolete is True it also check for db entries
403 416 that are not in initial_repo_list and removes them.
404 417
405 418 :param initial_repo_list: list of repositories found by scanning methods
406 419 :param remove_obsolete: check for obsolete entries in database
407 420 :param install_git_hook: if this is True, also check and install githook
408 421 for a repo if missing
409 422 """
410 423 from rhodecode.model.repo import RepoModel
411 424 from rhodecode.model.scm import ScmModel
412 425 sa = meta.Session()
413 426 rm = RepoModel()
414 427 user = sa.query(User).filter(User.admin == True).first()
415 428 if user is None:
416 429 raise Exception('Missing administrative account!')
417 430 added = []
418 431
419 432 # # clear cache keys
420 433 # log.debug("Clearing cache keys now...")
421 434 # CacheInvalidation.clear_cache()
422 435 # sa.commit()
423 436
424 437 ##creation defaults
425 438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
426 439 enable_statistics = defs.get('repo_enable_statistics')
427 440 enable_locking = defs.get('repo_enable_locking')
428 441 enable_downloads = defs.get('repo_enable_downloads')
429 442 private = defs.get('repo_private')
430 443
431 444 for name, repo in initial_repo_list.items():
432 445 group = map_groups(name)
433 446 db_repo = rm.get_by_repo_name(name)
434 447 # found repo that is on filesystem not in RhodeCode database
435 448 if not db_repo:
436 449 log.info('repository %s not found, creating now' % name)
437 450 added.append(name)
438 451 desc = (repo.description
439 452 if repo.description != 'unknown'
440 453 else '%s repository' % name)
441 454
442 455 new_repo = rm.create_repo(
443 456 repo_name=name,
444 457 repo_type=repo.alias,
445 458 description=desc,
446 459 repos_group=getattr(group, 'group_id', None),
447 460 owner=user,
448 461 just_db=True,
449 462 enable_locking=enable_locking,
450 463 enable_downloads=enable_downloads,
451 464 enable_statistics=enable_statistics,
452 465 private=private
453 466 )
454 467 # we added that repo just now, and make sure it has githook
455 468 # installed
456 469 if new_repo.repo_type == 'git':
457 470 ScmModel().install_git_hook(new_repo.scm_instance)
458 471 new_repo.update_changeset_cache()
459 472 elif install_git_hook:
460 473 if db_repo.repo_type == 'git':
461 474 ScmModel().install_git_hook(db_repo.scm_instance)
462 475 # during starting install all cache keys for all repositories in the
463 476 # system, this will register all repos and multiple instances
464 477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
465 478 CacheInvalidation.invalidate(name)
466 479 log.debug("Creating a cache key for %s, instance_id %s"
467 480 % (name, _prefix or 'unknown'))
468 481
469 482 sa.commit()
470 483 removed = []
471 484 if remove_obsolete:
472 485 # remove from database those repositories that are not in the filesystem
473 486 for repo in sa.query(Repository).all():
474 487 if repo.repo_name not in initial_repo_list.keys():
475 488 log.debug("Removing non-existing repository found in db `%s`" %
476 489 repo.repo_name)
477 490 try:
478 491 sa.delete(repo)
479 492 sa.commit()
480 493 removed.append(repo.repo_name)
481 494 except:
482 495 #don't hold further removals on error
483 496 log.error(traceback.format_exc())
484 497 sa.rollback()
485 498
486 499 return added, removed
487 500
488 501
489 502 # set cache regions for beaker so celery can utilise it
490 503 def add_cache(settings):
491 504 cache_settings = {'regions': None}
492 505 for key in settings.keys():
493 506 for prefix in ['beaker.cache.', 'cache.']:
494 507 if key.startswith(prefix):
495 508 name = key.split(prefix)[1].strip()
496 509 cache_settings[name] = settings[key].strip()
497 510 if cache_settings['regions']:
498 511 for region in cache_settings['regions'].split(','):
499 512 region = region.strip()
500 513 region_settings = {}
501 514 for key, value in cache_settings.items():
502 515 if key.startswith(region):
503 516 region_settings[key.split('.')[1]] = value
504 517 region_settings['expire'] = int(region_settings.get('expire',
505 518 60))
506 519 region_settings.setdefault('lock_dir',
507 520 cache_settings.get('lock_dir'))
508 521 region_settings.setdefault('data_dir',
509 522 cache_settings.get('data_dir'))
510 523
511 524 if 'type' not in region_settings:
512 525 region_settings['type'] = cache_settings.get('type',
513 526 'memory')
514 527 beaker.cache.cache_regions[region] = region_settings
515 528
516 529
517 530 def load_rcextensions(root_path):
518 531 import rhodecode
519 532 from rhodecode.config import conf
520 533
521 534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
522 535 if os.path.isfile(path):
523 536 rcext = create_module('rc', path)
524 537 EXT = rhodecode.EXTENSIONS = rcext
525 538 log.debug('Found rcextensions now loading %s...' % rcext)
526 539
527 540 # Additional mappings that are not present in the pygments lexers
528 541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
529 542
530 543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
531 544
532 545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
533 546 log.debug('settings custom INDEX_EXTENSIONS')
534 547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
535 548
536 549 #ADDITIONAL MAPPINGS
537 550 log.debug('adding extra into INDEX_EXTENSIONS')
538 551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
539 552
540 553
541 554 #==============================================================================
542 555 # TEST FUNCTIONS AND CREATORS
543 556 #==============================================================================
544 557 def create_test_index(repo_location, config, full_index):
545 558 """
546 559 Makes default test index
547 560
548 561 :param config: test config
549 562 :param full_index:
550 563 """
551 564
552 565 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
553 566 from rhodecode.lib.pidlock import DaemonLock, LockHeld
554 567
555 568 repo_location = repo_location
556 569
557 570 index_location = os.path.join(config['app_conf']['index_dir'])
558 571 if not os.path.exists(index_location):
559 572 os.makedirs(index_location)
560 573
561 574 try:
562 575 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
563 576 WhooshIndexingDaemon(index_location=index_location,
564 577 repo_location=repo_location)\
565 578 .run(full_index=full_index)
566 579 l.release()
567 580 except LockHeld:
568 581 pass
569 582
570 583
571 584 def create_test_env(repos_test_path, config):
572 585 """
573 586 Makes a fresh database and
574 587 install test repository into tmp dir
575 588 """
576 589 from rhodecode.lib.db_manage import DbManage
577 590 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
578 591
579 592 # PART ONE create db
580 593 dbconf = config['sqlalchemy.db1.url']
581 594 log.debug('making test db %s' % dbconf)
582 595
583 596 # create test dir if it doesn't exist
584 597 if not os.path.isdir(repos_test_path):
585 598 log.debug('Creating testdir %s' % repos_test_path)
586 599 os.makedirs(repos_test_path)
587 600
588 601 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
589 602 tests=True)
590 603 dbmanage.create_tables(override=True)
591 604 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
592 605 dbmanage.create_default_user()
593 606 dbmanage.admin_prompt()
594 607 dbmanage.create_permissions()
595 608 dbmanage.populate_default_permissions()
596 609 Session().commit()
597 610 # PART TWO make test repo
598 611 log.debug('making test vcs repositories')
599 612
600 613 idx_path = config['app_conf']['index_dir']
601 614 data_path = config['app_conf']['cache_dir']
602 615
603 616 #clean index and data
604 617 if idx_path and os.path.exists(idx_path):
605 618 log.debug('remove %s' % idx_path)
606 619 shutil.rmtree(idx_path)
607 620
608 621 if data_path and os.path.exists(data_path):
609 622 log.debug('remove %s' % data_path)
610 623 shutil.rmtree(data_path)
611 624
612 625 #CREATE DEFAULT TEST REPOS
613 626 cur_dir = dn(dn(abspath(__file__)))
614 627 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
615 628 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
616 629 tar.close()
617 630
618 631 cur_dir = dn(dn(abspath(__file__)))
619 632 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
620 633 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
621 634 tar.close()
622 635
623 636 #LOAD VCS test stuff
624 637 from rhodecode.tests.vcs import setup_package
625 638 setup_package()
626 639
627 640
628 641 #==============================================================================
629 642 # PASTER COMMANDS
630 643 #==============================================================================
631 644 class BasePasterCommand(Command):
632 645 """
633 646 Abstract Base Class for paster commands.
634 647
635 648 The celery commands are somewhat aggressive about loading
636 649 celery.conf, and since our module sets the `CELERY_LOADER`
637 650 environment variable to our loader, we have to bootstrap a bit and
638 651 make sure we've had a chance to load the pylons config off of the
639 652 command line, otherwise everything fails.
640 653 """
641 654 min_args = 1
642 655 min_args_error = "Please provide a paster config file as an argument."
643 656 takes_config_file = 1
644 657 requires_config_file = True
645 658
646 659 def notify_msg(self, msg, log=False):
647 660 """Make a notification to user, additionally if logger is passed
648 661 it logs this action using given logger
649 662
650 663 :param msg: message that will be printed to user
651 664 :param log: logging instance, to use to additionally log this message
652 665
653 666 """
654 667 if log and isinstance(log, logging):
655 668 log(msg)
656 669
657 670 def run(self, args):
658 671 """
659 672 Overrides Command.run
660 673
661 674 Checks for a config file argument and loads it.
662 675 """
663 676 if len(args) < self.min_args:
664 677 raise BadCommand(
665 678 self.min_args_error % {'min_args': self.min_args,
666 679 'actual_args': len(args)})
667 680
668 681 # Decrement because we're going to lob off the first argument.
669 682 # @@ This is hacky
670 683 self.min_args -= 1
671 684 self.bootstrap_config(args[0])
672 685 self.update_parser()
673 686 return super(BasePasterCommand, self).run(args[1:])
674 687
675 688 def update_parser(self):
676 689 """
677 690 Abstract method. Allows for the class's parser to be updated
678 691 before the superclass's `run` method is called. Necessary to
679 692 allow options/arguments to be passed through to the underlying
680 693 celery command.
681 694 """
682 695 raise NotImplementedError("Abstract Method.")
683 696
684 697 def bootstrap_config(self, conf):
685 698 """
686 699 Loads the pylons configuration.
687 700 """
688 701 from pylons import config as pylonsconfig
689 702
690 703 self.path_to_ini_file = os.path.realpath(conf)
691 704 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
692 705 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
693 706
694 707
695 708 def check_git_version():
696 709 """
697 710 Checks what version of git is installed in system, and issues a warning
698 711 if it's too old for RhodeCode to properly work.
699 712 """
700 713 import subprocess
701 714 from distutils.version import StrictVersion
702 715 from rhodecode import BACKENDS
703 716
704 717 p = subprocess.Popen('git --version', shell=True,
705 718 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
706 719 stdout, stderr = p.communicate()
707 720 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
708 721 if len(ver.split('.')) > 3:
709 722 #StrictVersion needs to be only 3 element type
710 723 ver = '.'.join(ver.split('.')[:3])
711 724 try:
712 725 _ver = StrictVersion(ver)
713 726 except:
714 727 _ver = StrictVersion('0.0.0')
715 728 stderr = traceback.format_exc()
716 729
717 730 req_ver = '1.7.4'
718 731 to_old_git = False
719 732 if _ver < StrictVersion(req_ver):
720 733 to_old_git = True
721 734
722 735 if 'git' in BACKENDS:
723 736 log.debug('GIT version detected: %s' % stdout)
724 737 if stderr:
725 738 log.warning('Unable to detect git version org error was:%r' % stderr)
726 739 elif to_old_git:
727 740 log.warning('RhodeCode detected git version %s, which is too old '
728 741 'for the system to function properly. Make sure '
729 742 'its version is at least %s' % (ver, req_ver))
730 743 return _ver
731 744
732 745
733 746 @decorator.decorator
734 747 def jsonify(func, *args, **kwargs):
735 748 """Action decorator that formats output for JSON
736 749
737 750 Given a function that will return content, this decorator will turn
738 751 the result into JSON, with a content-type of 'application/json' and
739 752 output it.
740 753
741 754 """
742 755 from pylons.decorators.util import get_pylons
743 756 from rhodecode.lib.ext_json import json
744 757 pylons = get_pylons(args)
745 758 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
746 759 data = func(*args, **kwargs)
747 760 if isinstance(data, (list, tuple)):
748 761 msg = "JSON responses with Array envelopes are susceptible to " \
749 762 "cross-site data leak attacks, see " \
750 763 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
751 764 warnings.warn(msg, Warning, 2)
752 765 log.warning(msg)
753 766 log.debug("Returning JSON wrapped action output")
754 767 return json.dumps(data, encoding='utf-8')
@@ -1,240 +1,241 b''
1 1 """
2 2 Utitlites aimed to help achieve mostly basic tasks.
3 3 """
4 4 from __future__ import division
5 5
6 6 import re
7 7 import time
8 8 import datetime
9 9 import os.path
10 10 from subprocess import Popen, PIPE
11 11 from rhodecode.lib.vcs.exceptions import VCSError
12 12 from rhodecode.lib.vcs.exceptions import RepositoryError
13 13 from rhodecode.lib.vcs.utils.paths import abspath
14 14
15 15 ALIASES = ['hg', 'git']
16 16
17 17
18 18 def get_scm(path, search_up=False, explicit_alias=None):
19 19 """
20 20 Returns one of alias from ``ALIASES`` (in order of precedence same as
21 21 shortcuts given in ``ALIASES``) and top working dir path for the given
22 22 argument. If no scm-specific directory is found or more than one scm is
23 23 found at that directory, ``VCSError`` is raised.
24 24
25 25 :param search_up: if set to ``True``, this function would try to
26 26 move up to parent directory every time no scm is recognized for the
27 27 currently checked path. Default: ``False``.
28 28 :param explicit_alias: can be one of available backend aliases, when given
29 29 it will return given explicit alias in repositories under more than one
30 30 version control, if explicit_alias is different than found it will raise
31 31 VCSError
32 32 """
33 33 if not os.path.isdir(path):
34 34 raise VCSError("Given path %s is not a directory" % path)
35 35
36 36 def get_scms(path):
37 37 return [(scm, path) for scm in get_scms_for_path(path)]
38 38
39 39 found_scms = get_scms(path)
40 40 while not found_scms and search_up:
41 41 newpath = abspath(path, '..')
42 42 if newpath == path:
43 43 break
44 44 path = newpath
45 45 found_scms = get_scms(path)
46 46
47 47 if len(found_scms) > 1:
48 48 for scm in found_scms:
49 49 if scm[0] == explicit_alias:
50 50 return scm
51 51 raise VCSError('More than one [%s] scm found at given path %s'
52 52 % (','.join((x[0] for x in found_scms)), path))
53 53
54 54 if len(found_scms) is 0:
55 55 raise VCSError('No scm found at given path %s' % path)
56 56
57 57 return found_scms[0]
58 58
59 59
60 60 def get_scms_for_path(path):
61 61 """
62 62 Returns all scm's found at the given path. If no scm is recognized
63 63 - empty list is returned.
64 64
65 65 :param path: path to directory which should be checked. May be callable.
66 66
67 67 :raises VCSError: if given ``path`` is not a directory
68 68 """
69 69 from rhodecode.lib.vcs.backends import get_backend
70 70 if hasattr(path, '__call__'):
71 71 path = path()
72 72 if not os.path.isdir(path):
73 73 raise VCSError("Given path %r is not a directory" % path)
74 74
75 75 result = []
76 76 for key in ALIASES:
77 77 dirname = os.path.join(path, '.' + key)
78 78 if os.path.isdir(dirname):
79 79 result.append(key)
80 80 continue
81 81 dirname = os.path.join(path, 'rm__.' + key)
82 82 if os.path.isdir(dirname):
83 return [None]
83 return result
84 84 # We still need to check if it's not bare repository as
85 85 # bare repos don't have working directories
86 86 try:
87 87 get_backend(key)(path)
88 88 result.append(key)
89 89 continue
90 90 except RepositoryError:
91 91 # Wrong backend
92 92 pass
93 93 except VCSError:
94 94 # No backend at all
95 95 pass
96 96 return result
97 97
98 98
99 99 def run_command(cmd, *args):
100 100 """
101 101 Runs command on the system with given ``args``.
102 102 """
103 103 command = ' '.join((cmd, args))
104 104 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
105 105 stdout, stderr = p.communicate()
106 106 return p.retcode, stdout, stderr
107 107
108 108
109 109 def get_highlighted_code(name, code, type='terminal'):
110 110 """
111 111 If pygments are available on the system
112 112 then returned output is colored. Otherwise
113 113 unchanged content is returned.
114 114 """
115 115 import logging
116 116 try:
117 117 import pygments
118 118 pygments
119 119 except ImportError:
120 120 return code
121 121 from pygments import highlight
122 122 from pygments.lexers import guess_lexer_for_filename, ClassNotFound
123 123 from pygments.formatters import TerminalFormatter
124 124
125 125 try:
126 126 lexer = guess_lexer_for_filename(name, code)
127 127 formatter = TerminalFormatter()
128 128 content = highlight(code, lexer, formatter)
129 129 except ClassNotFound:
130 130 logging.debug("Couldn't guess Lexer, will not use pygments.")
131 131 content = code
132 132 return content
133 133
134
134 135 def parse_changesets(text):
135 136 """
136 137 Returns dictionary with *start*, *main* and *end* ids.
137 138
138 139 Examples::
139 140
140 141 >>> parse_changesets('aaabbb')
141 142 {'start': None, 'main': 'aaabbb', 'end': None}
142 143 >>> parse_changesets('aaabbb..cccddd')
143 144 {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
144 145
145 146 """
146 147 text = text.strip()
147 148 CID_RE = r'[a-zA-Z0-9]+'
148 149 if not '..' in text:
149 150 m = re.match(r'^(?P<cid>%s)$' % CID_RE, text)
150 151 if m:
151 152 return {
152 153 'start': None,
153 154 'main': text,
154 155 'end': None,
155 156 }
156 157 else:
157 158 RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE)
158 159 m = re.match(RE, text)
159 160 if m:
160 161 result = m.groupdict()
161 162 result['main'] = None
162 163 return result
163 164 raise ValueError("IDs not recognized")
164 165
165 166 def parse_datetime(text):
166 167 """
167 168 Parses given text and returns ``datetime.datetime`` instance or raises
168 169 ``ValueError``.
169 170
170 171 :param text: string of desired date/datetime or something more verbose,
171 172 like *yesterday*, *2weeks 3days*, etc.
172 173 """
173 174
174 175 text = text.strip().lower()
175 176
176 177 INPUT_FORMATS = (
177 178 '%Y-%m-%d %H:%M:%S',
178 179 '%Y-%m-%d %H:%M',
179 180 '%Y-%m-%d',
180 181 '%m/%d/%Y %H:%M:%S',
181 182 '%m/%d/%Y %H:%M',
182 183 '%m/%d/%Y',
183 184 '%m/%d/%y %H:%M:%S',
184 185 '%m/%d/%y %H:%M',
185 186 '%m/%d/%y',
186 187 )
187 188 for format in INPUT_FORMATS:
188 189 try:
189 190 return datetime.datetime(*time.strptime(text, format)[:6])
190 191 except ValueError:
191 192 pass
192 193
193 194 # Try descriptive texts
194 195 if text == 'tomorrow':
195 196 future = datetime.datetime.now() + datetime.timedelta(days=1)
196 197 args = future.timetuple()[:3] + (23, 59, 59)
197 198 return datetime.datetime(*args)
198 199 elif text == 'today':
199 200 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
200 201 elif text == 'now':
201 202 return datetime.datetime.now()
202 203 elif text == 'yesterday':
203 204 past = datetime.datetime.now() - datetime.timedelta(days=1)
204 205 return datetime.datetime(*past.timetuple()[:3])
205 206 else:
206 207 days = 0
207 208 matched = re.match(
208 209 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
209 210 if matched:
210 211 groupdict = matched.groupdict()
211 212 if groupdict['days']:
212 213 days += int(matched.groupdict()['days'])
213 214 if groupdict['weeks']:
214 215 days += int(matched.groupdict()['weeks']) * 7
215 216 past = datetime.datetime.now() - datetime.timedelta(days=days)
216 217 return datetime.datetime(*past.timetuple()[:3])
217 218
218 219 raise ValueError('Wrong date: "%s"' % text)
219 220
220 221
221 222 def get_dict_for_attrs(obj, attrs):
222 223 """
223 224 Returns dictionary for each attribute from given ``obj``.
224 225 """
225 226 data = {}
226 227 for attr in attrs:
227 228 data[attr] = getattr(obj, attr)
228 229 return data
229 230
230 231
231 232 def get_total_seconds(timedelta):
232 233 """
233 234 Backported for Python 2.5.
234 235
235 236 See http://docs.python.org/library/datetime.html.
236 237 """
237 238 return ((timedelta.microseconds + (
238 239 timedelta.seconds +
239 240 timedelta.days * 24 * 60 * 60
240 241 ) * 10**6) / 10**6)
@@ -1,635 +1,631 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.scm
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Scm model for RhodeCode
7 7
8 8 :created_on: Apr 9, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25 from __future__ import with_statement
26 26 import os
27 27 import re
28 28 import time
29 29 import traceback
30 30 import logging
31 31 import cStringIO
32 32 import pkg_resources
33 33 from os.path import dirname as dn, join as jn
34 34
35 35 from sqlalchemy import func
36 36 from pylons.i18n.translation import _
37 37
38 38 import rhodecode
39 39 from rhodecode.lib.vcs import get_backend
40 40 from rhodecode.lib.vcs.exceptions import RepositoryError
41 41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
42 42 from rhodecode.lib.vcs.nodes import FileNode
43 43 from rhodecode.lib.vcs.backends.base import EmptyChangeset
44 44
45 45 from rhodecode import BACKENDS
46 46 from rhodecode.lib import helpers as h
47 47 from rhodecode.lib.utils2 import safe_str, safe_unicode
48 48 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
49 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
49 from rhodecode.lib.utils import get_filesystem_repos, make_ui, \
50 50 action_logger, REMOVED_REPO_PAT
51 51 from rhodecode.model import BaseModel
52 52 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
53 53 UserFollowing, UserLog, User, RepoGroup, PullRequest
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class CachedRepoList(object):
75 75 """
76 76 Cached repo list, uses in-memory cache after initialization, that is
77 77 super fast
78 78 """
79 79
80 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 81 self.db_repo_list = db_repo_list
82 82 self.repos_path = repos_path
83 83 self.order_by = order_by
84 84 self.reversed = (order_by or '').startswith('-')
85 85 if not perm_set:
86 86 perm_set = ['repository.read', 'repository.write',
87 87 'repository.admin']
88 88 self.perm_set = perm_set
89 89
90 90 def __len__(self):
91 91 return len(self.db_repo_list)
92 92
93 93 def __repr__(self):
94 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 95
96 96 def __iter__(self):
97 97 # pre-propagated cache_map to save executing select statements
98 98 # for each repo
99 99 cache_map = CacheInvalidation.get_cache_map()
100 100
101 101 for dbr in self.db_repo_list:
102 102 scmr = dbr.scm_instance_cached(cache_map)
103 103 # check permission at this level
104 104 if not HasRepoPermissionAny(
105 105 *self.perm_set
106 106 )(dbr.repo_name, 'get repo check'):
107 107 continue
108 108
109 109 if scmr is None:
110 110 log.error(
111 111 '%s this repository is present in database but it '
112 112 'cannot be created as an scm instance' % dbr.repo_name
113 113 )
114 114 continue
115 115
116 116 last_change = scmr.last_change
117 117 tip = h.get_changeset_safe(scmr, 'tip')
118 118
119 119 tmp_d = {}
120 120 tmp_d['name'] = dbr.repo_name
121 121 tmp_d['name_sort'] = tmp_d['name'].lower()
122 122 tmp_d['raw_name'] = tmp_d['name'].lower()
123 123 tmp_d['description'] = dbr.description
124 124 tmp_d['description_sort'] = tmp_d['description'].lower()
125 125 tmp_d['last_change'] = last_change
126 126 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
127 127 tmp_d['tip'] = tip.raw_id
128 128 tmp_d['tip_sort'] = tip.revision
129 129 tmp_d['rev'] = tip.revision
130 130 tmp_d['contact'] = dbr.user.full_contact
131 131 tmp_d['contact_sort'] = tmp_d['contact']
132 132 tmp_d['owner_sort'] = tmp_d['contact']
133 133 tmp_d['repo_archives'] = list(scmr._get_archives())
134 134 tmp_d['last_msg'] = tip.message
135 135 tmp_d['author'] = tip.author
136 136 tmp_d['dbrepo'] = dbr.get_dict()
137 137 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
138 138 yield tmp_d
139 139
140 140
141 141 class SimpleCachedRepoList(CachedRepoList):
142 142 """
143 143 Lighter version of CachedRepoList without the scm initialisation
144 144 """
145 145
146 146 def __iter__(self):
147 147 for dbr in self.db_repo_list:
148 148 # check permission at this level
149 149 if not HasRepoPermissionAny(
150 150 *self.perm_set
151 151 )(dbr.repo_name, 'get repo check'):
152 152 continue
153 153
154 154 tmp_d = {}
155 155 tmp_d['name'] = dbr.repo_name
156 156 tmp_d['name_sort'] = tmp_d['name'].lower()
157 157 tmp_d['raw_name'] = tmp_d['name'].lower()
158 158 tmp_d['description'] = dbr.description
159 159 tmp_d['description_sort'] = tmp_d['description'].lower()
160 160 tmp_d['dbrepo'] = dbr.get_dict()
161 161 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
162 162 yield tmp_d
163 163
164 164
165 165 class GroupList(object):
166 166
167 167 def __init__(self, db_repo_group_list, perm_set=None):
168 168 """
169 169 Creates iterator from given list of group objects, additionally
170 170 checking permission for them from perm_set var
171 171
172 172 :param db_repo_group_list:
173 173 :param perm_set: list of permissons to check
174 174 """
175 175 self.db_repo_group_list = db_repo_group_list
176 176 if not perm_set:
177 177 perm_set = ['group.read', 'group.write', 'group.admin']
178 178 self.perm_set = perm_set
179 179
180 180 def __len__(self):
181 181 return len(self.db_repo_group_list)
182 182
183 183 def __repr__(self):
184 184 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
185 185
186 186 def __iter__(self):
187 187 for dbgr in self.db_repo_group_list:
188 188 # check permission at this level
189 189 if not HasReposGroupPermissionAny(
190 190 *self.perm_set
191 191 )(dbgr.group_name, 'get group repo check'):
192 192 continue
193 193
194 194 yield dbgr
195 195
196 196
197 197 class ScmModel(BaseModel):
198 198 """
199 199 Generic Scm Model
200 200 """
201 201
202 202 def __get_repo(self, instance):
203 203 cls = Repository
204 204 if isinstance(instance, cls):
205 205 return instance
206 206 elif isinstance(instance, int) or safe_str(instance).isdigit():
207 207 return cls.get(instance)
208 208 elif isinstance(instance, basestring):
209 209 return cls.get_by_repo_name(instance)
210 210 elif instance:
211 211 raise Exception('given object must be int, basestr or Instance'
212 212 ' of %s got %s' % (type(cls), type(instance)))
213 213
214 214 @LazyProperty
215 215 def repos_path(self):
216 216 """
217 217 Get's the repositories root path from database
218 218 """
219 219
220 220 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
221 221
222 222 return q.ui_value
223 223
224 224 def repo_scan(self, repos_path=None):
225 225 """
226 226 Listing of repositories in given path. This path should not be a
227 227 repository itself. Return a dictionary of repository objects
228 228
229 229 :param repos_path: path to directory containing repositories
230 230 """
231 231
232 232 if repos_path is None:
233 233 repos_path = self.repos_path
234 234
235 235 log.info('scanning for repositories in %s' % repos_path)
236 236
237 237 baseui = make_ui('db')
238 238 repos = {}
239 239
240 240 for name, path in get_filesystem_repos(repos_path, recursive=True):
241 # skip removed repos
242 if REMOVED_REPO_PAT.match(name) or path[0] is None:
243 continue
244
245 241 # name need to be decomposed and put back together using the /
246 242 # since this is internal storage separator for rhodecode
247 243 name = Repository.normalize_repo_name(name)
248 244
249 245 try:
250 246 if name in repos:
251 247 raise RepositoryError('Duplicate repository name %s '
252 248 'found in %s' % (name, path))
253 249 else:
254 250
255 251 klass = get_backend(path[0])
256 252
257 253 if path[0] == 'hg' and path[0] in BACKENDS.keys():
258 254 repos[name] = klass(safe_str(path[1]), baseui=baseui)
259 255
260 256 if path[0] == 'git' and path[0] in BACKENDS.keys():
261 257 repos[name] = klass(path[1])
262 258 except OSError:
263 259 continue
264
260 log.debug('found %s paths with repositories' % (len(repos)))
265 261 return repos
266 262
267 263 def get_repos(self, all_repos=None, sort_key=None, simple=False):
268 264 """
269 265 Get all repos from db and for each repo create it's
270 266 backend instance and fill that backed with information from database
271 267
272 268 :param all_repos: list of repository names as strings
273 269 give specific repositories list, good for filtering
274 270
275 271 :param sort_key: initial sorting of repos
276 272 :param simple: use SimpleCachedList - one without the SCM info
277 273 """
278 274 if all_repos is None:
279 275 all_repos = self.sa.query(Repository)\
280 276 .filter(Repository.group_id == None)\
281 277 .order_by(func.lower(Repository.repo_name)).all()
282 278 if simple:
283 279 repo_iter = SimpleCachedRepoList(all_repos,
284 280 repos_path=self.repos_path,
285 281 order_by=sort_key)
286 282 else:
287 283 repo_iter = CachedRepoList(all_repos,
288 284 repos_path=self.repos_path,
289 285 order_by=sort_key)
290 286
291 287 return repo_iter
292 288
293 289 def get_repos_groups(self, all_groups=None):
294 290 if all_groups is None:
295 291 all_groups = RepoGroup.query()\
296 292 .filter(RepoGroup.group_parent_id == None).all()
297 293 group_iter = GroupList(all_groups)
298 294
299 295 return group_iter
300 296
301 297 def mark_for_invalidation(self, repo_name):
302 298 """
303 299 Puts cache invalidation task into db for
304 300 further global cache invalidation
305 301
306 302 :param repo_name: this repo that should invalidation take place
307 303 """
308 304 CacheInvalidation.set_invalidate(repo_name=repo_name)
309 305 repo = Repository.get_by_repo_name(repo_name)
310 306 if repo:
311 307 repo.update_changeset_cache()
312 308
313 309 def toggle_following_repo(self, follow_repo_id, user_id):
314 310
315 311 f = self.sa.query(UserFollowing)\
316 312 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
317 313 .filter(UserFollowing.user_id == user_id).scalar()
318 314
319 315 if f is not None:
320 316 try:
321 317 self.sa.delete(f)
322 318 action_logger(UserTemp(user_id),
323 319 'stopped_following_repo',
324 320 RepoTemp(follow_repo_id))
325 321 return
326 322 except:
327 323 log.error(traceback.format_exc())
328 324 raise
329 325
330 326 try:
331 327 f = UserFollowing()
332 328 f.user_id = user_id
333 329 f.follows_repo_id = follow_repo_id
334 330 self.sa.add(f)
335 331
336 332 action_logger(UserTemp(user_id),
337 333 'started_following_repo',
338 334 RepoTemp(follow_repo_id))
339 335 except:
340 336 log.error(traceback.format_exc())
341 337 raise
342 338
343 339 def toggle_following_user(self, follow_user_id, user_id):
344 340 f = self.sa.query(UserFollowing)\
345 341 .filter(UserFollowing.follows_user_id == follow_user_id)\
346 342 .filter(UserFollowing.user_id == user_id).scalar()
347 343
348 344 if f is not None:
349 345 try:
350 346 self.sa.delete(f)
351 347 return
352 348 except:
353 349 log.error(traceback.format_exc())
354 350 raise
355 351
356 352 try:
357 353 f = UserFollowing()
358 354 f.user_id = user_id
359 355 f.follows_user_id = follow_user_id
360 356 self.sa.add(f)
361 357 except:
362 358 log.error(traceback.format_exc())
363 359 raise
364 360
365 361 def is_following_repo(self, repo_name, user_id, cache=False):
366 362 r = self.sa.query(Repository)\
367 363 .filter(Repository.repo_name == repo_name).scalar()
368 364
369 365 f = self.sa.query(UserFollowing)\
370 366 .filter(UserFollowing.follows_repository == r)\
371 367 .filter(UserFollowing.user_id == user_id).scalar()
372 368
373 369 return f is not None
374 370
375 371 def is_following_user(self, username, user_id, cache=False):
376 372 u = User.get_by_username(username)
377 373
378 374 f = self.sa.query(UserFollowing)\
379 375 .filter(UserFollowing.follows_user == u)\
380 376 .filter(UserFollowing.user_id == user_id).scalar()
381 377
382 378 return f is not None
383 379
384 380 def get_followers(self, repo):
385 381 repo = self._get_repo(repo)
386 382
387 383 return self.sa.query(UserFollowing)\
388 384 .filter(UserFollowing.follows_repository == repo).count()
389 385
390 386 def get_forks(self, repo):
391 387 repo = self._get_repo(repo)
392 388 return self.sa.query(Repository)\
393 389 .filter(Repository.fork == repo).count()
394 390
395 391 def get_pull_requests(self, repo):
396 392 repo = self._get_repo(repo)
397 393 return self.sa.query(PullRequest)\
398 394 .filter(PullRequest.other_repo == repo).count()
399 395
400 396 def mark_as_fork(self, repo, fork, user):
401 397 repo = self.__get_repo(repo)
402 398 fork = self.__get_repo(fork)
403 399 if fork and repo.repo_id == fork.repo_id:
404 400 raise Exception("Cannot set repository as fork of itself")
405 401 repo.fork = fork
406 402 self.sa.add(repo)
407 403 return repo
408 404
409 405 def pull_changes(self, repo, username):
410 406 dbrepo = self.__get_repo(repo)
411 407 clone_uri = dbrepo.clone_uri
412 408 if not clone_uri:
413 409 raise Exception("This repository doesn't have a clone uri")
414 410
415 411 repo = dbrepo.scm_instance
416 412 from rhodecode import CONFIG
417 413 try:
418 414 extras = {
419 415 'ip': '',
420 416 'username': username,
421 417 'action': 'push_remote',
422 418 'repository': dbrepo.repo_name,
423 419 'scm': repo.alias,
424 420 'config': CONFIG['__file__'],
425 421 'make_lock': None,
426 422 'locked_by': [None, None]
427 423 }
428 424
429 425 Repository.inject_ui(repo, extras=extras)
430 426
431 427 if repo.alias == 'git':
432 428 repo.fetch(clone_uri)
433 429 else:
434 430 repo.pull(clone_uri)
435 431 self.mark_for_invalidation(dbrepo.repo_name)
436 432 except:
437 433 log.error(traceback.format_exc())
438 434 raise
439 435
440 436 def commit_change(self, repo, repo_name, cs, user, author, message,
441 437 content, f_path):
442 438 """
443 439 Commits changes
444 440
445 441 :param repo: SCM instance
446 442
447 443 """
448 444
449 445 if repo.alias == 'hg':
450 446 from rhodecode.lib.vcs.backends.hg import \
451 447 MercurialInMemoryChangeset as IMC
452 448 elif repo.alias == 'git':
453 449 from rhodecode.lib.vcs.backends.git import \
454 450 GitInMemoryChangeset as IMC
455 451
456 452 # decoding here will force that we have proper encoded values
457 453 # in any other case this will throw exceptions and deny commit
458 454 content = safe_str(content)
459 455 path = safe_str(f_path)
460 456 # message and author needs to be unicode
461 457 # proper backend should then translate that into required type
462 458 message = safe_unicode(message)
463 459 author = safe_unicode(author)
464 460 m = IMC(repo)
465 461 m.change(FileNode(path, content))
466 462 tip = m.commit(message=message,
467 463 author=author,
468 464 parents=[cs], branch=cs.branch)
469 465
470 466 action = 'push_local:%s' % tip.raw_id
471 467 action_logger(user, action, repo_name)
472 468 self.mark_for_invalidation(repo_name)
473 469 return tip
474 470
475 471 def create_node(self, repo, repo_name, cs, user, author, message, content,
476 472 f_path):
477 473 if repo.alias == 'hg':
478 474 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
479 475 elif repo.alias == 'git':
480 476 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
481 477 # decoding here will force that we have proper encoded values
482 478 # in any other case this will throw exceptions and deny commit
483 479
484 480 if isinstance(content, (basestring,)):
485 481 content = safe_str(content)
486 482 elif isinstance(content, (file, cStringIO.OutputType,)):
487 483 content = content.read()
488 484 else:
489 485 raise Exception('Content is of unrecognized type %s' % (
490 486 type(content)
491 487 ))
492 488
493 489 message = safe_unicode(message)
494 490 author = safe_unicode(author)
495 491 path = safe_str(f_path)
496 492 m = IMC(repo)
497 493
498 494 if isinstance(cs, EmptyChangeset):
499 495 # EmptyChangeset means we we're editing empty repository
500 496 parents = None
501 497 else:
502 498 parents = [cs]
503 499
504 500 m.add(FileNode(path, content=content))
505 501 tip = m.commit(message=message,
506 502 author=author,
507 503 parents=parents, branch=cs.branch)
508 504
509 505 action = 'push_local:%s' % tip.raw_id
510 506 action_logger(user, action, repo_name)
511 507 self.mark_for_invalidation(repo_name)
512 508 return tip
513 509
514 510 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
515 511 """
516 512 recursive walk in root dir and return a set of all path in that dir
517 513 based on repository walk function
518 514
519 515 :param repo_name: name of repository
520 516 :param revision: revision for which to list nodes
521 517 :param root_path: root path to list
522 518 :param flat: return as a list, if False returns a dict with decription
523 519
524 520 """
525 521 _files = list()
526 522 _dirs = list()
527 523 try:
528 524 _repo = self.__get_repo(repo_name)
529 525 changeset = _repo.scm_instance.get_changeset(revision)
530 526 root_path = root_path.lstrip('/')
531 527 for topnode, dirs, files in changeset.walk(root_path):
532 528 for f in files:
533 529 _files.append(f.path if flat else {"name": f.path,
534 530 "type": "file"})
535 531 for d in dirs:
536 532 _dirs.append(d.path if flat else {"name": d.path,
537 533 "type": "dir"})
538 534 except RepositoryError:
539 535 log.debug(traceback.format_exc())
540 536 raise
541 537
542 538 return _dirs, _files
543 539
544 540 def get_unread_journal(self):
545 541 return self.sa.query(UserLog).count()
546 542
547 543 def get_repo_landing_revs(self, repo=None):
548 544 """
549 545 Generates select option with tags branches and bookmarks (for hg only)
550 546 grouped by type
551 547
552 548 :param repo:
553 549 :type repo:
554 550 """
555 551
556 552 hist_l = []
557 553 choices = []
558 554 repo = self.__get_repo(repo)
559 555 hist_l.append(['tip', _('latest tip')])
560 556 choices.append('tip')
561 557 if not repo:
562 558 return choices, hist_l
563 559
564 560 repo = repo.scm_instance
565 561
566 562 branches_group = ([(k, k) for k, v in
567 563 repo.branches.iteritems()], _("Branches"))
568 564 hist_l.append(branches_group)
569 565 choices.extend([x[0] for x in branches_group[0]])
570 566
571 567 if repo.alias == 'hg':
572 568 bookmarks_group = ([(k, k) for k, v in
573 569 repo.bookmarks.iteritems()], _("Bookmarks"))
574 570 hist_l.append(bookmarks_group)
575 571 choices.extend([x[0] for x in bookmarks_group[0]])
576 572
577 573 tags_group = ([(k, k) for k, v in
578 574 repo.tags.iteritems()], _("Tags"))
579 575 hist_l.append(tags_group)
580 576 choices.extend([x[0] for x in tags_group[0]])
581 577
582 578 return choices, hist_l
583 579
584 580 def install_git_hook(self, repo, force_create=False):
585 581 """
586 582 Creates a rhodecode hook inside a git repository
587 583
588 584 :param repo: Instance of VCS repo
589 585 :param force_create: Create even if same name hook exists
590 586 """
591 587
592 588 loc = jn(repo.path, 'hooks')
593 589 if not repo.bare:
594 590 loc = jn(repo.path, '.git', 'hooks')
595 591 if not os.path.isdir(loc):
596 592 os.makedirs(loc)
597 593
598 594 tmpl_post = pkg_resources.resource_string(
599 595 'rhodecode', jn('config', 'post_receive_tmpl.py')
600 596 )
601 597 tmpl_pre = pkg_resources.resource_string(
602 598 'rhodecode', jn('config', 'pre_receive_tmpl.py')
603 599 )
604 600
605 601 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
606 602 _hook_file = jn(loc, '%s-receive' % h_type)
607 603 _rhodecode_hook = False
608 604 log.debug('Installing git hook in repo %s' % repo)
609 605 if os.path.exists(_hook_file):
610 606 # let's take a look at this hook, maybe it's rhodecode ?
611 607 log.debug('hook exists, checking if it is from rhodecode')
612 608 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
613 609 with open(_hook_file, 'rb') as f:
614 610 data = f.read()
615 611 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
616 612 % 'RC_HOOK_VER').search(data)
617 613 if matches:
618 614 try:
619 615 ver = matches.groups()[0]
620 616 log.debug('got %s it is rhodecode' % (ver))
621 617 _rhodecode_hook = True
622 618 except:
623 619 log.error(traceback.format_exc())
624 620 else:
625 621 # there is no hook in this dir, so we want to create one
626 622 _rhodecode_hook = True
627 623
628 624 if _rhodecode_hook or force_create:
629 625 log.debug('writing %s hook file !' % h_type)
630 626 with open(_hook_file, 'wb') as f:
631 627 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
632 628 f.write(tmpl)
633 629 os.chmod(_hook_file, 0755)
634 630 else:
635 631 log.debug('skipping writing hook file')
General Comments 0
You need to be logged in to leave comments. Login now