##// END OF EJS Templates
pytest: Create test repositories from rc_testdata package.
Martin Bornhold -
r213:8a01e4c9 default
parent child Browse files
Show More
@@ -1,982 +1,982 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 from os.path import abspath
37 37 from os.path import dirname as dn, join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from paste.script.command import Command, BadCommand
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44
45 45 from rhodecode.lib.fakemod import create_module
46 46 from rhodecode.lib.vcs.backends.base import Config
47 47 from rhodecode.lib.vcs.exceptions import VCSError
48 48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 49 from rhodecode.lib.utils2 import (
50 50 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 51 from rhodecode.model import meta
52 52 from rhodecode.model.db import (
53 53 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.repo_group import RepoGroupModel
56 56 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 _license_cache = None
63 63
64 64
65 65 def recursive_replace(str_, replace=' '):
66 66 """
67 67 Recursive replace of given sign to just one instance
68 68
69 69 :param str_: given string
70 70 :param replace: char to find and replace multiple instances
71 71
72 72 Examples::
73 73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 74 'Mighty-Mighty-Bo-sstones'
75 75 """
76 76
77 77 if str_.find(replace * 2) == -1:
78 78 return str_
79 79 else:
80 80 str_ = str_.replace(replace * 2, replace)
81 81 return recursive_replace(str_, replace)
82 82
83 83
84 84 def repo_name_slug(value):
85 85 """
86 86 Return slug of name of repository
87 87 This function is called on each creation/modification
88 88 of repository to prevent bad names in repo
89 89 """
90 90
91 91 slug = remove_formatting(value)
92 92 slug = strip_tags(slug)
93 93
94 94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 95 slug = slug.replace(c, '-')
96 96 slug = recursive_replace(slug, '-')
97 97 slug = collapse(slug, '-')
98 98 return slug
99 99
100 100
101 101 #==============================================================================
102 102 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
103 103 #==============================================================================
104 104 def get_repo_slug(request):
105 105 _repo = request.environ['pylons.routes_dict'].get('repo_name')
106 106 if _repo:
107 107 _repo = _repo.rstrip('/')
108 108 return _repo
109 109
110 110
111 111 def get_repo_group_slug(request):
112 112 _group = request.environ['pylons.routes_dict'].get('group_name')
113 113 if _group:
114 114 _group = _group.rstrip('/')
115 115 return _group
116 116
117 117
118 118 def get_user_group_slug(request):
119 119 _group = request.environ['pylons.routes_dict'].get('user_group_id')
120 120 try:
121 121 _group = UserGroup.get(_group)
122 122 if _group:
123 123 _group = _group.users_group_name
124 124 except Exception:
125 125 log.debug(traceback.format_exc())
126 126 #catch all failures here
127 127 pass
128 128
129 129 return _group
130 130
131 131
132 132 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
133 133 """
134 134 Action logger for various actions made by users
135 135
136 136 :param user: user that made this action, can be a unique username string or
137 137 object containing user_id attribute
138 138 :param action: action to log, should be on of predefined unique actions for
139 139 easy translations
140 140 :param repo: string name of repository or object containing repo_id,
141 141 that action was made on
142 142 :param ipaddr: optional ip address from what the action was made
143 143 :param sa: optional sqlalchemy session
144 144
145 145 """
146 146
147 147 if not sa:
148 148 sa = meta.Session()
149 149 # if we don't get explicit IP address try to get one from registered user
150 150 # in tmpl context var
151 151 if not ipaddr:
152 152 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
153 153
154 154 try:
155 155 if getattr(user, 'user_id', None):
156 156 user_obj = User.get(user.user_id)
157 157 elif isinstance(user, basestring):
158 158 user_obj = User.get_by_username(user)
159 159 else:
160 160 raise Exception('You have to provide a user object or a username')
161 161
162 162 if getattr(repo, 'repo_id', None):
163 163 repo_obj = Repository.get(repo.repo_id)
164 164 repo_name = repo_obj.repo_name
165 165 elif isinstance(repo, basestring):
166 166 repo_name = repo.lstrip('/')
167 167 repo_obj = Repository.get_by_repo_name(repo_name)
168 168 else:
169 169 repo_obj = None
170 170 repo_name = ''
171 171
172 172 user_log = UserLog()
173 173 user_log.user_id = user_obj.user_id
174 174 user_log.username = user_obj.username
175 175 action = safe_unicode(action)
176 176 user_log.action = action[:1200000]
177 177
178 178 user_log.repository = repo_obj
179 179 user_log.repository_name = repo_name
180 180
181 181 user_log.action_date = datetime.datetime.now()
182 182 user_log.user_ip = ipaddr
183 183 sa.add(user_log)
184 184
185 185 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
186 186 action, safe_unicode(repo), user_obj, ipaddr)
187 187 if commit:
188 188 sa.commit()
189 189 except Exception:
190 190 log.error(traceback.format_exc())
191 191 raise
192 192
193 193
194 194 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
195 195 """
196 196 Scans given path for repos and return (name,(type,path)) tuple
197 197
198 198 :param path: path to scan for repositories
199 199 :param recursive: recursive search and return names with subdirs in front
200 200 """
201 201
202 202 # remove ending slash for better results
203 203 path = path.rstrip(os.sep)
204 204 log.debug('now scanning in %s location recursive:%s...', path, recursive)
205 205
206 206 def _get_repos(p):
207 207 dirpaths = _get_dirpaths(p)
208 208 if not _is_dir_writable(p):
209 209 log.warning('repo path without write access: %s', p)
210 210
211 211 for dirpath in dirpaths:
212 212 if os.path.isfile(os.path.join(p, dirpath)):
213 213 continue
214 214 cur_path = os.path.join(p, dirpath)
215 215
216 216 # skip removed repos
217 217 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
218 218 continue
219 219
220 220 #skip .<somethin> dirs
221 221 if dirpath.startswith('.'):
222 222 continue
223 223
224 224 try:
225 225 scm_info = get_scm(cur_path)
226 226 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
227 227 except VCSError:
228 228 if not recursive:
229 229 continue
230 230 #check if this dir containts other repos for recursive scan
231 231 rec_path = os.path.join(p, dirpath)
232 232 if os.path.isdir(rec_path):
233 233 for inner_scm in _get_repos(rec_path):
234 234 yield inner_scm
235 235
236 236 return _get_repos(path)
237 237
238 238
239 239 def _get_dirpaths(p):
240 240 try:
241 241 # OS-independable way of checking if we have at least read-only
242 242 # access or not.
243 243 dirpaths = os.listdir(p)
244 244 except OSError:
245 245 log.warning('ignoring repo path without read access: %s', p)
246 246 return []
247 247
248 248 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
249 249 # decode paths and suddenly returns unicode objects itself. The items it
250 250 # cannot decode are returned as strings and cause issues.
251 251 #
252 252 # Those paths are ignored here until a solid solution for path handling has
253 253 # been built.
254 254 expected_type = type(p)
255 255
256 256 def _has_correct_type(item):
257 257 if type(item) is not expected_type:
258 258 log.error(
259 259 u"Ignoring path %s since it cannot be decoded into unicode.",
260 260 # Using "repr" to make sure that we see the byte value in case
261 261 # of support.
262 262 repr(item))
263 263 return False
264 264 return True
265 265
266 266 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
267 267
268 268 return dirpaths
269 269
270 270
271 271 def _is_dir_writable(path):
272 272 """
273 273 Probe if `path` is writable.
274 274
275 275 Due to trouble on Cygwin / Windows, this is actually probing if it is
276 276 possible to create a file inside of `path`, stat does not produce reliable
277 277 results in this case.
278 278 """
279 279 try:
280 280 with tempfile.TemporaryFile(dir=path):
281 281 pass
282 282 except OSError:
283 283 return False
284 284 return True
285 285
286 286
287 287 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
288 288 """
289 289 Returns True if given path is a valid repository False otherwise.
290 290 If expect_scm param is given also, compare if given scm is the same
291 291 as expected from scm parameter. If explicit_scm is given don't try to
292 292 detect the scm, just use the given one to check if repo is valid
293 293
294 294 :param repo_name:
295 295 :param base_path:
296 296 :param expect_scm:
297 297 :param explicit_scm:
298 298
299 299 :return True: if given path is a valid repository
300 300 """
301 301 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
302 302 log.debug('Checking if `%s` is a valid path for repository', repo_name)
303 303
304 304 try:
305 305 if explicit_scm:
306 306 detected_scms = [get_scm_backend(explicit_scm)]
307 307 else:
308 308 detected_scms = get_scm(full_path)
309 309
310 310 if expect_scm:
311 311 return detected_scms[0] == expect_scm
312 312 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
313 313 return True
314 314 except VCSError:
315 315 log.debug('path: %s is not a valid repo !', full_path)
316 316 return False
317 317
318 318
319 319 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
320 320 """
321 321 Returns True if given path is a repository group, False otherwise
322 322
323 323 :param repo_name:
324 324 :param base_path:
325 325 """
326 326 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
327 327 log.debug('Checking if `%s` is a valid path for repository group',
328 328 repo_group_name)
329 329
330 330 # check if it's not a repo
331 331 if is_valid_repo(repo_group_name, base_path):
332 332 log.debug('Repo called %s exist, it is not a valid '
333 333 'repo group' % repo_group_name)
334 334 return False
335 335
336 336 try:
337 337 # we need to check bare git repos at higher level
338 338 # since we might match branches/hooks/info/objects or possible
339 339 # other things inside bare git repo
340 340 scm_ = get_scm(os.path.dirname(full_path))
341 341 log.debug('path: %s is a vcs object:%s, not valid '
342 342 'repo group' % (full_path, scm_))
343 343 return False
344 344 except VCSError:
345 345 pass
346 346
347 347 # check if it's a valid path
348 348 if skip_path_check or os.path.isdir(full_path):
349 349 log.debug('path: %s is a valid repo group !', full_path)
350 350 return True
351 351
352 352 log.debug('path: %s is not a valid repo group !', full_path)
353 353 return False
354 354
355 355
356 356 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
357 357 while True:
358 358 ok = raw_input(prompt)
359 359 if ok in ('y', 'ye', 'yes'):
360 360 return True
361 361 if ok in ('n', 'no', 'nop', 'nope'):
362 362 return False
363 363 retries = retries - 1
364 364 if retries < 0:
365 365 raise IOError
366 366 print complaint
367 367
368 368 # propagated from mercurial documentation
369 369 ui_sections = [
370 370 'alias', 'auth',
371 371 'decode/encode', 'defaults',
372 372 'diff', 'email',
373 373 'extensions', 'format',
374 374 'merge-patterns', 'merge-tools',
375 375 'hooks', 'http_proxy',
376 376 'smtp', 'patch',
377 377 'paths', 'profiling',
378 378 'server', 'trusted',
379 379 'ui', 'web', ]
380 380
381 381
382 382 def config_data_from_db(clear_session=True, repo=None):
383 383 """
384 384 Read the configuration data from the database and return configuration
385 385 tuples.
386 386 """
387 387 config = []
388 388
389 389 sa = meta.Session()
390 390 settings_model = VcsSettingsModel(repo=repo, sa=sa)
391 391
392 392 ui_settings = settings_model.get_ui_settings()
393 393
394 394 for setting in ui_settings:
395 395 if setting.active:
396 396 log.debug(
397 397 'settings ui from db: [%s] %s=%s',
398 398 setting.section, setting.key, setting.value)
399 399 config.append((
400 400 safe_str(setting.section), safe_str(setting.key),
401 401 safe_str(setting.value)))
402 402 if setting.key == 'push_ssl':
403 403 # force set push_ssl requirement to False, rhodecode
404 404 # handles that
405 405 config.append((
406 406 safe_str(setting.section), safe_str(setting.key), False))
407 407 if clear_session:
408 408 meta.Session.remove()
409 409
410 410 # TODO: mikhail: probably it makes no sense to re-read hooks information.
411 411 # It's already there and activated/deactivated
412 412 skip_entries = []
413 413 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
414 414 if 'pull' not in enabled_hook_classes:
415 415 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
416 416 if 'push' not in enabled_hook_classes:
417 417 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
418 418
419 419 config = [entry for entry in config if entry[:2] not in skip_entries]
420 420
421 421 return config
422 422
423 423
424 424 def make_db_config(clear_session=True, repo=None):
425 425 """
426 426 Create a :class:`Config` instance based on the values in the database.
427 427 """
428 428 config = Config()
429 429 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
430 430 for section, option, value in config_data:
431 431 config.set(section, option, value)
432 432 return config
433 433
434 434
435 435 def get_enabled_hook_classes(ui_settings):
436 436 """
437 437 Return the enabled hook classes.
438 438
439 439 :param ui_settings: List of ui_settings as returned
440 440 by :meth:`VcsSettingsModel.get_ui_settings`
441 441
442 442 :return: a list with the enabled hook classes. The order is not guaranteed.
443 443 :rtype: list
444 444 """
445 445 enabled_hooks = []
446 446 active_hook_keys = [
447 447 key for section, key, value, active in ui_settings
448 448 if section == 'hooks' and active]
449 449
450 450 hook_names = {
451 451 RhodeCodeUi.HOOK_PUSH: 'push',
452 452 RhodeCodeUi.HOOK_PULL: 'pull',
453 453 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
454 454 }
455 455
456 456 for key in active_hook_keys:
457 457 hook = hook_names.get(key)
458 458 if hook:
459 459 enabled_hooks.append(hook)
460 460
461 461 return enabled_hooks
462 462
463 463
464 464 def set_rhodecode_config(config):
465 465 """
466 466 Updates pylons config with new settings from database
467 467
468 468 :param config:
469 469 """
470 470 app_settings = SettingsModel().get_all_settings()
471 471
472 472 for k, v in app_settings.items():
473 473 config[k] = v
474 474
475 475
476 476 def map_groups(path):
477 477 """
478 478 Given a full path to a repository, create all nested groups that this
479 479 repo is inside. This function creates parent-child relationships between
480 480 groups and creates default perms for all new groups.
481 481
482 482 :param paths: full path to repository
483 483 """
484 484 sa = meta.Session()
485 485 groups = path.split(Repository.NAME_SEP)
486 486 parent = None
487 487 group = None
488 488
489 489 # last element is repo in nested groups structure
490 490 groups = groups[:-1]
491 491 rgm = RepoGroupModel(sa)
492 492 owner = User.get_first_admin()
493 493 for lvl, group_name in enumerate(groups):
494 494 group_name = '/'.join(groups[:lvl] + [group_name])
495 495 group = RepoGroup.get_by_group_name(group_name)
496 496 desc = '%s group' % group_name
497 497
498 498 # skip folders that are now removed repos
499 499 if REMOVED_REPO_PAT.match(group_name):
500 500 break
501 501
502 502 if group is None:
503 503 log.debug('creating group level: %s group_name: %s',
504 504 lvl, group_name)
505 505 group = RepoGroup(group_name, parent)
506 506 group.group_description = desc
507 507 group.user = owner
508 508 sa.add(group)
509 509 perm_obj = rgm._create_default_perms(group)
510 510 sa.add(perm_obj)
511 511 sa.flush()
512 512
513 513 parent = group
514 514 return group
515 515
516 516
517 517 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
518 518 """
519 519 maps all repos given in initial_repo_list, non existing repositories
520 520 are created, if remove_obsolete is True it also checks for db entries
521 521 that are not in initial_repo_list and removes them.
522 522
523 523 :param initial_repo_list: list of repositories found by scanning methods
524 524 :param remove_obsolete: check for obsolete entries in database
525 525 """
526 526 from rhodecode.model.repo import RepoModel
527 527 from rhodecode.model.scm import ScmModel
528 528 sa = meta.Session()
529 529 repo_model = RepoModel()
530 530 user = User.get_first_admin()
531 531 added = []
532 532
533 533 # creation defaults
534 534 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
535 535 enable_statistics = defs.get('repo_enable_statistics')
536 536 enable_locking = defs.get('repo_enable_locking')
537 537 enable_downloads = defs.get('repo_enable_downloads')
538 538 private = defs.get('repo_private')
539 539
540 540 for name, repo in initial_repo_list.items():
541 541 group = map_groups(name)
542 542 unicode_name = safe_unicode(name)
543 543 db_repo = repo_model.get_by_repo_name(unicode_name)
544 544 # found repo that is on filesystem not in RhodeCode database
545 545 if not db_repo:
546 546 log.info('repository %s not found, creating now', name)
547 547 added.append(name)
548 548 desc = (repo.description
549 549 if repo.description != 'unknown'
550 550 else '%s repository' % name)
551 551
552 552 db_repo = repo_model._create_repo(
553 553 repo_name=name,
554 554 repo_type=repo.alias,
555 555 description=desc,
556 556 repo_group=getattr(group, 'group_id', None),
557 557 owner=user,
558 558 enable_locking=enable_locking,
559 559 enable_downloads=enable_downloads,
560 560 enable_statistics=enable_statistics,
561 561 private=private,
562 562 state=Repository.STATE_CREATED
563 563 )
564 564 sa.commit()
565 565 # we added that repo just now, and make sure we updated server info
566 566 if db_repo.repo_type == 'git':
567 567 git_repo = db_repo.scm_instance()
568 568 # update repository server-info
569 569 log.debug('Running update server info')
570 570 git_repo._update_server_info()
571 571
572 572 db_repo.update_commit_cache()
573 573
574 574 config = db_repo._config
575 575 config.set('extensions', 'largefiles', '')
576 576 ScmModel().install_hooks(
577 577 db_repo.scm_instance(config=config),
578 578 repo_type=db_repo.repo_type)
579 579
580 580 removed = []
581 581 if remove_obsolete:
582 582 # remove from database those repositories that are not in the filesystem
583 583 for repo in sa.query(Repository).all():
584 584 if repo.repo_name not in initial_repo_list.keys():
585 585 log.debug("Removing non-existing repository found in db `%s`",
586 586 repo.repo_name)
587 587 try:
588 588 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
589 589 sa.commit()
590 590 removed.append(repo.repo_name)
591 591 except Exception:
592 592 # don't hold further removals on error
593 593 log.error(traceback.format_exc())
594 594 sa.rollback()
595 595
596 596 def splitter(full_repo_name):
597 597 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
598 598 gr_name = None
599 599 if len(_parts) == 2:
600 600 gr_name = _parts[0]
601 601 return gr_name
602 602
603 603 initial_repo_group_list = [splitter(x) for x in
604 604 initial_repo_list.keys() if splitter(x)]
605 605
606 606 # remove from database those repository groups that are not in the
607 607 # filesystem due to parent child relationships we need to delete them
608 608 # in a specific order of most nested first
609 609 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
610 610 nested_sort = lambda gr: len(gr.split('/'))
611 611 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
612 612 if group_name not in initial_repo_group_list:
613 613 repo_group = RepoGroup.get_by_group_name(group_name)
614 614 if (repo_group.children.all() or
615 615 not RepoGroupModel().check_exist_filesystem(
616 616 group_name=group_name, exc_on_failure=False)):
617 617 continue
618 618
619 619 log.info(
620 620 'Removing non-existing repository group found in db `%s`',
621 621 group_name)
622 622 try:
623 623 RepoGroupModel(sa).delete(group_name, fs_remove=False)
624 624 sa.commit()
625 625 removed.append(group_name)
626 626 except Exception:
627 627 # don't hold further removals on error
628 628 log.exception(
629 629 'Unable to remove repository group `%s`',
630 630 group_name)
631 631 sa.rollback()
632 632 raise
633 633
634 634 return added, removed
635 635
636 636
637 637 def get_default_cache_settings(settings):
638 638 cache_settings = {}
639 639 for key in settings.keys():
640 640 for prefix in ['beaker.cache.', 'cache.']:
641 641 if key.startswith(prefix):
642 642 name = key.split(prefix)[1].strip()
643 643 cache_settings[name] = settings[key].strip()
644 644 return cache_settings
645 645
646 646
647 647 # set cache regions for beaker so celery can utilise it
648 648 def add_cache(settings):
649 649 from rhodecode.lib import caches
650 650 cache_settings = {'regions': None}
651 651 # main cache settings used as default ...
652 652 cache_settings.update(get_default_cache_settings(settings))
653 653
654 654 if cache_settings['regions']:
655 655 for region in cache_settings['regions'].split(','):
656 656 region = region.strip()
657 657 region_settings = {}
658 658 for key, value in cache_settings.items():
659 659 if key.startswith(region):
660 660 region_settings[key.split('.')[1]] = value
661 661
662 662 caches.configure_cache_region(
663 663 region, region_settings, cache_settings)
664 664
665 665
666 666 def load_rcextensions(root_path):
667 667 import rhodecode
668 668 from rhodecode.config import conf
669 669
670 670 path = os.path.join(root_path, 'rcextensions', '__init__.py')
671 671 if os.path.isfile(path):
672 672 rcext = create_module('rc', path)
673 673 EXT = rhodecode.EXTENSIONS = rcext
674 674 log.debug('Found rcextensions now loading %s...', rcext)
675 675
676 676 # Additional mappings that are not present in the pygments lexers
677 677 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
678 678
679 679 # auto check if the module is not missing any data, set to default if is
680 680 # this will help autoupdate new feature of rcext module
681 681 #from rhodecode.config import rcextensions
682 682 #for k in dir(rcextensions):
683 683 # if not k.startswith('_') and not hasattr(EXT, k):
684 684 # setattr(EXT, k, getattr(rcextensions, k))
685 685
686 686
687 687 def get_custom_lexer(extension):
688 688 """
689 689 returns a custom lexer if it is defined in rcextensions module, or None
690 690 if there's no custom lexer defined
691 691 """
692 692 import rhodecode
693 693 from pygments import lexers
694 694 # check if we didn't define this extension as other lexer
695 695 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
696 696 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
697 697 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
698 698 return lexers.get_lexer_by_name(_lexer_name)
699 699
700 700
701 701 #==============================================================================
702 702 # TEST FUNCTIONS AND CREATORS
703 703 #==============================================================================
704 704 def create_test_index(repo_location, config, full_index):
705 705 """
706 706 Makes default test index
707 707
708 708 :param config: test config
709 709 :param full_index:
710 710 # start test server:
711 711 rcserver --with-vcsserver test.ini
712 712
713 713 # build index and store it in /tmp/rc/index:
714 714 rhodecode-index --force --api-host=http://vps1.dev:5000 --api-key=xxx --engine-location=/tmp/rc/index
715 715
716 716 # package and move new packages
717 717 tar -zcvf vcs_search_index.tar.gz -C /tmp/rc index
718 718 mv vcs_search_index.tar.gz rhodecode/tests/fixtures/
719 719
720 720 """
721 721 import rc_testdata
722 722
723 723 rc_testdata.extract_search_index(
724 724 'vcs_search_index', os.path.dirname(config['search.location']))
725 725
726 726
727 727 def create_test_env(repos_test_path, config):
728 728 """
729 Makes a fresh database and
730 installs test repository into tmp dir
729 Makes a fresh database.
731 730 """
732 731 from rhodecode.lib.db_manage import DbManage
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO, TESTS_TMP_PATH
734 732
735 733 # PART ONE create db
736 734 dbconf = config['sqlalchemy.db1.url']
737 735 log.debug('making test db %s', dbconf)
738 736
739 737 # create test dir if it doesn't exist
740 738 if not os.path.isdir(repos_test_path):
741 739 log.debug('Creating testdir %s', repos_test_path)
742 740 os.makedirs(repos_test_path)
743 741
744 742 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
745 743 tests=True, cli_args={'force_ask': True})
746 744 dbmanage.create_tables(override=True)
747 745 dbmanage.set_db_version()
748 746 # for tests dynamically set new root paths based on generated content
749 747 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
750 748 dbmanage.create_default_user()
751 749 dbmanage.create_test_admin_and_users()
752 750 dbmanage.create_permissions()
753 751 dbmanage.populate_default_permissions()
754 752 Session().commit()
755 # PART TWO make test repo
753
754 create_test_repositories(repos_test_path, config)
755
756
757 def create_test_repositories(path, config):
758 """
759 Creates test repositories in the temporary directory. Repositories are
760 extracted from archives within the rc_testdata package.
761 """
762 import rc_testdata
763 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO, TESTS_TMP_PATH
764
756 765 log.debug('making test vcs repositories')
757 766
758 767 idx_path = config['search.location']
759 768 data_path = config['cache_dir']
760 769
761 770 # clean index and data
762 771 if idx_path and os.path.exists(idx_path):
763 772 log.debug('remove %s', idx_path)
764 773 shutil.rmtree(idx_path)
765 774
766 775 if data_path and os.path.exists(data_path):
767 776 log.debug('remove %s', data_path)
768 777 shutil.rmtree(data_path)
769 778
770 # CREATE DEFAULT TEST REPOS
771 cur_dir = dn(dn(abspath(__file__)))
772 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
773 'vcs_test_hg.tar.gz')) as tar:
774 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
775
776 cur_dir = dn(dn(abspath(__file__)))
777 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
778 'vcs_test_git.tar.gz')) as tar:
779 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
779 rc_testdata.extract_hg_dump('vcs_test_hg', jn(TESTS_TMP_PATH, HG_REPO))
780 rc_testdata.extract_git_dump('vcs_test_git', jn(TESTS_TMP_PATH, GIT_REPO))
780 781
781 782 # Note: Subversion is in the process of being integrated with the system,
782 783 # until we have a properly packed version of the test svn repository, this
783 784 # tries to copy over the repo from a package "rc_testdata"
784 import rc_testdata
785 785 svn_repo_path = rc_testdata.get_svn_repo_archive()
786 786 with tarfile.open(svn_repo_path) as tar:
787 787 tar.extractall(jn(TESTS_TMP_PATH, SVN_REPO))
788 788
789 789
790 790 #==============================================================================
791 791 # PASTER COMMANDS
792 792 #==============================================================================
793 793 class BasePasterCommand(Command):
794 794 """
795 795 Abstract Base Class for paster commands.
796 796
797 797 The celery commands are somewhat aggressive about loading
798 798 celery.conf, and since our module sets the `CELERY_LOADER`
799 799 environment variable to our loader, we have to bootstrap a bit and
800 800 make sure we've had a chance to load the pylons config off of the
801 801 command line, otherwise everything fails.
802 802 """
803 803 min_args = 1
804 804 min_args_error = "Please provide a paster config file as an argument."
805 805 takes_config_file = 1
806 806 requires_config_file = True
807 807
808 808 def notify_msg(self, msg, log=False):
809 809 """Make a notification to user, additionally if logger is passed
810 810 it logs this action using given logger
811 811
812 812 :param msg: message that will be printed to user
813 813 :param log: logging instance, to use to additionally log this message
814 814
815 815 """
816 816 if log and isinstance(log, logging):
817 817 log(msg)
818 818
819 819 def run(self, args):
820 820 """
821 821 Overrides Command.run
822 822
823 823 Checks for a config file argument and loads it.
824 824 """
825 825 if len(args) < self.min_args:
826 826 raise BadCommand(
827 827 self.min_args_error % {'min_args': self.min_args,
828 828 'actual_args': len(args)})
829 829
830 830 # Decrement because we're going to lob off the first argument.
831 831 # @@ This is hacky
832 832 self.min_args -= 1
833 833 self.bootstrap_config(args[0])
834 834 self.update_parser()
835 835 return super(BasePasterCommand, self).run(args[1:])
836 836
837 837 def update_parser(self):
838 838 """
839 839 Abstract method. Allows for the class' parser to be updated
840 840 before the superclass' `run` method is called. Necessary to
841 841 allow options/arguments to be passed through to the underlying
842 842 celery command.
843 843 """
844 844 raise NotImplementedError("Abstract Method.")
845 845
846 846 def bootstrap_config(self, conf):
847 847 """
848 848 Loads the pylons configuration.
849 849 """
850 850 from pylons import config as pylonsconfig
851 851
852 852 self.path_to_ini_file = os.path.realpath(conf)
853 853 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
854 854 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
855 855
856 856 def _init_session(self):
857 857 """
858 858 Inits SqlAlchemy Session
859 859 """
860 860 logging.config.fileConfig(self.path_to_ini_file)
861 861 from pylons import config
862 862 from rhodecode.config.utils import initialize_database
863 863
864 864 # get to remove repos !!
865 865 add_cache(config)
866 866 initialize_database(config)
867 867
868 868
869 869 @decorator.decorator
870 870 def jsonify(func, *args, **kwargs):
871 871 """Action decorator that formats output for JSON
872 872
873 873 Given a function that will return content, this decorator will turn
874 874 the result into JSON, with a content-type of 'application/json' and
875 875 output it.
876 876
877 877 """
878 878 from pylons.decorators.util import get_pylons
879 879 from rhodecode.lib.ext_json import json
880 880 pylons = get_pylons(args)
881 881 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
882 882 data = func(*args, **kwargs)
883 883 if isinstance(data, (list, tuple)):
884 884 msg = "JSON responses with Array envelopes are susceptible to " \
885 885 "cross-site data leak attacks, see " \
886 886 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
887 887 warnings.warn(msg, Warning, 2)
888 888 log.warning(msg)
889 889 log.debug("Returning JSON wrapped action output")
890 890 return json.dumps(data, encoding='utf-8')
891 891
892 892
893 893 class PartialRenderer(object):
894 894 """
895 895 Partial renderer used to render chunks of html used in datagrids
896 896 use like::
897 897
898 898 _render = PartialRenderer('data_table/_dt_elements.html')
899 899 _render('quick_menu', args, kwargs)
900 900 PartialRenderer.h,
901 901 c,
902 902 _,
903 903 ungettext
904 904 are the template stuff initialized inside and can be re-used later
905 905
906 906 :param tmpl_name: template path relate to /templates/ dir
907 907 """
908 908
909 909 def __init__(self, tmpl_name):
910 910 import rhodecode
911 911 from pylons import request, tmpl_context as c
912 912 from pylons.i18n.translation import _, ungettext
913 913 from rhodecode.lib import helpers as h
914 914
915 915 self.tmpl_name = tmpl_name
916 916 self.rhodecode = rhodecode
917 917 self.c = c
918 918 self._ = _
919 919 self.ungettext = ungettext
920 920 self.h = h
921 921 self.request = request
922 922
923 923 def _mako_lookup(self):
924 924 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
925 925 return _tmpl_lookup.get_template(self.tmpl_name)
926 926
927 927 def _update_kwargs_for_render(self, kwargs):
928 928 """
929 929 Inject params required for Mako rendering
930 930 """
931 931 _kwargs = {
932 932 '_': self._,
933 933 'h': self.h,
934 934 'c': self.c,
935 935 'request': self.request,
936 936 'ungettext': self.ungettext,
937 937 }
938 938 _kwargs.update(kwargs)
939 939 return _kwargs
940 940
941 941 def _render_with_exc(self, render_func, args, kwargs):
942 942 try:
943 943 return render_func.render(*args, **kwargs)
944 944 except:
945 945 log.error(exceptions.text_error_template().render())
946 946 raise
947 947
948 948 def _get_template(self, template_obj, def_name):
949 949 if def_name:
950 950 tmpl = template_obj.get_def(def_name)
951 951 else:
952 952 tmpl = template_obj
953 953 return tmpl
954 954
955 955 def render(self, def_name, *args, **kwargs):
956 956 lookup_obj = self._mako_lookup()
957 957 tmpl = self._get_template(lookup_obj, def_name=def_name)
958 958 kwargs = self._update_kwargs_for_render(kwargs)
959 959 return self._render_with_exc(tmpl, args, kwargs)
960 960
961 961 def __call__(self, tmpl, *args, **kwargs):
962 962 return self.render(tmpl, *args, **kwargs)
963 963
964 964
965 965 def password_changed(auth_user, session):
966 966 if auth_user.username == User.DEFAULT_USER:
967 967 return False
968 968 password_hash = md5(auth_user.password) if auth_user.password else None
969 969 rhodecode_user = session.get('rhodecode_user', {})
970 970 session_password_hash = rhodecode_user.get('password', '')
971 971 return password_hash != session_password_hash
972 972
973 973
974 974 def read_opensource_licenses():
975 975 global _license_cache
976 976
977 977 if not _license_cache:
978 978 licenses = pkg_resources.resource_string(
979 979 'rhodecode', 'config/licenses.json')
980 980 _license_cache = json.loads(licenses)
981 981
982 982 return _license_cache
General Comments 0
You need to be logged in to leave comments. Login now