##// END OF EJS Templates
pytest: Update docstrings to reflect changes.
Martin Bornhold -
r216:3ec05fb7 default
parent child Browse files
Show More
@@ -1,984 +1,971 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 from os.path import abspath
37 37 from os.path import dirname as dn, join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from paste.script.command import Command, BadCommand
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44
45 45 from rhodecode.lib.fakemod import create_module
46 46 from rhodecode.lib.vcs.backends.base import Config
47 47 from rhodecode.lib.vcs.exceptions import VCSError
48 48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 49 from rhodecode.lib.utils2 import (
50 50 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 51 from rhodecode.model import meta
52 52 from rhodecode.model.db import (
53 53 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.repo_group import RepoGroupModel
56 56 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 _license_cache = None
63 63
64 64
65 65 def recursive_replace(str_, replace=' '):
66 66 """
67 67 Recursive replace of given sign to just one instance
68 68
69 69 :param str_: given string
70 70 :param replace: char to find and replace multiple instances
71 71
72 72 Examples::
73 73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 74 'Mighty-Mighty-Bo-sstones'
75 75 """
76 76
77 77 if str_.find(replace * 2) == -1:
78 78 return str_
79 79 else:
80 80 str_ = str_.replace(replace * 2, replace)
81 81 return recursive_replace(str_, replace)
82 82
83 83
84 84 def repo_name_slug(value):
85 85 """
86 86 Return slug of name of repository
87 87 This function is called on each creation/modification
88 88 of repository to prevent bad names in repo
89 89 """
90 90
91 91 slug = remove_formatting(value)
92 92 slug = strip_tags(slug)
93 93
94 94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 95 slug = slug.replace(c, '-')
96 96 slug = recursive_replace(slug, '-')
97 97 slug = collapse(slug, '-')
98 98 return slug
99 99
100 100
101 101 #==============================================================================
102 102 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
103 103 #==============================================================================
104 104 def get_repo_slug(request):
105 105 _repo = request.environ['pylons.routes_dict'].get('repo_name')
106 106 if _repo:
107 107 _repo = _repo.rstrip('/')
108 108 return _repo
109 109
110 110
111 111 def get_repo_group_slug(request):
112 112 _group = request.environ['pylons.routes_dict'].get('group_name')
113 113 if _group:
114 114 _group = _group.rstrip('/')
115 115 return _group
116 116
117 117
118 118 def get_user_group_slug(request):
119 119 _group = request.environ['pylons.routes_dict'].get('user_group_id')
120 120 try:
121 121 _group = UserGroup.get(_group)
122 122 if _group:
123 123 _group = _group.users_group_name
124 124 except Exception:
125 125 log.debug(traceback.format_exc())
126 126 #catch all failures here
127 127 pass
128 128
129 129 return _group
130 130
131 131
132 132 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
133 133 """
134 134 Action logger for various actions made by users
135 135
136 136 :param user: user that made this action, can be a unique username string or
137 137 object containing user_id attribute
138 138 :param action: action to log, should be on of predefined unique actions for
139 139 easy translations
140 140 :param repo: string name of repository or object containing repo_id,
141 141 that action was made on
142 142 :param ipaddr: optional ip address from what the action was made
143 143 :param sa: optional sqlalchemy session
144 144
145 145 """
146 146
147 147 if not sa:
148 148 sa = meta.Session()
149 149 # if we don't get explicit IP address try to get one from registered user
150 150 # in tmpl context var
151 151 if not ipaddr:
152 152 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
153 153
154 154 try:
155 155 if getattr(user, 'user_id', None):
156 156 user_obj = User.get(user.user_id)
157 157 elif isinstance(user, basestring):
158 158 user_obj = User.get_by_username(user)
159 159 else:
160 160 raise Exception('You have to provide a user object or a username')
161 161
162 162 if getattr(repo, 'repo_id', None):
163 163 repo_obj = Repository.get(repo.repo_id)
164 164 repo_name = repo_obj.repo_name
165 165 elif isinstance(repo, basestring):
166 166 repo_name = repo.lstrip('/')
167 167 repo_obj = Repository.get_by_repo_name(repo_name)
168 168 else:
169 169 repo_obj = None
170 170 repo_name = ''
171 171
172 172 user_log = UserLog()
173 173 user_log.user_id = user_obj.user_id
174 174 user_log.username = user_obj.username
175 175 action = safe_unicode(action)
176 176 user_log.action = action[:1200000]
177 177
178 178 user_log.repository = repo_obj
179 179 user_log.repository_name = repo_name
180 180
181 181 user_log.action_date = datetime.datetime.now()
182 182 user_log.user_ip = ipaddr
183 183 sa.add(user_log)
184 184
185 185 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
186 186 action, safe_unicode(repo), user_obj, ipaddr)
187 187 if commit:
188 188 sa.commit()
189 189 except Exception:
190 190 log.error(traceback.format_exc())
191 191 raise
192 192
193 193
194 194 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
195 195 """
196 196 Scans given path for repos and return (name,(type,path)) tuple
197 197
198 198 :param path: path to scan for repositories
199 199 :param recursive: recursive search and return names with subdirs in front
200 200 """
201 201
202 202 # remove ending slash for better results
203 203 path = path.rstrip(os.sep)
204 204 log.debug('now scanning in %s location recursive:%s...', path, recursive)
205 205
206 206 def _get_repos(p):
207 207 dirpaths = _get_dirpaths(p)
208 208 if not _is_dir_writable(p):
209 209 log.warning('repo path without write access: %s', p)
210 210
211 211 for dirpath in dirpaths:
212 212 if os.path.isfile(os.path.join(p, dirpath)):
213 213 continue
214 214 cur_path = os.path.join(p, dirpath)
215 215
216 216 # skip removed repos
217 217 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
218 218 continue
219 219
220 220 #skip .<somethin> dirs
221 221 if dirpath.startswith('.'):
222 222 continue
223 223
224 224 try:
225 225 scm_info = get_scm(cur_path)
226 226 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
227 227 except VCSError:
228 228 if not recursive:
229 229 continue
230 230 #check if this dir containts other repos for recursive scan
231 231 rec_path = os.path.join(p, dirpath)
232 232 if os.path.isdir(rec_path):
233 233 for inner_scm in _get_repos(rec_path):
234 234 yield inner_scm
235 235
236 236 return _get_repos(path)
237 237
238 238
239 239 def _get_dirpaths(p):
240 240 try:
241 241 # OS-independable way of checking if we have at least read-only
242 242 # access or not.
243 243 dirpaths = os.listdir(p)
244 244 except OSError:
245 245 log.warning('ignoring repo path without read access: %s', p)
246 246 return []
247 247
248 248 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
249 249 # decode paths and suddenly returns unicode objects itself. The items it
250 250 # cannot decode are returned as strings and cause issues.
251 251 #
252 252 # Those paths are ignored here until a solid solution for path handling has
253 253 # been built.
254 254 expected_type = type(p)
255 255
256 256 def _has_correct_type(item):
257 257 if type(item) is not expected_type:
258 258 log.error(
259 259 u"Ignoring path %s since it cannot be decoded into unicode.",
260 260 # Using "repr" to make sure that we see the byte value in case
261 261 # of support.
262 262 repr(item))
263 263 return False
264 264 return True
265 265
266 266 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
267 267
268 268 return dirpaths
269 269
270 270
271 271 def _is_dir_writable(path):
272 272 """
273 273 Probe if `path` is writable.
274 274
275 275 Due to trouble on Cygwin / Windows, this is actually probing if it is
276 276 possible to create a file inside of `path`, stat does not produce reliable
277 277 results in this case.
278 278 """
279 279 try:
280 280 with tempfile.TemporaryFile(dir=path):
281 281 pass
282 282 except OSError:
283 283 return False
284 284 return True
285 285
286 286
287 287 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
288 288 """
289 289 Returns True if given path is a valid repository False otherwise.
290 290 If expect_scm param is given also, compare if given scm is the same
291 291 as expected from scm parameter. If explicit_scm is given don't try to
292 292 detect the scm, just use the given one to check if repo is valid
293 293
294 294 :param repo_name:
295 295 :param base_path:
296 296 :param expect_scm:
297 297 :param explicit_scm:
298 298
299 299 :return True: if given path is a valid repository
300 300 """
301 301 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
302 302 log.debug('Checking if `%s` is a valid path for repository', repo_name)
303 303
304 304 try:
305 305 if explicit_scm:
306 306 detected_scms = [get_scm_backend(explicit_scm)]
307 307 else:
308 308 detected_scms = get_scm(full_path)
309 309
310 310 if expect_scm:
311 311 return detected_scms[0] == expect_scm
312 312 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
313 313 return True
314 314 except VCSError:
315 315 log.debug('path: %s is not a valid repo !', full_path)
316 316 return False
317 317
318 318
319 319 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
320 320 """
321 321 Returns True if given path is a repository group, False otherwise
322 322
323 323 :param repo_name:
324 324 :param base_path:
325 325 """
326 326 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
327 327 log.debug('Checking if `%s` is a valid path for repository group',
328 328 repo_group_name)
329 329
330 330 # check if it's not a repo
331 331 if is_valid_repo(repo_group_name, base_path):
332 332 log.debug('Repo called %s exist, it is not a valid '
333 333 'repo group' % repo_group_name)
334 334 return False
335 335
336 336 try:
337 337 # we need to check bare git repos at higher level
338 338 # since we might match branches/hooks/info/objects or possible
339 339 # other things inside bare git repo
340 340 scm_ = get_scm(os.path.dirname(full_path))
341 341 log.debug('path: %s is a vcs object:%s, not valid '
342 342 'repo group' % (full_path, scm_))
343 343 return False
344 344 except VCSError:
345 345 pass
346 346
347 347 # check if it's a valid path
348 348 if skip_path_check or os.path.isdir(full_path):
349 349 log.debug('path: %s is a valid repo group !', full_path)
350 350 return True
351 351
352 352 log.debug('path: %s is not a valid repo group !', full_path)
353 353 return False
354 354
355 355
356 356 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
357 357 while True:
358 358 ok = raw_input(prompt)
359 359 if ok in ('y', 'ye', 'yes'):
360 360 return True
361 361 if ok in ('n', 'no', 'nop', 'nope'):
362 362 return False
363 363 retries = retries - 1
364 364 if retries < 0:
365 365 raise IOError
366 366 print complaint
367 367
368 368 # propagated from mercurial documentation
369 369 ui_sections = [
370 370 'alias', 'auth',
371 371 'decode/encode', 'defaults',
372 372 'diff', 'email',
373 373 'extensions', 'format',
374 374 'merge-patterns', 'merge-tools',
375 375 'hooks', 'http_proxy',
376 376 'smtp', 'patch',
377 377 'paths', 'profiling',
378 378 'server', 'trusted',
379 379 'ui', 'web', ]
380 380
381 381
382 382 def config_data_from_db(clear_session=True, repo=None):
383 383 """
384 384 Read the configuration data from the database and return configuration
385 385 tuples.
386 386 """
387 387 config = []
388 388
389 389 sa = meta.Session()
390 390 settings_model = VcsSettingsModel(repo=repo, sa=sa)
391 391
392 392 ui_settings = settings_model.get_ui_settings()
393 393
394 394 for setting in ui_settings:
395 395 if setting.active:
396 396 log.debug(
397 397 'settings ui from db: [%s] %s=%s',
398 398 setting.section, setting.key, setting.value)
399 399 config.append((
400 400 safe_str(setting.section), safe_str(setting.key),
401 401 safe_str(setting.value)))
402 402 if setting.key == 'push_ssl':
403 403 # force set push_ssl requirement to False, rhodecode
404 404 # handles that
405 405 config.append((
406 406 safe_str(setting.section), safe_str(setting.key), False))
407 407 if clear_session:
408 408 meta.Session.remove()
409 409
410 410 # TODO: mikhail: probably it makes no sense to re-read hooks information.
411 411 # It's already there and activated/deactivated
412 412 skip_entries = []
413 413 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
414 414 if 'pull' not in enabled_hook_classes:
415 415 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
416 416 if 'push' not in enabled_hook_classes:
417 417 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
418 418
419 419 config = [entry for entry in config if entry[:2] not in skip_entries]
420 420
421 421 return config
422 422
423 423
424 424 def make_db_config(clear_session=True, repo=None):
425 425 """
426 426 Create a :class:`Config` instance based on the values in the database.
427 427 """
428 428 config = Config()
429 429 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
430 430 for section, option, value in config_data:
431 431 config.set(section, option, value)
432 432 return config
433 433
434 434
435 435 def get_enabled_hook_classes(ui_settings):
436 436 """
437 437 Return the enabled hook classes.
438 438
439 439 :param ui_settings: List of ui_settings as returned
440 440 by :meth:`VcsSettingsModel.get_ui_settings`
441 441
442 442 :return: a list with the enabled hook classes. The order is not guaranteed.
443 443 :rtype: list
444 444 """
445 445 enabled_hooks = []
446 446 active_hook_keys = [
447 447 key for section, key, value, active in ui_settings
448 448 if section == 'hooks' and active]
449 449
450 450 hook_names = {
451 451 RhodeCodeUi.HOOK_PUSH: 'push',
452 452 RhodeCodeUi.HOOK_PULL: 'pull',
453 453 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
454 454 }
455 455
456 456 for key in active_hook_keys:
457 457 hook = hook_names.get(key)
458 458 if hook:
459 459 enabled_hooks.append(hook)
460 460
461 461 return enabled_hooks
462 462
463 463
464 464 def set_rhodecode_config(config):
465 465 """
466 466 Updates pylons config with new settings from database
467 467
468 468 :param config:
469 469 """
470 470 app_settings = SettingsModel().get_all_settings()
471 471
472 472 for k, v in app_settings.items():
473 473 config[k] = v
474 474
475 475
476 476 def map_groups(path):
477 477 """
478 478 Given a full path to a repository, create all nested groups that this
479 479 repo is inside. This function creates parent-child relationships between
480 480 groups and creates default perms for all new groups.
481 481
482 482 :param paths: full path to repository
483 483 """
484 484 sa = meta.Session()
485 485 groups = path.split(Repository.NAME_SEP)
486 486 parent = None
487 487 group = None
488 488
489 489 # last element is repo in nested groups structure
490 490 groups = groups[:-1]
491 491 rgm = RepoGroupModel(sa)
492 492 owner = User.get_first_admin()
493 493 for lvl, group_name in enumerate(groups):
494 494 group_name = '/'.join(groups[:lvl] + [group_name])
495 495 group = RepoGroup.get_by_group_name(group_name)
496 496 desc = '%s group' % group_name
497 497
498 498 # skip folders that are now removed repos
499 499 if REMOVED_REPO_PAT.match(group_name):
500 500 break
501 501
502 502 if group is None:
503 503 log.debug('creating group level: %s group_name: %s',
504 504 lvl, group_name)
505 505 group = RepoGroup(group_name, parent)
506 506 group.group_description = desc
507 507 group.user = owner
508 508 sa.add(group)
509 509 perm_obj = rgm._create_default_perms(group)
510 510 sa.add(perm_obj)
511 511 sa.flush()
512 512
513 513 parent = group
514 514 return group
515 515
516 516
517 517 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
518 518 """
519 519 maps all repos given in initial_repo_list, non existing repositories
520 520 are created, if remove_obsolete is True it also checks for db entries
521 521 that are not in initial_repo_list and removes them.
522 522
523 523 :param initial_repo_list: list of repositories found by scanning methods
524 524 :param remove_obsolete: check for obsolete entries in database
525 525 """
526 526 from rhodecode.model.repo import RepoModel
527 527 from rhodecode.model.scm import ScmModel
528 528 sa = meta.Session()
529 529 repo_model = RepoModel()
530 530 user = User.get_first_admin()
531 531 added = []
532 532
533 533 # creation defaults
534 534 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
535 535 enable_statistics = defs.get('repo_enable_statistics')
536 536 enable_locking = defs.get('repo_enable_locking')
537 537 enable_downloads = defs.get('repo_enable_downloads')
538 538 private = defs.get('repo_private')
539 539
540 540 for name, repo in initial_repo_list.items():
541 541 group = map_groups(name)
542 542 unicode_name = safe_unicode(name)
543 543 db_repo = repo_model.get_by_repo_name(unicode_name)
544 544 # found repo that is on filesystem not in RhodeCode database
545 545 if not db_repo:
546 546 log.info('repository %s not found, creating now', name)
547 547 added.append(name)
548 548 desc = (repo.description
549 549 if repo.description != 'unknown'
550 550 else '%s repository' % name)
551 551
552 552 db_repo = repo_model._create_repo(
553 553 repo_name=name,
554 554 repo_type=repo.alias,
555 555 description=desc,
556 556 repo_group=getattr(group, 'group_id', None),
557 557 owner=user,
558 558 enable_locking=enable_locking,
559 559 enable_downloads=enable_downloads,
560 560 enable_statistics=enable_statistics,
561 561 private=private,
562 562 state=Repository.STATE_CREATED
563 563 )
564 564 sa.commit()
565 565 # we added that repo just now, and make sure we updated server info
566 566 if db_repo.repo_type == 'git':
567 567 git_repo = db_repo.scm_instance()
568 568 # update repository server-info
569 569 log.debug('Running update server info')
570 570 git_repo._update_server_info()
571 571
572 572 db_repo.update_commit_cache()
573 573
574 574 config = db_repo._config
575 575 config.set('extensions', 'largefiles', '')
576 576 ScmModel().install_hooks(
577 577 db_repo.scm_instance(config=config),
578 578 repo_type=db_repo.repo_type)
579 579
580 580 removed = []
581 581 if remove_obsolete:
582 582 # remove from database those repositories that are not in the filesystem
583 583 for repo in sa.query(Repository).all():
584 584 if repo.repo_name not in initial_repo_list.keys():
585 585 log.debug("Removing non-existing repository found in db `%s`",
586 586 repo.repo_name)
587 587 try:
588 588 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
589 589 sa.commit()
590 590 removed.append(repo.repo_name)
591 591 except Exception:
592 592 # don't hold further removals on error
593 593 log.error(traceback.format_exc())
594 594 sa.rollback()
595 595
596 596 def splitter(full_repo_name):
597 597 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
598 598 gr_name = None
599 599 if len(_parts) == 2:
600 600 gr_name = _parts[0]
601 601 return gr_name
602 602
603 603 initial_repo_group_list = [splitter(x) for x in
604 604 initial_repo_list.keys() if splitter(x)]
605 605
606 606 # remove from database those repository groups that are not in the
607 607 # filesystem due to parent child relationships we need to delete them
608 608 # in a specific order of most nested first
609 609 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
610 610 nested_sort = lambda gr: len(gr.split('/'))
611 611 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
612 612 if group_name not in initial_repo_group_list:
613 613 repo_group = RepoGroup.get_by_group_name(group_name)
614 614 if (repo_group.children.all() or
615 615 not RepoGroupModel().check_exist_filesystem(
616 616 group_name=group_name, exc_on_failure=False)):
617 617 continue
618 618
619 619 log.info(
620 620 'Removing non-existing repository group found in db `%s`',
621 621 group_name)
622 622 try:
623 623 RepoGroupModel(sa).delete(group_name, fs_remove=False)
624 624 sa.commit()
625 625 removed.append(group_name)
626 626 except Exception:
627 627 # don't hold further removals on error
628 628 log.exception(
629 629 'Unable to remove repository group `%s`',
630 630 group_name)
631 631 sa.rollback()
632 632 raise
633 633
634 634 return added, removed
635 635
636 636
637 637 def get_default_cache_settings(settings):
638 638 cache_settings = {}
639 639 for key in settings.keys():
640 640 for prefix in ['beaker.cache.', 'cache.']:
641 641 if key.startswith(prefix):
642 642 name = key.split(prefix)[1].strip()
643 643 cache_settings[name] = settings[key].strip()
644 644 return cache_settings
645 645
646 646
647 647 # set cache regions for beaker so celery can utilise it
648 648 def add_cache(settings):
649 649 from rhodecode.lib import caches
650 650 cache_settings = {'regions': None}
651 651 # main cache settings used as default ...
652 652 cache_settings.update(get_default_cache_settings(settings))
653 653
654 654 if cache_settings['regions']:
655 655 for region in cache_settings['regions'].split(','):
656 656 region = region.strip()
657 657 region_settings = {}
658 658 for key, value in cache_settings.items():
659 659 if key.startswith(region):
660 660 region_settings[key.split('.')[1]] = value
661 661
662 662 caches.configure_cache_region(
663 663 region, region_settings, cache_settings)
664 664
665 665
666 666 def load_rcextensions(root_path):
667 667 import rhodecode
668 668 from rhodecode.config import conf
669 669
670 670 path = os.path.join(root_path, 'rcextensions', '__init__.py')
671 671 if os.path.isfile(path):
672 672 rcext = create_module('rc', path)
673 673 EXT = rhodecode.EXTENSIONS = rcext
674 674 log.debug('Found rcextensions now loading %s...', rcext)
675 675
676 676 # Additional mappings that are not present in the pygments lexers
677 677 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
678 678
679 679 # auto check if the module is not missing any data, set to default if is
680 680 # this will help autoupdate new feature of rcext module
681 681 #from rhodecode.config import rcextensions
682 682 #for k in dir(rcextensions):
683 683 # if not k.startswith('_') and not hasattr(EXT, k):
684 684 # setattr(EXT, k, getattr(rcextensions, k))
685 685
686 686
687 687 def get_custom_lexer(extension):
688 688 """
689 689 returns a custom lexer if it is defined in rcextensions module, or None
690 690 if there's no custom lexer defined
691 691 """
692 692 import rhodecode
693 693 from pygments import lexers
694 694 # check if we didn't define this extension as other lexer
695 695 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
696 696 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
697 697 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
698 698 return lexers.get_lexer_by_name(_lexer_name)
699 699
700 700
701 701 #==============================================================================
702 702 # TEST FUNCTIONS AND CREATORS
703 703 #==============================================================================
704 704 def create_test_index(repo_location, config):
705 705 """
706 Makes default test index
707
708 :param config: test config
709 :param full_index:
710 # start test server:
711 rcserver --with-vcsserver test.ini
712
713 # build index and store it in /tmp/rc/index:
714 rhodecode-index --force --api-host=http://vps1.dev:5000 --api-key=xxx --engine-location=/tmp/rc/index
715
716 # package and move new packages
717 tar -zcvf vcs_search_index.tar.gz -C /tmp/rc index
718 mv vcs_search_index.tar.gz rhodecode/tests/fixtures/
719
706 Makes default test index.
720 707 """
721 708 import rc_testdata
722 709
723 710 rc_testdata.extract_search_index(
724 711 'vcs_search_index', os.path.dirname(config['search.location']))
725 712
726 713
727 714 def create_test_directory(test_path):
728 715 """
729 create test dir if it doesn't exist
716 Create test directory if it doesn't exist.
730 717 """
731 718 if not os.path.isdir(test_path):
732 719 log.debug('Creating testdir %s', test_path)
733 720 os.makedirs(test_path)
734 721
735 722
736 723 def create_test_database(test_path, config):
737 724 """
738 725 Makes a fresh database.
739 726 """
740 727 from rhodecode.lib.db_manage import DbManage
741 728
742 729 # PART ONE create db
743 730 dbconf = config['sqlalchemy.db1.url']
744 731 log.debug('making test db %s', dbconf)
745 732
746 733 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
747 734 tests=True, cli_args={'force_ask': True})
748 735 dbmanage.create_tables(override=True)
749 736 dbmanage.set_db_version()
750 737 # for tests dynamically set new root paths based on generated content
751 738 dbmanage.create_settings(dbmanage.config_prompt(test_path))
752 739 dbmanage.create_default_user()
753 740 dbmanage.create_test_admin_and_users()
754 741 dbmanage.create_permissions()
755 742 dbmanage.populate_default_permissions()
756 743 Session().commit()
757 744
758 745
759 746 def create_test_repositories(test_path, config):
760 747 """
761 748 Creates test repositories in the temporary directory. Repositories are
762 749 extracted from archives within the rc_testdata package.
763 750 """
764 751 import rc_testdata
765 752 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
766 753
767 754 log.debug('making test vcs repositories')
768 755
769 756 idx_path = config['search.location']
770 757 data_path = config['cache_dir']
771 758
772 759 # clean index and data
773 760 if idx_path and os.path.exists(idx_path):
774 761 log.debug('remove %s', idx_path)
775 762 shutil.rmtree(idx_path)
776 763
777 764 if data_path and os.path.exists(data_path):
778 765 log.debug('remove %s', data_path)
779 766 shutil.rmtree(data_path)
780 767
781 768 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
782 769 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
783 770
784 771 # Note: Subversion is in the process of being integrated with the system,
785 772 # until we have a properly packed version of the test svn repository, this
786 773 # tries to copy over the repo from a package "rc_testdata"
787 774 svn_repo_path = rc_testdata.get_svn_repo_archive()
788 775 with tarfile.open(svn_repo_path) as tar:
789 776 tar.extractall(jn(test_path, SVN_REPO))
790 777
791 778
792 779 #==============================================================================
793 780 # PASTER COMMANDS
794 781 #==============================================================================
795 782 class BasePasterCommand(Command):
796 783 """
797 784 Abstract Base Class for paster commands.
798 785
799 786 The celery commands are somewhat aggressive about loading
800 787 celery.conf, and since our module sets the `CELERY_LOADER`
801 788 environment variable to our loader, we have to bootstrap a bit and
802 789 make sure we've had a chance to load the pylons config off of the
803 790 command line, otherwise everything fails.
804 791 """
805 792 min_args = 1
806 793 min_args_error = "Please provide a paster config file as an argument."
807 794 takes_config_file = 1
808 795 requires_config_file = True
809 796
810 797 def notify_msg(self, msg, log=False):
811 798 """Make a notification to user, additionally if logger is passed
812 799 it logs this action using given logger
813 800
814 801 :param msg: message that will be printed to user
815 802 :param log: logging instance, to use to additionally log this message
816 803
817 804 """
818 805 if log and isinstance(log, logging):
819 806 log(msg)
820 807
821 808 def run(self, args):
822 809 """
823 810 Overrides Command.run
824 811
825 812 Checks for a config file argument and loads it.
826 813 """
827 814 if len(args) < self.min_args:
828 815 raise BadCommand(
829 816 self.min_args_error % {'min_args': self.min_args,
830 817 'actual_args': len(args)})
831 818
832 819 # Decrement because we're going to lob off the first argument.
833 820 # @@ This is hacky
834 821 self.min_args -= 1
835 822 self.bootstrap_config(args[0])
836 823 self.update_parser()
837 824 return super(BasePasterCommand, self).run(args[1:])
838 825
839 826 def update_parser(self):
840 827 """
841 828 Abstract method. Allows for the class' parser to be updated
842 829 before the superclass' `run` method is called. Necessary to
843 830 allow options/arguments to be passed through to the underlying
844 831 celery command.
845 832 """
846 833 raise NotImplementedError("Abstract Method.")
847 834
848 835 def bootstrap_config(self, conf):
849 836 """
850 837 Loads the pylons configuration.
851 838 """
852 839 from pylons import config as pylonsconfig
853 840
854 841 self.path_to_ini_file = os.path.realpath(conf)
855 842 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
856 843 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
857 844
858 845 def _init_session(self):
859 846 """
860 847 Inits SqlAlchemy Session
861 848 """
862 849 logging.config.fileConfig(self.path_to_ini_file)
863 850 from pylons import config
864 851 from rhodecode.config.utils import initialize_database
865 852
866 853 # get to remove repos !!
867 854 add_cache(config)
868 855 initialize_database(config)
869 856
870 857
871 858 @decorator.decorator
872 859 def jsonify(func, *args, **kwargs):
873 860 """Action decorator that formats output for JSON
874 861
875 862 Given a function that will return content, this decorator will turn
876 863 the result into JSON, with a content-type of 'application/json' and
877 864 output it.
878 865
879 866 """
880 867 from pylons.decorators.util import get_pylons
881 868 from rhodecode.lib.ext_json import json
882 869 pylons = get_pylons(args)
883 870 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
884 871 data = func(*args, **kwargs)
885 872 if isinstance(data, (list, tuple)):
886 873 msg = "JSON responses with Array envelopes are susceptible to " \
887 874 "cross-site data leak attacks, see " \
888 875 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
889 876 warnings.warn(msg, Warning, 2)
890 877 log.warning(msg)
891 878 log.debug("Returning JSON wrapped action output")
892 879 return json.dumps(data, encoding='utf-8')
893 880
894 881
895 882 class PartialRenderer(object):
896 883 """
897 884 Partial renderer used to render chunks of html used in datagrids
898 885 use like::
899 886
900 887 _render = PartialRenderer('data_table/_dt_elements.html')
901 888 _render('quick_menu', args, kwargs)
902 889 PartialRenderer.h,
903 890 c,
904 891 _,
905 892 ungettext
906 893 are the template stuff initialized inside and can be re-used later
907 894
908 895 :param tmpl_name: template path relate to /templates/ dir
909 896 """
910 897
911 898 def __init__(self, tmpl_name):
912 899 import rhodecode
913 900 from pylons import request, tmpl_context as c
914 901 from pylons.i18n.translation import _, ungettext
915 902 from rhodecode.lib import helpers as h
916 903
917 904 self.tmpl_name = tmpl_name
918 905 self.rhodecode = rhodecode
919 906 self.c = c
920 907 self._ = _
921 908 self.ungettext = ungettext
922 909 self.h = h
923 910 self.request = request
924 911
925 912 def _mako_lookup(self):
926 913 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
927 914 return _tmpl_lookup.get_template(self.tmpl_name)
928 915
929 916 def _update_kwargs_for_render(self, kwargs):
930 917 """
931 918 Inject params required for Mako rendering
932 919 """
933 920 _kwargs = {
934 921 '_': self._,
935 922 'h': self.h,
936 923 'c': self.c,
937 924 'request': self.request,
938 925 'ungettext': self.ungettext,
939 926 }
940 927 _kwargs.update(kwargs)
941 928 return _kwargs
942 929
943 930 def _render_with_exc(self, render_func, args, kwargs):
944 931 try:
945 932 return render_func.render(*args, **kwargs)
946 933 except:
947 934 log.error(exceptions.text_error_template().render())
948 935 raise
949 936
950 937 def _get_template(self, template_obj, def_name):
951 938 if def_name:
952 939 tmpl = template_obj.get_def(def_name)
953 940 else:
954 941 tmpl = template_obj
955 942 return tmpl
956 943
957 944 def render(self, def_name, *args, **kwargs):
958 945 lookup_obj = self._mako_lookup()
959 946 tmpl = self._get_template(lookup_obj, def_name=def_name)
960 947 kwargs = self._update_kwargs_for_render(kwargs)
961 948 return self._render_with_exc(tmpl, args, kwargs)
962 949
963 950 def __call__(self, tmpl, *args, **kwargs):
964 951 return self.render(tmpl, *args, **kwargs)
965 952
966 953
967 954 def password_changed(auth_user, session):
968 955 if auth_user.username == User.DEFAULT_USER:
969 956 return False
970 957 password_hash = md5(auth_user.password) if auth_user.password else None
971 958 rhodecode_user = session.get('rhodecode_user', {})
972 959 session_password_hash = rhodecode_user.get('password', '')
973 960 return password_hash != session_password_hash
974 961
975 962
976 963 def read_opensource_licenses():
977 964 global _license_cache
978 965
979 966 if not _license_cache:
980 967 licenses = pkg_resources.resource_string(
981 968 'rhodecode', 'config/licenses.json')
982 969 _license_cache = json.loads(licenses)
983 970
984 971 return _license_cache
General Comments 0
You need to be logged in to leave comments. Login now