##// END OF EJS Templates
logging: added additional log info to vcs detection util.
marcink -
r1315:72279f5c default
parent child Browse files
Show More
@@ -1,1019 +1,1020 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 import hashlib
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from paste.script.command import Command, BadCommand
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44 from pyramid.threadlocal import get_current_registry
45 45
46 46 from rhodecode.lib.fakemod import create_module
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = request.environ['pylons.routes_dict'].get('repo_name')
99 99 if _repo:
100 100 _repo = _repo.rstrip('/')
101 101 return _repo
102 102
103 103
104 104 def get_repo_group_slug(request):
105 105 _group = request.environ['pylons.routes_dict'].get('group_name')
106 106 if _group:
107 107 _group = _group.rstrip('/')
108 108 return _group
109 109
110 110
111 111 def get_user_group_slug(request):
112 112 _group = request.environ['pylons.routes_dict'].get('user_group_id')
113 113 try:
114 114 _group = UserGroup.get(_group)
115 115 if _group:
116 116 _group = _group.users_group_name
117 117 except Exception:
118 118 log.debug(traceback.format_exc())
119 119 #catch all failures here
120 120 pass
121 121
122 122 return _group
123 123
124 124
125 125 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
126 126 """
127 127 Action logger for various actions made by users
128 128
129 129 :param user: user that made this action, can be a unique username string or
130 130 object containing user_id attribute
131 131 :param action: action to log, should be on of predefined unique actions for
132 132 easy translations
133 133 :param repo: string name of repository or object containing repo_id,
134 134 that action was made on
135 135 :param ipaddr: optional ip address from what the action was made
136 136 :param sa: optional sqlalchemy session
137 137
138 138 """
139 139
140 140 if not sa:
141 141 sa = meta.Session()
142 142 # if we don't get explicit IP address try to get one from registered user
143 143 # in tmpl context var
144 144 if not ipaddr:
145 145 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
146 146
147 147 try:
148 148 if getattr(user, 'user_id', None):
149 149 user_obj = User.get(user.user_id)
150 150 elif isinstance(user, basestring):
151 151 user_obj = User.get_by_username(user)
152 152 else:
153 153 raise Exception('You have to provide a user object or a username')
154 154
155 155 if getattr(repo, 'repo_id', None):
156 156 repo_obj = Repository.get(repo.repo_id)
157 157 repo_name = repo_obj.repo_name
158 158 elif isinstance(repo, basestring):
159 159 repo_name = repo.lstrip('/')
160 160 repo_obj = Repository.get_by_repo_name(repo_name)
161 161 else:
162 162 repo_obj = None
163 163 repo_name = ''
164 164
165 165 user_log = UserLog()
166 166 user_log.user_id = user_obj.user_id
167 167 user_log.username = user_obj.username
168 168 action = safe_unicode(action)
169 169 user_log.action = action[:1200000]
170 170
171 171 user_log.repository = repo_obj
172 172 user_log.repository_name = repo_name
173 173
174 174 user_log.action_date = datetime.datetime.now()
175 175 user_log.user_ip = ipaddr
176 176 sa.add(user_log)
177 177
178 178 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
179 179 action, safe_unicode(repo), user_obj, ipaddr)
180 180 if commit:
181 181 sa.commit()
182 182 except Exception:
183 183 log.error(traceback.format_exc())
184 184 raise
185 185
186 186
187 187 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
188 188 """
189 189 Scans given path for repos and return (name,(type,path)) tuple
190 190
191 191 :param path: path to scan for repositories
192 192 :param recursive: recursive search and return names with subdirs in front
193 193 """
194 194
195 195 # remove ending slash for better results
196 196 path = path.rstrip(os.sep)
197 197 log.debug('now scanning in %s location recursive:%s...', path, recursive)
198 198
199 199 def _get_repos(p):
200 200 dirpaths = _get_dirpaths(p)
201 201 if not _is_dir_writable(p):
202 202 log.warning('repo path without write access: %s', p)
203 203
204 204 for dirpath in dirpaths:
205 205 if os.path.isfile(os.path.join(p, dirpath)):
206 206 continue
207 207 cur_path = os.path.join(p, dirpath)
208 208
209 209 # skip removed repos
210 210 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
211 211 continue
212 212
213 213 #skip .<somethin> dirs
214 214 if dirpath.startswith('.'):
215 215 continue
216 216
217 217 try:
218 218 scm_info = get_scm(cur_path)
219 219 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
220 220 except VCSError:
221 221 if not recursive:
222 222 continue
223 223 #check if this dir containts other repos for recursive scan
224 224 rec_path = os.path.join(p, dirpath)
225 225 if os.path.isdir(rec_path):
226 226 for inner_scm in _get_repos(rec_path):
227 227 yield inner_scm
228 228
229 229 return _get_repos(path)
230 230
231 231
232 232 def _get_dirpaths(p):
233 233 try:
234 234 # OS-independable way of checking if we have at least read-only
235 235 # access or not.
236 236 dirpaths = os.listdir(p)
237 237 except OSError:
238 238 log.warning('ignoring repo path without read access: %s', p)
239 239 return []
240 240
241 241 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
242 242 # decode paths and suddenly returns unicode objects itself. The items it
243 243 # cannot decode are returned as strings and cause issues.
244 244 #
245 245 # Those paths are ignored here until a solid solution for path handling has
246 246 # been built.
247 247 expected_type = type(p)
248 248
249 249 def _has_correct_type(item):
250 250 if type(item) is not expected_type:
251 251 log.error(
252 252 u"Ignoring path %s since it cannot be decoded into unicode.",
253 253 # Using "repr" to make sure that we see the byte value in case
254 254 # of support.
255 255 repr(item))
256 256 return False
257 257 return True
258 258
259 259 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
260 260
261 261 return dirpaths
262 262
263 263
264 264 def _is_dir_writable(path):
265 265 """
266 266 Probe if `path` is writable.
267 267
268 268 Due to trouble on Cygwin / Windows, this is actually probing if it is
269 269 possible to create a file inside of `path`, stat does not produce reliable
270 270 results in this case.
271 271 """
272 272 try:
273 273 with tempfile.TemporaryFile(dir=path):
274 274 pass
275 275 except OSError:
276 276 return False
277 277 return True
278 278
279 279
280 280 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
281 281 """
282 282 Returns True if given path is a valid repository False otherwise.
283 283 If expect_scm param is given also, compare if given scm is the same
284 284 as expected from scm parameter. If explicit_scm is given don't try to
285 285 detect the scm, just use the given one to check if repo is valid
286 286
287 287 :param repo_name:
288 288 :param base_path:
289 289 :param expect_scm:
290 290 :param explicit_scm:
291 291
292 292 :return True: if given path is a valid repository
293 293 """
294 294 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
295 log.debug('Checking if `%s` is a valid path for repository', repo_name)
295 log.debug('Checking if `%s` is a valid path for repository. '
296 'Explicit type: %s', repo_name, explicit_scm)
296 297
297 298 try:
298 299 if explicit_scm:
299 300 detected_scms = [get_scm_backend(explicit_scm)]
300 301 else:
301 302 detected_scms = get_scm(full_path)
302 303
303 304 if expect_scm:
304 305 return detected_scms[0] == expect_scm
305 306 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
306 307 return True
307 308 except VCSError:
308 309 log.debug('path: %s is not a valid repo !', full_path)
309 310 return False
310 311
311 312
312 313 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
313 314 """
314 315 Returns True if given path is a repository group, False otherwise
315 316
316 317 :param repo_name:
317 318 :param base_path:
318 319 """
319 320 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
320 321 log.debug('Checking if `%s` is a valid path for repository group',
321 322 repo_group_name)
322 323
323 324 # check if it's not a repo
324 325 if is_valid_repo(repo_group_name, base_path):
325 326 log.debug('Repo called %s exist, it is not a valid '
326 327 'repo group' % repo_group_name)
327 328 return False
328 329
329 330 try:
330 331 # we need to check bare git repos at higher level
331 332 # since we might match branches/hooks/info/objects or possible
332 333 # other things inside bare git repo
333 334 scm_ = get_scm(os.path.dirname(full_path))
334 335 log.debug('path: %s is a vcs object:%s, not valid '
335 336 'repo group' % (full_path, scm_))
336 337 return False
337 338 except VCSError:
338 339 pass
339 340
340 341 # check if it's a valid path
341 342 if skip_path_check or os.path.isdir(full_path):
342 343 log.debug('path: %s is a valid repo group !', full_path)
343 344 return True
344 345
345 346 log.debug('path: %s is not a valid repo group !', full_path)
346 347 return False
347 348
348 349
349 350 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
350 351 while True:
351 352 ok = raw_input(prompt)
352 353 if ok.lower() in ('y', 'ye', 'yes'):
353 354 return True
354 355 if ok.lower() in ('n', 'no', 'nop', 'nope'):
355 356 return False
356 357 retries = retries - 1
357 358 if retries < 0:
358 359 raise IOError
359 360 print(complaint)
360 361
361 362 # propagated from mercurial documentation
362 363 ui_sections = [
363 364 'alias', 'auth',
364 365 'decode/encode', 'defaults',
365 366 'diff', 'email',
366 367 'extensions', 'format',
367 368 'merge-patterns', 'merge-tools',
368 369 'hooks', 'http_proxy',
369 370 'smtp', 'patch',
370 371 'paths', 'profiling',
371 372 'server', 'trusted',
372 373 'ui', 'web', ]
373 374
374 375
375 376 def config_data_from_db(clear_session=True, repo=None):
376 377 """
377 378 Read the configuration data from the database and return configuration
378 379 tuples.
379 380 """
380 381 from rhodecode.model.settings import VcsSettingsModel
381 382
382 383 config = []
383 384
384 385 sa = meta.Session()
385 386 settings_model = VcsSettingsModel(repo=repo, sa=sa)
386 387
387 388 ui_settings = settings_model.get_ui_settings()
388 389
389 390 for setting in ui_settings:
390 391 if setting.active:
391 392 log.debug(
392 393 'settings ui from db: [%s] %s=%s',
393 394 setting.section, setting.key, setting.value)
394 395 config.append((
395 396 safe_str(setting.section), safe_str(setting.key),
396 397 safe_str(setting.value)))
397 398 if setting.key == 'push_ssl':
398 399 # force set push_ssl requirement to False, rhodecode
399 400 # handles that
400 401 config.append((
401 402 safe_str(setting.section), safe_str(setting.key), False))
402 403 if clear_session:
403 404 meta.Session.remove()
404 405
405 406 # TODO: mikhail: probably it makes no sense to re-read hooks information.
406 407 # It's already there and activated/deactivated
407 408 skip_entries = []
408 409 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
409 410 if 'pull' not in enabled_hook_classes:
410 411 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
411 412 if 'push' not in enabled_hook_classes:
412 413 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
413 414
414 415 config = [entry for entry in config if entry[:2] not in skip_entries]
415 416
416 417 return config
417 418
418 419
419 420 def make_db_config(clear_session=True, repo=None):
420 421 """
421 422 Create a :class:`Config` instance based on the values in the database.
422 423 """
423 424 config = Config()
424 425 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
425 426 for section, option, value in config_data:
426 427 config.set(section, option, value)
427 428 return config
428 429
429 430
430 431 def get_enabled_hook_classes(ui_settings):
431 432 """
432 433 Return the enabled hook classes.
433 434
434 435 :param ui_settings: List of ui_settings as returned
435 436 by :meth:`VcsSettingsModel.get_ui_settings`
436 437
437 438 :return: a list with the enabled hook classes. The order is not guaranteed.
438 439 :rtype: list
439 440 """
440 441 enabled_hooks = []
441 442 active_hook_keys = [
442 443 key for section, key, value, active in ui_settings
443 444 if section == 'hooks' and active]
444 445
445 446 hook_names = {
446 447 RhodeCodeUi.HOOK_PUSH: 'push',
447 448 RhodeCodeUi.HOOK_PULL: 'pull',
448 449 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
449 450 }
450 451
451 452 for key in active_hook_keys:
452 453 hook = hook_names.get(key)
453 454 if hook:
454 455 enabled_hooks.append(hook)
455 456
456 457 return enabled_hooks
457 458
458 459
459 460 def set_rhodecode_config(config):
460 461 """
461 462 Updates pylons config with new settings from database
462 463
463 464 :param config:
464 465 """
465 466 from rhodecode.model.settings import SettingsModel
466 467 app_settings = SettingsModel().get_all_settings()
467 468
468 469 for k, v in app_settings.items():
469 470 config[k] = v
470 471
471 472
472 473 def get_rhodecode_realm():
473 474 """
474 475 Return the rhodecode realm from database.
475 476 """
476 477 from rhodecode.model.settings import SettingsModel
477 478 realm = SettingsModel().get_setting_by_name('realm')
478 479 return safe_str(realm.app_settings_value)
479 480
480 481
481 482 def get_rhodecode_base_path():
482 483 """
483 484 Returns the base path. The base path is the filesystem path which points
484 485 to the repository store.
485 486 """
486 487 from rhodecode.model.settings import SettingsModel
487 488 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
488 489 return safe_str(paths_ui.ui_value)
489 490
490 491
491 492 def map_groups(path):
492 493 """
493 494 Given a full path to a repository, create all nested groups that this
494 495 repo is inside. This function creates parent-child relationships between
495 496 groups and creates default perms for all new groups.
496 497
497 498 :param paths: full path to repository
498 499 """
499 500 from rhodecode.model.repo_group import RepoGroupModel
500 501 sa = meta.Session()
501 502 groups = path.split(Repository.NAME_SEP)
502 503 parent = None
503 504 group = None
504 505
505 506 # last element is repo in nested groups structure
506 507 groups = groups[:-1]
507 508 rgm = RepoGroupModel(sa)
508 509 owner = User.get_first_super_admin()
509 510 for lvl, group_name in enumerate(groups):
510 511 group_name = '/'.join(groups[:lvl] + [group_name])
511 512 group = RepoGroup.get_by_group_name(group_name)
512 513 desc = '%s group' % group_name
513 514
514 515 # skip folders that are now removed repos
515 516 if REMOVED_REPO_PAT.match(group_name):
516 517 break
517 518
518 519 if group is None:
519 520 log.debug('creating group level: %s group_name: %s',
520 521 lvl, group_name)
521 522 group = RepoGroup(group_name, parent)
522 523 group.group_description = desc
523 524 group.user = owner
524 525 sa.add(group)
525 526 perm_obj = rgm._create_default_perms(group)
526 527 sa.add(perm_obj)
527 528 sa.flush()
528 529
529 530 parent = group
530 531 return group
531 532
532 533
533 534 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
534 535 """
535 536 maps all repos given in initial_repo_list, non existing repositories
536 537 are created, if remove_obsolete is True it also checks for db entries
537 538 that are not in initial_repo_list and removes them.
538 539
539 540 :param initial_repo_list: list of repositories found by scanning methods
540 541 :param remove_obsolete: check for obsolete entries in database
541 542 """
542 543 from rhodecode.model.repo import RepoModel
543 544 from rhodecode.model.scm import ScmModel
544 545 from rhodecode.model.repo_group import RepoGroupModel
545 546 from rhodecode.model.settings import SettingsModel
546 547
547 548 sa = meta.Session()
548 549 repo_model = RepoModel()
549 550 user = User.get_first_super_admin()
550 551 added = []
551 552
552 553 # creation defaults
553 554 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
554 555 enable_statistics = defs.get('repo_enable_statistics')
555 556 enable_locking = defs.get('repo_enable_locking')
556 557 enable_downloads = defs.get('repo_enable_downloads')
557 558 private = defs.get('repo_private')
558 559
559 560 for name, repo in initial_repo_list.items():
560 561 group = map_groups(name)
561 562 unicode_name = safe_unicode(name)
562 563 db_repo = repo_model.get_by_repo_name(unicode_name)
563 564 # found repo that is on filesystem not in RhodeCode database
564 565 if not db_repo:
565 566 log.info('repository %s not found, creating now', name)
566 567 added.append(name)
567 568 desc = (repo.description
568 569 if repo.description != 'unknown'
569 570 else '%s repository' % name)
570 571
571 572 db_repo = repo_model._create_repo(
572 573 repo_name=name,
573 574 repo_type=repo.alias,
574 575 description=desc,
575 576 repo_group=getattr(group, 'group_id', None),
576 577 owner=user,
577 578 enable_locking=enable_locking,
578 579 enable_downloads=enable_downloads,
579 580 enable_statistics=enable_statistics,
580 581 private=private,
581 582 state=Repository.STATE_CREATED
582 583 )
583 584 sa.commit()
584 585 # we added that repo just now, and make sure we updated server info
585 586 if db_repo.repo_type == 'git':
586 587 git_repo = db_repo.scm_instance()
587 588 # update repository server-info
588 589 log.debug('Running update server info')
589 590 git_repo._update_server_info()
590 591
591 592 db_repo.update_commit_cache()
592 593
593 594 config = db_repo._config
594 595 config.set('extensions', 'largefiles', '')
595 596 ScmModel().install_hooks(
596 597 db_repo.scm_instance(config=config),
597 598 repo_type=db_repo.repo_type)
598 599
599 600 removed = []
600 601 if remove_obsolete:
601 602 # remove from database those repositories that are not in the filesystem
602 603 for repo in sa.query(Repository).all():
603 604 if repo.repo_name not in initial_repo_list.keys():
604 605 log.debug("Removing non-existing repository found in db `%s`",
605 606 repo.repo_name)
606 607 try:
607 608 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
608 609 sa.commit()
609 610 removed.append(repo.repo_name)
610 611 except Exception:
611 612 # don't hold further removals on error
612 613 log.error(traceback.format_exc())
613 614 sa.rollback()
614 615
615 616 def splitter(full_repo_name):
616 617 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
617 618 gr_name = None
618 619 if len(_parts) == 2:
619 620 gr_name = _parts[0]
620 621 return gr_name
621 622
622 623 initial_repo_group_list = [splitter(x) for x in
623 624 initial_repo_list.keys() if splitter(x)]
624 625
625 626 # remove from database those repository groups that are not in the
626 627 # filesystem due to parent child relationships we need to delete them
627 628 # in a specific order of most nested first
628 629 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
629 630 nested_sort = lambda gr: len(gr.split('/'))
630 631 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
631 632 if group_name not in initial_repo_group_list:
632 633 repo_group = RepoGroup.get_by_group_name(group_name)
633 634 if (repo_group.children.all() or
634 635 not RepoGroupModel().check_exist_filesystem(
635 636 group_name=group_name, exc_on_failure=False)):
636 637 continue
637 638
638 639 log.info(
639 640 'Removing non-existing repository group found in db `%s`',
640 641 group_name)
641 642 try:
642 643 RepoGroupModel(sa).delete(group_name, fs_remove=False)
643 644 sa.commit()
644 645 removed.append(group_name)
645 646 except Exception:
646 647 # don't hold further removals on error
647 648 log.exception(
648 649 'Unable to remove repository group `%s`',
649 650 group_name)
650 651 sa.rollback()
651 652 raise
652 653
653 654 return added, removed
654 655
655 656
656 657 def get_default_cache_settings(settings):
657 658 cache_settings = {}
658 659 for key in settings.keys():
659 660 for prefix in ['beaker.cache.', 'cache.']:
660 661 if key.startswith(prefix):
661 662 name = key.split(prefix)[1].strip()
662 663 cache_settings[name] = settings[key].strip()
663 664 return cache_settings
664 665
665 666
666 667 # set cache regions for beaker so celery can utilise it
667 668 def add_cache(settings):
668 669 from rhodecode.lib import caches
669 670 cache_settings = {'regions': None}
670 671 # main cache settings used as default ...
671 672 cache_settings.update(get_default_cache_settings(settings))
672 673
673 674 if cache_settings['regions']:
674 675 for region in cache_settings['regions'].split(','):
675 676 region = region.strip()
676 677 region_settings = {}
677 678 for key, value in cache_settings.items():
678 679 if key.startswith(region):
679 680 region_settings[key.split('.')[1]] = value
680 681
681 682 caches.configure_cache_region(
682 683 region, region_settings, cache_settings)
683 684
684 685
685 686 def load_rcextensions(root_path):
686 687 import rhodecode
687 688 from rhodecode.config import conf
688 689
689 690 path = os.path.join(root_path, 'rcextensions', '__init__.py')
690 691 if os.path.isfile(path):
691 692 rcext = create_module('rc', path)
692 693 EXT = rhodecode.EXTENSIONS = rcext
693 694 log.debug('Found rcextensions now loading %s...', rcext)
694 695
695 696 # Additional mappings that are not present in the pygments lexers
696 697 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
697 698
698 699 # auto check if the module is not missing any data, set to default if is
699 700 # this will help autoupdate new feature of rcext module
700 701 #from rhodecode.config import rcextensions
701 702 #for k in dir(rcextensions):
702 703 # if not k.startswith('_') and not hasattr(EXT, k):
703 704 # setattr(EXT, k, getattr(rcextensions, k))
704 705
705 706
706 707 def get_custom_lexer(extension):
707 708 """
708 709 returns a custom lexer if it is defined in rcextensions module, or None
709 710 if there's no custom lexer defined
710 711 """
711 712 import rhodecode
712 713 from pygments import lexers
713 714 # check if we didn't define this extension as other lexer
714 715 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
715 716 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
716 717 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
717 718 return lexers.get_lexer_by_name(_lexer_name)
718 719
719 720
720 721 #==============================================================================
721 722 # TEST FUNCTIONS AND CREATORS
722 723 #==============================================================================
723 724 def create_test_index(repo_location, config):
724 725 """
725 726 Makes default test index.
726 727 """
727 728 import rc_testdata
728 729
729 730 rc_testdata.extract_search_index(
730 731 'vcs_search_index', os.path.dirname(config['search.location']))
731 732
732 733
733 734 def create_test_directory(test_path):
734 735 """
735 736 Create test directory if it doesn't exist.
736 737 """
737 738 if not os.path.isdir(test_path):
738 739 log.debug('Creating testdir %s', test_path)
739 740 os.makedirs(test_path)
740 741
741 742
742 743 def create_test_database(test_path, config):
743 744 """
744 745 Makes a fresh database.
745 746 """
746 747 from rhodecode.lib.db_manage import DbManage
747 748
748 749 # PART ONE create db
749 750 dbconf = config['sqlalchemy.db1.url']
750 751 log.debug('making test db %s', dbconf)
751 752
752 753 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
753 754 tests=True, cli_args={'force_ask': True})
754 755 dbmanage.create_tables(override=True)
755 756 dbmanage.set_db_version()
756 757 # for tests dynamically set new root paths based on generated content
757 758 dbmanage.create_settings(dbmanage.config_prompt(test_path))
758 759 dbmanage.create_default_user()
759 760 dbmanage.create_test_admin_and_users()
760 761 dbmanage.create_permissions()
761 762 dbmanage.populate_default_permissions()
762 763 Session().commit()
763 764
764 765
765 766 def create_test_repositories(test_path, config):
766 767 """
767 768 Creates test repositories in the temporary directory. Repositories are
768 769 extracted from archives within the rc_testdata package.
769 770 """
770 771 import rc_testdata
771 772 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
772 773
773 774 log.debug('making test vcs repositories')
774 775
775 776 idx_path = config['search.location']
776 777 data_path = config['cache_dir']
777 778
778 779 # clean index and data
779 780 if idx_path and os.path.exists(idx_path):
780 781 log.debug('remove %s', idx_path)
781 782 shutil.rmtree(idx_path)
782 783
783 784 if data_path and os.path.exists(data_path):
784 785 log.debug('remove %s', data_path)
785 786 shutil.rmtree(data_path)
786 787
787 788 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
788 789 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
789 790
790 791 # Note: Subversion is in the process of being integrated with the system,
791 792 # until we have a properly packed version of the test svn repository, this
792 793 # tries to copy over the repo from a package "rc_testdata"
793 794 svn_repo_path = rc_testdata.get_svn_repo_archive()
794 795 with tarfile.open(svn_repo_path) as tar:
795 796 tar.extractall(jn(test_path, SVN_REPO))
796 797
797 798
798 799 #==============================================================================
799 800 # PASTER COMMANDS
800 801 #==============================================================================
801 802 class BasePasterCommand(Command):
802 803 """
803 804 Abstract Base Class for paster commands.
804 805
805 806 The celery commands are somewhat aggressive about loading
806 807 celery.conf, and since our module sets the `CELERY_LOADER`
807 808 environment variable to our loader, we have to bootstrap a bit and
808 809 make sure we've had a chance to load the pylons config off of the
809 810 command line, otherwise everything fails.
810 811 """
811 812 min_args = 1
812 813 min_args_error = "Please provide a paster config file as an argument."
813 814 takes_config_file = 1
814 815 requires_config_file = True
815 816
816 817 def notify_msg(self, msg, log=False):
817 818 """Make a notification to user, additionally if logger is passed
818 819 it logs this action using given logger
819 820
820 821 :param msg: message that will be printed to user
821 822 :param log: logging instance, to use to additionally log this message
822 823
823 824 """
824 825 if log and isinstance(log, logging):
825 826 log(msg)
826 827
827 828 def run(self, args):
828 829 """
829 830 Overrides Command.run
830 831
831 832 Checks for a config file argument and loads it.
832 833 """
833 834 if len(args) < self.min_args:
834 835 raise BadCommand(
835 836 self.min_args_error % {'min_args': self.min_args,
836 837 'actual_args': len(args)})
837 838
838 839 # Decrement because we're going to lob off the first argument.
839 840 # @@ This is hacky
840 841 self.min_args -= 1
841 842 self.bootstrap_config(args[0])
842 843 self.update_parser()
843 844 return super(BasePasterCommand, self).run(args[1:])
844 845
845 846 def update_parser(self):
846 847 """
847 848 Abstract method. Allows for the class' parser to be updated
848 849 before the superclass' `run` method is called. Necessary to
849 850 allow options/arguments to be passed through to the underlying
850 851 celery command.
851 852 """
852 853 raise NotImplementedError("Abstract Method.")
853 854
854 855 def bootstrap_config(self, conf):
855 856 """
856 857 Loads the pylons configuration.
857 858 """
858 859 from pylons import config as pylonsconfig
859 860
860 861 self.path_to_ini_file = os.path.realpath(conf)
861 862 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
862 863 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
863 864
864 865 def _init_session(self):
865 866 """
866 867 Inits SqlAlchemy Session
867 868 """
868 869 logging.config.fileConfig(self.path_to_ini_file)
869 870 from pylons import config
870 871 from rhodecode.config.utils import initialize_database
871 872
872 873 # get to remove repos !!
873 874 add_cache(config)
874 875 initialize_database(config)
875 876
876 877
877 878 @decorator.decorator
878 879 def jsonify(func, *args, **kwargs):
879 880 """Action decorator that formats output for JSON
880 881
881 882 Given a function that will return content, this decorator will turn
882 883 the result into JSON, with a content-type of 'application/json' and
883 884 output it.
884 885
885 886 """
886 887 from pylons.decorators.util import get_pylons
887 888 from rhodecode.lib.ext_json import json
888 889 pylons = get_pylons(args)
889 890 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
890 891 data = func(*args, **kwargs)
891 892 if isinstance(data, (list, tuple)):
892 893 msg = "JSON responses with Array envelopes are susceptible to " \
893 894 "cross-site data leak attacks, see " \
894 895 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
895 896 warnings.warn(msg, Warning, 2)
896 897 log.warning(msg)
897 898 log.debug("Returning JSON wrapped action output")
898 899 return json.dumps(data, encoding='utf-8')
899 900
900 901
901 902 class PartialRenderer(object):
902 903 """
903 904 Partial renderer used to render chunks of html used in datagrids
904 905 use like::
905 906
906 907 _render = PartialRenderer('data_table/_dt_elements.mako')
907 908 _render('quick_menu', args, kwargs)
908 909 PartialRenderer.h,
909 910 c,
910 911 _,
911 912 ungettext
912 913 are the template stuff initialized inside and can be re-used later
913 914
914 915 :param tmpl_name: template path relate to /templates/ dir
915 916 """
916 917
917 918 def __init__(self, tmpl_name):
918 919 import rhodecode
919 920 from pylons import request, tmpl_context as c
920 921 from pylons.i18n.translation import _, ungettext
921 922 from rhodecode.lib import helpers as h
922 923
923 924 self.tmpl_name = tmpl_name
924 925 self.rhodecode = rhodecode
925 926 self.c = c
926 927 self._ = _
927 928 self.ungettext = ungettext
928 929 self.h = h
929 930 self.request = request
930 931
931 932 def _mako_lookup(self):
932 933 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
933 934 return _tmpl_lookup.get_template(self.tmpl_name)
934 935
935 936 def _update_kwargs_for_render(self, kwargs):
936 937 """
937 938 Inject params required for Mako rendering
938 939 """
939 940 _kwargs = {
940 941 '_': self._,
941 942 'h': self.h,
942 943 'c': self.c,
943 944 'request': self.request,
944 945 'ungettext': self.ungettext,
945 946 }
946 947 _kwargs.update(kwargs)
947 948 return _kwargs
948 949
949 950 def _render_with_exc(self, render_func, args, kwargs):
950 951 try:
951 952 return render_func.render(*args, **kwargs)
952 953 except:
953 954 log.error(exceptions.text_error_template().render())
954 955 raise
955 956
956 957 def _get_template(self, template_obj, def_name):
957 958 if def_name:
958 959 tmpl = template_obj.get_def(def_name)
959 960 else:
960 961 tmpl = template_obj
961 962 return tmpl
962 963
963 964 def render(self, def_name, *args, **kwargs):
964 965 lookup_obj = self._mako_lookup()
965 966 tmpl = self._get_template(lookup_obj, def_name=def_name)
966 967 kwargs = self._update_kwargs_for_render(kwargs)
967 968 return self._render_with_exc(tmpl, args, kwargs)
968 969
969 970 def __call__(self, tmpl, *args, **kwargs):
970 971 return self.render(tmpl, *args, **kwargs)
971 972
972 973
973 974 def password_changed(auth_user, session):
974 975 # Never report password change in case of default user or anonymous user.
975 976 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
976 977 return False
977 978
978 979 password_hash = md5(auth_user.password) if auth_user.password else None
979 980 rhodecode_user = session.get('rhodecode_user', {})
980 981 session_password_hash = rhodecode_user.get('password', '')
981 982 return password_hash != session_password_hash
982 983
983 984
984 985 def read_opensource_licenses():
985 986 global _license_cache
986 987
987 988 if not _license_cache:
988 989 licenses = pkg_resources.resource_string(
989 990 'rhodecode', 'config/licenses.json')
990 991 _license_cache = json.loads(licenses)
991 992
992 993 return _license_cache
993 994
994 995
995 996 def get_registry(request):
996 997 """
997 998 Utility to get the pyramid registry from a request. During migration to
998 999 pyramid we sometimes want to use the pyramid registry from pylons context.
999 1000 Therefore this utility returns `request.registry` for pyramid requests and
1000 1001 uses `get_current_registry()` for pylons requests.
1001 1002 """
1002 1003 try:
1003 1004 return request.registry
1004 1005 except AttributeError:
1005 1006 return get_current_registry()
1006 1007
1007 1008
1008 1009 def generate_platform_uuid():
1009 1010 """
1010 1011 Generates platform UUID based on it's name
1011 1012 """
1012 1013 import platform
1013 1014
1014 1015 try:
1015 1016 uuid_list = [platform.platform()]
1016 1017 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1017 1018 except Exception as e:
1018 1019 log.error('Failed to generate host uuid: %s' % e)
1019 1020 return 'UNDEFINED'
General Comments 0
You need to be logged in to leave comments. Login now