##// END OF EJS Templates
spelling: depending
timeless@gmail.com -
r5797:784d28c9 default
parent child Browse files
Show More
@@ -1,875 +1,875 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.lib.utils
16 16 ~~~~~~~~~~~~~~~~~~~
17 17
18 18 Utilities library for Kallithea
19 19
20 20 This file was forked by the Kallithea project in July 2014.
21 21 Original author and date, and relevant copyright and licensing information is below:
22 22 :created_on: Apr 18, 2010
23 23 :author: marcink
24 24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 25 :license: GPLv3, see LICENSE.md for more details.
26 26 """
27 27
28 28 import os
29 29 import re
30 30 import logging
31 31 import datetime
32 32 import traceback
33 33 import paste
34 34 import beaker
35 35 import tarfile
36 36 import shutil
37 37 import decorator
38 38 import warnings
39 39 from os.path import abspath
40 40 from os.path import dirname as dn, join as jn
41 41
42 42 from paste.script.command import Command, BadCommand
43 43
44 44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 45 from beaker.cache import _cache_decorate
46 46
47 47 from kallithea import BRAND
48 48
49 49 from kallithea.lib.vcs.utils.hgcompat import ui, config
50 50 from kallithea.lib.vcs.utils.helpers import get_scm
51 51 from kallithea.lib.vcs.exceptions import VCSError
52 52
53 53 from kallithea.model import meta
54 54 from kallithea.model.db import Repository, User, Ui, \
55 55 UserLog, RepoGroup, Setting, UserGroup
56 56 from kallithea.model.meta import Session
57 57 from kallithea.model.repo_group import RepoGroupModel
58 58 from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser
59 59 from kallithea.lib.vcs.utils.fakemod import create_module
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
64 64
65 65
66 66 def recursive_replace(str_, replace=' '):
67 67 """
68 68 Recursive replace of given sign to just one instance
69 69
70 70 :param str_: given string
71 71 :param replace: char to find and replace multiple instances
72 72
73 73 Examples::
74 74 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 75 'Mighty-Mighty-Bo-sstones'
76 76 """
77 77
78 78 if str_.find(replace * 2) == -1:
79 79 return str_
80 80 else:
81 81 str_ = str_.replace(replace * 2, replace)
82 82 return recursive_replace(str_, replace)
83 83
84 84
85 85 def repo_name_slug(value):
86 86 """
87 87 Return slug of name of repository
88 88 This function is called on each creation/modification
89 89 of repository to prevent bad names in repo
90 90 """
91 91
92 92 slug = remove_formatting(value)
93 93 slug = strip_tags(slug)
94 94
95 95 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 96 slug = slug.replace(c, '-')
97 97 slug = recursive_replace(slug, '-')
98 98 slug = collapse(slug, '-')
99 99 return slug
100 100
101 101
102 102 #==============================================================================
103 103 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
104 104 #==============================================================================
105 105 def get_repo_slug(request):
106 106 _repo = request.environ['pylons.routes_dict'].get('repo_name')
107 107 if _repo:
108 108 _repo = _repo.rstrip('/')
109 109 return _repo
110 110
111 111
112 112 def get_repo_group_slug(request):
113 113 _group = request.environ['pylons.routes_dict'].get('group_name')
114 114 if _group:
115 115 _group = _group.rstrip('/')
116 116 return _group
117 117
118 118
119 119 def get_user_group_slug(request):
120 120 _group = request.environ['pylons.routes_dict'].get('id')
121 121 _group = UserGroup.get(_group)
122 122 if _group:
123 123 return _group.users_group_name
124 124 return None
125 125
126 126
127 127 def _extract_id_from_repo_name(repo_name):
128 128 if repo_name.startswith('/'):
129 129 repo_name = repo_name.lstrip('/')
130 130 by_id_match = re.match(r'^_(\d{1,})', repo_name)
131 131 if by_id_match:
132 132 return by_id_match.groups()[0]
133 133
134 134
135 135 def get_repo_by_id(repo_name):
136 136 """
137 137 Extracts repo_name by id from special urls. Example url is _11/repo_name
138 138
139 139 :param repo_name:
140 140 :return: repo_name if matched else None
141 141 """
142 142 _repo_id = _extract_id_from_repo_name(repo_name)
143 143 if _repo_id:
144 144 from kallithea.model.db import Repository
145 145 repo = Repository.get(_repo_id)
146 146 if repo:
147 147 # TODO: return repo instead of reponame? or would that be a layering violation?
148 148 return repo.repo_name
149 149 return None
150 150
151 151
152 152 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
153 153 """
154 154 Action logger for various actions made by users
155 155
156 156 :param user: user that made this action, can be a unique username string or
157 157 object containing user_id attribute
158 158 :param action: action to log, should be on of predefined unique actions for
159 159 easy translations
160 160 :param repo: string name of repository or object containing repo_id,
161 161 that action was made on
162 162 :param ipaddr: optional IP address from what the action was made
163 163 :param sa: optional sqlalchemy session
164 164
165 165 """
166 166
167 167 if not sa:
168 168 sa = meta.Session()
169 169 # if we don't get explicit IP address try to get one from registered user
170 170 # in tmpl context var
171 171 if not ipaddr:
172 172 ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
173 173
174 174 if getattr(user, 'user_id', None):
175 175 user_obj = User.get(user.user_id)
176 176 elif isinstance(user, basestring):
177 177 user_obj = User.get_by_username(user)
178 178 else:
179 179 raise Exception('You have to provide a user object or a username')
180 180
181 181 if getattr(repo, 'repo_id', None):
182 182 repo_obj = Repository.get(repo.repo_id)
183 183 repo_name = repo_obj.repo_name
184 184 elif isinstance(repo, basestring):
185 185 repo_name = repo.lstrip('/')
186 186 repo_obj = Repository.get_by_repo_name(repo_name)
187 187 else:
188 188 repo_obj = None
189 189 repo_name = u''
190 190
191 191 user_log = UserLog()
192 192 user_log.user_id = user_obj.user_id
193 193 user_log.username = user_obj.username
194 194 user_log.action = safe_unicode(action)
195 195
196 196 user_log.repository = repo_obj
197 197 user_log.repository_name = repo_name
198 198
199 199 user_log.action_date = datetime.datetime.now()
200 200 user_log.user_ip = ipaddr
201 201 sa.add(user_log)
202 202
203 203 log.info('Logging action:%s on %s by user:%s ip:%s',
204 204 action, safe_unicode(repo), user_obj, ipaddr)
205 205 if commit:
206 206 sa.commit()
207 207
208 208
209 209 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
210 210 """
211 211 Scans given path for repos and return (name,(type,path)) tuple
212 212
213 213 :param path: path to scan for repositories
214 214 :param recursive: recursive search and return names with subdirs in front
215 215 """
216 216
217 217 # remove ending slash for better results
218 218 path = path.rstrip(os.sep)
219 219 log.debug('now scanning in %s location recursive:%s...', path, recursive)
220 220
221 221 def _get_repos(p):
222 222 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
223 223 log.warning('ignoring repo path without access: %s', p)
224 224 return
225 225 if not os.access(p, os.W_OK):
226 226 log.warning('repo path without write access: %s', p)
227 227 for dirpath in os.listdir(p):
228 228 if os.path.isfile(os.path.join(p, dirpath)):
229 229 continue
230 230 cur_path = os.path.join(p, dirpath)
231 231
232 232 # skip removed repos
233 233 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
234 234 continue
235 235
236 236 #skip .<somethin> dirs
237 237 if dirpath.startswith('.'):
238 238 continue
239 239
240 240 try:
241 241 scm_info = get_scm(cur_path)
242 242 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
243 243 except VCSError:
244 244 if not recursive:
245 245 continue
246 246 #check if this dir contains other repos for recursive scan
247 247 rec_path = os.path.join(p, dirpath)
248 248 if not os.path.islink(rec_path) and os.path.isdir(rec_path):
249 249 for inner_scm in _get_repos(rec_path):
250 250 yield inner_scm
251 251
252 252 return _get_repos(path)
253 253
254 254
255 255 def is_valid_repo(repo_name, base_path, scm=None):
256 256 """
257 257 Returns True if given path is a valid repository False otherwise.
258 258 If scm param is given also compare if given scm is the same as expected
259 259 from scm parameter
260 260
261 261 :param repo_name:
262 262 :param base_path:
263 263 :param scm:
264 264
265 265 :return True: if given path is a valid repository
266 266 """
267 267 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
268 268
269 269 try:
270 270 scm_ = get_scm(full_path)
271 271 if scm:
272 272 return scm_[0] == scm
273 273 return True
274 274 except VCSError:
275 275 return False
276 276
277 277
278 278 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
279 279 """
280 280 Returns True if given path is a repository group False otherwise
281 281
282 282 :param repo_name:
283 283 :param base_path:
284 284 """
285 285 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
286 286
287 287 # check if it's not a repo
288 288 if is_valid_repo(repo_group_name, base_path):
289 289 return False
290 290
291 291 try:
292 292 # we need to check bare git repos at higher level
293 293 # since we might match branches/hooks/info/objects or possible
294 294 # other things inside bare git repo
295 295 get_scm(os.path.dirname(full_path))
296 296 return False
297 297 except VCSError:
298 298 pass
299 299
300 300 # check if it's a valid path
301 301 if skip_path_check or os.path.isdir(full_path):
302 302 return True
303 303
304 304 return False
305 305
306 306
307 307 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
308 308 while True:
309 309 ok = raw_input(prompt)
310 310 if ok in ('y', 'ye', 'yes'):
311 311 return True
312 312 if ok in ('n', 'no', 'nop', 'nope'):
313 313 return False
314 314 retries = retries - 1
315 315 if retries < 0:
316 316 raise IOError
317 317 print complaint
318 318
319 319 #propagated from mercurial documentation
320 320 ui_sections = ['alias', 'auth',
321 321 'decode/encode', 'defaults',
322 322 'diff', 'email',
323 323 'extensions', 'format',
324 324 'merge-patterns', 'merge-tools',
325 325 'hooks', 'http_proxy',
326 326 'smtp', 'patch',
327 327 'paths', 'profiling',
328 328 'server', 'trusted',
329 329 'ui', 'web', ]
330 330
331 331
332 332 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
333 333 """
334 334 A function that will read python rc files or database
335 335 and make an mercurial ui object from read options
336 336
337 337 :param path: path to mercurial config file
338 338 :param checkpaths: check the path
339 339 :param read_from: read from 'file' or 'db'
340 340 """
341 341
342 342 baseui = ui.ui()
343 343
344 344 # clean the baseui object
345 345 baseui._ocfg = config.config()
346 346 baseui._ucfg = config.config()
347 347 baseui._tcfg = config.config()
348 348
349 349 if read_from == 'file':
350 350 if not os.path.isfile(path):
351 351 log.debug('hgrc file is not present at %s, skipping...', path)
352 352 return False
353 353 log.debug('reading hgrc from %s', path)
354 354 cfg = config.config()
355 355 cfg.read(path)
356 356 for section in ui_sections:
357 357 for k, v in cfg.items(section):
358 358 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
359 359 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
360 360
361 361 elif read_from == 'db':
362 362 sa = meta.Session()
363 363 ret = sa.query(Ui).all()
364 364
365 365 hg_ui = ret
366 366 for ui_ in hg_ui:
367 367 if ui_.ui_active:
368 368 ui_val = safe_str(ui_.ui_value)
369 369 if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'):
370 370 ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.')
371 371 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
372 372 ui_.ui_key, ui_val)
373 373 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
374 374 ui_val)
375 375 if ui_.ui_key == 'push_ssl':
376 376 # force set push_ssl requirement to False, kallithea
377 377 # handles that
378 378 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
379 379 False)
380 380 if clear_session:
381 381 meta.Session.remove()
382 382
383 383 # prevent interactive questions for ssh password / passphrase
384 384 ssh = baseui.config('ui', 'ssh', default='ssh')
385 385 baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
386 386
387 387 return baseui
388 388
389 389
390 390 def set_app_settings(config):
391 391 """
392 392 Updates pylons config with new settings from database
393 393
394 394 :param config:
395 395 """
396 396 hgsettings = Setting.get_app_settings()
397 397
398 398 for k, v in hgsettings.items():
399 399 config[k] = v
400 400
401 401
402 402 def set_vcs_config(config):
403 403 """
404 404 Patch VCS config with some Kallithea specific stuff
405 405
406 406 :param config: kallithea.CONFIG
407 407 """
408 408 from kallithea.lib.vcs import conf
409 409 from kallithea.lib.utils2 import aslist
410 410 conf.settings.BACKENDS = {
411 411 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
412 412 'git': 'kallithea.lib.vcs.backends.git.GitRepository',
413 413 }
414 414
415 415 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
416 416 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
417 417 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
418 418 'utf8'), sep=',')
419 419
420 420
421 421 def set_indexer_config(config):
422 422 """
423 423 Update Whoosh index mapping
424 424
425 425 :param config: kallithea.CONFIG
426 426 """
427 427 from kallithea.config import conf
428 428
429 429 log.debug('adding extra into INDEX_EXTENSIONS')
430 430 conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', '')))
431 431
432 432 log.debug('adding extra into INDEX_FILENAMES')
433 433 conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', '')))
434 434
435 435
436 436 def map_groups(path):
437 437 """
438 438 Given a full path to a repository, create all nested groups that this
439 439 repo is inside. This function creates parent-child relationships between
440 440 groups and creates default perms for all new groups.
441 441
442 442 :param paths: full path to repository
443 443 """
444 444 sa = meta.Session()
445 445 groups = path.split(Repository.url_sep())
446 446 parent = None
447 447 group = None
448 448
449 449 # last element is repo in nested groups structure
450 450 groups = groups[:-1]
451 451 rgm = RepoGroupModel(sa)
452 452 owner = User.get_first_admin()
453 453 for lvl, group_name in enumerate(groups):
454 454 group_name = u'/'.join(groups[:lvl] + [group_name])
455 455 group = RepoGroup.get_by_group_name(group_name)
456 456 desc = '%s group' % group_name
457 457
458 458 # skip folders that are now removed repos
459 459 if REMOVED_REPO_PAT.match(group_name):
460 460 break
461 461
462 462 if group is None:
463 463 log.debug('creating group level: %s group_name: %s',
464 464 lvl, group_name)
465 465 group = RepoGroup(group_name, parent)
466 466 group.group_description = desc
467 467 group.user = owner
468 468 sa.add(group)
469 469 perm_obj = rgm._create_default_perms(group)
470 470 sa.add(perm_obj)
471 471 sa.flush()
472 472
473 473 parent = group
474 474 return group
475 475
476 476
477 477 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
478 478 install_git_hooks=False, user=None, overwrite_git_hooks=False):
479 479 """
480 480 maps all repos given in initial_repo_list, non existing repositories
481 481 are created, if remove_obsolete is True it also check for db entries
482 482 that are not in initial_repo_list and removes them.
483 483
484 484 :param initial_repo_list: list of repositories found by scanning methods
485 485 :param remove_obsolete: check for obsolete entries in database
486 486 :param install_git_hooks: if this is True, also check and install git hook
487 487 for a repo if missing
488 488 :param overwrite_git_hooks: if this is True, overwrite any existing git hooks
489 489 that may be encountered (even if user-deployed)
490 490 """
491 491 from kallithea.model.repo import RepoModel
492 492 from kallithea.model.scm import ScmModel
493 493 sa = meta.Session()
494 494 repo_model = RepoModel()
495 495 if user is None:
496 496 user = User.get_first_admin()
497 497 added = []
498 498
499 499 ##creation defaults
500 500 defs = Setting.get_default_repo_settings(strip_prefix=True)
501 501 enable_statistics = defs.get('repo_enable_statistics')
502 502 enable_locking = defs.get('repo_enable_locking')
503 503 enable_downloads = defs.get('repo_enable_downloads')
504 504 private = defs.get('repo_private')
505 505
506 506 for name, repo in initial_repo_list.items():
507 507 group = map_groups(name)
508 508 unicode_name = safe_unicode(name)
509 509 db_repo = repo_model.get_by_repo_name(unicode_name)
510 510 # found repo that is on filesystem not in Kallithea database
511 511 if not db_repo:
512 512 log.info('repository %s not found, creating now', name)
513 513 added.append(name)
514 514 desc = (repo.description
515 515 if repo.description != 'unknown'
516 516 else '%s repository' % name)
517 517
518 518 new_repo = repo_model._create_repo(
519 519 repo_name=name,
520 520 repo_type=repo.alias,
521 521 description=desc,
522 522 repo_group=getattr(group, 'group_id', None),
523 523 owner=user,
524 524 enable_locking=enable_locking,
525 525 enable_downloads=enable_downloads,
526 526 enable_statistics=enable_statistics,
527 527 private=private,
528 528 state=Repository.STATE_CREATED
529 529 )
530 530 sa.commit()
531 531 # we added that repo just now, and make sure it has githook
532 532 # installed, and updated server info
533 533 if new_repo.repo_type == 'git':
534 534 git_repo = new_repo.scm_instance
535 535 ScmModel().install_git_hooks(git_repo)
536 536 # update repository server-info
537 537 log.debug('Running update server info')
538 538 git_repo._update_server_info()
539 539 new_repo.update_changeset_cache()
540 540 elif install_git_hooks:
541 541 if db_repo.repo_type == 'git':
542 542 ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks)
543 543
544 544 removed = []
545 545 # remove from database those repositories that are not in the filesystem
546 546 unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list)
547 547 for repo in sa.query(Repository).all():
548 548 if repo.repo_name not in unicode_initial_repo_list:
549 549 if remove_obsolete:
550 550 log.debug("Removing non-existing repository found in db `%s`",
551 551 repo.repo_name)
552 552 try:
553 553 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
554 554 sa.commit()
555 555 except Exception:
556 556 #don't hold further removals on error
557 557 log.error(traceback.format_exc())
558 558 sa.rollback()
559 559 removed.append(repo.repo_name)
560 560 return added, removed
561 561
562 562
563 563 # set cache regions for beaker so celery can utilise it
564 564 def add_cache(settings):
565 565 cache_settings = {'regions': None}
566 566 for key in settings.keys():
567 567 for prefix in ['beaker.cache.', 'cache.']:
568 568 if key.startswith(prefix):
569 569 name = key.split(prefix)[1].strip()
570 570 cache_settings[name] = settings[key].strip()
571 571 if cache_settings['regions']:
572 572 for region in cache_settings['regions'].split(','):
573 573 region = region.strip()
574 574 region_settings = {}
575 575 for key, value in cache_settings.items():
576 576 if key.startswith(region):
577 577 region_settings[key.split('.')[1]] = value
578 578 region_settings['expire'] = int(region_settings.get('expire',
579 579 60))
580 580 region_settings.setdefault('lock_dir',
581 581 cache_settings.get('lock_dir'))
582 582 region_settings.setdefault('data_dir',
583 583 cache_settings.get('data_dir'))
584 584
585 585 if 'type' not in region_settings:
586 586 region_settings['type'] = cache_settings.get('type',
587 587 'memory')
588 588 beaker.cache.cache_regions[region] = region_settings
589 589
590 590
591 591 def load_rcextensions(root_path):
592 592 import kallithea
593 593 from kallithea.config import conf
594 594
595 595 path = os.path.join(root_path, 'rcextensions', '__init__.py')
596 596 if os.path.isfile(path):
597 597 rcext = create_module('rc', path)
598 598 EXT = kallithea.EXTENSIONS = rcext
599 599 log.debug('Found rcextensions now loading %s...', rcext)
600 600
601 601 # Additional mappings that are not present in the pygments lexers
602 602 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
603 603
604 604 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
605 605
606 606 if getattr(EXT, 'INDEX_EXTENSIONS', []):
607 607 log.debug('settings custom INDEX_EXTENSIONS')
608 608 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
609 609
610 610 #ADDITIONAL MAPPINGS
611 611 log.debug('adding extra into INDEX_EXTENSIONS')
612 612 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
613 613
614 614 # auto check if the module is not missing any data, set to default if is
615 615 # this will help autoupdate new feature of rcext module
616 616 #from kallithea.config import rcextensions
617 617 #for k in dir(rcextensions):
618 618 # if not k.startswith('_') and not hasattr(EXT, k):
619 619 # setattr(EXT, k, getattr(rcextensions, k))
620 620
621 621
622 622 def get_custom_lexer(extension):
623 623 """
624 624 returns a custom lexer if it's defined in rcextensions module, or None
625 625 if there's no custom lexer defined
626 626 """
627 627 import kallithea
628 628 from pygments import lexers
629 629 #check if we didn't define this extension as other lexer
630 630 if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS:
631 631 _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension]
632 632 return lexers.get_lexer_by_name(_lexer_name)
633 633
634 634
635 635 #==============================================================================
636 636 # TEST FUNCTIONS AND CREATORS
637 637 #==============================================================================
638 638 def create_test_index(repo_location, config, full_index):
639 639 """
640 640 Makes default test index
641 641
642 642 :param config: test config
643 643 :param full_index:
644 644 """
645 645
646 646 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
647 647 from kallithea.lib.pidlock import DaemonLock, LockHeld
648 648
649 649 repo_location = repo_location
650 650
651 651 index_location = os.path.join(config['app_conf']['index_dir'])
652 652 if not os.path.exists(index_location):
653 653 os.makedirs(index_location)
654 654
655 655 try:
656 656 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
657 657 WhooshIndexingDaemon(index_location=index_location,
658 658 repo_location=repo_location) \
659 659 .run(full_index=full_index)
660 660 l.release()
661 661 except LockHeld:
662 662 pass
663 663
664 664
665 665 def create_test_env(repos_test_path, config):
666 666 """
667 667 Makes a fresh database and
668 668 install test repository into tmp dir
669 669 """
670 670 from kallithea.lib.db_manage import DbManage
671 671 from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
672 672
673 673 # PART ONE create db
674 674 dbconf = config['sqlalchemy.db1.url']
675 675 log.debug('making test db %s', dbconf)
676 676
677 677 # create test dir if it doesn't exist
678 678 if not os.path.isdir(repos_test_path):
679 679 log.debug('Creating testdir %s', repos_test_path)
680 680 os.makedirs(repos_test_path)
681 681
682 682 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
683 683 tests=True)
684 684 dbmanage.create_tables(override=True)
685 685 # for tests dynamically set new root paths based on generated content
686 686 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
687 687 dbmanage.create_default_user()
688 688 dbmanage.admin_prompt()
689 689 dbmanage.create_permissions()
690 690 dbmanage.populate_default_permissions()
691 691 Session().commit()
692 692 # PART TWO make test repo
693 693 log.debug('making test vcs repositories')
694 694
695 695 idx_path = config['app_conf']['index_dir']
696 696 data_path = config['app_conf']['cache_dir']
697 697
698 698 #clean index and data
699 699 if idx_path and os.path.exists(idx_path):
700 700 log.debug('remove %s', idx_path)
701 701 shutil.rmtree(idx_path)
702 702
703 703 if data_path and os.path.exists(data_path):
704 704 log.debug('remove %s', data_path)
705 705 shutil.rmtree(data_path)
706 706
707 707 #CREATE DEFAULT TEST REPOS
708 708 cur_dir = dn(dn(abspath(__file__)))
709 709 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
710 710 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
711 711 tar.close()
712 712
713 713 cur_dir = dn(dn(abspath(__file__)))
714 714 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
715 715 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
716 716 tar.close()
717 717
718 718 #LOAD VCS test stuff
719 719 from kallithea.tests.vcs import setup_package
720 720 setup_package()
721 721
722 722
723 723 #==============================================================================
724 724 # PASTER COMMANDS
725 725 #==============================================================================
726 726 class BasePasterCommand(Command):
727 727 """
728 728 Abstract Base Class for paster commands.
729 729
730 730 The celery commands are somewhat aggressive about loading
731 731 celery.conf, and since our module sets the `CELERY_LOADER`
732 732 environment variable to our loader, we have to bootstrap a bit and
733 733 make sure we've had a chance to load the pylons config off of the
734 734 command line, otherwise everything fails.
735 735 """
736 736 min_args = 1
737 737 min_args_error = "Please provide a paster config file as an argument."
738 738 takes_config_file = 1
739 739 requires_config_file = True
740 740
741 741 def run(self, args):
742 742 """
743 743 Overrides Command.run
744 744
745 745 Checks for a config file argument and loads it.
746 746 """
747 747 if len(args) < self.min_args:
748 748 raise BadCommand(
749 749 self.min_args_error % {'min_args': self.min_args,
750 750 'actual_args': len(args)})
751 751
752 752 # Decrement because we're going to lob off the first argument.
753 753 # @@ This is hacky
754 754 self.min_args -= 1
755 755 self.bootstrap_config(args[0])
756 756 self.update_parser()
757 757 return super(BasePasterCommand, self).run(args[1:])
758 758
759 759 def update_parser(self):
760 760 """
761 761 Abstract method. Allows for the class's parser to be updated
762 762 before the superclass's `run` method is called. Necessary to
763 763 allow options/arguments to be passed through to the underlying
764 764 celery command.
765 765 """
766 766 raise NotImplementedError("Abstract Method.")
767 767
768 768 def bootstrap_config(self, conf):
769 769 """
770 770 Loads the pylons configuration.
771 771 """
772 772 from pylons import config as pylonsconfig
773 773
774 774 self.path_to_ini_file = os.path.realpath(conf)
775 775 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
776 776 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
777 777
778 778 def _init_session(self):
779 779 """
780 780 Inits SqlAlchemy Session
781 781 """
782 782 logging.config.fileConfig(self.path_to_ini_file)
783 783
784 784 from pylons import config
785 785 from kallithea.model import init_model
786 786 from kallithea.lib.utils2 import engine_from_config
787 787 add_cache(config)
788 788 engine = engine_from_config(config, 'sqlalchemy.db1.')
789 789 init_model(engine)
790 790
791 791
792 792 def check_git_version():
793 793 """
794 794 Checks what version of git is installed in system, and issues a warning
795 795 if it's too old for Kallithea to work properly.
796 796 """
797 797 from kallithea import BACKENDS
798 798 from kallithea.lib.vcs.backends.git.repository import GitRepository
799 799 from kallithea.lib.vcs.conf import settings
800 800 from distutils.version import StrictVersion
801 801
802 802 if 'git' not in BACKENDS:
803 803 return None
804 804
805 805 stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
806 806 _safe=True)
807 807
808 808 m = re.search("\d+.\d+.\d+", stdout)
809 809 if m:
810 810 ver = StrictVersion(m.group(0))
811 811 else:
812 812 ver = StrictVersion('0.0.0')
813 813
814 814 req_ver = StrictVersion('1.7.4')
815 815
816 816 log.debug('Git executable: "%s" version %s detected: %s',
817 817 settings.GIT_EXECUTABLE_PATH, ver, stdout)
818 818 if stderr:
819 819 log.warning('Error detecting git version: %r', stderr)
820 820 elif ver < req_ver:
821 821 log.warning('Kallithea detected git version %s, which is too old '
822 822 'for the system to function properly. '
823 823 'Please upgrade to version %s or later.' % (ver, req_ver))
824 824 return ver
825 825
826 826
827 827 @decorator.decorator
828 828 def jsonify(func, *args, **kwargs):
829 829 """Action decorator that formats output for JSON
830 830
831 831 Given a function that will return content, this decorator will turn
832 832 the result into JSON, with a content-type of 'application/json' and
833 833 output it.
834 834
835 835 """
836 836 from pylons.decorators.util import get_pylons
837 837 from kallithea.lib.compat import json
838 838 pylons = get_pylons(args)
839 839 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
840 840 data = func(*args, **kwargs)
841 841 if isinstance(data, (list, tuple)):
842 842 msg = "JSON responses with Array envelopes are susceptible to " \
843 843 "cross-site data leak attacks, see " \
844 844 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
845 845 warnings.warn(msg, Warning, 2)
846 846 log.warning(msg)
847 847 log.debug("Returning JSON wrapped action output")
848 848 return json.dumps(data, encoding='utf-8')
849 849
850 850
851 851 def conditional_cache(region, prefix, condition, func):
852 852 """
853 853
854 854 Conditional caching function use like::
855 855 def _c(arg):
856 856 #heavy computation function
857 857 return data
858 858
859 # denpending from condition the compute is wrapped in cache or not
859 # depending from condition the compute is wrapped in cache or not
860 860 compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func)
861 861 return compute(arg)
862 862
863 863 :param region: name of cache region
864 864 :param prefix: cache region prefix
865 865 :param condition: condition for cache to be triggered, and return data cached
866 866 :param func: wrapped heavy function to compute
867 867
868 868 """
869 869 wrapped = func
870 870 if condition:
871 871 log.debug('conditional_cache: True, wrapping call of '
872 872 'func: %s into %s region cache' % (region, func))
873 873 wrapped = _cache_decorate((prefix,), None, None, region)(func)
874 874
875 875 return wrapped
General Comments 0
You need to be logged in to leave comments. Login now