##// END OF EJS Templates
spelling: depending
timeless@gmail.com -
r5797:784d28c9 default
parent child Browse files
Show More
@@ -1,875 +1,875 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.utils
15 kallithea.lib.utils
16 ~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~
17
17
18 Utilities library for Kallithea
18 Utilities library for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 18, 2010
22 :created_on: Apr 18, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import os
28 import os
29 import re
29 import re
30 import logging
30 import logging
31 import datetime
31 import datetime
32 import traceback
32 import traceback
33 import paste
33 import paste
34 import beaker
34 import beaker
35 import tarfile
35 import tarfile
36 import shutil
36 import shutil
37 import decorator
37 import decorator
38 import warnings
38 import warnings
39 from os.path import abspath
39 from os.path import abspath
40 from os.path import dirname as dn, join as jn
40 from os.path import dirname as dn, join as jn
41
41
42 from paste.script.command import Command, BadCommand
42 from paste.script.command import Command, BadCommand
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 from beaker.cache import _cache_decorate
45 from beaker.cache import _cache_decorate
46
46
47 from kallithea import BRAND
47 from kallithea import BRAND
48
48
49 from kallithea.lib.vcs.utils.hgcompat import ui, config
49 from kallithea.lib.vcs.utils.hgcompat import ui, config
50 from kallithea.lib.vcs.utils.helpers import get_scm
50 from kallithea.lib.vcs.utils.helpers import get_scm
51 from kallithea.lib.vcs.exceptions import VCSError
51 from kallithea.lib.vcs.exceptions import VCSError
52
52
53 from kallithea.model import meta
53 from kallithea.model import meta
54 from kallithea.model.db import Repository, User, Ui, \
54 from kallithea.model.db import Repository, User, Ui, \
55 UserLog, RepoGroup, Setting, UserGroup
55 UserLog, RepoGroup, Setting, UserGroup
56 from kallithea.model.meta import Session
56 from kallithea.model.meta import Session
57 from kallithea.model.repo_group import RepoGroupModel
57 from kallithea.model.repo_group import RepoGroupModel
58 from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser
58 from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser
59 from kallithea.lib.vcs.utils.fakemod import create_module
59 from kallithea.lib.vcs.utils.fakemod import create_module
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
63 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
64
64
65
65
66 def recursive_replace(str_, replace=' '):
66 def recursive_replace(str_, replace=' '):
67 """
67 """
68 Recursive replace of given sign to just one instance
68 Recursive replace of given sign to just one instance
69
69
70 :param str_: given string
70 :param str_: given string
71 :param replace: char to find and replace multiple instances
71 :param replace: char to find and replace multiple instances
72
72
73 Examples::
73 Examples::
74 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 'Mighty-Mighty-Bo-sstones'
75 'Mighty-Mighty-Bo-sstones'
76 """
76 """
77
77
78 if str_.find(replace * 2) == -1:
78 if str_.find(replace * 2) == -1:
79 return str_
79 return str_
80 else:
80 else:
81 str_ = str_.replace(replace * 2, replace)
81 str_ = str_.replace(replace * 2, replace)
82 return recursive_replace(str_, replace)
82 return recursive_replace(str_, replace)
83
83
84
84
85 def repo_name_slug(value):
85 def repo_name_slug(value):
86 """
86 """
87 Return slug of name of repository
87 Return slug of name of repository
88 This function is called on each creation/modification
88 This function is called on each creation/modification
89 of repository to prevent bad names in repo
89 of repository to prevent bad names in repo
90 """
90 """
91
91
92 slug = remove_formatting(value)
92 slug = remove_formatting(value)
93 slug = strip_tags(slug)
93 slug = strip_tags(slug)
94
94
95 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 slug = slug.replace(c, '-')
96 slug = slug.replace(c, '-')
97 slug = recursive_replace(slug, '-')
97 slug = recursive_replace(slug, '-')
98 slug = collapse(slug, '-')
98 slug = collapse(slug, '-')
99 return slug
99 return slug
100
100
101
101
102 #==============================================================================
102 #==============================================================================
103 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
103 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
104 #==============================================================================
104 #==============================================================================
105 def get_repo_slug(request):
105 def get_repo_slug(request):
106 _repo = request.environ['pylons.routes_dict'].get('repo_name')
106 _repo = request.environ['pylons.routes_dict'].get('repo_name')
107 if _repo:
107 if _repo:
108 _repo = _repo.rstrip('/')
108 _repo = _repo.rstrip('/')
109 return _repo
109 return _repo
110
110
111
111
112 def get_repo_group_slug(request):
112 def get_repo_group_slug(request):
113 _group = request.environ['pylons.routes_dict'].get('group_name')
113 _group = request.environ['pylons.routes_dict'].get('group_name')
114 if _group:
114 if _group:
115 _group = _group.rstrip('/')
115 _group = _group.rstrip('/')
116 return _group
116 return _group
117
117
118
118
119 def get_user_group_slug(request):
119 def get_user_group_slug(request):
120 _group = request.environ['pylons.routes_dict'].get('id')
120 _group = request.environ['pylons.routes_dict'].get('id')
121 _group = UserGroup.get(_group)
121 _group = UserGroup.get(_group)
122 if _group:
122 if _group:
123 return _group.users_group_name
123 return _group.users_group_name
124 return None
124 return None
125
125
126
126
127 def _extract_id_from_repo_name(repo_name):
127 def _extract_id_from_repo_name(repo_name):
128 if repo_name.startswith('/'):
128 if repo_name.startswith('/'):
129 repo_name = repo_name.lstrip('/')
129 repo_name = repo_name.lstrip('/')
130 by_id_match = re.match(r'^_(\d{1,})', repo_name)
130 by_id_match = re.match(r'^_(\d{1,})', repo_name)
131 if by_id_match:
131 if by_id_match:
132 return by_id_match.groups()[0]
132 return by_id_match.groups()[0]
133
133
134
134
135 def get_repo_by_id(repo_name):
135 def get_repo_by_id(repo_name):
136 """
136 """
137 Extracts repo_name by id from special urls. Example url is _11/repo_name
137 Extracts repo_name by id from special urls. Example url is _11/repo_name
138
138
139 :param repo_name:
139 :param repo_name:
140 :return: repo_name if matched else None
140 :return: repo_name if matched else None
141 """
141 """
142 _repo_id = _extract_id_from_repo_name(repo_name)
142 _repo_id = _extract_id_from_repo_name(repo_name)
143 if _repo_id:
143 if _repo_id:
144 from kallithea.model.db import Repository
144 from kallithea.model.db import Repository
145 repo = Repository.get(_repo_id)
145 repo = Repository.get(_repo_id)
146 if repo:
146 if repo:
147 # TODO: return repo instead of reponame? or would that be a layering violation?
147 # TODO: return repo instead of reponame? or would that be a layering violation?
148 return repo.repo_name
148 return repo.repo_name
149 return None
149 return None
150
150
151
151
152 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
152 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
153 """
153 """
154 Action logger for various actions made by users
154 Action logger for various actions made by users
155
155
156 :param user: user that made this action, can be a unique username string or
156 :param user: user that made this action, can be a unique username string or
157 object containing user_id attribute
157 object containing user_id attribute
158 :param action: action to log, should be on of predefined unique actions for
158 :param action: action to log, should be on of predefined unique actions for
159 easy translations
159 easy translations
160 :param repo: string name of repository or object containing repo_id,
160 :param repo: string name of repository or object containing repo_id,
161 that action was made on
161 that action was made on
162 :param ipaddr: optional IP address from what the action was made
162 :param ipaddr: optional IP address from what the action was made
163 :param sa: optional sqlalchemy session
163 :param sa: optional sqlalchemy session
164
164
165 """
165 """
166
166
167 if not sa:
167 if not sa:
168 sa = meta.Session()
168 sa = meta.Session()
169 # if we don't get explicit IP address try to get one from registered user
169 # if we don't get explicit IP address try to get one from registered user
170 # in tmpl context var
170 # in tmpl context var
171 if not ipaddr:
171 if not ipaddr:
172 ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
172 ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
173
173
174 if getattr(user, 'user_id', None):
174 if getattr(user, 'user_id', None):
175 user_obj = User.get(user.user_id)
175 user_obj = User.get(user.user_id)
176 elif isinstance(user, basestring):
176 elif isinstance(user, basestring):
177 user_obj = User.get_by_username(user)
177 user_obj = User.get_by_username(user)
178 else:
178 else:
179 raise Exception('You have to provide a user object or a username')
179 raise Exception('You have to provide a user object or a username')
180
180
181 if getattr(repo, 'repo_id', None):
181 if getattr(repo, 'repo_id', None):
182 repo_obj = Repository.get(repo.repo_id)
182 repo_obj = Repository.get(repo.repo_id)
183 repo_name = repo_obj.repo_name
183 repo_name = repo_obj.repo_name
184 elif isinstance(repo, basestring):
184 elif isinstance(repo, basestring):
185 repo_name = repo.lstrip('/')
185 repo_name = repo.lstrip('/')
186 repo_obj = Repository.get_by_repo_name(repo_name)
186 repo_obj = Repository.get_by_repo_name(repo_name)
187 else:
187 else:
188 repo_obj = None
188 repo_obj = None
189 repo_name = u''
189 repo_name = u''
190
190
191 user_log = UserLog()
191 user_log = UserLog()
192 user_log.user_id = user_obj.user_id
192 user_log.user_id = user_obj.user_id
193 user_log.username = user_obj.username
193 user_log.username = user_obj.username
194 user_log.action = safe_unicode(action)
194 user_log.action = safe_unicode(action)
195
195
196 user_log.repository = repo_obj
196 user_log.repository = repo_obj
197 user_log.repository_name = repo_name
197 user_log.repository_name = repo_name
198
198
199 user_log.action_date = datetime.datetime.now()
199 user_log.action_date = datetime.datetime.now()
200 user_log.user_ip = ipaddr
200 user_log.user_ip = ipaddr
201 sa.add(user_log)
201 sa.add(user_log)
202
202
203 log.info('Logging action:%s on %s by user:%s ip:%s',
203 log.info('Logging action:%s on %s by user:%s ip:%s',
204 action, safe_unicode(repo), user_obj, ipaddr)
204 action, safe_unicode(repo), user_obj, ipaddr)
205 if commit:
205 if commit:
206 sa.commit()
206 sa.commit()
207
207
208
208
209 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
209 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
210 """
210 """
211 Scans given path for repos and return (name,(type,path)) tuple
211 Scans given path for repos and return (name,(type,path)) tuple
212
212
213 :param path: path to scan for repositories
213 :param path: path to scan for repositories
214 :param recursive: recursive search and return names with subdirs in front
214 :param recursive: recursive search and return names with subdirs in front
215 """
215 """
216
216
217 # remove ending slash for better results
217 # remove ending slash for better results
218 path = path.rstrip(os.sep)
218 path = path.rstrip(os.sep)
219 log.debug('now scanning in %s location recursive:%s...', path, recursive)
219 log.debug('now scanning in %s location recursive:%s...', path, recursive)
220
220
221 def _get_repos(p):
221 def _get_repos(p):
222 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
222 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
223 log.warning('ignoring repo path without access: %s', p)
223 log.warning('ignoring repo path without access: %s', p)
224 return
224 return
225 if not os.access(p, os.W_OK):
225 if not os.access(p, os.W_OK):
226 log.warning('repo path without write access: %s', p)
226 log.warning('repo path without write access: %s', p)
227 for dirpath in os.listdir(p):
227 for dirpath in os.listdir(p):
228 if os.path.isfile(os.path.join(p, dirpath)):
228 if os.path.isfile(os.path.join(p, dirpath)):
229 continue
229 continue
230 cur_path = os.path.join(p, dirpath)
230 cur_path = os.path.join(p, dirpath)
231
231
232 # skip removed repos
232 # skip removed repos
233 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
233 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
234 continue
234 continue
235
235
236 #skip .<somethin> dirs
236 #skip .<somethin> dirs
237 if dirpath.startswith('.'):
237 if dirpath.startswith('.'):
238 continue
238 continue
239
239
240 try:
240 try:
241 scm_info = get_scm(cur_path)
241 scm_info = get_scm(cur_path)
242 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
242 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
243 except VCSError:
243 except VCSError:
244 if not recursive:
244 if not recursive:
245 continue
245 continue
246 #check if this dir contains other repos for recursive scan
246 #check if this dir contains other repos for recursive scan
247 rec_path = os.path.join(p, dirpath)
247 rec_path = os.path.join(p, dirpath)
248 if not os.path.islink(rec_path) and os.path.isdir(rec_path):
248 if not os.path.islink(rec_path) and os.path.isdir(rec_path):
249 for inner_scm in _get_repos(rec_path):
249 for inner_scm in _get_repos(rec_path):
250 yield inner_scm
250 yield inner_scm
251
251
252 return _get_repos(path)
252 return _get_repos(path)
253
253
254
254
255 def is_valid_repo(repo_name, base_path, scm=None):
255 def is_valid_repo(repo_name, base_path, scm=None):
256 """
256 """
257 Returns True if given path is a valid repository False otherwise.
257 Returns True if given path is a valid repository False otherwise.
258 If scm param is given also compare if given scm is the same as expected
258 If scm param is given also compare if given scm is the same as expected
259 from scm parameter
259 from scm parameter
260
260
261 :param repo_name:
261 :param repo_name:
262 :param base_path:
262 :param base_path:
263 :param scm:
263 :param scm:
264
264
265 :return True: if given path is a valid repository
265 :return True: if given path is a valid repository
266 """
266 """
267 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
267 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
268
268
269 try:
269 try:
270 scm_ = get_scm(full_path)
270 scm_ = get_scm(full_path)
271 if scm:
271 if scm:
272 return scm_[0] == scm
272 return scm_[0] == scm
273 return True
273 return True
274 except VCSError:
274 except VCSError:
275 return False
275 return False
276
276
277
277
278 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
278 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
279 """
279 """
280 Returns True if given path is a repository group False otherwise
280 Returns True if given path is a repository group False otherwise
281
281
282 :param repo_name:
282 :param repo_name:
283 :param base_path:
283 :param base_path:
284 """
284 """
285 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
285 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
286
286
287 # check if it's not a repo
287 # check if it's not a repo
288 if is_valid_repo(repo_group_name, base_path):
288 if is_valid_repo(repo_group_name, base_path):
289 return False
289 return False
290
290
291 try:
291 try:
292 # we need to check bare git repos at higher level
292 # we need to check bare git repos at higher level
293 # since we might match branches/hooks/info/objects or possible
293 # since we might match branches/hooks/info/objects or possible
294 # other things inside bare git repo
294 # other things inside bare git repo
295 get_scm(os.path.dirname(full_path))
295 get_scm(os.path.dirname(full_path))
296 return False
296 return False
297 except VCSError:
297 except VCSError:
298 pass
298 pass
299
299
300 # check if it's a valid path
300 # check if it's a valid path
301 if skip_path_check or os.path.isdir(full_path):
301 if skip_path_check or os.path.isdir(full_path):
302 return True
302 return True
303
303
304 return False
304 return False
305
305
306
306
307 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
307 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
308 while True:
308 while True:
309 ok = raw_input(prompt)
309 ok = raw_input(prompt)
310 if ok in ('y', 'ye', 'yes'):
310 if ok in ('y', 'ye', 'yes'):
311 return True
311 return True
312 if ok in ('n', 'no', 'nop', 'nope'):
312 if ok in ('n', 'no', 'nop', 'nope'):
313 return False
313 return False
314 retries = retries - 1
314 retries = retries - 1
315 if retries < 0:
315 if retries < 0:
316 raise IOError
316 raise IOError
317 print complaint
317 print complaint
318
318
319 #propagated from mercurial documentation
319 #propagated from mercurial documentation
320 ui_sections = ['alias', 'auth',
320 ui_sections = ['alias', 'auth',
321 'decode/encode', 'defaults',
321 'decode/encode', 'defaults',
322 'diff', 'email',
322 'diff', 'email',
323 'extensions', 'format',
323 'extensions', 'format',
324 'merge-patterns', 'merge-tools',
324 'merge-patterns', 'merge-tools',
325 'hooks', 'http_proxy',
325 'hooks', 'http_proxy',
326 'smtp', 'patch',
326 'smtp', 'patch',
327 'paths', 'profiling',
327 'paths', 'profiling',
328 'server', 'trusted',
328 'server', 'trusted',
329 'ui', 'web', ]
329 'ui', 'web', ]
330
330
331
331
332 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
332 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
333 """
333 """
334 A function that will read python rc files or database
334 A function that will read python rc files or database
335 and make an mercurial ui object from read options
335 and make an mercurial ui object from read options
336
336
337 :param path: path to mercurial config file
337 :param path: path to mercurial config file
338 :param checkpaths: check the path
338 :param checkpaths: check the path
339 :param read_from: read from 'file' or 'db'
339 :param read_from: read from 'file' or 'db'
340 """
340 """
341
341
342 baseui = ui.ui()
342 baseui = ui.ui()
343
343
344 # clean the baseui object
344 # clean the baseui object
345 baseui._ocfg = config.config()
345 baseui._ocfg = config.config()
346 baseui._ucfg = config.config()
346 baseui._ucfg = config.config()
347 baseui._tcfg = config.config()
347 baseui._tcfg = config.config()
348
348
349 if read_from == 'file':
349 if read_from == 'file':
350 if not os.path.isfile(path):
350 if not os.path.isfile(path):
351 log.debug('hgrc file is not present at %s, skipping...', path)
351 log.debug('hgrc file is not present at %s, skipping...', path)
352 return False
352 return False
353 log.debug('reading hgrc from %s', path)
353 log.debug('reading hgrc from %s', path)
354 cfg = config.config()
354 cfg = config.config()
355 cfg.read(path)
355 cfg.read(path)
356 for section in ui_sections:
356 for section in ui_sections:
357 for k, v in cfg.items(section):
357 for k, v in cfg.items(section):
358 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
358 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
359 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
359 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
360
360
361 elif read_from == 'db':
361 elif read_from == 'db':
362 sa = meta.Session()
362 sa = meta.Session()
363 ret = sa.query(Ui).all()
363 ret = sa.query(Ui).all()
364
364
365 hg_ui = ret
365 hg_ui = ret
366 for ui_ in hg_ui:
366 for ui_ in hg_ui:
367 if ui_.ui_active:
367 if ui_.ui_active:
368 ui_val = safe_str(ui_.ui_value)
368 ui_val = safe_str(ui_.ui_value)
369 if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'):
369 if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'):
370 ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.')
370 ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.')
371 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
371 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
372 ui_.ui_key, ui_val)
372 ui_.ui_key, ui_val)
373 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
373 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
374 ui_val)
374 ui_val)
375 if ui_.ui_key == 'push_ssl':
375 if ui_.ui_key == 'push_ssl':
376 # force set push_ssl requirement to False, kallithea
376 # force set push_ssl requirement to False, kallithea
377 # handles that
377 # handles that
378 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
378 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
379 False)
379 False)
380 if clear_session:
380 if clear_session:
381 meta.Session.remove()
381 meta.Session.remove()
382
382
383 # prevent interactive questions for ssh password / passphrase
383 # prevent interactive questions for ssh password / passphrase
384 ssh = baseui.config('ui', 'ssh', default='ssh')
384 ssh = baseui.config('ui', 'ssh', default='ssh')
385 baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
385 baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
386
386
387 return baseui
387 return baseui
388
388
389
389
390 def set_app_settings(config):
390 def set_app_settings(config):
391 """
391 """
392 Updates pylons config with new settings from database
392 Updates pylons config with new settings from database
393
393
394 :param config:
394 :param config:
395 """
395 """
396 hgsettings = Setting.get_app_settings()
396 hgsettings = Setting.get_app_settings()
397
397
398 for k, v in hgsettings.items():
398 for k, v in hgsettings.items():
399 config[k] = v
399 config[k] = v
400
400
401
401
402 def set_vcs_config(config):
402 def set_vcs_config(config):
403 """
403 """
404 Patch VCS config with some Kallithea specific stuff
404 Patch VCS config with some Kallithea specific stuff
405
405
406 :param config: kallithea.CONFIG
406 :param config: kallithea.CONFIG
407 """
407 """
408 from kallithea.lib.vcs import conf
408 from kallithea.lib.vcs import conf
409 from kallithea.lib.utils2 import aslist
409 from kallithea.lib.utils2 import aslist
410 conf.settings.BACKENDS = {
410 conf.settings.BACKENDS = {
411 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
411 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
412 'git': 'kallithea.lib.vcs.backends.git.GitRepository',
412 'git': 'kallithea.lib.vcs.backends.git.GitRepository',
413 }
413 }
414
414
415 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
415 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
416 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
416 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
417 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
417 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
418 'utf8'), sep=',')
418 'utf8'), sep=',')
419
419
420
420
421 def set_indexer_config(config):
421 def set_indexer_config(config):
422 """
422 """
423 Update Whoosh index mapping
423 Update Whoosh index mapping
424
424
425 :param config: kallithea.CONFIG
425 :param config: kallithea.CONFIG
426 """
426 """
427 from kallithea.config import conf
427 from kallithea.config import conf
428
428
429 log.debug('adding extra into INDEX_EXTENSIONS')
429 log.debug('adding extra into INDEX_EXTENSIONS')
430 conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', '')))
430 conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', '')))
431
431
432 log.debug('adding extra into INDEX_FILENAMES')
432 log.debug('adding extra into INDEX_FILENAMES')
433 conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', '')))
433 conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', '')))
434
434
435
435
436 def map_groups(path):
436 def map_groups(path):
437 """
437 """
438 Given a full path to a repository, create all nested groups that this
438 Given a full path to a repository, create all nested groups that this
439 repo is inside. This function creates parent-child relationships between
439 repo is inside. This function creates parent-child relationships between
440 groups and creates default perms for all new groups.
440 groups and creates default perms for all new groups.
441
441
442 :param paths: full path to repository
442 :param paths: full path to repository
443 """
443 """
444 sa = meta.Session()
444 sa = meta.Session()
445 groups = path.split(Repository.url_sep())
445 groups = path.split(Repository.url_sep())
446 parent = None
446 parent = None
447 group = None
447 group = None
448
448
449 # last element is repo in nested groups structure
449 # last element is repo in nested groups structure
450 groups = groups[:-1]
450 groups = groups[:-1]
451 rgm = RepoGroupModel(sa)
451 rgm = RepoGroupModel(sa)
452 owner = User.get_first_admin()
452 owner = User.get_first_admin()
453 for lvl, group_name in enumerate(groups):
453 for lvl, group_name in enumerate(groups):
454 group_name = u'/'.join(groups[:lvl] + [group_name])
454 group_name = u'/'.join(groups[:lvl] + [group_name])
455 group = RepoGroup.get_by_group_name(group_name)
455 group = RepoGroup.get_by_group_name(group_name)
456 desc = '%s group' % group_name
456 desc = '%s group' % group_name
457
457
458 # skip folders that are now removed repos
458 # skip folders that are now removed repos
459 if REMOVED_REPO_PAT.match(group_name):
459 if REMOVED_REPO_PAT.match(group_name):
460 break
460 break
461
461
462 if group is None:
462 if group is None:
463 log.debug('creating group level: %s group_name: %s',
463 log.debug('creating group level: %s group_name: %s',
464 lvl, group_name)
464 lvl, group_name)
465 group = RepoGroup(group_name, parent)
465 group = RepoGroup(group_name, parent)
466 group.group_description = desc
466 group.group_description = desc
467 group.user = owner
467 group.user = owner
468 sa.add(group)
468 sa.add(group)
469 perm_obj = rgm._create_default_perms(group)
469 perm_obj = rgm._create_default_perms(group)
470 sa.add(perm_obj)
470 sa.add(perm_obj)
471 sa.flush()
471 sa.flush()
472
472
473 parent = group
473 parent = group
474 return group
474 return group
475
475
476
476
477 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
477 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
478 install_git_hooks=False, user=None, overwrite_git_hooks=False):
478 install_git_hooks=False, user=None, overwrite_git_hooks=False):
479 """
479 """
480 maps all repos given in initial_repo_list, non existing repositories
480 maps all repos given in initial_repo_list, non existing repositories
481 are created, if remove_obsolete is True it also check for db entries
481 are created, if remove_obsolete is True it also check for db entries
482 that are not in initial_repo_list and removes them.
482 that are not in initial_repo_list and removes them.
483
483
484 :param initial_repo_list: list of repositories found by scanning methods
484 :param initial_repo_list: list of repositories found by scanning methods
485 :param remove_obsolete: check for obsolete entries in database
485 :param remove_obsolete: check for obsolete entries in database
486 :param install_git_hooks: if this is True, also check and install git hook
486 :param install_git_hooks: if this is True, also check and install git hook
487 for a repo if missing
487 for a repo if missing
488 :param overwrite_git_hooks: if this is True, overwrite any existing git hooks
488 :param overwrite_git_hooks: if this is True, overwrite any existing git hooks
489 that may be encountered (even if user-deployed)
489 that may be encountered (even if user-deployed)
490 """
490 """
491 from kallithea.model.repo import RepoModel
491 from kallithea.model.repo import RepoModel
492 from kallithea.model.scm import ScmModel
492 from kallithea.model.scm import ScmModel
493 sa = meta.Session()
493 sa = meta.Session()
494 repo_model = RepoModel()
494 repo_model = RepoModel()
495 if user is None:
495 if user is None:
496 user = User.get_first_admin()
496 user = User.get_first_admin()
497 added = []
497 added = []
498
498
499 ##creation defaults
499 ##creation defaults
500 defs = Setting.get_default_repo_settings(strip_prefix=True)
500 defs = Setting.get_default_repo_settings(strip_prefix=True)
501 enable_statistics = defs.get('repo_enable_statistics')
501 enable_statistics = defs.get('repo_enable_statistics')
502 enable_locking = defs.get('repo_enable_locking')
502 enable_locking = defs.get('repo_enable_locking')
503 enable_downloads = defs.get('repo_enable_downloads')
503 enable_downloads = defs.get('repo_enable_downloads')
504 private = defs.get('repo_private')
504 private = defs.get('repo_private')
505
505
506 for name, repo in initial_repo_list.items():
506 for name, repo in initial_repo_list.items():
507 group = map_groups(name)
507 group = map_groups(name)
508 unicode_name = safe_unicode(name)
508 unicode_name = safe_unicode(name)
509 db_repo = repo_model.get_by_repo_name(unicode_name)
509 db_repo = repo_model.get_by_repo_name(unicode_name)
510 # found repo that is on filesystem not in Kallithea database
510 # found repo that is on filesystem not in Kallithea database
511 if not db_repo:
511 if not db_repo:
512 log.info('repository %s not found, creating now', name)
512 log.info('repository %s not found, creating now', name)
513 added.append(name)
513 added.append(name)
514 desc = (repo.description
514 desc = (repo.description
515 if repo.description != 'unknown'
515 if repo.description != 'unknown'
516 else '%s repository' % name)
516 else '%s repository' % name)
517
517
518 new_repo = repo_model._create_repo(
518 new_repo = repo_model._create_repo(
519 repo_name=name,
519 repo_name=name,
520 repo_type=repo.alias,
520 repo_type=repo.alias,
521 description=desc,
521 description=desc,
522 repo_group=getattr(group, 'group_id', None),
522 repo_group=getattr(group, 'group_id', None),
523 owner=user,
523 owner=user,
524 enable_locking=enable_locking,
524 enable_locking=enable_locking,
525 enable_downloads=enable_downloads,
525 enable_downloads=enable_downloads,
526 enable_statistics=enable_statistics,
526 enable_statistics=enable_statistics,
527 private=private,
527 private=private,
528 state=Repository.STATE_CREATED
528 state=Repository.STATE_CREATED
529 )
529 )
530 sa.commit()
530 sa.commit()
531 # we added that repo just now, and make sure it has githook
531 # we added that repo just now, and make sure it has githook
532 # installed, and updated server info
532 # installed, and updated server info
533 if new_repo.repo_type == 'git':
533 if new_repo.repo_type == 'git':
534 git_repo = new_repo.scm_instance
534 git_repo = new_repo.scm_instance
535 ScmModel().install_git_hooks(git_repo)
535 ScmModel().install_git_hooks(git_repo)
536 # update repository server-info
536 # update repository server-info
537 log.debug('Running update server info')
537 log.debug('Running update server info')
538 git_repo._update_server_info()
538 git_repo._update_server_info()
539 new_repo.update_changeset_cache()
539 new_repo.update_changeset_cache()
540 elif install_git_hooks:
540 elif install_git_hooks:
541 if db_repo.repo_type == 'git':
541 if db_repo.repo_type == 'git':
542 ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks)
542 ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks)
543
543
544 removed = []
544 removed = []
545 # remove from database those repositories that are not in the filesystem
545 # remove from database those repositories that are not in the filesystem
546 unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list)
546 unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list)
547 for repo in sa.query(Repository).all():
547 for repo in sa.query(Repository).all():
548 if repo.repo_name not in unicode_initial_repo_list:
548 if repo.repo_name not in unicode_initial_repo_list:
549 if remove_obsolete:
549 if remove_obsolete:
550 log.debug("Removing non-existing repository found in db `%s`",
550 log.debug("Removing non-existing repository found in db `%s`",
551 repo.repo_name)
551 repo.repo_name)
552 try:
552 try:
553 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
553 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
554 sa.commit()
554 sa.commit()
555 except Exception:
555 except Exception:
556 #don't hold further removals on error
556 #don't hold further removals on error
557 log.error(traceback.format_exc())
557 log.error(traceback.format_exc())
558 sa.rollback()
558 sa.rollback()
559 removed.append(repo.repo_name)
559 removed.append(repo.repo_name)
560 return added, removed
560 return added, removed
561
561
562
562
563 # set cache regions for beaker so celery can utilise it
563 # set cache regions for beaker so celery can utilise it
564 def add_cache(settings):
564 def add_cache(settings):
565 cache_settings = {'regions': None}
565 cache_settings = {'regions': None}
566 for key in settings.keys():
566 for key in settings.keys():
567 for prefix in ['beaker.cache.', 'cache.']:
567 for prefix in ['beaker.cache.', 'cache.']:
568 if key.startswith(prefix):
568 if key.startswith(prefix):
569 name = key.split(prefix)[1].strip()
569 name = key.split(prefix)[1].strip()
570 cache_settings[name] = settings[key].strip()
570 cache_settings[name] = settings[key].strip()
571 if cache_settings['regions']:
571 if cache_settings['regions']:
572 for region in cache_settings['regions'].split(','):
572 for region in cache_settings['regions'].split(','):
573 region = region.strip()
573 region = region.strip()
574 region_settings = {}
574 region_settings = {}
575 for key, value in cache_settings.items():
575 for key, value in cache_settings.items():
576 if key.startswith(region):
576 if key.startswith(region):
577 region_settings[key.split('.')[1]] = value
577 region_settings[key.split('.')[1]] = value
578 region_settings['expire'] = int(region_settings.get('expire',
578 region_settings['expire'] = int(region_settings.get('expire',
579 60))
579 60))
580 region_settings.setdefault('lock_dir',
580 region_settings.setdefault('lock_dir',
581 cache_settings.get('lock_dir'))
581 cache_settings.get('lock_dir'))
582 region_settings.setdefault('data_dir',
582 region_settings.setdefault('data_dir',
583 cache_settings.get('data_dir'))
583 cache_settings.get('data_dir'))
584
584
585 if 'type' not in region_settings:
585 if 'type' not in region_settings:
586 region_settings['type'] = cache_settings.get('type',
586 region_settings['type'] = cache_settings.get('type',
587 'memory')
587 'memory')
588 beaker.cache.cache_regions[region] = region_settings
588 beaker.cache.cache_regions[region] = region_settings
589
589
590
590
591 def load_rcextensions(root_path):
591 def load_rcextensions(root_path):
592 import kallithea
592 import kallithea
593 from kallithea.config import conf
593 from kallithea.config import conf
594
594
595 path = os.path.join(root_path, 'rcextensions', '__init__.py')
595 path = os.path.join(root_path, 'rcextensions', '__init__.py')
596 if os.path.isfile(path):
596 if os.path.isfile(path):
597 rcext = create_module('rc', path)
597 rcext = create_module('rc', path)
598 EXT = kallithea.EXTENSIONS = rcext
598 EXT = kallithea.EXTENSIONS = rcext
599 log.debug('Found rcextensions now loading %s...', rcext)
599 log.debug('Found rcextensions now loading %s...', rcext)
600
600
601 # Additional mappings that are not present in the pygments lexers
601 # Additional mappings that are not present in the pygments lexers
602 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
602 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
603
603
604 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
604 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
605
605
606 if getattr(EXT, 'INDEX_EXTENSIONS', []):
606 if getattr(EXT, 'INDEX_EXTENSIONS', []):
607 log.debug('settings custom INDEX_EXTENSIONS')
607 log.debug('settings custom INDEX_EXTENSIONS')
608 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
608 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
609
609
610 #ADDITIONAL MAPPINGS
610 #ADDITIONAL MAPPINGS
611 log.debug('adding extra into INDEX_EXTENSIONS')
611 log.debug('adding extra into INDEX_EXTENSIONS')
612 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
612 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
613
613
614 # auto check if the module is not missing any data, set to default if is
614 # auto check if the module is not missing any data, set to default if is
615 # this will help autoupdate new feature of rcext module
615 # this will help autoupdate new feature of rcext module
616 #from kallithea.config import rcextensions
616 #from kallithea.config import rcextensions
617 #for k in dir(rcextensions):
617 #for k in dir(rcextensions):
618 # if not k.startswith('_') and not hasattr(EXT, k):
618 # if not k.startswith('_') and not hasattr(EXT, k):
619 # setattr(EXT, k, getattr(rcextensions, k))
619 # setattr(EXT, k, getattr(rcextensions, k))
620
620
621
621
622 def get_custom_lexer(extension):
622 def get_custom_lexer(extension):
623 """
623 """
624 returns a custom lexer if it's defined in rcextensions module, or None
624 returns a custom lexer if it's defined in rcextensions module, or None
625 if there's no custom lexer defined
625 if there's no custom lexer defined
626 """
626 """
627 import kallithea
627 import kallithea
628 from pygments import lexers
628 from pygments import lexers
629 #check if we didn't define this extension as other lexer
629 #check if we didn't define this extension as other lexer
630 if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS:
630 if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS:
631 _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension]
631 _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension]
632 return lexers.get_lexer_by_name(_lexer_name)
632 return lexers.get_lexer_by_name(_lexer_name)
633
633
634
634
635 #==============================================================================
635 #==============================================================================
636 # TEST FUNCTIONS AND CREATORS
636 # TEST FUNCTIONS AND CREATORS
637 #==============================================================================
637 #==============================================================================
638 def create_test_index(repo_location, config, full_index):
638 def create_test_index(repo_location, config, full_index):
639 """
639 """
640 Makes default test index
640 Makes default test index
641
641
642 :param config: test config
642 :param config: test config
643 :param full_index:
643 :param full_index:
644 """
644 """
645
645
646 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
646 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
647 from kallithea.lib.pidlock import DaemonLock, LockHeld
647 from kallithea.lib.pidlock import DaemonLock, LockHeld
648
648
649 repo_location = repo_location
649 repo_location = repo_location
650
650
651 index_location = os.path.join(config['app_conf']['index_dir'])
651 index_location = os.path.join(config['app_conf']['index_dir'])
652 if not os.path.exists(index_location):
652 if not os.path.exists(index_location):
653 os.makedirs(index_location)
653 os.makedirs(index_location)
654
654
655 try:
655 try:
656 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
656 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
657 WhooshIndexingDaemon(index_location=index_location,
657 WhooshIndexingDaemon(index_location=index_location,
658 repo_location=repo_location) \
658 repo_location=repo_location) \
659 .run(full_index=full_index)
659 .run(full_index=full_index)
660 l.release()
660 l.release()
661 except LockHeld:
661 except LockHeld:
662 pass
662 pass
663
663
664
664
665 def create_test_env(repos_test_path, config):
665 def create_test_env(repos_test_path, config):
666 """
666 """
667 Makes a fresh database and
667 Makes a fresh database and
668 install test repository into tmp dir
668 install test repository into tmp dir
669 """
669 """
670 from kallithea.lib.db_manage import DbManage
670 from kallithea.lib.db_manage import DbManage
671 from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
671 from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
672
672
673 # PART ONE create db
673 # PART ONE create db
674 dbconf = config['sqlalchemy.db1.url']
674 dbconf = config['sqlalchemy.db1.url']
675 log.debug('making test db %s', dbconf)
675 log.debug('making test db %s', dbconf)
676
676
677 # create test dir if it doesn't exist
677 # create test dir if it doesn't exist
678 if not os.path.isdir(repos_test_path):
678 if not os.path.isdir(repos_test_path):
679 log.debug('Creating testdir %s', repos_test_path)
679 log.debug('Creating testdir %s', repos_test_path)
680 os.makedirs(repos_test_path)
680 os.makedirs(repos_test_path)
681
681
682 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
682 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
683 tests=True)
683 tests=True)
684 dbmanage.create_tables(override=True)
684 dbmanage.create_tables(override=True)
685 # for tests dynamically set new root paths based on generated content
685 # for tests dynamically set new root paths based on generated content
686 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
686 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
687 dbmanage.create_default_user()
687 dbmanage.create_default_user()
688 dbmanage.admin_prompt()
688 dbmanage.admin_prompt()
689 dbmanage.create_permissions()
689 dbmanage.create_permissions()
690 dbmanage.populate_default_permissions()
690 dbmanage.populate_default_permissions()
691 Session().commit()
691 Session().commit()
692 # PART TWO make test repo
692 # PART TWO make test repo
693 log.debug('making test vcs repositories')
693 log.debug('making test vcs repositories')
694
694
695 idx_path = config['app_conf']['index_dir']
695 idx_path = config['app_conf']['index_dir']
696 data_path = config['app_conf']['cache_dir']
696 data_path = config['app_conf']['cache_dir']
697
697
698 #clean index and data
698 #clean index and data
699 if idx_path and os.path.exists(idx_path):
699 if idx_path and os.path.exists(idx_path):
700 log.debug('remove %s', idx_path)
700 log.debug('remove %s', idx_path)
701 shutil.rmtree(idx_path)
701 shutil.rmtree(idx_path)
702
702
703 if data_path and os.path.exists(data_path):
703 if data_path and os.path.exists(data_path):
704 log.debug('remove %s', data_path)
704 log.debug('remove %s', data_path)
705 shutil.rmtree(data_path)
705 shutil.rmtree(data_path)
706
706
707 #CREATE DEFAULT TEST REPOS
707 #CREATE DEFAULT TEST REPOS
708 cur_dir = dn(dn(abspath(__file__)))
708 cur_dir = dn(dn(abspath(__file__)))
709 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
709 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
710 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
710 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
711 tar.close()
711 tar.close()
712
712
713 cur_dir = dn(dn(abspath(__file__)))
713 cur_dir = dn(dn(abspath(__file__)))
714 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
714 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
715 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
715 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
716 tar.close()
716 tar.close()
717
717
718 #LOAD VCS test stuff
718 #LOAD VCS test stuff
719 from kallithea.tests.vcs import setup_package
719 from kallithea.tests.vcs import setup_package
720 setup_package()
720 setup_package()
721
721
722
722
723 #==============================================================================
723 #==============================================================================
724 # PASTER COMMANDS
724 # PASTER COMMANDS
725 #==============================================================================
725 #==============================================================================
726 class BasePasterCommand(Command):
726 class BasePasterCommand(Command):
727 """
727 """
728 Abstract Base Class for paster commands.
728 Abstract Base Class for paster commands.
729
729
730 The celery commands are somewhat aggressive about loading
730 The celery commands are somewhat aggressive about loading
731 celery.conf, and since our module sets the `CELERY_LOADER`
731 celery.conf, and since our module sets the `CELERY_LOADER`
732 environment variable to our loader, we have to bootstrap a bit and
732 environment variable to our loader, we have to bootstrap a bit and
733 make sure we've had a chance to load the pylons config off of the
733 make sure we've had a chance to load the pylons config off of the
734 command line, otherwise everything fails.
734 command line, otherwise everything fails.
735 """
735 """
736 min_args = 1
736 min_args = 1
737 min_args_error = "Please provide a paster config file as an argument."
737 min_args_error = "Please provide a paster config file as an argument."
738 takes_config_file = 1
738 takes_config_file = 1
739 requires_config_file = True
739 requires_config_file = True
740
740
741 def run(self, args):
741 def run(self, args):
742 """
742 """
743 Overrides Command.run
743 Overrides Command.run
744
744
745 Checks for a config file argument and loads it.
745 Checks for a config file argument and loads it.
746 """
746 """
747 if len(args) < self.min_args:
747 if len(args) < self.min_args:
748 raise BadCommand(
748 raise BadCommand(
749 self.min_args_error % {'min_args': self.min_args,
749 self.min_args_error % {'min_args': self.min_args,
750 'actual_args': len(args)})
750 'actual_args': len(args)})
751
751
752 # Decrement because we're going to lob off the first argument.
752 # Decrement because we're going to lob off the first argument.
753 # @@ This is hacky
753 # @@ This is hacky
754 self.min_args -= 1
754 self.min_args -= 1
755 self.bootstrap_config(args[0])
755 self.bootstrap_config(args[0])
756 self.update_parser()
756 self.update_parser()
757 return super(BasePasterCommand, self).run(args[1:])
757 return super(BasePasterCommand, self).run(args[1:])
758
758
759 def update_parser(self):
759 def update_parser(self):
760 """
760 """
761 Abstract method. Allows for the class's parser to be updated
761 Abstract method. Allows for the class's parser to be updated
762 before the superclass's `run` method is called. Necessary to
762 before the superclass's `run` method is called. Necessary to
763 allow options/arguments to be passed through to the underlying
763 allow options/arguments to be passed through to the underlying
764 celery command.
764 celery command.
765 """
765 """
766 raise NotImplementedError("Abstract Method.")
766 raise NotImplementedError("Abstract Method.")
767
767
768 def bootstrap_config(self, conf):
768 def bootstrap_config(self, conf):
769 """
769 """
770 Loads the pylons configuration.
770 Loads the pylons configuration.
771 """
771 """
772 from pylons import config as pylonsconfig
772 from pylons import config as pylonsconfig
773
773
774 self.path_to_ini_file = os.path.realpath(conf)
774 self.path_to_ini_file = os.path.realpath(conf)
775 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
775 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
776 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
776 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
777
777
778 def _init_session(self):
778 def _init_session(self):
779 """
779 """
780 Inits SqlAlchemy Session
780 Inits SqlAlchemy Session
781 """
781 """
782 logging.config.fileConfig(self.path_to_ini_file)
782 logging.config.fileConfig(self.path_to_ini_file)
783
783
784 from pylons import config
784 from pylons import config
785 from kallithea.model import init_model
785 from kallithea.model import init_model
786 from kallithea.lib.utils2 import engine_from_config
786 from kallithea.lib.utils2 import engine_from_config
787 add_cache(config)
787 add_cache(config)
788 engine = engine_from_config(config, 'sqlalchemy.db1.')
788 engine = engine_from_config(config, 'sqlalchemy.db1.')
789 init_model(engine)
789 init_model(engine)
790
790
791
791
792 def check_git_version():
792 def check_git_version():
793 """
793 """
794 Checks what version of git is installed in system, and issues a warning
794 Checks what version of git is installed in system, and issues a warning
795 if it's too old for Kallithea to work properly.
795 if it's too old for Kallithea to work properly.
796 """
796 """
797 from kallithea import BACKENDS
797 from kallithea import BACKENDS
798 from kallithea.lib.vcs.backends.git.repository import GitRepository
798 from kallithea.lib.vcs.backends.git.repository import GitRepository
799 from kallithea.lib.vcs.conf import settings
799 from kallithea.lib.vcs.conf import settings
800 from distutils.version import StrictVersion
800 from distutils.version import StrictVersion
801
801
802 if 'git' not in BACKENDS:
802 if 'git' not in BACKENDS:
803 return None
803 return None
804
804
805 stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
805 stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
806 _safe=True)
806 _safe=True)
807
807
808 m = re.search("\d+.\d+.\d+", stdout)
808 m = re.search("\d+.\d+.\d+", stdout)
809 if m:
809 if m:
810 ver = StrictVersion(m.group(0))
810 ver = StrictVersion(m.group(0))
811 else:
811 else:
812 ver = StrictVersion('0.0.0')
812 ver = StrictVersion('0.0.0')
813
813
814 req_ver = StrictVersion('1.7.4')
814 req_ver = StrictVersion('1.7.4')
815
815
816 log.debug('Git executable: "%s" version %s detected: %s',
816 log.debug('Git executable: "%s" version %s detected: %s',
817 settings.GIT_EXECUTABLE_PATH, ver, stdout)
817 settings.GIT_EXECUTABLE_PATH, ver, stdout)
818 if stderr:
818 if stderr:
819 log.warning('Error detecting git version: %r', stderr)
819 log.warning('Error detecting git version: %r', stderr)
820 elif ver < req_ver:
820 elif ver < req_ver:
821 log.warning('Kallithea detected git version %s, which is too old '
821 log.warning('Kallithea detected git version %s, which is too old '
822 'for the system to function properly. '
822 'for the system to function properly. '
823 'Please upgrade to version %s or later.' % (ver, req_ver))
823 'Please upgrade to version %s or later.' % (ver, req_ver))
824 return ver
824 return ver
825
825
826
826
827 @decorator.decorator
827 @decorator.decorator
828 def jsonify(func, *args, **kwargs):
828 def jsonify(func, *args, **kwargs):
829 """Action decorator that formats output for JSON
829 """Action decorator that formats output for JSON
830
830
831 Given a function that will return content, this decorator will turn
831 Given a function that will return content, this decorator will turn
832 the result into JSON, with a content-type of 'application/json' and
832 the result into JSON, with a content-type of 'application/json' and
833 output it.
833 output it.
834
834
835 """
835 """
836 from pylons.decorators.util import get_pylons
836 from pylons.decorators.util import get_pylons
837 from kallithea.lib.compat import json
837 from kallithea.lib.compat import json
838 pylons = get_pylons(args)
838 pylons = get_pylons(args)
839 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
839 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
840 data = func(*args, **kwargs)
840 data = func(*args, **kwargs)
841 if isinstance(data, (list, tuple)):
841 if isinstance(data, (list, tuple)):
842 msg = "JSON responses with Array envelopes are susceptible to " \
842 msg = "JSON responses with Array envelopes are susceptible to " \
843 "cross-site data leak attacks, see " \
843 "cross-site data leak attacks, see " \
844 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
844 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
845 warnings.warn(msg, Warning, 2)
845 warnings.warn(msg, Warning, 2)
846 log.warning(msg)
846 log.warning(msg)
847 log.debug("Returning JSON wrapped action output")
847 log.debug("Returning JSON wrapped action output")
848 return json.dumps(data, encoding='utf-8')
848 return json.dumps(data, encoding='utf-8')
849
849
850
850
851 def conditional_cache(region, prefix, condition, func):
851 def conditional_cache(region, prefix, condition, func):
852 """
852 """
853
853
854 Conditional caching function use like::
854 Conditional caching function use like::
855 def _c(arg):
855 def _c(arg):
856 #heavy computation function
856 #heavy computation function
857 return data
857 return data
858
858
859 # denpending from condition the compute is wrapped in cache or not
859 # depending from condition the compute is wrapped in cache or not
860 compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func)
860 compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func)
861 return compute(arg)
861 return compute(arg)
862
862
863 :param region: name of cache region
863 :param region: name of cache region
864 :param prefix: cache region prefix
864 :param prefix: cache region prefix
865 :param condition: condition for cache to be triggered, and return data cached
865 :param condition: condition for cache to be triggered, and return data cached
866 :param func: wrapped heavy function to compute
866 :param func: wrapped heavy function to compute
867
867
868 """
868 """
869 wrapped = func
869 wrapped = func
870 if condition:
870 if condition:
871 log.debug('conditional_cache: True, wrapping call of '
871 log.debug('conditional_cache: True, wrapping call of '
872 'func: %s into %s region cache' % (region, func))
872 'func: %s into %s region cache' % (region, func))
873 wrapped = _cache_decorate((prefix,), None, None, region)(func)
873 wrapped = _cache_decorate((prefix,), None, None, region)(func)
874
874
875 return wrapped
875 return wrapped
General Comments 0
You need to be logged in to leave comments. Login now