##// END OF EJS Templates
use consistent string formatting
marcink -
r3981:4c78da22 default
parent child Browse files
Show More
@@ -1,817 +1,817 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
48 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.exceptions import VCSError
50
50
51 from rhodecode.lib.caching_query import FromCache
51 from rhodecode.lib.caching_query import FromCache
52
52
53 from rhodecode.model import meta
53 from rhodecode.model import meta
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.vcs.utils.fakemod import create_module
59 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.model.users_group import UserGroupModel
60 from rhodecode.model.users_group import UserGroupModel
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 #==============================================================================
103 #==============================================================================
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
105 #==============================================================================
105 #==============================================================================
106 def get_repo_slug(request):
106 def get_repo_slug(request):
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repos_group_slug(request):
113 def get_repos_group_slug(request):
114 _group = request.environ['pylons.routes_dict'].get('group_name')
114 _group = request.environ['pylons.routes_dict'].get('group_name')
115 if _group:
115 if _group:
116 _group = _group.rstrip('/')
116 _group = _group.rstrip('/')
117 return _group
117 return _group
118
118
119
119
120 def get_user_group_slug(request):
120 def get_user_group_slug(request):
121 _group = request.environ['pylons.routes_dict'].get('id')
121 _group = request.environ['pylons.routes_dict'].get('id')
122 try:
122 try:
123 _group = UserGroup.get(_group)
123 _group = UserGroup.get(_group)
124 if _group:
124 if _group:
125 _group = _group.users_group_name
125 _group = _group.users_group_name
126 except Exception:
126 except Exception:
127 log.debug(traceback.format_exc())
127 log.debug(traceback.format_exc())
128 #catch all failures here
128 #catch all failures here
129 pass
129 pass
130
130
131 return _group
131 return _group
132
132
133
133
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
135 """
135 """
136 Action logger for various actions made by users
136 Action logger for various actions made by users
137
137
138 :param user: user that made this action, can be a unique username string or
138 :param user: user that made this action, can be a unique username string or
139 object containing user_id attribute
139 object containing user_id attribute
140 :param action: action to log, should be on of predefined unique actions for
140 :param action: action to log, should be on of predefined unique actions for
141 easy translations
141 easy translations
142 :param repo: string name of repository or object containing repo_id,
142 :param repo: string name of repository or object containing repo_id,
143 that action was made on
143 that action was made on
144 :param ipaddr: optional ip address from what the action was made
144 :param ipaddr: optional ip address from what the action was made
145 :param sa: optional sqlalchemy session
145 :param sa: optional sqlalchemy session
146
146
147 """
147 """
148
148
149 if not sa:
149 if not sa:
150 sa = meta.Session()
150 sa = meta.Session()
151
151
152 try:
152 try:
153 if hasattr(user, 'user_id'):
153 if hasattr(user, 'user_id'):
154 user_obj = User.get(user.user_id)
154 user_obj = User.get(user.user_id)
155 elif isinstance(user, basestring):
155 elif isinstance(user, basestring):
156 user_obj = User.get_by_username(user)
156 user_obj = User.get_by_username(user)
157 else:
157 else:
158 raise Exception('You have to provide a user object or a username')
158 raise Exception('You have to provide a user object or a username')
159
159
160 if hasattr(repo, 'repo_id'):
160 if hasattr(repo, 'repo_id'):
161 repo_obj = Repository.get(repo.repo_id)
161 repo_obj = Repository.get(repo.repo_id)
162 repo_name = repo_obj.repo_name
162 repo_name = repo_obj.repo_name
163 elif isinstance(repo, basestring):
163 elif isinstance(repo, basestring):
164 repo_name = repo.lstrip('/')
164 repo_name = repo.lstrip('/')
165 repo_obj = Repository.get_by_repo_name(repo_name)
165 repo_obj = Repository.get_by_repo_name(repo_name)
166 else:
166 else:
167 repo_obj = None
167 repo_obj = None
168 repo_name = ''
168 repo_name = ''
169
169
170 user_log = UserLog()
170 user_log = UserLog()
171 user_log.user_id = user_obj.user_id
171 user_log.user_id = user_obj.user_id
172 user_log.username = user_obj.username
172 user_log.username = user_obj.username
173 user_log.action = safe_unicode(action)
173 user_log.action = safe_unicode(action)
174
174
175 user_log.repository = repo_obj
175 user_log.repository = repo_obj
176 user_log.repository_name = repo_name
176 user_log.repository_name = repo_name
177
177
178 user_log.action_date = datetime.datetime.now()
178 user_log.action_date = datetime.datetime.now()
179 user_log.user_ip = ipaddr
179 user_log.user_ip = ipaddr
180 sa.add(user_log)
180 sa.add(user_log)
181
181
182 log.info('Logging action:%s on %s by user:%s ip:%s' %
182 log.info('Logging action:%s on %s by user:%s ip:%s' %
183 (action, safe_unicode(repo), user_obj, ipaddr))
183 (action, safe_unicode(repo), user_obj, ipaddr))
184 if commit:
184 if commit:
185 sa.commit()
185 sa.commit()
186 except Exception:
186 except Exception:
187 log.error(traceback.format_exc())
187 log.error(traceback.format_exc())
188 raise
188 raise
189
189
190
190
191 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
191 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
192 """
192 """
193 Scans given path for repos and return (name,(type,path)) tuple
193 Scans given path for repos and return (name,(type,path)) tuple
194
194
195 :param path: path to scan for repositories
195 :param path: path to scan for repositories
196 :param recursive: recursive search and return names with subdirs in front
196 :param recursive: recursive search and return names with subdirs in front
197 """
197 """
198
198
199 # remove ending slash for better results
199 # remove ending slash for better results
200 path = path.rstrip(os.sep)
200 path = path.rstrip(os.sep)
201 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
201 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
202
202
203 def _get_repos(p):
203 def _get_repos(p):
204 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
204 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
205 log.warn('ignoring repo path without access: %s', p)
205 log.warn('ignoring repo path without access: %s' % (p,))
206 return
206 return
207 if not os.access(p, os.W_OK):
207 if not os.access(p, os.W_OK):
208 log.warn('repo path without write access: %s', p)
208 log.warn('repo path without write access: %s' % (p,))
209 for dirpath in os.listdir(p):
209 for dirpath in os.listdir(p):
210 if os.path.isfile(os.path.join(p, dirpath)):
210 if os.path.isfile(os.path.join(p, dirpath)):
211 continue
211 continue
212 cur_path = os.path.join(p, dirpath)
212 cur_path = os.path.join(p, dirpath)
213
213
214 # skip removed repos
214 # skip removed repos
215 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
215 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
216 continue
216 continue
217
217
218 #skip .<somethin> dirs
218 #skip .<somethin> dirs
219 if dirpath.startswith('.'):
219 if dirpath.startswith('.'):
220 continue
220 continue
221
221
222 try:
222 try:
223 scm_info = get_scm(cur_path)
223 scm_info = get_scm(cur_path)
224 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
224 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
225 except VCSError:
225 except VCSError:
226 if not recursive:
226 if not recursive:
227 continue
227 continue
228 #check if this dir containts other repos for recursive scan
228 #check if this dir containts other repos for recursive scan
229 rec_path = os.path.join(p, dirpath)
229 rec_path = os.path.join(p, dirpath)
230 if os.path.isdir(rec_path):
230 if os.path.isdir(rec_path):
231 for inner_scm in _get_repos(rec_path):
231 for inner_scm in _get_repos(rec_path):
232 yield inner_scm
232 yield inner_scm
233
233
234 return _get_repos(path)
234 return _get_repos(path)
235
235
236
236
237 def is_valid_repo(repo_name, base_path, scm=None):
237 def is_valid_repo(repo_name, base_path, scm=None):
238 """
238 """
239 Returns True if given path is a valid repository False otherwise.
239 Returns True if given path is a valid repository False otherwise.
240 If scm param is given also compare if given scm is the same as expected
240 If scm param is given also compare if given scm is the same as expected
241 from scm parameter
241 from scm parameter
242
242
243 :param repo_name:
243 :param repo_name:
244 :param base_path:
244 :param base_path:
245 :param scm:
245 :param scm:
246
246
247 :return True: if given path is a valid repository
247 :return True: if given path is a valid repository
248 """
248 """
249 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
249 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
250
250
251 try:
251 try:
252 scm_ = get_scm(full_path)
252 scm_ = get_scm(full_path)
253 if scm:
253 if scm:
254 return scm_[0] == scm
254 return scm_[0] == scm
255 return True
255 return True
256 except VCSError:
256 except VCSError:
257 return False
257 return False
258
258
259
259
260 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
260 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
261 """
261 """
262 Returns True if given path is a repository group False otherwise
262 Returns True if given path is a repository group False otherwise
263
263
264 :param repo_name:
264 :param repo_name:
265 :param base_path:
265 :param base_path:
266 """
266 """
267 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
267 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
268
268
269 # check if it's not a repo
269 # check if it's not a repo
270 if is_valid_repo(repos_group_name, base_path):
270 if is_valid_repo(repos_group_name, base_path):
271 return False
271 return False
272
272
273 try:
273 try:
274 # we need to check bare git repos at higher level
274 # we need to check bare git repos at higher level
275 # since we might match branches/hooks/info/objects or possible
275 # since we might match branches/hooks/info/objects or possible
276 # other things inside bare git repo
276 # other things inside bare git repo
277 get_scm(os.path.dirname(full_path))
277 get_scm(os.path.dirname(full_path))
278 return False
278 return False
279 except VCSError:
279 except VCSError:
280 pass
280 pass
281
281
282 # check if it's a valid path
282 # check if it's a valid path
283 if skip_path_check or os.path.isdir(full_path):
283 if skip_path_check or os.path.isdir(full_path):
284 return True
284 return True
285
285
286 return False
286 return False
287
287
288
288
289 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
289 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
290 while True:
290 while True:
291 ok = raw_input(prompt)
291 ok = raw_input(prompt)
292 if ok in ('y', 'ye', 'yes'):
292 if ok in ('y', 'ye', 'yes'):
293 return True
293 return True
294 if ok in ('n', 'no', 'nop', 'nope'):
294 if ok in ('n', 'no', 'nop', 'nope'):
295 return False
295 return False
296 retries = retries - 1
296 retries = retries - 1
297 if retries < 0:
297 if retries < 0:
298 raise IOError
298 raise IOError
299 print complaint
299 print complaint
300
300
301 #propagated from mercurial documentation
301 #propagated from mercurial documentation
302 ui_sections = ['alias', 'auth',
302 ui_sections = ['alias', 'auth',
303 'decode/encode', 'defaults',
303 'decode/encode', 'defaults',
304 'diff', 'email',
304 'diff', 'email',
305 'extensions', 'format',
305 'extensions', 'format',
306 'merge-patterns', 'merge-tools',
306 'merge-patterns', 'merge-tools',
307 'hooks', 'http_proxy',
307 'hooks', 'http_proxy',
308 'smtp', 'patch',
308 'smtp', 'patch',
309 'paths', 'profiling',
309 'paths', 'profiling',
310 'server', 'trusted',
310 'server', 'trusted',
311 'ui', 'web', ]
311 'ui', 'web', ]
312
312
313
313
314 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
314 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
315 """
315 """
316 A function that will read python rc files or database
316 A function that will read python rc files or database
317 and make an mercurial ui object from read options
317 and make an mercurial ui object from read options
318
318
319 :param path: path to mercurial config file
319 :param path: path to mercurial config file
320 :param checkpaths: check the path
320 :param checkpaths: check the path
321 :param read_from: read from 'file' or 'db'
321 :param read_from: read from 'file' or 'db'
322 """
322 """
323
323
324 baseui = ui.ui()
324 baseui = ui.ui()
325
325
326 # clean the baseui object
326 # clean the baseui object
327 baseui._ocfg = config.config()
327 baseui._ocfg = config.config()
328 baseui._ucfg = config.config()
328 baseui._ucfg = config.config()
329 baseui._tcfg = config.config()
329 baseui._tcfg = config.config()
330
330
331 if read_from == 'file':
331 if read_from == 'file':
332 if not os.path.isfile(path):
332 if not os.path.isfile(path):
333 log.debug('hgrc file is not present at %s, skipping...' % path)
333 log.debug('hgrc file is not present at %s, skipping...' % path)
334 return False
334 return False
335 log.debug('reading hgrc from %s' % path)
335 log.debug('reading hgrc from %s' % path)
336 cfg = config.config()
336 cfg = config.config()
337 cfg.read(path)
337 cfg.read(path)
338 for section in ui_sections:
338 for section in ui_sections:
339 for k, v in cfg.items(section):
339 for k, v in cfg.items(section):
340 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
340 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
341 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
341 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
342
342
343 elif read_from == 'db':
343 elif read_from == 'db':
344 sa = meta.Session()
344 sa = meta.Session()
345 ret = sa.query(RhodeCodeUi)\
345 ret = sa.query(RhodeCodeUi)\
346 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
346 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
347 .all()
347 .all()
348
348
349 hg_ui = ret
349 hg_ui = ret
350 for ui_ in hg_ui:
350 for ui_ in hg_ui:
351 if ui_.ui_active:
351 if ui_.ui_active:
352 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
352 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
353 ui_.ui_key, ui_.ui_value)
353 ui_.ui_key, ui_.ui_value)
354 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
354 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
355 safe_str(ui_.ui_value))
355 safe_str(ui_.ui_value))
356 if ui_.ui_key == 'push_ssl':
356 if ui_.ui_key == 'push_ssl':
357 # force set push_ssl requirement to False, rhodecode
357 # force set push_ssl requirement to False, rhodecode
358 # handles that
358 # handles that
359 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
359 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
360 False)
360 False)
361 if clear_session:
361 if clear_session:
362 meta.Session.remove()
362 meta.Session.remove()
363 return baseui
363 return baseui
364
364
365
365
366 def set_rhodecode_config(config):
366 def set_rhodecode_config(config):
367 """
367 """
368 Updates pylons config with new settings from database
368 Updates pylons config with new settings from database
369
369
370 :param config:
370 :param config:
371 """
371 """
372 hgsettings = RhodeCodeSetting.get_app_settings()
372 hgsettings = RhodeCodeSetting.get_app_settings()
373
373
374 for k, v in hgsettings.items():
374 for k, v in hgsettings.items():
375 config[k] = v
375 config[k] = v
376
376
377
377
378 def set_vcs_config(config):
378 def set_vcs_config(config):
379 """
379 """
380 Patch VCS config with some RhodeCode specific stuff
380 Patch VCS config with some RhodeCode specific stuff
381
381
382 :param config: rhodecode.CONFIG
382 :param config: rhodecode.CONFIG
383 """
383 """
384 import rhodecode
384 import rhodecode
385 from rhodecode.lib.vcs import conf
385 from rhodecode.lib.vcs import conf
386 from rhodecode.lib.utils2 import aslist
386 from rhodecode.lib.utils2 import aslist
387 conf.settings.BACKENDS = {
387 conf.settings.BACKENDS = {
388 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
388 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
389 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
389 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
390 }
390 }
391
391
392 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
392 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
393 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
393 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
394 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
394 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
395 'utf8'), sep=',')
395 'utf8'), sep=',')
396
396
397
397
398 def map_groups(path):
398 def map_groups(path):
399 """
399 """
400 Given a full path to a repository, create all nested groups that this
400 Given a full path to a repository, create all nested groups that this
401 repo is inside. This function creates parent-child relationships between
401 repo is inside. This function creates parent-child relationships between
402 groups and creates default perms for all new groups.
402 groups and creates default perms for all new groups.
403
403
404 :param paths: full path to repository
404 :param paths: full path to repository
405 """
405 """
406 sa = meta.Session()
406 sa = meta.Session()
407 groups = path.split(Repository.url_sep())
407 groups = path.split(Repository.url_sep())
408 parent = None
408 parent = None
409 group = None
409 group = None
410
410
411 # last element is repo in nested groups structure
411 # last element is repo in nested groups structure
412 groups = groups[:-1]
412 groups = groups[:-1]
413 rgm = ReposGroupModel(sa)
413 rgm = ReposGroupModel(sa)
414 owner = User.get_first_admin()
414 owner = User.get_first_admin()
415 for lvl, group_name in enumerate(groups):
415 for lvl, group_name in enumerate(groups):
416 group_name = '/'.join(groups[:lvl] + [group_name])
416 group_name = '/'.join(groups[:lvl] + [group_name])
417 group = RepoGroup.get_by_group_name(group_name)
417 group = RepoGroup.get_by_group_name(group_name)
418 desc = '%s group' % group_name
418 desc = '%s group' % group_name
419
419
420 # skip folders that are now removed repos
420 # skip folders that are now removed repos
421 if REMOVED_REPO_PAT.match(group_name):
421 if REMOVED_REPO_PAT.match(group_name):
422 break
422 break
423
423
424 if group is None:
424 if group is None:
425 log.debug('creating group level: %s group_name: %s'
425 log.debug('creating group level: %s group_name: %s'
426 % (lvl, group_name))
426 % (lvl, group_name))
427 group = RepoGroup(group_name, parent)
427 group = RepoGroup(group_name, parent)
428 group.group_description = desc
428 group.group_description = desc
429 group.user = owner
429 group.user = owner
430 sa.add(group)
430 sa.add(group)
431 perm_obj = rgm._create_default_perms(group)
431 perm_obj = rgm._create_default_perms(group)
432 sa.add(perm_obj)
432 sa.add(perm_obj)
433 sa.flush()
433 sa.flush()
434
434
435 parent = group
435 parent = group
436 return group
436 return group
437
437
438
438
439 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
439 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
440 install_git_hook=False):
440 install_git_hook=False):
441 """
441 """
442 maps all repos given in initial_repo_list, non existing repositories
442 maps all repos given in initial_repo_list, non existing repositories
443 are created, if remove_obsolete is True it also check for db entries
443 are created, if remove_obsolete is True it also check for db entries
444 that are not in initial_repo_list and removes them.
444 that are not in initial_repo_list and removes them.
445
445
446 :param initial_repo_list: list of repositories found by scanning methods
446 :param initial_repo_list: list of repositories found by scanning methods
447 :param remove_obsolete: check for obsolete entries in database
447 :param remove_obsolete: check for obsolete entries in database
448 :param install_git_hook: if this is True, also check and install githook
448 :param install_git_hook: if this is True, also check and install githook
449 for a repo if missing
449 for a repo if missing
450 """
450 """
451 from rhodecode.model.repo import RepoModel
451 from rhodecode.model.repo import RepoModel
452 from rhodecode.model.scm import ScmModel
452 from rhodecode.model.scm import ScmModel
453 sa = meta.Session()
453 sa = meta.Session()
454 rm = RepoModel()
454 rm = RepoModel()
455 user = User.get_first_admin()
455 user = User.get_first_admin()
456 added = []
456 added = []
457
457
458 ##creation defaults
458 ##creation defaults
459 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
459 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
460 enable_statistics = defs.get('repo_enable_statistics')
460 enable_statistics = defs.get('repo_enable_statistics')
461 enable_locking = defs.get('repo_enable_locking')
461 enable_locking = defs.get('repo_enable_locking')
462 enable_downloads = defs.get('repo_enable_downloads')
462 enable_downloads = defs.get('repo_enable_downloads')
463 private = defs.get('repo_private')
463 private = defs.get('repo_private')
464
464
465 for name, repo in initial_repo_list.items():
465 for name, repo in initial_repo_list.items():
466 group = map_groups(name)
466 group = map_groups(name)
467 db_repo = rm.get_by_repo_name(name)
467 db_repo = rm.get_by_repo_name(name)
468 # found repo that is on filesystem not in RhodeCode database
468 # found repo that is on filesystem not in RhodeCode database
469 if not db_repo:
469 if not db_repo:
470 log.info('repository %s not found, creating now' % name)
470 log.info('repository %s not found, creating now' % name)
471 added.append(name)
471 added.append(name)
472 desc = (repo.description
472 desc = (repo.description
473 if repo.description != 'unknown'
473 if repo.description != 'unknown'
474 else '%s repository' % name)
474 else '%s repository' % name)
475
475
476 new_repo = rm.create_repo(
476 new_repo = rm.create_repo(
477 repo_name=name,
477 repo_name=name,
478 repo_type=repo.alias,
478 repo_type=repo.alias,
479 description=desc,
479 description=desc,
480 repos_group=getattr(group, 'group_id', None),
480 repos_group=getattr(group, 'group_id', None),
481 owner=user,
481 owner=user,
482 just_db=True,
482 just_db=True,
483 enable_locking=enable_locking,
483 enable_locking=enable_locking,
484 enable_downloads=enable_downloads,
484 enable_downloads=enable_downloads,
485 enable_statistics=enable_statistics,
485 enable_statistics=enable_statistics,
486 private=private
486 private=private
487 )
487 )
488 # we added that repo just now, and make sure it has githook
488 # we added that repo just now, and make sure it has githook
489 # installed
489 # installed
490 if new_repo.repo_type == 'git':
490 if new_repo.repo_type == 'git':
491 ScmModel().install_git_hook(new_repo.scm_instance)
491 ScmModel().install_git_hook(new_repo.scm_instance)
492 new_repo.update_changeset_cache()
492 new_repo.update_changeset_cache()
493 elif install_git_hook:
493 elif install_git_hook:
494 if db_repo.repo_type == 'git':
494 if db_repo.repo_type == 'git':
495 ScmModel().install_git_hook(db_repo.scm_instance)
495 ScmModel().install_git_hook(db_repo.scm_instance)
496
496
497 sa.commit()
497 sa.commit()
498 removed = []
498 removed = []
499 if remove_obsolete:
499 if remove_obsolete:
500 # remove from database those repositories that are not in the filesystem
500 # remove from database those repositories that are not in the filesystem
501 for repo in sa.query(Repository).all():
501 for repo in sa.query(Repository).all():
502 if repo.repo_name not in initial_repo_list.keys():
502 if repo.repo_name not in initial_repo_list.keys():
503 log.debug("Removing non-existing repository found in db `%s`" %
503 log.debug("Removing non-existing repository found in db `%s`" %
504 repo.repo_name)
504 repo.repo_name)
505 try:
505 try:
506 removed.append(repo.repo_name)
506 removed.append(repo.repo_name)
507 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
507 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
508 sa.commit()
508 sa.commit()
509 except Exception:
509 except Exception:
510 #don't hold further removals on error
510 #don't hold further removals on error
511 log.error(traceback.format_exc())
511 log.error(traceback.format_exc())
512 sa.rollback()
512 sa.rollback()
513 return added, removed
513 return added, removed
514
514
515
515
516 # set cache regions for beaker so celery can utilise it
516 # set cache regions for beaker so celery can utilise it
517 def add_cache(settings):
517 def add_cache(settings):
518 cache_settings = {'regions': None}
518 cache_settings = {'regions': None}
519 for key in settings.keys():
519 for key in settings.keys():
520 for prefix in ['beaker.cache.', 'cache.']:
520 for prefix in ['beaker.cache.', 'cache.']:
521 if key.startswith(prefix):
521 if key.startswith(prefix):
522 name = key.split(prefix)[1].strip()
522 name = key.split(prefix)[1].strip()
523 cache_settings[name] = settings[key].strip()
523 cache_settings[name] = settings[key].strip()
524 if cache_settings['regions']:
524 if cache_settings['regions']:
525 for region in cache_settings['regions'].split(','):
525 for region in cache_settings['regions'].split(','):
526 region = region.strip()
526 region = region.strip()
527 region_settings = {}
527 region_settings = {}
528 for key, value in cache_settings.items():
528 for key, value in cache_settings.items():
529 if key.startswith(region):
529 if key.startswith(region):
530 region_settings[key.split('.')[1]] = value
530 region_settings[key.split('.')[1]] = value
531 region_settings['expire'] = int(region_settings.get('expire',
531 region_settings['expire'] = int(region_settings.get('expire',
532 60))
532 60))
533 region_settings.setdefault('lock_dir',
533 region_settings.setdefault('lock_dir',
534 cache_settings.get('lock_dir'))
534 cache_settings.get('lock_dir'))
535 region_settings.setdefault('data_dir',
535 region_settings.setdefault('data_dir',
536 cache_settings.get('data_dir'))
536 cache_settings.get('data_dir'))
537
537
538 if 'type' not in region_settings:
538 if 'type' not in region_settings:
539 region_settings['type'] = cache_settings.get('type',
539 region_settings['type'] = cache_settings.get('type',
540 'memory')
540 'memory')
541 beaker.cache.cache_regions[region] = region_settings
541 beaker.cache.cache_regions[region] = region_settings
542
542
543
543
544 def load_rcextensions(root_path):
544 def load_rcextensions(root_path):
545 import rhodecode
545 import rhodecode
546 from rhodecode.config import conf
546 from rhodecode.config import conf
547
547
548 path = os.path.join(root_path, 'rcextensions', '__init__.py')
548 path = os.path.join(root_path, 'rcextensions', '__init__.py')
549 if os.path.isfile(path):
549 if os.path.isfile(path):
550 rcext = create_module('rc', path)
550 rcext = create_module('rc', path)
551 EXT = rhodecode.EXTENSIONS = rcext
551 EXT = rhodecode.EXTENSIONS = rcext
552 log.debug('Found rcextensions now loading %s...' % rcext)
552 log.debug('Found rcextensions now loading %s...' % rcext)
553
553
554 # Additional mappings that are not present in the pygments lexers
554 # Additional mappings that are not present in the pygments lexers
555 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
555 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
556
556
557 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
557 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
558
558
559 if getattr(EXT, 'INDEX_EXTENSIONS', []):
559 if getattr(EXT, 'INDEX_EXTENSIONS', []):
560 log.debug('settings custom INDEX_EXTENSIONS')
560 log.debug('settings custom INDEX_EXTENSIONS')
561 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
561 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
562
562
563 #ADDITIONAL MAPPINGS
563 #ADDITIONAL MAPPINGS
564 log.debug('adding extra into INDEX_EXTENSIONS')
564 log.debug('adding extra into INDEX_EXTENSIONS')
565 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
565 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
566
566
567 # auto check if the module is not missing any data, set to default if is
567 # auto check if the module is not missing any data, set to default if is
568 # this will help autoupdate new feature of rcext module
568 # this will help autoupdate new feature of rcext module
569 from rhodecode.config import rcextensions
569 from rhodecode.config import rcextensions
570 for k in dir(rcextensions):
570 for k in dir(rcextensions):
571 if not k.startswith('_') and not hasattr(EXT, k):
571 if not k.startswith('_') and not hasattr(EXT, k):
572 setattr(EXT, k, getattr(rcextensions, k))
572 setattr(EXT, k, getattr(rcextensions, k))
573
573
574
574
575 def get_custom_lexer(extension):
575 def get_custom_lexer(extension):
576 """
576 """
577 returns a custom lexer if it's defined in rcextensions module, or None
577 returns a custom lexer if it's defined in rcextensions module, or None
578 if there's no custom lexer defined
578 if there's no custom lexer defined
579 """
579 """
580 import rhodecode
580 import rhodecode
581 from pygments import lexers
581 from pygments import lexers
582 #check if we didn't define this extension as other lexer
582 #check if we didn't define this extension as other lexer
583 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
583 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
584 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
584 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
585 return lexers.get_lexer_by_name(_lexer_name)
585 return lexers.get_lexer_by_name(_lexer_name)
586
586
587
587
588 #==============================================================================
588 #==============================================================================
589 # TEST FUNCTIONS AND CREATORS
589 # TEST FUNCTIONS AND CREATORS
590 #==============================================================================
590 #==============================================================================
591 def create_test_index(repo_location, config, full_index):
591 def create_test_index(repo_location, config, full_index):
592 """
592 """
593 Makes default test index
593 Makes default test index
594
594
595 :param config: test config
595 :param config: test config
596 :param full_index:
596 :param full_index:
597 """
597 """
598
598
599 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
599 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
600 from rhodecode.lib.pidlock import DaemonLock, LockHeld
600 from rhodecode.lib.pidlock import DaemonLock, LockHeld
601
601
602 repo_location = repo_location
602 repo_location = repo_location
603
603
604 index_location = os.path.join(config['app_conf']['index_dir'])
604 index_location = os.path.join(config['app_conf']['index_dir'])
605 if not os.path.exists(index_location):
605 if not os.path.exists(index_location):
606 os.makedirs(index_location)
606 os.makedirs(index_location)
607
607
608 try:
608 try:
609 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
609 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
610 WhooshIndexingDaemon(index_location=index_location,
610 WhooshIndexingDaemon(index_location=index_location,
611 repo_location=repo_location)\
611 repo_location=repo_location)\
612 .run(full_index=full_index)
612 .run(full_index=full_index)
613 l.release()
613 l.release()
614 except LockHeld:
614 except LockHeld:
615 pass
615 pass
616
616
617
617
618 def create_test_env(repos_test_path, config):
618 def create_test_env(repos_test_path, config):
619 """
619 """
620 Makes a fresh database and
620 Makes a fresh database and
621 install test repository into tmp dir
621 install test repository into tmp dir
622 """
622 """
623 from rhodecode.lib.db_manage import DbManage
623 from rhodecode.lib.db_manage import DbManage
624 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
624 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
625
625
626 # PART ONE create db
626 # PART ONE create db
627 dbconf = config['sqlalchemy.db1.url']
627 dbconf = config['sqlalchemy.db1.url']
628 log.debug('making test db %s' % dbconf)
628 log.debug('making test db %s' % dbconf)
629
629
630 # create test dir if it doesn't exist
630 # create test dir if it doesn't exist
631 if not os.path.isdir(repos_test_path):
631 if not os.path.isdir(repos_test_path):
632 log.debug('Creating testdir %s' % repos_test_path)
632 log.debug('Creating testdir %s' % repos_test_path)
633 os.makedirs(repos_test_path)
633 os.makedirs(repos_test_path)
634
634
635 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
635 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
636 tests=True)
636 tests=True)
637 dbmanage.create_tables(override=True)
637 dbmanage.create_tables(override=True)
638 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
638 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
639 dbmanage.create_default_user()
639 dbmanage.create_default_user()
640 dbmanage.admin_prompt()
640 dbmanage.admin_prompt()
641 dbmanage.create_permissions()
641 dbmanage.create_permissions()
642 dbmanage.populate_default_permissions()
642 dbmanage.populate_default_permissions()
643 Session().commit()
643 Session().commit()
644 # PART TWO make test repo
644 # PART TWO make test repo
645 log.debug('making test vcs repositories')
645 log.debug('making test vcs repositories')
646
646
647 idx_path = config['app_conf']['index_dir']
647 idx_path = config['app_conf']['index_dir']
648 data_path = config['app_conf']['cache_dir']
648 data_path = config['app_conf']['cache_dir']
649
649
650 #clean index and data
650 #clean index and data
651 if idx_path and os.path.exists(idx_path):
651 if idx_path and os.path.exists(idx_path):
652 log.debug('remove %s' % idx_path)
652 log.debug('remove %s' % idx_path)
653 shutil.rmtree(idx_path)
653 shutil.rmtree(idx_path)
654
654
655 if data_path and os.path.exists(data_path):
655 if data_path and os.path.exists(data_path):
656 log.debug('remove %s' % data_path)
656 log.debug('remove %s' % data_path)
657 shutil.rmtree(data_path)
657 shutil.rmtree(data_path)
658
658
659 #CREATE DEFAULT TEST REPOS
659 #CREATE DEFAULT TEST REPOS
660 cur_dir = dn(dn(abspath(__file__)))
660 cur_dir = dn(dn(abspath(__file__)))
661 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
661 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
662 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
662 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
663 tar.close()
663 tar.close()
664
664
665 cur_dir = dn(dn(abspath(__file__)))
665 cur_dir = dn(dn(abspath(__file__)))
666 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
666 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
667 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
667 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
668 tar.close()
668 tar.close()
669
669
670 #LOAD VCS test stuff
670 #LOAD VCS test stuff
671 from rhodecode.tests.vcs import setup_package
671 from rhodecode.tests.vcs import setup_package
672 setup_package()
672 setup_package()
673
673
674
674
675 #==============================================================================
675 #==============================================================================
676 # PASTER COMMANDS
676 # PASTER COMMANDS
677 #==============================================================================
677 #==============================================================================
678 class BasePasterCommand(Command):
678 class BasePasterCommand(Command):
679 """
679 """
680 Abstract Base Class for paster commands.
680 Abstract Base Class for paster commands.
681
681
682 The celery commands are somewhat aggressive about loading
682 The celery commands are somewhat aggressive about loading
683 celery.conf, and since our module sets the `CELERY_LOADER`
683 celery.conf, and since our module sets the `CELERY_LOADER`
684 environment variable to our loader, we have to bootstrap a bit and
684 environment variable to our loader, we have to bootstrap a bit and
685 make sure we've had a chance to load the pylons config off of the
685 make sure we've had a chance to load the pylons config off of the
686 command line, otherwise everything fails.
686 command line, otherwise everything fails.
687 """
687 """
688 min_args = 1
688 min_args = 1
689 min_args_error = "Please provide a paster config file as an argument."
689 min_args_error = "Please provide a paster config file as an argument."
690 takes_config_file = 1
690 takes_config_file = 1
691 requires_config_file = True
691 requires_config_file = True
692
692
693 def notify_msg(self, msg, log=False):
693 def notify_msg(self, msg, log=False):
694 """Make a notification to user, additionally if logger is passed
694 """Make a notification to user, additionally if logger is passed
695 it logs this action using given logger
695 it logs this action using given logger
696
696
697 :param msg: message that will be printed to user
697 :param msg: message that will be printed to user
698 :param log: logging instance, to use to additionally log this message
698 :param log: logging instance, to use to additionally log this message
699
699
700 """
700 """
701 if log and isinstance(log, logging):
701 if log and isinstance(log, logging):
702 log(msg)
702 log(msg)
703
703
704 def run(self, args):
704 def run(self, args):
705 """
705 """
706 Overrides Command.run
706 Overrides Command.run
707
707
708 Checks for a config file argument and loads it.
708 Checks for a config file argument and loads it.
709 """
709 """
710 if len(args) < self.min_args:
710 if len(args) < self.min_args:
711 raise BadCommand(
711 raise BadCommand(
712 self.min_args_error % {'min_args': self.min_args,
712 self.min_args_error % {'min_args': self.min_args,
713 'actual_args': len(args)})
713 'actual_args': len(args)})
714
714
715 # Decrement because we're going to lob off the first argument.
715 # Decrement because we're going to lob off the first argument.
716 # @@ This is hacky
716 # @@ This is hacky
717 self.min_args -= 1
717 self.min_args -= 1
718 self.bootstrap_config(args[0])
718 self.bootstrap_config(args[0])
719 self.update_parser()
719 self.update_parser()
720 return super(BasePasterCommand, self).run(args[1:])
720 return super(BasePasterCommand, self).run(args[1:])
721
721
722 def update_parser(self):
722 def update_parser(self):
723 """
723 """
724 Abstract method. Allows for the class's parser to be updated
724 Abstract method. Allows for the class's parser to be updated
725 before the superclass's `run` method is called. Necessary to
725 before the superclass's `run` method is called. Necessary to
726 allow options/arguments to be passed through to the underlying
726 allow options/arguments to be passed through to the underlying
727 celery command.
727 celery command.
728 """
728 """
729 raise NotImplementedError("Abstract Method.")
729 raise NotImplementedError("Abstract Method.")
730
730
731 def bootstrap_config(self, conf):
731 def bootstrap_config(self, conf):
732 """
732 """
733 Loads the pylons configuration.
733 Loads the pylons configuration.
734 """
734 """
735 from pylons import config as pylonsconfig
735 from pylons import config as pylonsconfig
736
736
737 self.path_to_ini_file = os.path.realpath(conf)
737 self.path_to_ini_file = os.path.realpath(conf)
738 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
738 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
739 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
739 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
740
740
741 def _init_session(self):
741 def _init_session(self):
742 """
742 """
743 Inits SqlAlchemy Session
743 Inits SqlAlchemy Session
744 """
744 """
745 logging.config.fileConfig(self.path_to_ini_file)
745 logging.config.fileConfig(self.path_to_ini_file)
746 from pylons import config
746 from pylons import config
747 from rhodecode.model import init_model
747 from rhodecode.model import init_model
748 from rhodecode.lib.utils2 import engine_from_config
748 from rhodecode.lib.utils2 import engine_from_config
749
749
750 #get to remove repos !!
750 #get to remove repos !!
751 add_cache(config)
751 add_cache(config)
752 engine = engine_from_config(config, 'sqlalchemy.db1.')
752 engine = engine_from_config(config, 'sqlalchemy.db1.')
753 init_model(engine)
753 init_model(engine)
754
754
755
755
756 def check_git_version():
756 def check_git_version():
757 """
757 """
758 Checks what version of git is installed in system, and issues a warning
758 Checks what version of git is installed in system, and issues a warning
759 if it's too old for RhodeCode to properly work.
759 if it's too old for RhodeCode to properly work.
760 """
760 """
761 from rhodecode import BACKENDS
761 from rhodecode import BACKENDS
762 from rhodecode.lib.vcs.backends.git.repository import GitRepository
762 from rhodecode.lib.vcs.backends.git.repository import GitRepository
763 from rhodecode.lib.vcs.conf import settings
763 from rhodecode.lib.vcs.conf import settings
764 from distutils.version import StrictVersion
764 from distutils.version import StrictVersion
765
765
766 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
766 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
767 _safe=True)
767 _safe=True)
768
768
769 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
769 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
770 if len(ver.split('.')) > 3:
770 if len(ver.split('.')) > 3:
771 #StrictVersion needs to be only 3 element type
771 #StrictVersion needs to be only 3 element type
772 ver = '.'.join(ver.split('.')[:3])
772 ver = '.'.join(ver.split('.')[:3])
773 try:
773 try:
774 _ver = StrictVersion(ver)
774 _ver = StrictVersion(ver)
775 except Exception:
775 except Exception:
776 _ver = StrictVersion('0.0.0')
776 _ver = StrictVersion('0.0.0')
777 stderr = traceback.format_exc()
777 stderr = traceback.format_exc()
778
778
779 req_ver = '1.7.4'
779 req_ver = '1.7.4'
780 to_old_git = False
780 to_old_git = False
781 if _ver < StrictVersion(req_ver):
781 if _ver < StrictVersion(req_ver):
782 to_old_git = True
782 to_old_git = True
783
783
784 if 'git' in BACKENDS:
784 if 'git' in BACKENDS:
785 log.debug('GIT executable: "%s" version detected: %s'
785 log.debug('GIT executable: "%s" version detected: %s'
786 % (settings.GIT_EXECUTABLE_PATH, stdout))
786 % (settings.GIT_EXECUTABLE_PATH, stdout))
787 if stderr:
787 if stderr:
788 log.warning('Unable to detect git version, org error was: %r' % stderr)
788 log.warning('Unable to detect git version, org error was: %r' % stderr)
789 elif to_old_git:
789 elif to_old_git:
790 log.warning('RhodeCode detected git version %s, which is too old '
790 log.warning('RhodeCode detected git version %s, which is too old '
791 'for the system to function properly. Make sure '
791 'for the system to function properly. Make sure '
792 'its version is at least %s' % (ver, req_ver))
792 'its version is at least %s' % (ver, req_ver))
793 return _ver
793 return _ver
794
794
795
795
796 @decorator.decorator
796 @decorator.decorator
797 def jsonify(func, *args, **kwargs):
797 def jsonify(func, *args, **kwargs):
798 """Action decorator that formats output for JSON
798 """Action decorator that formats output for JSON
799
799
800 Given a function that will return content, this decorator will turn
800 Given a function that will return content, this decorator will turn
801 the result into JSON, with a content-type of 'application/json' and
801 the result into JSON, with a content-type of 'application/json' and
802 output it.
802 output it.
803
803
804 """
804 """
805 from pylons.decorators.util import get_pylons
805 from pylons.decorators.util import get_pylons
806 from rhodecode.lib.compat import json
806 from rhodecode.lib.compat import json
807 pylons = get_pylons(args)
807 pylons = get_pylons(args)
808 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
808 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
809 data = func(*args, **kwargs)
809 data = func(*args, **kwargs)
810 if isinstance(data, (list, tuple)):
810 if isinstance(data, (list, tuple)):
811 msg = "JSON responses with Array envelopes are susceptible to " \
811 msg = "JSON responses with Array envelopes are susceptible to " \
812 "cross-site data leak attacks, see " \
812 "cross-site data leak attacks, see " \
813 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
813 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
814 warnings.warn(msg, Warning, 2)
814 warnings.warn(msg, Warning, 2)
815 log.warning(msg)
815 log.warning(msg)
816 log.debug("Returning JSON wrapped action output")
816 log.debug("Returning JSON wrapped action output")
817 return json.dumps(data, encoding='utf-8')
817 return json.dumps(data, encoding='utf-8')
General Comments 0
You need to be logged in to leave comments. Login now