##// END OF EJS Templates
action logger will try to get IP address automatically based...
marcink -
r4001:5ee34120 default
parent child Browse files
Show More
@@ -1,817 +1,822 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
48 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.exceptions import VCSError
50
50
51 from rhodecode.lib.caching_query import FromCache
51 from rhodecode.lib.caching_query import FromCache
52
52
53 from rhodecode.model import meta
53 from rhodecode.model import meta
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.vcs.utils.fakemod import create_module
59 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.model.users_group import UserGroupModel
60 from rhodecode.model.users_group import UserGroupModel
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 #==============================================================================
103 #==============================================================================
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
105 #==============================================================================
105 #==============================================================================
106 def get_repo_slug(request):
106 def get_repo_slug(request):
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repos_group_slug(request):
113 def get_repos_group_slug(request):
114 _group = request.environ['pylons.routes_dict'].get('group_name')
114 _group = request.environ['pylons.routes_dict'].get('group_name')
115 if _group:
115 if _group:
116 _group = _group.rstrip('/')
116 _group = _group.rstrip('/')
117 return _group
117 return _group
118
118
119
119
120 def get_user_group_slug(request):
120 def get_user_group_slug(request):
121 _group = request.environ['pylons.routes_dict'].get('id')
121 _group = request.environ['pylons.routes_dict'].get('id')
122 try:
122 try:
123 _group = UserGroup.get(_group)
123 _group = UserGroup.get(_group)
124 if _group:
124 if _group:
125 _group = _group.users_group_name
125 _group = _group.users_group_name
126 except Exception:
126 except Exception:
127 log.debug(traceback.format_exc())
127 log.debug(traceback.format_exc())
128 #catch all failures here
128 #catch all failures here
129 pass
129 pass
130
130
131 return _group
131 return _group
132
132
133
133
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
135 """
135 """
136 Action logger for various actions made by users
136 Action logger for various actions made by users
137
137
138 :param user: user that made this action, can be a unique username string or
138 :param user: user that made this action, can be a unique username string or
139 object containing user_id attribute
139 object containing user_id attribute
140 :param action: action to log, should be on of predefined unique actions for
140 :param action: action to log, should be on of predefined unique actions for
141 easy translations
141 easy translations
142 :param repo: string name of repository or object containing repo_id,
142 :param repo: string name of repository or object containing repo_id,
143 that action was made on
143 that action was made on
144 :param ipaddr: optional ip address from what the action was made
144 :param ipaddr: optional ip address from what the action was made
145 :param sa: optional sqlalchemy session
145 :param sa: optional sqlalchemy session
146
146
147 """
147 """
148
148
149 if not sa:
149 if not sa:
150 sa = meta.Session()
150 sa = meta.Session()
151 # if we don't get explicit IP address try to get one from registered user
152 # in tmpl context var
153 from pylons import tmpl_context
154 if not ipaddr and hasattr(tmpl_context, 'rhodecode_user'):
155 ipaddr = tmpl_context.rhodecode_user.ip_addr
151
156
152 try:
157 try:
153 if hasattr(user, 'user_id'):
158 if hasattr(user, 'user_id'):
154 user_obj = User.get(user.user_id)
159 user_obj = User.get(user.user_id)
155 elif isinstance(user, basestring):
160 elif isinstance(user, basestring):
156 user_obj = User.get_by_username(user)
161 user_obj = User.get_by_username(user)
157 else:
162 else:
158 raise Exception('You have to provide a user object or a username')
163 raise Exception('You have to provide a user object or a username')
159
164
160 if hasattr(repo, 'repo_id'):
165 if hasattr(repo, 'repo_id'):
161 repo_obj = Repository.get(repo.repo_id)
166 repo_obj = Repository.get(repo.repo_id)
162 repo_name = repo_obj.repo_name
167 repo_name = repo_obj.repo_name
163 elif isinstance(repo, basestring):
168 elif isinstance(repo, basestring):
164 repo_name = repo.lstrip('/')
169 repo_name = repo.lstrip('/')
165 repo_obj = Repository.get_by_repo_name(repo_name)
170 repo_obj = Repository.get_by_repo_name(repo_name)
166 else:
171 else:
167 repo_obj = None
172 repo_obj = None
168 repo_name = ''
173 repo_name = ''
169
174
170 user_log = UserLog()
175 user_log = UserLog()
171 user_log.user_id = user_obj.user_id
176 user_log.user_id = user_obj.user_id
172 user_log.username = user_obj.username
177 user_log.username = user_obj.username
173 user_log.action = safe_unicode(action)
178 user_log.action = safe_unicode(action)
174
179
175 user_log.repository = repo_obj
180 user_log.repository = repo_obj
176 user_log.repository_name = repo_name
181 user_log.repository_name = repo_name
177
182
178 user_log.action_date = datetime.datetime.now()
183 user_log.action_date = datetime.datetime.now()
179 user_log.user_ip = ipaddr
184 user_log.user_ip = ipaddr
180 sa.add(user_log)
185 sa.add(user_log)
181
186
182 log.info('Logging action:%s on %s by user:%s ip:%s' %
187 log.info('Logging action:%s on %s by user:%s ip:%s' %
183 (action, safe_unicode(repo), user_obj, ipaddr))
188 (action, safe_unicode(repo), user_obj, ipaddr))
184 if commit:
189 if commit:
185 sa.commit()
190 sa.commit()
186 except Exception:
191 except Exception:
187 log.error(traceback.format_exc())
192 log.error(traceback.format_exc())
188 raise
193 raise
189
194
190
195
191 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
196 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
192 """
197 """
193 Scans given path for repos and return (name,(type,path)) tuple
198 Scans given path for repos and return (name,(type,path)) tuple
194
199
195 :param path: path to scan for repositories
200 :param path: path to scan for repositories
196 :param recursive: recursive search and return names with subdirs in front
201 :param recursive: recursive search and return names with subdirs in front
197 """
202 """
198
203
199 # remove ending slash for better results
204 # remove ending slash for better results
200 path = path.rstrip(os.sep)
205 path = path.rstrip(os.sep)
201 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
206 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
202
207
203 def _get_repos(p):
208 def _get_repos(p):
204 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
209 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
205 log.warn('ignoring repo path without access: %s' % (p,))
210 log.warn('ignoring repo path without access: %s' % (p,))
206 return
211 return
207 if not os.access(p, os.W_OK):
212 if not os.access(p, os.W_OK):
208 log.warn('repo path without write access: %s' % (p,))
213 log.warn('repo path without write access: %s' % (p,))
209 for dirpath in os.listdir(p):
214 for dirpath in os.listdir(p):
210 if os.path.isfile(os.path.join(p, dirpath)):
215 if os.path.isfile(os.path.join(p, dirpath)):
211 continue
216 continue
212 cur_path = os.path.join(p, dirpath)
217 cur_path = os.path.join(p, dirpath)
213
218
214 # skip removed repos
219 # skip removed repos
215 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
220 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
216 continue
221 continue
217
222
218 #skip .<somethin> dirs
223 #skip .<somethin> dirs
219 if dirpath.startswith('.'):
224 if dirpath.startswith('.'):
220 continue
225 continue
221
226
222 try:
227 try:
223 scm_info = get_scm(cur_path)
228 scm_info = get_scm(cur_path)
224 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
229 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
225 except VCSError:
230 except VCSError:
226 if not recursive:
231 if not recursive:
227 continue
232 continue
228 #check if this dir containts other repos for recursive scan
233 #check if this dir containts other repos for recursive scan
229 rec_path = os.path.join(p, dirpath)
234 rec_path = os.path.join(p, dirpath)
230 if os.path.isdir(rec_path):
235 if os.path.isdir(rec_path):
231 for inner_scm in _get_repos(rec_path):
236 for inner_scm in _get_repos(rec_path):
232 yield inner_scm
237 yield inner_scm
233
238
234 return _get_repos(path)
239 return _get_repos(path)
235
240
236
241
237 def is_valid_repo(repo_name, base_path, scm=None):
242 def is_valid_repo(repo_name, base_path, scm=None):
238 """
243 """
239 Returns True if given path is a valid repository False otherwise.
244 Returns True if given path is a valid repository False otherwise.
240 If scm param is given also compare if given scm is the same as expected
245 If scm param is given also compare if given scm is the same as expected
241 from scm parameter
246 from scm parameter
242
247
243 :param repo_name:
248 :param repo_name:
244 :param base_path:
249 :param base_path:
245 :param scm:
250 :param scm:
246
251
247 :return True: if given path is a valid repository
252 :return True: if given path is a valid repository
248 """
253 """
249 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
254 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
250
255
251 try:
256 try:
252 scm_ = get_scm(full_path)
257 scm_ = get_scm(full_path)
253 if scm:
258 if scm:
254 return scm_[0] == scm
259 return scm_[0] == scm
255 return True
260 return True
256 except VCSError:
261 except VCSError:
257 return False
262 return False
258
263
259
264
260 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
265 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
261 """
266 """
262 Returns True if given path is a repository group False otherwise
267 Returns True if given path is a repository group False otherwise
263
268
264 :param repo_name:
269 :param repo_name:
265 :param base_path:
270 :param base_path:
266 """
271 """
267 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
272 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
268
273
269 # check if it's not a repo
274 # check if it's not a repo
270 if is_valid_repo(repos_group_name, base_path):
275 if is_valid_repo(repos_group_name, base_path):
271 return False
276 return False
272
277
273 try:
278 try:
274 # we need to check bare git repos at higher level
279 # we need to check bare git repos at higher level
275 # since we might match branches/hooks/info/objects or possible
280 # since we might match branches/hooks/info/objects or possible
276 # other things inside bare git repo
281 # other things inside bare git repo
277 get_scm(os.path.dirname(full_path))
282 get_scm(os.path.dirname(full_path))
278 return False
283 return False
279 except VCSError:
284 except VCSError:
280 pass
285 pass
281
286
282 # check if it's a valid path
287 # check if it's a valid path
283 if skip_path_check or os.path.isdir(full_path):
288 if skip_path_check or os.path.isdir(full_path):
284 return True
289 return True
285
290
286 return False
291 return False
287
292
288
293
289 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
294 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
290 while True:
295 while True:
291 ok = raw_input(prompt)
296 ok = raw_input(prompt)
292 if ok in ('y', 'ye', 'yes'):
297 if ok in ('y', 'ye', 'yes'):
293 return True
298 return True
294 if ok in ('n', 'no', 'nop', 'nope'):
299 if ok in ('n', 'no', 'nop', 'nope'):
295 return False
300 return False
296 retries = retries - 1
301 retries = retries - 1
297 if retries < 0:
302 if retries < 0:
298 raise IOError
303 raise IOError
299 print complaint
304 print complaint
300
305
301 #propagated from mercurial documentation
306 #propagated from mercurial documentation
302 ui_sections = ['alias', 'auth',
307 ui_sections = ['alias', 'auth',
303 'decode/encode', 'defaults',
308 'decode/encode', 'defaults',
304 'diff', 'email',
309 'diff', 'email',
305 'extensions', 'format',
310 'extensions', 'format',
306 'merge-patterns', 'merge-tools',
311 'merge-patterns', 'merge-tools',
307 'hooks', 'http_proxy',
312 'hooks', 'http_proxy',
308 'smtp', 'patch',
313 'smtp', 'patch',
309 'paths', 'profiling',
314 'paths', 'profiling',
310 'server', 'trusted',
315 'server', 'trusted',
311 'ui', 'web', ]
316 'ui', 'web', ]
312
317
313
318
314 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
319 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
315 """
320 """
316 A function that will read python rc files or database
321 A function that will read python rc files or database
317 and make an mercurial ui object from read options
322 and make an mercurial ui object from read options
318
323
319 :param path: path to mercurial config file
324 :param path: path to mercurial config file
320 :param checkpaths: check the path
325 :param checkpaths: check the path
321 :param read_from: read from 'file' or 'db'
326 :param read_from: read from 'file' or 'db'
322 """
327 """
323
328
324 baseui = ui.ui()
329 baseui = ui.ui()
325
330
326 # clean the baseui object
331 # clean the baseui object
327 baseui._ocfg = config.config()
332 baseui._ocfg = config.config()
328 baseui._ucfg = config.config()
333 baseui._ucfg = config.config()
329 baseui._tcfg = config.config()
334 baseui._tcfg = config.config()
330
335
331 if read_from == 'file':
336 if read_from == 'file':
332 if not os.path.isfile(path):
337 if not os.path.isfile(path):
333 log.debug('hgrc file is not present at %s, skipping...' % path)
338 log.debug('hgrc file is not present at %s, skipping...' % path)
334 return False
339 return False
335 log.debug('reading hgrc from %s' % path)
340 log.debug('reading hgrc from %s' % path)
336 cfg = config.config()
341 cfg = config.config()
337 cfg.read(path)
342 cfg.read(path)
338 for section in ui_sections:
343 for section in ui_sections:
339 for k, v in cfg.items(section):
344 for k, v in cfg.items(section):
340 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
345 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
341 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
346 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
342
347
343 elif read_from == 'db':
348 elif read_from == 'db':
344 sa = meta.Session()
349 sa = meta.Session()
345 ret = sa.query(RhodeCodeUi)\
350 ret = sa.query(RhodeCodeUi)\
346 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
351 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
347 .all()
352 .all()
348
353
349 hg_ui = ret
354 hg_ui = ret
350 for ui_ in hg_ui:
355 for ui_ in hg_ui:
351 if ui_.ui_active:
356 if ui_.ui_active:
352 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
357 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
353 ui_.ui_key, ui_.ui_value)
358 ui_.ui_key, ui_.ui_value)
354 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
359 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
355 safe_str(ui_.ui_value))
360 safe_str(ui_.ui_value))
356 if ui_.ui_key == 'push_ssl':
361 if ui_.ui_key == 'push_ssl':
357 # force set push_ssl requirement to False, rhodecode
362 # force set push_ssl requirement to False, rhodecode
358 # handles that
363 # handles that
359 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
364 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
360 False)
365 False)
361 if clear_session:
366 if clear_session:
362 meta.Session.remove()
367 meta.Session.remove()
363 return baseui
368 return baseui
364
369
365
370
366 def set_rhodecode_config(config):
371 def set_rhodecode_config(config):
367 """
372 """
368 Updates pylons config with new settings from database
373 Updates pylons config with new settings from database
369
374
370 :param config:
375 :param config:
371 """
376 """
372 hgsettings = RhodeCodeSetting.get_app_settings()
377 hgsettings = RhodeCodeSetting.get_app_settings()
373
378
374 for k, v in hgsettings.items():
379 for k, v in hgsettings.items():
375 config[k] = v
380 config[k] = v
376
381
377
382
378 def set_vcs_config(config):
383 def set_vcs_config(config):
379 """
384 """
380 Patch VCS config with some RhodeCode specific stuff
385 Patch VCS config with some RhodeCode specific stuff
381
386
382 :param config: rhodecode.CONFIG
387 :param config: rhodecode.CONFIG
383 """
388 """
384 import rhodecode
389 import rhodecode
385 from rhodecode.lib.vcs import conf
390 from rhodecode.lib.vcs import conf
386 from rhodecode.lib.utils2 import aslist
391 from rhodecode.lib.utils2 import aslist
387 conf.settings.BACKENDS = {
392 conf.settings.BACKENDS = {
388 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
393 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
389 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
394 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
390 }
395 }
391
396
392 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
397 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
393 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
398 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
394 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
399 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
395 'utf8'), sep=',')
400 'utf8'), sep=',')
396
401
397
402
398 def map_groups(path):
403 def map_groups(path):
399 """
404 """
400 Given a full path to a repository, create all nested groups that this
405 Given a full path to a repository, create all nested groups that this
401 repo is inside. This function creates parent-child relationships between
406 repo is inside. This function creates parent-child relationships between
402 groups and creates default perms for all new groups.
407 groups and creates default perms for all new groups.
403
408
404 :param paths: full path to repository
409 :param paths: full path to repository
405 """
410 """
406 sa = meta.Session()
411 sa = meta.Session()
407 groups = path.split(Repository.url_sep())
412 groups = path.split(Repository.url_sep())
408 parent = None
413 parent = None
409 group = None
414 group = None
410
415
411 # last element is repo in nested groups structure
416 # last element is repo in nested groups structure
412 groups = groups[:-1]
417 groups = groups[:-1]
413 rgm = ReposGroupModel(sa)
418 rgm = ReposGroupModel(sa)
414 owner = User.get_first_admin()
419 owner = User.get_first_admin()
415 for lvl, group_name in enumerate(groups):
420 for lvl, group_name in enumerate(groups):
416 group_name = '/'.join(groups[:lvl] + [group_name])
421 group_name = '/'.join(groups[:lvl] + [group_name])
417 group = RepoGroup.get_by_group_name(group_name)
422 group = RepoGroup.get_by_group_name(group_name)
418 desc = '%s group' % group_name
423 desc = '%s group' % group_name
419
424
420 # skip folders that are now removed repos
425 # skip folders that are now removed repos
421 if REMOVED_REPO_PAT.match(group_name):
426 if REMOVED_REPO_PAT.match(group_name):
422 break
427 break
423
428
424 if group is None:
429 if group is None:
425 log.debug('creating group level: %s group_name: %s'
430 log.debug('creating group level: %s group_name: %s'
426 % (lvl, group_name))
431 % (lvl, group_name))
427 group = RepoGroup(group_name, parent)
432 group = RepoGroup(group_name, parent)
428 group.group_description = desc
433 group.group_description = desc
429 group.user = owner
434 group.user = owner
430 sa.add(group)
435 sa.add(group)
431 perm_obj = rgm._create_default_perms(group)
436 perm_obj = rgm._create_default_perms(group)
432 sa.add(perm_obj)
437 sa.add(perm_obj)
433 sa.flush()
438 sa.flush()
434
439
435 parent = group
440 parent = group
436 return group
441 return group
437
442
438
443
439 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
444 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
440 install_git_hook=False):
445 install_git_hook=False):
441 """
446 """
442 maps all repos given in initial_repo_list, non existing repositories
447 maps all repos given in initial_repo_list, non existing repositories
443 are created, if remove_obsolete is True it also check for db entries
448 are created, if remove_obsolete is True it also check for db entries
444 that are not in initial_repo_list and removes them.
449 that are not in initial_repo_list and removes them.
445
450
446 :param initial_repo_list: list of repositories found by scanning methods
451 :param initial_repo_list: list of repositories found by scanning methods
447 :param remove_obsolete: check for obsolete entries in database
452 :param remove_obsolete: check for obsolete entries in database
448 :param install_git_hook: if this is True, also check and install githook
453 :param install_git_hook: if this is True, also check and install githook
449 for a repo if missing
454 for a repo if missing
450 """
455 """
451 from rhodecode.model.repo import RepoModel
456 from rhodecode.model.repo import RepoModel
452 from rhodecode.model.scm import ScmModel
457 from rhodecode.model.scm import ScmModel
453 sa = meta.Session()
458 sa = meta.Session()
454 rm = RepoModel()
459 rm = RepoModel()
455 user = User.get_first_admin()
460 user = User.get_first_admin()
456 added = []
461 added = []
457
462
458 ##creation defaults
463 ##creation defaults
459 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
464 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
460 enable_statistics = defs.get('repo_enable_statistics')
465 enable_statistics = defs.get('repo_enable_statistics')
461 enable_locking = defs.get('repo_enable_locking')
466 enable_locking = defs.get('repo_enable_locking')
462 enable_downloads = defs.get('repo_enable_downloads')
467 enable_downloads = defs.get('repo_enable_downloads')
463 private = defs.get('repo_private')
468 private = defs.get('repo_private')
464
469
465 for name, repo in initial_repo_list.items():
470 for name, repo in initial_repo_list.items():
466 group = map_groups(name)
471 group = map_groups(name)
467 db_repo = rm.get_by_repo_name(name)
472 db_repo = rm.get_by_repo_name(name)
468 # found repo that is on filesystem not in RhodeCode database
473 # found repo that is on filesystem not in RhodeCode database
469 if not db_repo:
474 if not db_repo:
470 log.info('repository %s not found, creating now' % name)
475 log.info('repository %s not found, creating now' % name)
471 added.append(name)
476 added.append(name)
472 desc = (repo.description
477 desc = (repo.description
473 if repo.description != 'unknown'
478 if repo.description != 'unknown'
474 else '%s repository' % name)
479 else '%s repository' % name)
475
480
476 new_repo = rm.create_repo(
481 new_repo = rm.create_repo(
477 repo_name=name,
482 repo_name=name,
478 repo_type=repo.alias,
483 repo_type=repo.alias,
479 description=desc,
484 description=desc,
480 repos_group=getattr(group, 'group_id', None),
485 repos_group=getattr(group, 'group_id', None),
481 owner=user,
486 owner=user,
482 just_db=True,
487 just_db=True,
483 enable_locking=enable_locking,
488 enable_locking=enable_locking,
484 enable_downloads=enable_downloads,
489 enable_downloads=enable_downloads,
485 enable_statistics=enable_statistics,
490 enable_statistics=enable_statistics,
486 private=private
491 private=private
487 )
492 )
488 # we added that repo just now, and make sure it has githook
493 # we added that repo just now, and make sure it has githook
489 # installed
494 # installed
490 if new_repo.repo_type == 'git':
495 if new_repo.repo_type == 'git':
491 ScmModel().install_git_hook(new_repo.scm_instance)
496 ScmModel().install_git_hook(new_repo.scm_instance)
492 new_repo.update_changeset_cache()
497 new_repo.update_changeset_cache()
493 elif install_git_hook:
498 elif install_git_hook:
494 if db_repo.repo_type == 'git':
499 if db_repo.repo_type == 'git':
495 ScmModel().install_git_hook(db_repo.scm_instance)
500 ScmModel().install_git_hook(db_repo.scm_instance)
496
501
497 sa.commit()
502 sa.commit()
498 removed = []
503 removed = []
499 if remove_obsolete:
504 if remove_obsolete:
500 # remove from database those repositories that are not in the filesystem
505 # remove from database those repositories that are not in the filesystem
501 for repo in sa.query(Repository).all():
506 for repo in sa.query(Repository).all():
502 if repo.repo_name not in initial_repo_list.keys():
507 if repo.repo_name not in initial_repo_list.keys():
503 log.debug("Removing non-existing repository found in db `%s`" %
508 log.debug("Removing non-existing repository found in db `%s`" %
504 repo.repo_name)
509 repo.repo_name)
505 try:
510 try:
506 removed.append(repo.repo_name)
511 removed.append(repo.repo_name)
507 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
512 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
508 sa.commit()
513 sa.commit()
509 except Exception:
514 except Exception:
510 #don't hold further removals on error
515 #don't hold further removals on error
511 log.error(traceback.format_exc())
516 log.error(traceback.format_exc())
512 sa.rollback()
517 sa.rollback()
513 return added, removed
518 return added, removed
514
519
515
520
516 # set cache regions for beaker so celery can utilise it
521 # set cache regions for beaker so celery can utilise it
517 def add_cache(settings):
522 def add_cache(settings):
518 cache_settings = {'regions': None}
523 cache_settings = {'regions': None}
519 for key in settings.keys():
524 for key in settings.keys():
520 for prefix in ['beaker.cache.', 'cache.']:
525 for prefix in ['beaker.cache.', 'cache.']:
521 if key.startswith(prefix):
526 if key.startswith(prefix):
522 name = key.split(prefix)[1].strip()
527 name = key.split(prefix)[1].strip()
523 cache_settings[name] = settings[key].strip()
528 cache_settings[name] = settings[key].strip()
524 if cache_settings['regions']:
529 if cache_settings['regions']:
525 for region in cache_settings['regions'].split(','):
530 for region in cache_settings['regions'].split(','):
526 region = region.strip()
531 region = region.strip()
527 region_settings = {}
532 region_settings = {}
528 for key, value in cache_settings.items():
533 for key, value in cache_settings.items():
529 if key.startswith(region):
534 if key.startswith(region):
530 region_settings[key.split('.')[1]] = value
535 region_settings[key.split('.')[1]] = value
531 region_settings['expire'] = int(region_settings.get('expire',
536 region_settings['expire'] = int(region_settings.get('expire',
532 60))
537 60))
533 region_settings.setdefault('lock_dir',
538 region_settings.setdefault('lock_dir',
534 cache_settings.get('lock_dir'))
539 cache_settings.get('lock_dir'))
535 region_settings.setdefault('data_dir',
540 region_settings.setdefault('data_dir',
536 cache_settings.get('data_dir'))
541 cache_settings.get('data_dir'))
537
542
538 if 'type' not in region_settings:
543 if 'type' not in region_settings:
539 region_settings['type'] = cache_settings.get('type',
544 region_settings['type'] = cache_settings.get('type',
540 'memory')
545 'memory')
541 beaker.cache.cache_regions[region] = region_settings
546 beaker.cache.cache_regions[region] = region_settings
542
547
543
548
544 def load_rcextensions(root_path):
549 def load_rcextensions(root_path):
545 import rhodecode
550 import rhodecode
546 from rhodecode.config import conf
551 from rhodecode.config import conf
547
552
548 path = os.path.join(root_path, 'rcextensions', '__init__.py')
553 path = os.path.join(root_path, 'rcextensions', '__init__.py')
549 if os.path.isfile(path):
554 if os.path.isfile(path):
550 rcext = create_module('rc', path)
555 rcext = create_module('rc', path)
551 EXT = rhodecode.EXTENSIONS = rcext
556 EXT = rhodecode.EXTENSIONS = rcext
552 log.debug('Found rcextensions now loading %s...' % rcext)
557 log.debug('Found rcextensions now loading %s...' % rcext)
553
558
554 # Additional mappings that are not present in the pygments lexers
559 # Additional mappings that are not present in the pygments lexers
555 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
560 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
556
561
557 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
562 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
558
563
559 if getattr(EXT, 'INDEX_EXTENSIONS', []):
564 if getattr(EXT, 'INDEX_EXTENSIONS', []):
560 log.debug('settings custom INDEX_EXTENSIONS')
565 log.debug('settings custom INDEX_EXTENSIONS')
561 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
566 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
562
567
563 #ADDITIONAL MAPPINGS
568 #ADDITIONAL MAPPINGS
564 log.debug('adding extra into INDEX_EXTENSIONS')
569 log.debug('adding extra into INDEX_EXTENSIONS')
565 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
570 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
566
571
567 # auto check if the module is not missing any data, set to default if is
572 # auto check if the module is not missing any data, set to default if is
568 # this will help autoupdate new feature of rcext module
573 # this will help autoupdate new feature of rcext module
569 from rhodecode.config import rcextensions
574 from rhodecode.config import rcextensions
570 for k in dir(rcextensions):
575 for k in dir(rcextensions):
571 if not k.startswith('_') and not hasattr(EXT, k):
576 if not k.startswith('_') and not hasattr(EXT, k):
572 setattr(EXT, k, getattr(rcextensions, k))
577 setattr(EXT, k, getattr(rcextensions, k))
573
578
574
579
575 def get_custom_lexer(extension):
580 def get_custom_lexer(extension):
576 """
581 """
577 returns a custom lexer if it's defined in rcextensions module, or None
582 returns a custom lexer if it's defined in rcextensions module, or None
578 if there's no custom lexer defined
583 if there's no custom lexer defined
579 """
584 """
580 import rhodecode
585 import rhodecode
581 from pygments import lexers
586 from pygments import lexers
582 #check if we didn't define this extension as other lexer
587 #check if we didn't define this extension as other lexer
583 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
588 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
584 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
589 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
585 return lexers.get_lexer_by_name(_lexer_name)
590 return lexers.get_lexer_by_name(_lexer_name)
586
591
587
592
588 #==============================================================================
593 #==============================================================================
589 # TEST FUNCTIONS AND CREATORS
594 # TEST FUNCTIONS AND CREATORS
590 #==============================================================================
595 #==============================================================================
591 def create_test_index(repo_location, config, full_index):
596 def create_test_index(repo_location, config, full_index):
592 """
597 """
593 Makes default test index
598 Makes default test index
594
599
595 :param config: test config
600 :param config: test config
596 :param full_index:
601 :param full_index:
597 """
602 """
598
603
599 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
604 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
600 from rhodecode.lib.pidlock import DaemonLock, LockHeld
605 from rhodecode.lib.pidlock import DaemonLock, LockHeld
601
606
602 repo_location = repo_location
607 repo_location = repo_location
603
608
604 index_location = os.path.join(config['app_conf']['index_dir'])
609 index_location = os.path.join(config['app_conf']['index_dir'])
605 if not os.path.exists(index_location):
610 if not os.path.exists(index_location):
606 os.makedirs(index_location)
611 os.makedirs(index_location)
607
612
608 try:
613 try:
609 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
614 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
610 WhooshIndexingDaemon(index_location=index_location,
615 WhooshIndexingDaemon(index_location=index_location,
611 repo_location=repo_location)\
616 repo_location=repo_location)\
612 .run(full_index=full_index)
617 .run(full_index=full_index)
613 l.release()
618 l.release()
614 except LockHeld:
619 except LockHeld:
615 pass
620 pass
616
621
617
622
618 def create_test_env(repos_test_path, config):
623 def create_test_env(repos_test_path, config):
619 """
624 """
620 Makes a fresh database and
625 Makes a fresh database and
621 install test repository into tmp dir
626 install test repository into tmp dir
622 """
627 """
623 from rhodecode.lib.db_manage import DbManage
628 from rhodecode.lib.db_manage import DbManage
624 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
629 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
625
630
626 # PART ONE create db
631 # PART ONE create db
627 dbconf = config['sqlalchemy.db1.url']
632 dbconf = config['sqlalchemy.db1.url']
628 log.debug('making test db %s' % dbconf)
633 log.debug('making test db %s' % dbconf)
629
634
630 # create test dir if it doesn't exist
635 # create test dir if it doesn't exist
631 if not os.path.isdir(repos_test_path):
636 if not os.path.isdir(repos_test_path):
632 log.debug('Creating testdir %s' % repos_test_path)
637 log.debug('Creating testdir %s' % repos_test_path)
633 os.makedirs(repos_test_path)
638 os.makedirs(repos_test_path)
634
639
635 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
640 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
636 tests=True)
641 tests=True)
637 dbmanage.create_tables(override=True)
642 dbmanage.create_tables(override=True)
638 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
643 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
639 dbmanage.create_default_user()
644 dbmanage.create_default_user()
640 dbmanage.admin_prompt()
645 dbmanage.admin_prompt()
641 dbmanage.create_permissions()
646 dbmanage.create_permissions()
642 dbmanage.populate_default_permissions()
647 dbmanage.populate_default_permissions()
643 Session().commit()
648 Session().commit()
644 # PART TWO make test repo
649 # PART TWO make test repo
645 log.debug('making test vcs repositories')
650 log.debug('making test vcs repositories')
646
651
647 idx_path = config['app_conf']['index_dir']
652 idx_path = config['app_conf']['index_dir']
648 data_path = config['app_conf']['cache_dir']
653 data_path = config['app_conf']['cache_dir']
649
654
650 #clean index and data
655 #clean index and data
651 if idx_path and os.path.exists(idx_path):
656 if idx_path and os.path.exists(idx_path):
652 log.debug('remove %s' % idx_path)
657 log.debug('remove %s' % idx_path)
653 shutil.rmtree(idx_path)
658 shutil.rmtree(idx_path)
654
659
655 if data_path and os.path.exists(data_path):
660 if data_path and os.path.exists(data_path):
656 log.debug('remove %s' % data_path)
661 log.debug('remove %s' % data_path)
657 shutil.rmtree(data_path)
662 shutil.rmtree(data_path)
658
663
659 #CREATE DEFAULT TEST REPOS
664 #CREATE DEFAULT TEST REPOS
660 cur_dir = dn(dn(abspath(__file__)))
665 cur_dir = dn(dn(abspath(__file__)))
661 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
666 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
662 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
667 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
663 tar.close()
668 tar.close()
664
669
665 cur_dir = dn(dn(abspath(__file__)))
670 cur_dir = dn(dn(abspath(__file__)))
666 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
671 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
667 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
672 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
668 tar.close()
673 tar.close()
669
674
670 #LOAD VCS test stuff
675 #LOAD VCS test stuff
671 from rhodecode.tests.vcs import setup_package
676 from rhodecode.tests.vcs import setup_package
672 setup_package()
677 setup_package()
673
678
674
679
675 #==============================================================================
680 #==============================================================================
676 # PASTER COMMANDS
681 # PASTER COMMANDS
677 #==============================================================================
682 #==============================================================================
678 class BasePasterCommand(Command):
683 class BasePasterCommand(Command):
679 """
684 """
680 Abstract Base Class for paster commands.
685 Abstract Base Class for paster commands.
681
686
682 The celery commands are somewhat aggressive about loading
687 The celery commands are somewhat aggressive about loading
683 celery.conf, and since our module sets the `CELERY_LOADER`
688 celery.conf, and since our module sets the `CELERY_LOADER`
684 environment variable to our loader, we have to bootstrap a bit and
689 environment variable to our loader, we have to bootstrap a bit and
685 make sure we've had a chance to load the pylons config off of the
690 make sure we've had a chance to load the pylons config off of the
686 command line, otherwise everything fails.
691 command line, otherwise everything fails.
687 """
692 """
688 min_args = 1
693 min_args = 1
689 min_args_error = "Please provide a paster config file as an argument."
694 min_args_error = "Please provide a paster config file as an argument."
690 takes_config_file = 1
695 takes_config_file = 1
691 requires_config_file = True
696 requires_config_file = True
692
697
693 def notify_msg(self, msg, log=False):
698 def notify_msg(self, msg, log=False):
694 """Make a notification to user, additionally if logger is passed
699 """Make a notification to user, additionally if logger is passed
695 it logs this action using given logger
700 it logs this action using given logger
696
701
697 :param msg: message that will be printed to user
702 :param msg: message that will be printed to user
698 :param log: logging instance, to use to additionally log this message
703 :param log: logging instance, to use to additionally log this message
699
704
700 """
705 """
701 if log and isinstance(log, logging):
706 if log and isinstance(log, logging):
702 log(msg)
707 log(msg)
703
708
704 def run(self, args):
709 def run(self, args):
705 """
710 """
706 Overrides Command.run
711 Overrides Command.run
707
712
708 Checks for a config file argument and loads it.
713 Checks for a config file argument and loads it.
709 """
714 """
710 if len(args) < self.min_args:
715 if len(args) < self.min_args:
711 raise BadCommand(
716 raise BadCommand(
712 self.min_args_error % {'min_args': self.min_args,
717 self.min_args_error % {'min_args': self.min_args,
713 'actual_args': len(args)})
718 'actual_args': len(args)})
714
719
715 # Decrement because we're going to lob off the first argument.
720 # Decrement because we're going to lob off the first argument.
716 # @@ This is hacky
721 # @@ This is hacky
717 self.min_args -= 1
722 self.min_args -= 1
718 self.bootstrap_config(args[0])
723 self.bootstrap_config(args[0])
719 self.update_parser()
724 self.update_parser()
720 return super(BasePasterCommand, self).run(args[1:])
725 return super(BasePasterCommand, self).run(args[1:])
721
726
722 def update_parser(self):
727 def update_parser(self):
723 """
728 """
724 Abstract method. Allows for the class's parser to be updated
729 Abstract method. Allows for the class's parser to be updated
725 before the superclass's `run` method is called. Necessary to
730 before the superclass's `run` method is called. Necessary to
726 allow options/arguments to be passed through to the underlying
731 allow options/arguments to be passed through to the underlying
727 celery command.
732 celery command.
728 """
733 """
729 raise NotImplementedError("Abstract Method.")
734 raise NotImplementedError("Abstract Method.")
730
735
731 def bootstrap_config(self, conf):
736 def bootstrap_config(self, conf):
732 """
737 """
733 Loads the pylons configuration.
738 Loads the pylons configuration.
734 """
739 """
735 from pylons import config as pylonsconfig
740 from pylons import config as pylonsconfig
736
741
737 self.path_to_ini_file = os.path.realpath(conf)
742 self.path_to_ini_file = os.path.realpath(conf)
738 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
743 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
739 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
744 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
740
745
741 def _init_session(self):
746 def _init_session(self):
742 """
747 """
743 Inits SqlAlchemy Session
748 Inits SqlAlchemy Session
744 """
749 """
745 logging.config.fileConfig(self.path_to_ini_file)
750 logging.config.fileConfig(self.path_to_ini_file)
746 from pylons import config
751 from pylons import config
747 from rhodecode.model import init_model
752 from rhodecode.model import init_model
748 from rhodecode.lib.utils2 import engine_from_config
753 from rhodecode.lib.utils2 import engine_from_config
749
754
750 #get to remove repos !!
755 #get to remove repos !!
751 add_cache(config)
756 add_cache(config)
752 engine = engine_from_config(config, 'sqlalchemy.db1.')
757 engine = engine_from_config(config, 'sqlalchemy.db1.')
753 init_model(engine)
758 init_model(engine)
754
759
755
760
756 def check_git_version():
761 def check_git_version():
757 """
762 """
758 Checks what version of git is installed in system, and issues a warning
763 Checks what version of git is installed in system, and issues a warning
759 if it's too old for RhodeCode to properly work.
764 if it's too old for RhodeCode to properly work.
760 """
765 """
761 from rhodecode import BACKENDS
766 from rhodecode import BACKENDS
762 from rhodecode.lib.vcs.backends.git.repository import GitRepository
767 from rhodecode.lib.vcs.backends.git.repository import GitRepository
763 from rhodecode.lib.vcs.conf import settings
768 from rhodecode.lib.vcs.conf import settings
764 from distutils.version import StrictVersion
769 from distutils.version import StrictVersion
765
770
766 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
771 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
767 _safe=True)
772 _safe=True)
768
773
769 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
774 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
770 if len(ver.split('.')) > 3:
775 if len(ver.split('.')) > 3:
771 #StrictVersion needs to be only 3 element type
776 #StrictVersion needs to be only 3 element type
772 ver = '.'.join(ver.split('.')[:3])
777 ver = '.'.join(ver.split('.')[:3])
773 try:
778 try:
774 _ver = StrictVersion(ver)
779 _ver = StrictVersion(ver)
775 except Exception:
780 except Exception:
776 _ver = StrictVersion('0.0.0')
781 _ver = StrictVersion('0.0.0')
777 stderr = traceback.format_exc()
782 stderr = traceback.format_exc()
778
783
779 req_ver = '1.7.4'
784 req_ver = '1.7.4'
780 to_old_git = False
785 to_old_git = False
781 if _ver < StrictVersion(req_ver):
786 if _ver < StrictVersion(req_ver):
782 to_old_git = True
787 to_old_git = True
783
788
784 if 'git' in BACKENDS:
789 if 'git' in BACKENDS:
785 log.debug('GIT executable: "%s" version detected: %s'
790 log.debug('GIT executable: "%s" version detected: %s'
786 % (settings.GIT_EXECUTABLE_PATH, stdout))
791 % (settings.GIT_EXECUTABLE_PATH, stdout))
787 if stderr:
792 if stderr:
788 log.warning('Unable to detect git version, org error was: %r' % stderr)
793 log.warning('Unable to detect git version, org error was: %r' % stderr)
789 elif to_old_git:
794 elif to_old_git:
790 log.warning('RhodeCode detected git version %s, which is too old '
795 log.warning('RhodeCode detected git version %s, which is too old '
791 'for the system to function properly. Make sure '
796 'for the system to function properly. Make sure '
792 'its version is at least %s' % (ver, req_ver))
797 'its version is at least %s' % (ver, req_ver))
793 return _ver
798 return _ver
794
799
795
800
796 @decorator.decorator
801 @decorator.decorator
797 def jsonify(func, *args, **kwargs):
802 def jsonify(func, *args, **kwargs):
798 """Action decorator that formats output for JSON
803 """Action decorator that formats output for JSON
799
804
800 Given a function that will return content, this decorator will turn
805 Given a function that will return content, this decorator will turn
801 the result into JSON, with a content-type of 'application/json' and
806 the result into JSON, with a content-type of 'application/json' and
802 output it.
807 output it.
803
808
804 """
809 """
805 from pylons.decorators.util import get_pylons
810 from pylons.decorators.util import get_pylons
806 from rhodecode.lib.compat import json
811 from rhodecode.lib.compat import json
807 pylons = get_pylons(args)
812 pylons = get_pylons(args)
808 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
813 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
809 data = func(*args, **kwargs)
814 data = func(*args, **kwargs)
810 if isinstance(data, (list, tuple)):
815 if isinstance(data, (list, tuple)):
811 msg = "JSON responses with Array envelopes are susceptible to " \
816 msg = "JSON responses with Array envelopes are susceptible to " \
812 "cross-site data leak attacks, see " \
817 "cross-site data leak attacks, see " \
813 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
818 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
814 warnings.warn(msg, Warning, 2)
819 warnings.warn(msg, Warning, 2)
815 log.warning(msg)
820 log.warning(msg)
816 log.debug("Returning JSON wrapped action output")
821 log.debug("Returning JSON wrapped action output")
817 return json.dumps(data, encoding='utf-8')
822 return json.dumps(data, encoding='utf-8')
General Comments 0
You need to be logged in to leave comments. Login now