##// END OF EJS Templates
better logging
marcink -
r3545:e2fad0c6 beta
parent child Browse files
Show More
@@ -1,800 +1,800 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action %s on %s by %s' %
165 log.info('Logging action:%s on %s by user:%s ip:%s' %
166 (action, safe_unicode(repo), user_obj))
166 (action, safe_unicode(repo), user_obj, ipaddr))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except:
169 except:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False, skip_removed_repos=True):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185
185
186 def _get_repos(p):
186 def _get_repos(p):
187 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
188 return
188 return
189 for dirpath in os.listdir(p):
189 for dirpath in os.listdir(p):
190 if os.path.isfile(os.path.join(p, dirpath)):
190 if os.path.isfile(os.path.join(p, dirpath)):
191 continue
191 continue
192 cur_path = os.path.join(p, dirpath)
192 cur_path = os.path.join(p, dirpath)
193
193
194 # skip removed repos
194 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
196 continue
197
197
198 #skip .<somethin> dirs
198 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
199 if dirpath.startswith('.'):
200 continue
200 continue
201
201
202 try:
202 try:
203 scm_info = get_scm(cur_path)
203 scm_info = get_scm(cur_path)
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 except VCSError:
205 except VCSError:
206 if not recursive:
206 if not recursive:
207 continue
207 continue
208 #check if this dir containts other repos for recursive scan
208 #check if this dir containts other repos for recursive scan
209 rec_path = os.path.join(p, dirpath)
209 rec_path = os.path.join(p, dirpath)
210 if os.path.isdir(rec_path):
210 if os.path.isdir(rec_path):
211 for inner_scm in _get_repos(rec_path):
211 for inner_scm in _get_repos(rec_path):
212 yield inner_scm
212 yield inner_scm
213
213
214 return _get_repos(path)
214 return _get_repos(path)
215
215
216 #alias for backward compat
216 #alias for backward compat
217 get_filesystem_repos = get_repos
217 get_filesystem_repos = get_repos
218
218
219
219
220 def is_valid_repo(repo_name, base_path, scm=None):
220 def is_valid_repo(repo_name, base_path, scm=None):
221 """
221 """
222 Returns True if given path is a valid repository False otherwise.
222 Returns True if given path is a valid repository False otherwise.
223 If scm param is given also compare if given scm is the same as expected
223 If scm param is given also compare if given scm is the same as expected
224 from scm parameter
224 from scm parameter
225
225
226 :param repo_name:
226 :param repo_name:
227 :param base_path:
227 :param base_path:
228 :param scm:
228 :param scm:
229
229
230 :return True: if given path is a valid repository
230 :return True: if given path is a valid repository
231 """
231 """
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233
233
234 try:
234 try:
235 scm_ = get_scm(full_path)
235 scm_ = get_scm(full_path)
236 if scm:
236 if scm:
237 return scm_[0] == scm
237 return scm_[0] == scm
238 return True
238 return True
239 except VCSError:
239 except VCSError:
240 return False
240 return False
241
241
242
242
243 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
243 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
244 """
244 """
245 Returns True if given path is a repos group False otherwise
245 Returns True if given path is a repos group False otherwise
246
246
247 :param repo_name:
247 :param repo_name:
248 :param base_path:
248 :param base_path:
249 """
249 """
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251
251
252 # check if it's not a repo
252 # check if it's not a repo
253 if is_valid_repo(repos_group_name, base_path):
253 if is_valid_repo(repos_group_name, base_path):
254 return False
254 return False
255
255
256 try:
256 try:
257 # we need to check bare git repos at higher level
257 # we need to check bare git repos at higher level
258 # since we might match branches/hooks/info/objects or possible
258 # since we might match branches/hooks/info/objects or possible
259 # other things inside bare git repo
259 # other things inside bare git repo
260 get_scm(os.path.dirname(full_path))
260 get_scm(os.path.dirname(full_path))
261 return False
261 return False
262 except VCSError:
262 except VCSError:
263 pass
263 pass
264
264
265 # check if it's a valid path
265 # check if it's a valid path
266 if skip_path_check or os.path.isdir(full_path):
266 if skip_path_check or os.path.isdir(full_path):
267 return True
267 return True
268
268
269 return False
269 return False
270
270
271
271
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 while True:
273 while True:
274 ok = raw_input(prompt)
274 ok = raw_input(prompt)
275 if ok in ('y', 'ye', 'yes'):
275 if ok in ('y', 'ye', 'yes'):
276 return True
276 return True
277 if ok in ('n', 'no', 'nop', 'nope'):
277 if ok in ('n', 'no', 'nop', 'nope'):
278 return False
278 return False
279 retries = retries - 1
279 retries = retries - 1
280 if retries < 0:
280 if retries < 0:
281 raise IOError
281 raise IOError
282 print complaint
282 print complaint
283
283
284 #propagated from mercurial documentation
284 #propagated from mercurial documentation
285 ui_sections = ['alias', 'auth',
285 ui_sections = ['alias', 'auth',
286 'decode/encode', 'defaults',
286 'decode/encode', 'defaults',
287 'diff', 'email',
287 'diff', 'email',
288 'extensions', 'format',
288 'extensions', 'format',
289 'merge-patterns', 'merge-tools',
289 'merge-patterns', 'merge-tools',
290 'hooks', 'http_proxy',
290 'hooks', 'http_proxy',
291 'smtp', 'patch',
291 'smtp', 'patch',
292 'paths', 'profiling',
292 'paths', 'profiling',
293 'server', 'trusted',
293 'server', 'trusted',
294 'ui', 'web', ]
294 'ui', 'web', ]
295
295
296
296
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 """
298 """
299 A function that will read python rc files or database
299 A function that will read python rc files or database
300 and make an mercurial ui object from read options
300 and make an mercurial ui object from read options
301
301
302 :param path: path to mercurial config file
302 :param path: path to mercurial config file
303 :param checkpaths: check the path
303 :param checkpaths: check the path
304 :param read_from: read from 'file' or 'db'
304 :param read_from: read from 'file' or 'db'
305 """
305 """
306
306
307 baseui = ui.ui()
307 baseui = ui.ui()
308
308
309 # clean the baseui object
309 # clean the baseui object
310 baseui._ocfg = config.config()
310 baseui._ocfg = config.config()
311 baseui._ucfg = config.config()
311 baseui._ucfg = config.config()
312 baseui._tcfg = config.config()
312 baseui._tcfg = config.config()
313
313
314 if read_from == 'file':
314 if read_from == 'file':
315 if not os.path.isfile(path):
315 if not os.path.isfile(path):
316 log.debug('hgrc file is not present at %s, skipping...' % path)
316 log.debug('hgrc file is not present at %s, skipping...' % path)
317 return False
317 return False
318 log.debug('reading hgrc from %s' % path)
318 log.debug('reading hgrc from %s' % path)
319 cfg = config.config()
319 cfg = config.config()
320 cfg.read(path)
320 cfg.read(path)
321 for section in ui_sections:
321 for section in ui_sections:
322 for k, v in cfg.items(section):
322 for k, v in cfg.items(section):
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325
325
326 elif read_from == 'db':
326 elif read_from == 'db':
327 sa = meta.Session()
327 sa = meta.Session()
328 ret = sa.query(RhodeCodeUi)\
328 ret = sa.query(RhodeCodeUi)\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 .all()
330 .all()
331
331
332 hg_ui = ret
332 hg_ui = ret
333 for ui_ in hg_ui:
333 for ui_ in hg_ui:
334 if ui_.ui_active:
334 if ui_.ui_active:
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 ui_.ui_key, ui_.ui_value)
336 ui_.ui_key, ui_.ui_value)
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 safe_str(ui_.ui_value))
338 safe_str(ui_.ui_value))
339 if ui_.ui_key == 'push_ssl':
339 if ui_.ui_key == 'push_ssl':
340 # force set push_ssl requirement to False, rhodecode
340 # force set push_ssl requirement to False, rhodecode
341 # handles that
341 # handles that
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 False)
343 False)
344 if clear_session:
344 if clear_session:
345 meta.Session.remove()
345 meta.Session.remove()
346 return baseui
346 return baseui
347
347
348
348
349 def set_rhodecode_config(config):
349 def set_rhodecode_config(config):
350 """
350 """
351 Updates pylons config with new settings from database
351 Updates pylons config with new settings from database
352
352
353 :param config:
353 :param config:
354 """
354 """
355 hgsettings = RhodeCodeSetting.get_app_settings()
355 hgsettings = RhodeCodeSetting.get_app_settings()
356
356
357 for k, v in hgsettings.items():
357 for k, v in hgsettings.items():
358 config[k] = v
358 config[k] = v
359
359
360
360
361 def invalidate_cache(cache_key, *args):
361 def invalidate_cache(cache_key, *args):
362 """
362 """
363 Puts cache invalidation task into db for
363 Puts cache invalidation task into db for
364 further global cache invalidation
364 further global cache invalidation
365 """
365 """
366
366
367 from rhodecode.model.scm import ScmModel
367 from rhodecode.model.scm import ScmModel
368
368
369 if cache_key.startswith('get_repo_cached_'):
369 if cache_key.startswith('get_repo_cached_'):
370 name = cache_key.split('get_repo_cached_')[-1]
370 name = cache_key.split('get_repo_cached_')[-1]
371 ScmModel().mark_for_invalidation(name)
371 ScmModel().mark_for_invalidation(name)
372
372
373
373
374 def map_groups(path):
374 def map_groups(path):
375 """
375 """
376 Given a full path to a repository, create all nested groups that this
376 Given a full path to a repository, create all nested groups that this
377 repo is inside. This function creates parent-child relationships between
377 repo is inside. This function creates parent-child relationships between
378 groups and creates default perms for all new groups.
378 groups and creates default perms for all new groups.
379
379
380 :param paths: full path to repository
380 :param paths: full path to repository
381 """
381 """
382 sa = meta.Session()
382 sa = meta.Session()
383 groups = path.split(Repository.url_sep())
383 groups = path.split(Repository.url_sep())
384 parent = None
384 parent = None
385 group = None
385 group = None
386
386
387 # last element is repo in nested groups structure
387 # last element is repo in nested groups structure
388 groups = groups[:-1]
388 groups = groups[:-1]
389 rgm = ReposGroupModel(sa)
389 rgm = ReposGroupModel(sa)
390 for lvl, group_name in enumerate(groups):
390 for lvl, group_name in enumerate(groups):
391 group_name = '/'.join(groups[:lvl] + [group_name])
391 group_name = '/'.join(groups[:lvl] + [group_name])
392 group = RepoGroup.get_by_group_name(group_name)
392 group = RepoGroup.get_by_group_name(group_name)
393 desc = '%s group' % group_name
393 desc = '%s group' % group_name
394
394
395 # skip folders that are now removed repos
395 # skip folders that are now removed repos
396 if REMOVED_REPO_PAT.match(group_name):
396 if REMOVED_REPO_PAT.match(group_name):
397 break
397 break
398
398
399 if group is None:
399 if group is None:
400 log.debug('creating group level: %s group_name: %s' % (lvl,
400 log.debug('creating group level: %s group_name: %s' % (lvl,
401 group_name))
401 group_name))
402 group = RepoGroup(group_name, parent)
402 group = RepoGroup(group_name, parent)
403 group.group_description = desc
403 group.group_description = desc
404 sa.add(group)
404 sa.add(group)
405 rgm._create_default_perms(group)
405 rgm._create_default_perms(group)
406 sa.flush()
406 sa.flush()
407 parent = group
407 parent = group
408 return group
408 return group
409
409
410
410
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 install_git_hook=False):
412 install_git_hook=False):
413 """
413 """
414 maps all repos given in initial_repo_list, non existing repositories
414 maps all repos given in initial_repo_list, non existing repositories
415 are created, if remove_obsolete is True it also check for db entries
415 are created, if remove_obsolete is True it also check for db entries
416 that are not in initial_repo_list and removes them.
416 that are not in initial_repo_list and removes them.
417
417
418 :param initial_repo_list: list of repositories found by scanning methods
418 :param initial_repo_list: list of repositories found by scanning methods
419 :param remove_obsolete: check for obsolete entries in database
419 :param remove_obsolete: check for obsolete entries in database
420 :param install_git_hook: if this is True, also check and install githook
420 :param install_git_hook: if this is True, also check and install githook
421 for a repo if missing
421 for a repo if missing
422 """
422 """
423 from rhodecode.model.repo import RepoModel
423 from rhodecode.model.repo import RepoModel
424 from rhodecode.model.scm import ScmModel
424 from rhodecode.model.scm import ScmModel
425 sa = meta.Session()
425 sa = meta.Session()
426 rm = RepoModel()
426 rm = RepoModel()
427 user = sa.query(User).filter(User.admin == True).first()
427 user = sa.query(User).filter(User.admin == True).first()
428 if user is None:
428 if user is None:
429 raise Exception('Missing administrative account!')
429 raise Exception('Missing administrative account!')
430 added = []
430 added = []
431
431
432 # # clear cache keys
432 # # clear cache keys
433 # log.debug("Clearing cache keys now...")
433 # log.debug("Clearing cache keys now...")
434 # CacheInvalidation.clear_cache()
434 # CacheInvalidation.clear_cache()
435 # sa.commit()
435 # sa.commit()
436
436
437 ##creation defaults
437 ##creation defaults
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
439 enable_statistics = defs.get('repo_enable_statistics')
439 enable_statistics = defs.get('repo_enable_statistics')
440 enable_locking = defs.get('repo_enable_locking')
440 enable_locking = defs.get('repo_enable_locking')
441 enable_downloads = defs.get('repo_enable_downloads')
441 enable_downloads = defs.get('repo_enable_downloads')
442 private = defs.get('repo_private')
442 private = defs.get('repo_private')
443
443
444 for name, repo in initial_repo_list.items():
444 for name, repo in initial_repo_list.items():
445 group = map_groups(name)
445 group = map_groups(name)
446 db_repo = rm.get_by_repo_name(name)
446 db_repo = rm.get_by_repo_name(name)
447 # found repo that is on filesystem not in RhodeCode database
447 # found repo that is on filesystem not in RhodeCode database
448 if not db_repo:
448 if not db_repo:
449 log.info('repository %s not found, creating now' % name)
449 log.info('repository %s not found, creating now' % name)
450 added.append(name)
450 added.append(name)
451 desc = (repo.description
451 desc = (repo.description
452 if repo.description != 'unknown'
452 if repo.description != 'unknown'
453 else '%s repository' % name)
453 else '%s repository' % name)
454
454
455 new_repo = rm.create_repo(
455 new_repo = rm.create_repo(
456 repo_name=name,
456 repo_name=name,
457 repo_type=repo.alias,
457 repo_type=repo.alias,
458 description=desc,
458 description=desc,
459 repos_group=getattr(group, 'group_id', None),
459 repos_group=getattr(group, 'group_id', None),
460 owner=user,
460 owner=user,
461 just_db=True,
461 just_db=True,
462 enable_locking=enable_locking,
462 enable_locking=enable_locking,
463 enable_downloads=enable_downloads,
463 enable_downloads=enable_downloads,
464 enable_statistics=enable_statistics,
464 enable_statistics=enable_statistics,
465 private=private
465 private=private
466 )
466 )
467 # we added that repo just now, and make sure it has githook
467 # we added that repo just now, and make sure it has githook
468 # installed
468 # installed
469 if new_repo.repo_type == 'git':
469 if new_repo.repo_type == 'git':
470 ScmModel().install_git_hook(new_repo.scm_instance)
470 ScmModel().install_git_hook(new_repo.scm_instance)
471 new_repo.update_changeset_cache()
471 new_repo.update_changeset_cache()
472 elif install_git_hook:
472 elif install_git_hook:
473 if db_repo.repo_type == 'git':
473 if db_repo.repo_type == 'git':
474 ScmModel().install_git_hook(db_repo.scm_instance)
474 ScmModel().install_git_hook(db_repo.scm_instance)
475 # during starting install all cache keys for all repositories in the
475 # during starting install all cache keys for all repositories in the
476 # system, this will register all repos and multiple instances
476 # system, this will register all repos and multiple instances
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
478 CacheInvalidation.invalidate(name)
478 CacheInvalidation.invalidate(name)
479 log.debug("Creating a cache key for %s, instance_id %s"
479 log.debug("Creating a cache key for %s, instance_id %s"
480 % (name, _prefix or 'unknown'))
480 % (name, _prefix or 'unknown'))
481
481
482 sa.commit()
482 sa.commit()
483 removed = []
483 removed = []
484 if remove_obsolete:
484 if remove_obsolete:
485 # remove from database those repositories that are not in the filesystem
485 # remove from database those repositories that are not in the filesystem
486 for repo in sa.query(Repository).all():
486 for repo in sa.query(Repository).all():
487 if repo.repo_name not in initial_repo_list.keys():
487 if repo.repo_name not in initial_repo_list.keys():
488 log.debug("Removing non-existing repository found in db `%s`" %
488 log.debug("Removing non-existing repository found in db `%s`" %
489 repo.repo_name)
489 repo.repo_name)
490 try:
490 try:
491 sa.delete(repo)
491 sa.delete(repo)
492 sa.commit()
492 sa.commit()
493 removed.append(repo.repo_name)
493 removed.append(repo.repo_name)
494 except:
494 except:
495 #don't hold further removals on error
495 #don't hold further removals on error
496 log.error(traceback.format_exc())
496 log.error(traceback.format_exc())
497 sa.rollback()
497 sa.rollback()
498 return added, removed
498 return added, removed
499
499
500
500
501 # set cache regions for beaker so celery can utilise it
501 # set cache regions for beaker so celery can utilise it
502 def add_cache(settings):
502 def add_cache(settings):
503 cache_settings = {'regions': None}
503 cache_settings = {'regions': None}
504 for key in settings.keys():
504 for key in settings.keys():
505 for prefix in ['beaker.cache.', 'cache.']:
505 for prefix in ['beaker.cache.', 'cache.']:
506 if key.startswith(prefix):
506 if key.startswith(prefix):
507 name = key.split(prefix)[1].strip()
507 name = key.split(prefix)[1].strip()
508 cache_settings[name] = settings[key].strip()
508 cache_settings[name] = settings[key].strip()
509 if cache_settings['regions']:
509 if cache_settings['regions']:
510 for region in cache_settings['regions'].split(','):
510 for region in cache_settings['regions'].split(','):
511 region = region.strip()
511 region = region.strip()
512 region_settings = {}
512 region_settings = {}
513 for key, value in cache_settings.items():
513 for key, value in cache_settings.items():
514 if key.startswith(region):
514 if key.startswith(region):
515 region_settings[key.split('.')[1]] = value
515 region_settings[key.split('.')[1]] = value
516 region_settings['expire'] = int(region_settings.get('expire',
516 region_settings['expire'] = int(region_settings.get('expire',
517 60))
517 60))
518 region_settings.setdefault('lock_dir',
518 region_settings.setdefault('lock_dir',
519 cache_settings.get('lock_dir'))
519 cache_settings.get('lock_dir'))
520 region_settings.setdefault('data_dir',
520 region_settings.setdefault('data_dir',
521 cache_settings.get('data_dir'))
521 cache_settings.get('data_dir'))
522
522
523 if 'type' not in region_settings:
523 if 'type' not in region_settings:
524 region_settings['type'] = cache_settings.get('type',
524 region_settings['type'] = cache_settings.get('type',
525 'memory')
525 'memory')
526 beaker.cache.cache_regions[region] = region_settings
526 beaker.cache.cache_regions[region] = region_settings
527
527
528
528
529 def load_rcextensions(root_path):
529 def load_rcextensions(root_path):
530 import rhodecode
530 import rhodecode
531 from rhodecode.config import conf
531 from rhodecode.config import conf
532
532
533 path = os.path.join(root_path, 'rcextensions', '__init__.py')
533 path = os.path.join(root_path, 'rcextensions', '__init__.py')
534 if os.path.isfile(path):
534 if os.path.isfile(path):
535 rcext = create_module('rc', path)
535 rcext = create_module('rc', path)
536 EXT = rhodecode.EXTENSIONS = rcext
536 EXT = rhodecode.EXTENSIONS = rcext
537 log.debug('Found rcextensions now loading %s...' % rcext)
537 log.debug('Found rcextensions now loading %s...' % rcext)
538
538
539 # Additional mappings that are not present in the pygments lexers
539 # Additional mappings that are not present in the pygments lexers
540 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
540 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
541
541
542 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
542 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
543
543
544 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
544 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
545 log.debug('settings custom INDEX_EXTENSIONS')
545 log.debug('settings custom INDEX_EXTENSIONS')
546 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
546 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
547
547
548 #ADDITIONAL MAPPINGS
548 #ADDITIONAL MAPPINGS
549 log.debug('adding extra into INDEX_EXTENSIONS')
549 log.debug('adding extra into INDEX_EXTENSIONS')
550 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
550 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
551
551
552 # auto check if the module is not missing any data, set to default if is
552 # auto check if the module is not missing any data, set to default if is
553 # this will help autoupdate new feature of rcext module
553 # this will help autoupdate new feature of rcext module
554 from rhodecode.config import rcextensions
554 from rhodecode.config import rcextensions
555 for k in dir(rcextensions):
555 for k in dir(rcextensions):
556 if not k.startswith('_') and not hasattr(EXT, k):
556 if not k.startswith('_') and not hasattr(EXT, k):
557 setattr(EXT, k, getattr(rcextensions, k))
557 setattr(EXT, k, getattr(rcextensions, k))
558
558
559
559
560 def get_custom_lexer(extension):
560 def get_custom_lexer(extension):
561 """
561 """
562 returns a custom lexer if it's defined in rcextensions module, or None
562 returns a custom lexer if it's defined in rcextensions module, or None
563 if there's no custom lexer defined
563 if there's no custom lexer defined
564 """
564 """
565 import rhodecode
565 import rhodecode
566 from pygments import lexers
566 from pygments import lexers
567 #check if we didn't define this extension as other lexer
567 #check if we didn't define this extension as other lexer
568 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
568 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
569 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
569 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
570 return lexers.get_lexer_by_name(_lexer_name)
570 return lexers.get_lexer_by_name(_lexer_name)
571
571
572
572
573 #==============================================================================
573 #==============================================================================
574 # TEST FUNCTIONS AND CREATORS
574 # TEST FUNCTIONS AND CREATORS
575 #==============================================================================
575 #==============================================================================
576 def create_test_index(repo_location, config, full_index):
576 def create_test_index(repo_location, config, full_index):
577 """
577 """
578 Makes default test index
578 Makes default test index
579
579
580 :param config: test config
580 :param config: test config
581 :param full_index:
581 :param full_index:
582 """
582 """
583
583
584 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
584 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
585 from rhodecode.lib.pidlock import DaemonLock, LockHeld
585 from rhodecode.lib.pidlock import DaemonLock, LockHeld
586
586
587 repo_location = repo_location
587 repo_location = repo_location
588
588
589 index_location = os.path.join(config['app_conf']['index_dir'])
589 index_location = os.path.join(config['app_conf']['index_dir'])
590 if not os.path.exists(index_location):
590 if not os.path.exists(index_location):
591 os.makedirs(index_location)
591 os.makedirs(index_location)
592
592
593 try:
593 try:
594 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
594 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
595 WhooshIndexingDaemon(index_location=index_location,
595 WhooshIndexingDaemon(index_location=index_location,
596 repo_location=repo_location)\
596 repo_location=repo_location)\
597 .run(full_index=full_index)
597 .run(full_index=full_index)
598 l.release()
598 l.release()
599 except LockHeld:
599 except LockHeld:
600 pass
600 pass
601
601
602
602
603 def create_test_env(repos_test_path, config):
603 def create_test_env(repos_test_path, config):
604 """
604 """
605 Makes a fresh database and
605 Makes a fresh database and
606 install test repository into tmp dir
606 install test repository into tmp dir
607 """
607 """
608 from rhodecode.lib.db_manage import DbManage
608 from rhodecode.lib.db_manage import DbManage
609 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
609 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
610
610
611 # PART ONE create db
611 # PART ONE create db
612 dbconf = config['sqlalchemy.db1.url']
612 dbconf = config['sqlalchemy.db1.url']
613 log.debug('making test db %s' % dbconf)
613 log.debug('making test db %s' % dbconf)
614
614
615 # create test dir if it doesn't exist
615 # create test dir if it doesn't exist
616 if not os.path.isdir(repos_test_path):
616 if not os.path.isdir(repos_test_path):
617 log.debug('Creating testdir %s' % repos_test_path)
617 log.debug('Creating testdir %s' % repos_test_path)
618 os.makedirs(repos_test_path)
618 os.makedirs(repos_test_path)
619
619
620 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
620 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
621 tests=True)
621 tests=True)
622 dbmanage.create_tables(override=True)
622 dbmanage.create_tables(override=True)
623 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
623 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
624 dbmanage.create_default_user()
624 dbmanage.create_default_user()
625 dbmanage.admin_prompt()
625 dbmanage.admin_prompt()
626 dbmanage.create_permissions()
626 dbmanage.create_permissions()
627 dbmanage.populate_default_permissions()
627 dbmanage.populate_default_permissions()
628 Session().commit()
628 Session().commit()
629 # PART TWO make test repo
629 # PART TWO make test repo
630 log.debug('making test vcs repositories')
630 log.debug('making test vcs repositories')
631
631
632 idx_path = config['app_conf']['index_dir']
632 idx_path = config['app_conf']['index_dir']
633 data_path = config['app_conf']['cache_dir']
633 data_path = config['app_conf']['cache_dir']
634
634
635 #clean index and data
635 #clean index and data
636 if idx_path and os.path.exists(idx_path):
636 if idx_path and os.path.exists(idx_path):
637 log.debug('remove %s' % idx_path)
637 log.debug('remove %s' % idx_path)
638 shutil.rmtree(idx_path)
638 shutil.rmtree(idx_path)
639
639
640 if data_path and os.path.exists(data_path):
640 if data_path and os.path.exists(data_path):
641 log.debug('remove %s' % data_path)
641 log.debug('remove %s' % data_path)
642 shutil.rmtree(data_path)
642 shutil.rmtree(data_path)
643
643
644 #CREATE DEFAULT TEST REPOS
644 #CREATE DEFAULT TEST REPOS
645 cur_dir = dn(dn(abspath(__file__)))
645 cur_dir = dn(dn(abspath(__file__)))
646 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
646 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
647 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
647 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
648 tar.close()
648 tar.close()
649
649
650 cur_dir = dn(dn(abspath(__file__)))
650 cur_dir = dn(dn(abspath(__file__)))
651 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
651 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
652 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
652 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
653 tar.close()
653 tar.close()
654
654
655 #LOAD VCS test stuff
655 #LOAD VCS test stuff
656 from rhodecode.tests.vcs import setup_package
656 from rhodecode.tests.vcs import setup_package
657 setup_package()
657 setup_package()
658
658
659
659
660 #==============================================================================
660 #==============================================================================
661 # PASTER COMMANDS
661 # PASTER COMMANDS
662 #==============================================================================
662 #==============================================================================
663 class BasePasterCommand(Command):
663 class BasePasterCommand(Command):
664 """
664 """
665 Abstract Base Class for paster commands.
665 Abstract Base Class for paster commands.
666
666
667 The celery commands are somewhat aggressive about loading
667 The celery commands are somewhat aggressive about loading
668 celery.conf, and since our module sets the `CELERY_LOADER`
668 celery.conf, and since our module sets the `CELERY_LOADER`
669 environment variable to our loader, we have to bootstrap a bit and
669 environment variable to our loader, we have to bootstrap a bit and
670 make sure we've had a chance to load the pylons config off of the
670 make sure we've had a chance to load the pylons config off of the
671 command line, otherwise everything fails.
671 command line, otherwise everything fails.
672 """
672 """
673 min_args = 1
673 min_args = 1
674 min_args_error = "Please provide a paster config file as an argument."
674 min_args_error = "Please provide a paster config file as an argument."
675 takes_config_file = 1
675 takes_config_file = 1
676 requires_config_file = True
676 requires_config_file = True
677
677
678 def notify_msg(self, msg, log=False):
678 def notify_msg(self, msg, log=False):
679 """Make a notification to user, additionally if logger is passed
679 """Make a notification to user, additionally if logger is passed
680 it logs this action using given logger
680 it logs this action using given logger
681
681
682 :param msg: message that will be printed to user
682 :param msg: message that will be printed to user
683 :param log: logging instance, to use to additionally log this message
683 :param log: logging instance, to use to additionally log this message
684
684
685 """
685 """
686 if log and isinstance(log, logging):
686 if log and isinstance(log, logging):
687 log(msg)
687 log(msg)
688
688
689 def run(self, args):
689 def run(self, args):
690 """
690 """
691 Overrides Command.run
691 Overrides Command.run
692
692
693 Checks for a config file argument and loads it.
693 Checks for a config file argument and loads it.
694 """
694 """
695 if len(args) < self.min_args:
695 if len(args) < self.min_args:
696 raise BadCommand(
696 raise BadCommand(
697 self.min_args_error % {'min_args': self.min_args,
697 self.min_args_error % {'min_args': self.min_args,
698 'actual_args': len(args)})
698 'actual_args': len(args)})
699
699
700 # Decrement because we're going to lob off the first argument.
700 # Decrement because we're going to lob off the first argument.
701 # @@ This is hacky
701 # @@ This is hacky
702 self.min_args -= 1
702 self.min_args -= 1
703 self.bootstrap_config(args[0])
703 self.bootstrap_config(args[0])
704 self.update_parser()
704 self.update_parser()
705 return super(BasePasterCommand, self).run(args[1:])
705 return super(BasePasterCommand, self).run(args[1:])
706
706
707 def update_parser(self):
707 def update_parser(self):
708 """
708 """
709 Abstract method. Allows for the class's parser to be updated
709 Abstract method. Allows for the class's parser to be updated
710 before the superclass's `run` method is called. Necessary to
710 before the superclass's `run` method is called. Necessary to
711 allow options/arguments to be passed through to the underlying
711 allow options/arguments to be passed through to the underlying
712 celery command.
712 celery command.
713 """
713 """
714 raise NotImplementedError("Abstract Method.")
714 raise NotImplementedError("Abstract Method.")
715
715
716 def bootstrap_config(self, conf):
716 def bootstrap_config(self, conf):
717 """
717 """
718 Loads the pylons configuration.
718 Loads the pylons configuration.
719 """
719 """
720 from pylons import config as pylonsconfig
720 from pylons import config as pylonsconfig
721
721
722 self.path_to_ini_file = os.path.realpath(conf)
722 self.path_to_ini_file = os.path.realpath(conf)
723 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
723 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
724 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
724 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
725
725
726 def _init_session(self):
726 def _init_session(self):
727 """
727 """
728 Inits SqlAlchemy Session
728 Inits SqlAlchemy Session
729 """
729 """
730 logging.config.fileConfig(self.path_to_ini_file)
730 logging.config.fileConfig(self.path_to_ini_file)
731 from pylons import config
731 from pylons import config
732 from rhodecode.model import init_model
732 from rhodecode.model import init_model
733 from rhodecode.lib.utils2 import engine_from_config
733 from rhodecode.lib.utils2 import engine_from_config
734
734
735 #get to remove repos !!
735 #get to remove repos !!
736 add_cache(config)
736 add_cache(config)
737 engine = engine_from_config(config, 'sqlalchemy.db1.')
737 engine = engine_from_config(config, 'sqlalchemy.db1.')
738 init_model(engine)
738 init_model(engine)
739
739
740
740
741 def check_git_version():
741 def check_git_version():
742 """
742 """
743 Checks what version of git is installed in system, and issues a warning
743 Checks what version of git is installed in system, and issues a warning
744 if it's too old for RhodeCode to properly work.
744 if it's too old for RhodeCode to properly work.
745 """
745 """
746 from rhodecode import BACKENDS
746 from rhodecode import BACKENDS
747 from rhodecode.lib.vcs.backends.git.repository import GitRepository
747 from rhodecode.lib.vcs.backends.git.repository import GitRepository
748 from distutils.version import StrictVersion
748 from distutils.version import StrictVersion
749
749
750 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
750 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
751 _safe=True)
751 _safe=True)
752
752
753 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
753 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
754 if len(ver.split('.')) > 3:
754 if len(ver.split('.')) > 3:
755 #StrictVersion needs to be only 3 element type
755 #StrictVersion needs to be only 3 element type
756 ver = '.'.join(ver.split('.')[:3])
756 ver = '.'.join(ver.split('.')[:3])
757 try:
757 try:
758 _ver = StrictVersion(ver)
758 _ver = StrictVersion(ver)
759 except:
759 except:
760 _ver = StrictVersion('0.0.0')
760 _ver = StrictVersion('0.0.0')
761 stderr = traceback.format_exc()
761 stderr = traceback.format_exc()
762
762
763 req_ver = '1.7.4'
763 req_ver = '1.7.4'
764 to_old_git = False
764 to_old_git = False
765 if _ver < StrictVersion(req_ver):
765 if _ver < StrictVersion(req_ver):
766 to_old_git = True
766 to_old_git = True
767
767
768 if 'git' in BACKENDS:
768 if 'git' in BACKENDS:
769 log.debug('GIT version detected: %s' % stdout)
769 log.debug('GIT version detected: %s' % stdout)
770 if stderr:
770 if stderr:
771 log.warning('Unable to detect git version org error was:%r' % stderr)
771 log.warning('Unable to detect git version org error was:%r' % stderr)
772 elif to_old_git:
772 elif to_old_git:
773 log.warning('RhodeCode detected git version %s, which is too old '
773 log.warning('RhodeCode detected git version %s, which is too old '
774 'for the system to function properly. Make sure '
774 'for the system to function properly. Make sure '
775 'its version is at least %s' % (ver, req_ver))
775 'its version is at least %s' % (ver, req_ver))
776 return _ver
776 return _ver
777
777
778
778
779 @decorator.decorator
779 @decorator.decorator
780 def jsonify(func, *args, **kwargs):
780 def jsonify(func, *args, **kwargs):
781 """Action decorator that formats output for JSON
781 """Action decorator that formats output for JSON
782
782
783 Given a function that will return content, this decorator will turn
783 Given a function that will return content, this decorator will turn
784 the result into JSON, with a content-type of 'application/json' and
784 the result into JSON, with a content-type of 'application/json' and
785 output it.
785 output it.
786
786
787 """
787 """
788 from pylons.decorators.util import get_pylons
788 from pylons.decorators.util import get_pylons
789 from rhodecode.lib.ext_json import json
789 from rhodecode.lib.ext_json import json
790 pylons = get_pylons(args)
790 pylons = get_pylons(args)
791 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
791 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
792 data = func(*args, **kwargs)
792 data = func(*args, **kwargs)
793 if isinstance(data, (list, tuple)):
793 if isinstance(data, (list, tuple)):
794 msg = "JSON responses with Array envelopes are susceptible to " \
794 msg = "JSON responses with Array envelopes are susceptible to " \
795 "cross-site data leak attacks, see " \
795 "cross-site data leak attacks, see " \
796 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
796 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
797 warnings.warn(msg, Warning, 2)
797 warnings.warn(msg, Warning, 2)
798 log.warning(msg)
798 log.warning(msg)
799 log.debug("Returning JSON wrapped action output")
799 log.debug("Returning JSON wrapped action output")
800 return json.dumps(data, encoding='utf-8')
800 return json.dumps(data, encoding='utf-8')
General Comments 0
You need to be logged in to leave comments. Login now