##// END OF EJS Templates
util: remove attempt of renaming get_filesystem_repos while remaining backward compatible
Mads Kiilerich -
r3649:24e24661 beta
parent child Browse files
Show More
@@ -1,795 +1,792 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action:%s on %s by user:%s ip:%s' %
165 log.info('Logging action:%s on %s by user:%s ip:%s' %
166 (action, safe_unicode(repo), user_obj, ipaddr))
166 (action, safe_unicode(repo), user_obj, ipaddr))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except Exception:
169 except Exception:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False, skip_removed_repos=True):
174 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185
185
186 def _get_repos(p):
186 def _get_repos(p):
187 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
188 log.warn('ignoring repo path without write access: %s', p)
188 log.warn('ignoring repo path without write access: %s', p)
189 return
189 return
190 for dirpath in os.listdir(p):
190 for dirpath in os.listdir(p):
191 if os.path.isfile(os.path.join(p, dirpath)):
191 if os.path.isfile(os.path.join(p, dirpath)):
192 continue
192 continue
193 cur_path = os.path.join(p, dirpath)
193 cur_path = os.path.join(p, dirpath)
194
194
195 # skip removed repos
195 # skip removed repos
196 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
197 continue
197 continue
198
198
199 #skip .<somethin> dirs
199 #skip .<somethin> dirs
200 if dirpath.startswith('.'):
200 if dirpath.startswith('.'):
201 continue
201 continue
202
202
203 try:
203 try:
204 scm_info = get_scm(cur_path)
204 scm_info = get_scm(cur_path)
205 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
206 except VCSError:
206 except VCSError:
207 if not recursive:
207 if not recursive:
208 continue
208 continue
209 #check if this dir containts other repos for recursive scan
209 #check if this dir containts other repos for recursive scan
210 rec_path = os.path.join(p, dirpath)
210 rec_path = os.path.join(p, dirpath)
211 if os.path.isdir(rec_path):
211 if os.path.isdir(rec_path):
212 for inner_scm in _get_repos(rec_path):
212 for inner_scm in _get_repos(rec_path):
213 yield inner_scm
213 yield inner_scm
214
214
215 return _get_repos(path)
215 return _get_repos(path)
216
216
217 #alias for backward compat
218 get_filesystem_repos = get_repos
219
220
217
221 def is_valid_repo(repo_name, base_path, scm=None):
218 def is_valid_repo(repo_name, base_path, scm=None):
222 """
219 """
223 Returns True if given path is a valid repository False otherwise.
220 Returns True if given path is a valid repository False otherwise.
224 If scm param is given also compare if given scm is the same as expected
221 If scm param is given also compare if given scm is the same as expected
225 from scm parameter
222 from scm parameter
226
223
227 :param repo_name:
224 :param repo_name:
228 :param base_path:
225 :param base_path:
229 :param scm:
226 :param scm:
230
227
231 :return True: if given path is a valid repository
228 :return True: if given path is a valid repository
232 """
229 """
233 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
230 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
234
231
235 try:
232 try:
236 scm_ = get_scm(full_path)
233 scm_ = get_scm(full_path)
237 if scm:
234 if scm:
238 return scm_[0] == scm
235 return scm_[0] == scm
239 return True
236 return True
240 except VCSError:
237 except VCSError:
241 return False
238 return False
242
239
243
240
244 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
241 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
245 """
242 """
246 Returns True if given path is a repos group False otherwise
243 Returns True if given path is a repos group False otherwise
247
244
248 :param repo_name:
245 :param repo_name:
249 :param base_path:
246 :param base_path:
250 """
247 """
251 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
248 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
252
249
253 # check if it's not a repo
250 # check if it's not a repo
254 if is_valid_repo(repos_group_name, base_path):
251 if is_valid_repo(repos_group_name, base_path):
255 return False
252 return False
256
253
257 try:
254 try:
258 # we need to check bare git repos at higher level
255 # we need to check bare git repos at higher level
259 # since we might match branches/hooks/info/objects or possible
256 # since we might match branches/hooks/info/objects or possible
260 # other things inside bare git repo
257 # other things inside bare git repo
261 get_scm(os.path.dirname(full_path))
258 get_scm(os.path.dirname(full_path))
262 return False
259 return False
263 except VCSError:
260 except VCSError:
264 pass
261 pass
265
262
266 # check if it's a valid path
263 # check if it's a valid path
267 if skip_path_check or os.path.isdir(full_path):
264 if skip_path_check or os.path.isdir(full_path):
268 return True
265 return True
269
266
270 return False
267 return False
271
268
272
269
273 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
270 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
274 while True:
271 while True:
275 ok = raw_input(prompt)
272 ok = raw_input(prompt)
276 if ok in ('y', 'ye', 'yes'):
273 if ok in ('y', 'ye', 'yes'):
277 return True
274 return True
278 if ok in ('n', 'no', 'nop', 'nope'):
275 if ok in ('n', 'no', 'nop', 'nope'):
279 return False
276 return False
280 retries = retries - 1
277 retries = retries - 1
281 if retries < 0:
278 if retries < 0:
282 raise IOError
279 raise IOError
283 print complaint
280 print complaint
284
281
285 #propagated from mercurial documentation
282 #propagated from mercurial documentation
286 ui_sections = ['alias', 'auth',
283 ui_sections = ['alias', 'auth',
287 'decode/encode', 'defaults',
284 'decode/encode', 'defaults',
288 'diff', 'email',
285 'diff', 'email',
289 'extensions', 'format',
286 'extensions', 'format',
290 'merge-patterns', 'merge-tools',
287 'merge-patterns', 'merge-tools',
291 'hooks', 'http_proxy',
288 'hooks', 'http_proxy',
292 'smtp', 'patch',
289 'smtp', 'patch',
293 'paths', 'profiling',
290 'paths', 'profiling',
294 'server', 'trusted',
291 'server', 'trusted',
295 'ui', 'web', ]
292 'ui', 'web', ]
296
293
297
294
298 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
295 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
299 """
296 """
300 A function that will read python rc files or database
297 A function that will read python rc files or database
301 and make an mercurial ui object from read options
298 and make an mercurial ui object from read options
302
299
303 :param path: path to mercurial config file
300 :param path: path to mercurial config file
304 :param checkpaths: check the path
301 :param checkpaths: check the path
305 :param read_from: read from 'file' or 'db'
302 :param read_from: read from 'file' or 'db'
306 """
303 """
307
304
308 baseui = ui.ui()
305 baseui = ui.ui()
309
306
310 # clean the baseui object
307 # clean the baseui object
311 baseui._ocfg = config.config()
308 baseui._ocfg = config.config()
312 baseui._ucfg = config.config()
309 baseui._ucfg = config.config()
313 baseui._tcfg = config.config()
310 baseui._tcfg = config.config()
314
311
315 if read_from == 'file':
312 if read_from == 'file':
316 if not os.path.isfile(path):
313 if not os.path.isfile(path):
317 log.debug('hgrc file is not present at %s, skipping...' % path)
314 log.debug('hgrc file is not present at %s, skipping...' % path)
318 return False
315 return False
319 log.debug('reading hgrc from %s' % path)
316 log.debug('reading hgrc from %s' % path)
320 cfg = config.config()
317 cfg = config.config()
321 cfg.read(path)
318 cfg.read(path)
322 for section in ui_sections:
319 for section in ui_sections:
323 for k, v in cfg.items(section):
320 for k, v in cfg.items(section):
324 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
321 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
325 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
322 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
326
323
327 elif read_from == 'db':
324 elif read_from == 'db':
328 sa = meta.Session()
325 sa = meta.Session()
329 ret = sa.query(RhodeCodeUi)\
326 ret = sa.query(RhodeCodeUi)\
330 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
327 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
331 .all()
328 .all()
332
329
333 hg_ui = ret
330 hg_ui = ret
334 for ui_ in hg_ui:
331 for ui_ in hg_ui:
335 if ui_.ui_active:
332 if ui_.ui_active:
336 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
333 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
337 ui_.ui_key, ui_.ui_value)
334 ui_.ui_key, ui_.ui_value)
338 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
335 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
339 safe_str(ui_.ui_value))
336 safe_str(ui_.ui_value))
340 if ui_.ui_key == 'push_ssl':
337 if ui_.ui_key == 'push_ssl':
341 # force set push_ssl requirement to False, rhodecode
338 # force set push_ssl requirement to False, rhodecode
342 # handles that
339 # handles that
343 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
340 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
344 False)
341 False)
345 if clear_session:
342 if clear_session:
346 meta.Session.remove()
343 meta.Session.remove()
347 return baseui
344 return baseui
348
345
349
346
350 def set_rhodecode_config(config):
347 def set_rhodecode_config(config):
351 """
348 """
352 Updates pylons config with new settings from database
349 Updates pylons config with new settings from database
353
350
354 :param config:
351 :param config:
355 """
352 """
356 hgsettings = RhodeCodeSetting.get_app_settings()
353 hgsettings = RhodeCodeSetting.get_app_settings()
357
354
358 for k, v in hgsettings.items():
355 for k, v in hgsettings.items():
359 config[k] = v
356 config[k] = v
360
357
361
358
362 def invalidate_cache(cache_key, *args):
359 def invalidate_cache(cache_key, *args):
363 """
360 """
364 Puts cache invalidation task into db for
361 Puts cache invalidation task into db for
365 further global cache invalidation
362 further global cache invalidation
366 """
363 """
367
364
368 from rhodecode.model.scm import ScmModel
365 from rhodecode.model.scm import ScmModel
369
366
370 if cache_key.startswith('get_repo_cached_'):
367 if cache_key.startswith('get_repo_cached_'):
371 name = cache_key.split('get_repo_cached_')[-1]
368 name = cache_key.split('get_repo_cached_')[-1]
372 ScmModel().mark_for_invalidation(name)
369 ScmModel().mark_for_invalidation(name)
373
370
374
371
375 def map_groups(path):
372 def map_groups(path):
376 """
373 """
377 Given a full path to a repository, create all nested groups that this
374 Given a full path to a repository, create all nested groups that this
378 repo is inside. This function creates parent-child relationships between
375 repo is inside. This function creates parent-child relationships between
379 groups and creates default perms for all new groups.
376 groups and creates default perms for all new groups.
380
377
381 :param paths: full path to repository
378 :param paths: full path to repository
382 """
379 """
383 sa = meta.Session()
380 sa = meta.Session()
384 groups = path.split(Repository.url_sep())
381 groups = path.split(Repository.url_sep())
385 parent = None
382 parent = None
386 group = None
383 group = None
387
384
388 # last element is repo in nested groups structure
385 # last element is repo in nested groups structure
389 groups = groups[:-1]
386 groups = groups[:-1]
390 rgm = ReposGroupModel(sa)
387 rgm = ReposGroupModel(sa)
391 for lvl, group_name in enumerate(groups):
388 for lvl, group_name in enumerate(groups):
392 group_name = '/'.join(groups[:lvl] + [group_name])
389 group_name = '/'.join(groups[:lvl] + [group_name])
393 group = RepoGroup.get_by_group_name(group_name)
390 group = RepoGroup.get_by_group_name(group_name)
394 desc = '%s group' % group_name
391 desc = '%s group' % group_name
395
392
396 # skip folders that are now removed repos
393 # skip folders that are now removed repos
397 if REMOVED_REPO_PAT.match(group_name):
394 if REMOVED_REPO_PAT.match(group_name):
398 break
395 break
399
396
400 if group is None:
397 if group is None:
401 log.debug('creating group level: %s group_name: %s' % (lvl,
398 log.debug('creating group level: %s group_name: %s' % (lvl,
402 group_name))
399 group_name))
403 group = RepoGroup(group_name, parent)
400 group = RepoGroup(group_name, parent)
404 group.group_description = desc
401 group.group_description = desc
405 sa.add(group)
402 sa.add(group)
406 rgm._create_default_perms(group)
403 rgm._create_default_perms(group)
407 sa.flush()
404 sa.flush()
408 parent = group
405 parent = group
409 return group
406 return group
410
407
411
408
412 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
409 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
413 install_git_hook=False):
410 install_git_hook=False):
414 """
411 """
415 maps all repos given in initial_repo_list, non existing repositories
412 maps all repos given in initial_repo_list, non existing repositories
416 are created, if remove_obsolete is True it also check for db entries
413 are created, if remove_obsolete is True it also check for db entries
417 that are not in initial_repo_list and removes them.
414 that are not in initial_repo_list and removes them.
418
415
419 :param initial_repo_list: list of repositories found by scanning methods
416 :param initial_repo_list: list of repositories found by scanning methods
420 :param remove_obsolete: check for obsolete entries in database
417 :param remove_obsolete: check for obsolete entries in database
421 :param install_git_hook: if this is True, also check and install githook
418 :param install_git_hook: if this is True, also check and install githook
422 for a repo if missing
419 for a repo if missing
423 """
420 """
424 from rhodecode.model.repo import RepoModel
421 from rhodecode.model.repo import RepoModel
425 from rhodecode.model.scm import ScmModel
422 from rhodecode.model.scm import ScmModel
426 sa = meta.Session()
423 sa = meta.Session()
427 rm = RepoModel()
424 rm = RepoModel()
428 user = sa.query(User).filter(User.admin == True).first()
425 user = sa.query(User).filter(User.admin == True).first()
429 if user is None:
426 if user is None:
430 raise Exception('Missing administrative account!')
427 raise Exception('Missing administrative account!')
431 added = []
428 added = []
432
429
433 ##creation defaults
430 ##creation defaults
434 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
431 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
435 enable_statistics = defs.get('repo_enable_statistics')
432 enable_statistics = defs.get('repo_enable_statistics')
436 enable_locking = defs.get('repo_enable_locking')
433 enable_locking = defs.get('repo_enable_locking')
437 enable_downloads = defs.get('repo_enable_downloads')
434 enable_downloads = defs.get('repo_enable_downloads')
438 private = defs.get('repo_private')
435 private = defs.get('repo_private')
439
436
440 for name, repo in initial_repo_list.items():
437 for name, repo in initial_repo_list.items():
441 group = map_groups(name)
438 group = map_groups(name)
442 db_repo = rm.get_by_repo_name(name)
439 db_repo = rm.get_by_repo_name(name)
443 # found repo that is on filesystem not in RhodeCode database
440 # found repo that is on filesystem not in RhodeCode database
444 if not db_repo:
441 if not db_repo:
445 log.info('repository %s not found, creating now' % name)
442 log.info('repository %s not found, creating now' % name)
446 added.append(name)
443 added.append(name)
447 desc = (repo.description
444 desc = (repo.description
448 if repo.description != 'unknown'
445 if repo.description != 'unknown'
449 else '%s repository' % name)
446 else '%s repository' % name)
450
447
451 new_repo = rm.create_repo(
448 new_repo = rm.create_repo(
452 repo_name=name,
449 repo_name=name,
453 repo_type=repo.alias,
450 repo_type=repo.alias,
454 description=desc,
451 description=desc,
455 repos_group=getattr(group, 'group_id', None),
452 repos_group=getattr(group, 'group_id', None),
456 owner=user,
453 owner=user,
457 just_db=True,
454 just_db=True,
458 enable_locking=enable_locking,
455 enable_locking=enable_locking,
459 enable_downloads=enable_downloads,
456 enable_downloads=enable_downloads,
460 enable_statistics=enable_statistics,
457 enable_statistics=enable_statistics,
461 private=private
458 private=private
462 )
459 )
463 # we added that repo just now, and make sure it has githook
460 # we added that repo just now, and make sure it has githook
464 # installed
461 # installed
465 if new_repo.repo_type == 'git':
462 if new_repo.repo_type == 'git':
466 ScmModel().install_git_hook(new_repo.scm_instance)
463 ScmModel().install_git_hook(new_repo.scm_instance)
467 new_repo.update_changeset_cache()
464 new_repo.update_changeset_cache()
468 elif install_git_hook:
465 elif install_git_hook:
469 if db_repo.repo_type == 'git':
466 if db_repo.repo_type == 'git':
470 ScmModel().install_git_hook(db_repo.scm_instance)
467 ScmModel().install_git_hook(db_repo.scm_instance)
471 # during starting install all cache keys for all repositories in the
468 # during starting install all cache keys for all repositories in the
472 # system, this will register all repos and multiple instances
469 # system, this will register all repos and multiple instances
473 cache_key = CacheInvalidation._get_cache_key(name)
470 cache_key = CacheInvalidation._get_cache_key(name)
474 log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
471 log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
475 CacheInvalidation.invalidate(name)
472 CacheInvalidation.invalidate(name)
476
473
477 sa.commit()
474 sa.commit()
478 removed = []
475 removed = []
479 if remove_obsolete:
476 if remove_obsolete:
480 # remove from database those repositories that are not in the filesystem
477 # remove from database those repositories that are not in the filesystem
481 for repo in sa.query(Repository).all():
478 for repo in sa.query(Repository).all():
482 if repo.repo_name not in initial_repo_list.keys():
479 if repo.repo_name not in initial_repo_list.keys():
483 log.debug("Removing non-existing repository found in db `%s`" %
480 log.debug("Removing non-existing repository found in db `%s`" %
484 repo.repo_name)
481 repo.repo_name)
485 try:
482 try:
486 sa.delete(repo)
483 sa.delete(repo)
487 sa.commit()
484 sa.commit()
488 removed.append(repo.repo_name)
485 removed.append(repo.repo_name)
489 except Exception:
486 except Exception:
490 #don't hold further removals on error
487 #don't hold further removals on error
491 log.error(traceback.format_exc())
488 log.error(traceback.format_exc())
492 sa.rollback()
489 sa.rollback()
493 return added, removed
490 return added, removed
494
491
495
492
496 # set cache regions for beaker so celery can utilise it
493 # set cache regions for beaker so celery can utilise it
497 def add_cache(settings):
494 def add_cache(settings):
498 cache_settings = {'regions': None}
495 cache_settings = {'regions': None}
499 for key in settings.keys():
496 for key in settings.keys():
500 for prefix in ['beaker.cache.', 'cache.']:
497 for prefix in ['beaker.cache.', 'cache.']:
501 if key.startswith(prefix):
498 if key.startswith(prefix):
502 name = key.split(prefix)[1].strip()
499 name = key.split(prefix)[1].strip()
503 cache_settings[name] = settings[key].strip()
500 cache_settings[name] = settings[key].strip()
504 if cache_settings['regions']:
501 if cache_settings['regions']:
505 for region in cache_settings['regions'].split(','):
502 for region in cache_settings['regions'].split(','):
506 region = region.strip()
503 region = region.strip()
507 region_settings = {}
504 region_settings = {}
508 for key, value in cache_settings.items():
505 for key, value in cache_settings.items():
509 if key.startswith(region):
506 if key.startswith(region):
510 region_settings[key.split('.')[1]] = value
507 region_settings[key.split('.')[1]] = value
511 region_settings['expire'] = int(region_settings.get('expire',
508 region_settings['expire'] = int(region_settings.get('expire',
512 60))
509 60))
513 region_settings.setdefault('lock_dir',
510 region_settings.setdefault('lock_dir',
514 cache_settings.get('lock_dir'))
511 cache_settings.get('lock_dir'))
515 region_settings.setdefault('data_dir',
512 region_settings.setdefault('data_dir',
516 cache_settings.get('data_dir'))
513 cache_settings.get('data_dir'))
517
514
518 if 'type' not in region_settings:
515 if 'type' not in region_settings:
519 region_settings['type'] = cache_settings.get('type',
516 region_settings['type'] = cache_settings.get('type',
520 'memory')
517 'memory')
521 beaker.cache.cache_regions[region] = region_settings
518 beaker.cache.cache_regions[region] = region_settings
522
519
523
520
524 def load_rcextensions(root_path):
521 def load_rcextensions(root_path):
525 import rhodecode
522 import rhodecode
526 from rhodecode.config import conf
523 from rhodecode.config import conf
527
524
528 path = os.path.join(root_path, 'rcextensions', '__init__.py')
525 path = os.path.join(root_path, 'rcextensions', '__init__.py')
529 if os.path.isfile(path):
526 if os.path.isfile(path):
530 rcext = create_module('rc', path)
527 rcext = create_module('rc', path)
531 EXT = rhodecode.EXTENSIONS = rcext
528 EXT = rhodecode.EXTENSIONS = rcext
532 log.debug('Found rcextensions now loading %s...' % rcext)
529 log.debug('Found rcextensions now loading %s...' % rcext)
533
530
534 # Additional mappings that are not present in the pygments lexers
531 # Additional mappings that are not present in the pygments lexers
535 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
532 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
536
533
537 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
534 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
538
535
539 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
536 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
540 log.debug('settings custom INDEX_EXTENSIONS')
537 log.debug('settings custom INDEX_EXTENSIONS')
541 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
538 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
542
539
543 #ADDITIONAL MAPPINGS
540 #ADDITIONAL MAPPINGS
544 log.debug('adding extra into INDEX_EXTENSIONS')
541 log.debug('adding extra into INDEX_EXTENSIONS')
545 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
542 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
546
543
547 # auto check if the module is not missing any data, set to default if is
544 # auto check if the module is not missing any data, set to default if is
548 # this will help autoupdate new feature of rcext module
545 # this will help autoupdate new feature of rcext module
549 from rhodecode.config import rcextensions
546 from rhodecode.config import rcextensions
550 for k in dir(rcextensions):
547 for k in dir(rcextensions):
551 if not k.startswith('_') and not hasattr(EXT, k):
548 if not k.startswith('_') and not hasattr(EXT, k):
552 setattr(EXT, k, getattr(rcextensions, k))
549 setattr(EXT, k, getattr(rcextensions, k))
553
550
554
551
555 def get_custom_lexer(extension):
552 def get_custom_lexer(extension):
556 """
553 """
557 returns a custom lexer if it's defined in rcextensions module, or None
554 returns a custom lexer if it's defined in rcextensions module, or None
558 if there's no custom lexer defined
555 if there's no custom lexer defined
559 """
556 """
560 import rhodecode
557 import rhodecode
561 from pygments import lexers
558 from pygments import lexers
562 #check if we didn't define this extension as other lexer
559 #check if we didn't define this extension as other lexer
563 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
560 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
564 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
561 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
565 return lexers.get_lexer_by_name(_lexer_name)
562 return lexers.get_lexer_by_name(_lexer_name)
566
563
567
564
568 #==============================================================================
565 #==============================================================================
569 # TEST FUNCTIONS AND CREATORS
566 # TEST FUNCTIONS AND CREATORS
570 #==============================================================================
567 #==============================================================================
571 def create_test_index(repo_location, config, full_index):
568 def create_test_index(repo_location, config, full_index):
572 """
569 """
573 Makes default test index
570 Makes default test index
574
571
575 :param config: test config
572 :param config: test config
576 :param full_index:
573 :param full_index:
577 """
574 """
578
575
579 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
576 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
580 from rhodecode.lib.pidlock import DaemonLock, LockHeld
577 from rhodecode.lib.pidlock import DaemonLock, LockHeld
581
578
582 repo_location = repo_location
579 repo_location = repo_location
583
580
584 index_location = os.path.join(config['app_conf']['index_dir'])
581 index_location = os.path.join(config['app_conf']['index_dir'])
585 if not os.path.exists(index_location):
582 if not os.path.exists(index_location):
586 os.makedirs(index_location)
583 os.makedirs(index_location)
587
584
588 try:
585 try:
589 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
586 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
590 WhooshIndexingDaemon(index_location=index_location,
587 WhooshIndexingDaemon(index_location=index_location,
591 repo_location=repo_location)\
588 repo_location=repo_location)\
592 .run(full_index=full_index)
589 .run(full_index=full_index)
593 l.release()
590 l.release()
594 except LockHeld:
591 except LockHeld:
595 pass
592 pass
596
593
597
594
598 def create_test_env(repos_test_path, config):
595 def create_test_env(repos_test_path, config):
599 """
596 """
600 Makes a fresh database and
597 Makes a fresh database and
601 install test repository into tmp dir
598 install test repository into tmp dir
602 """
599 """
603 from rhodecode.lib.db_manage import DbManage
600 from rhodecode.lib.db_manage import DbManage
604 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
601 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
605
602
606 # PART ONE create db
603 # PART ONE create db
607 dbconf = config['sqlalchemy.db1.url']
604 dbconf = config['sqlalchemy.db1.url']
608 log.debug('making test db %s' % dbconf)
605 log.debug('making test db %s' % dbconf)
609
606
610 # create test dir if it doesn't exist
607 # create test dir if it doesn't exist
611 if not os.path.isdir(repos_test_path):
608 if not os.path.isdir(repos_test_path):
612 log.debug('Creating testdir %s' % repos_test_path)
609 log.debug('Creating testdir %s' % repos_test_path)
613 os.makedirs(repos_test_path)
610 os.makedirs(repos_test_path)
614
611
615 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
612 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
616 tests=True)
613 tests=True)
617 dbmanage.create_tables(override=True)
614 dbmanage.create_tables(override=True)
618 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
615 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
619 dbmanage.create_default_user()
616 dbmanage.create_default_user()
620 dbmanage.admin_prompt()
617 dbmanage.admin_prompt()
621 dbmanage.create_permissions()
618 dbmanage.create_permissions()
622 dbmanage.populate_default_permissions()
619 dbmanage.populate_default_permissions()
623 Session().commit()
620 Session().commit()
624 # PART TWO make test repo
621 # PART TWO make test repo
625 log.debug('making test vcs repositories')
622 log.debug('making test vcs repositories')
626
623
627 idx_path = config['app_conf']['index_dir']
624 idx_path = config['app_conf']['index_dir']
628 data_path = config['app_conf']['cache_dir']
625 data_path = config['app_conf']['cache_dir']
629
626
630 #clean index and data
627 #clean index and data
631 if idx_path and os.path.exists(idx_path):
628 if idx_path and os.path.exists(idx_path):
632 log.debug('remove %s' % idx_path)
629 log.debug('remove %s' % idx_path)
633 shutil.rmtree(idx_path)
630 shutil.rmtree(idx_path)
634
631
635 if data_path and os.path.exists(data_path):
632 if data_path and os.path.exists(data_path):
636 log.debug('remove %s' % data_path)
633 log.debug('remove %s' % data_path)
637 shutil.rmtree(data_path)
634 shutil.rmtree(data_path)
638
635
639 #CREATE DEFAULT TEST REPOS
636 #CREATE DEFAULT TEST REPOS
640 cur_dir = dn(dn(abspath(__file__)))
637 cur_dir = dn(dn(abspath(__file__)))
641 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
638 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
642 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
639 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
643 tar.close()
640 tar.close()
644
641
645 cur_dir = dn(dn(abspath(__file__)))
642 cur_dir = dn(dn(abspath(__file__)))
646 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
643 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
647 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
644 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
648 tar.close()
645 tar.close()
649
646
650 #LOAD VCS test stuff
647 #LOAD VCS test stuff
651 from rhodecode.tests.vcs import setup_package
648 from rhodecode.tests.vcs import setup_package
652 setup_package()
649 setup_package()
653
650
654
651
655 #==============================================================================
652 #==============================================================================
656 # PASTER COMMANDS
653 # PASTER COMMANDS
657 #==============================================================================
654 #==============================================================================
658 class BasePasterCommand(Command):
655 class BasePasterCommand(Command):
659 """
656 """
660 Abstract Base Class for paster commands.
657 Abstract Base Class for paster commands.
661
658
662 The celery commands are somewhat aggressive about loading
659 The celery commands are somewhat aggressive about loading
663 celery.conf, and since our module sets the `CELERY_LOADER`
660 celery.conf, and since our module sets the `CELERY_LOADER`
664 environment variable to our loader, we have to bootstrap a bit and
661 environment variable to our loader, we have to bootstrap a bit and
665 make sure we've had a chance to load the pylons config off of the
662 make sure we've had a chance to load the pylons config off of the
666 command line, otherwise everything fails.
663 command line, otherwise everything fails.
667 """
664 """
668 min_args = 1
665 min_args = 1
669 min_args_error = "Please provide a paster config file as an argument."
666 min_args_error = "Please provide a paster config file as an argument."
670 takes_config_file = 1
667 takes_config_file = 1
671 requires_config_file = True
668 requires_config_file = True
672
669
673 def notify_msg(self, msg, log=False):
670 def notify_msg(self, msg, log=False):
674 """Make a notification to user, additionally if logger is passed
671 """Make a notification to user, additionally if logger is passed
675 it logs this action using given logger
672 it logs this action using given logger
676
673
677 :param msg: message that will be printed to user
674 :param msg: message that will be printed to user
678 :param log: logging instance, to use to additionally log this message
675 :param log: logging instance, to use to additionally log this message
679
676
680 """
677 """
681 if log and isinstance(log, logging):
678 if log and isinstance(log, logging):
682 log(msg)
679 log(msg)
683
680
684 def run(self, args):
681 def run(self, args):
685 """
682 """
686 Overrides Command.run
683 Overrides Command.run
687
684
688 Checks for a config file argument and loads it.
685 Checks for a config file argument and loads it.
689 """
686 """
690 if len(args) < self.min_args:
687 if len(args) < self.min_args:
691 raise BadCommand(
688 raise BadCommand(
692 self.min_args_error % {'min_args': self.min_args,
689 self.min_args_error % {'min_args': self.min_args,
693 'actual_args': len(args)})
690 'actual_args': len(args)})
694
691
695 # Decrement because we're going to lob off the first argument.
692 # Decrement because we're going to lob off the first argument.
696 # @@ This is hacky
693 # @@ This is hacky
697 self.min_args -= 1
694 self.min_args -= 1
698 self.bootstrap_config(args[0])
695 self.bootstrap_config(args[0])
699 self.update_parser()
696 self.update_parser()
700 return super(BasePasterCommand, self).run(args[1:])
697 return super(BasePasterCommand, self).run(args[1:])
701
698
702 def update_parser(self):
699 def update_parser(self):
703 """
700 """
704 Abstract method. Allows for the class's parser to be updated
701 Abstract method. Allows for the class's parser to be updated
705 before the superclass's `run` method is called. Necessary to
702 before the superclass's `run` method is called. Necessary to
706 allow options/arguments to be passed through to the underlying
703 allow options/arguments to be passed through to the underlying
707 celery command.
704 celery command.
708 """
705 """
709 raise NotImplementedError("Abstract Method.")
706 raise NotImplementedError("Abstract Method.")
710
707
711 def bootstrap_config(self, conf):
708 def bootstrap_config(self, conf):
712 """
709 """
713 Loads the pylons configuration.
710 Loads the pylons configuration.
714 """
711 """
715 from pylons import config as pylonsconfig
712 from pylons import config as pylonsconfig
716
713
717 self.path_to_ini_file = os.path.realpath(conf)
714 self.path_to_ini_file = os.path.realpath(conf)
718 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
715 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
719 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
716 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
720
717
721 def _init_session(self):
718 def _init_session(self):
722 """
719 """
723 Inits SqlAlchemy Session
720 Inits SqlAlchemy Session
724 """
721 """
725 logging.config.fileConfig(self.path_to_ini_file)
722 logging.config.fileConfig(self.path_to_ini_file)
726 from pylons import config
723 from pylons import config
727 from rhodecode.model import init_model
724 from rhodecode.model import init_model
728 from rhodecode.lib.utils2 import engine_from_config
725 from rhodecode.lib.utils2 import engine_from_config
729
726
730 #get to remove repos !!
727 #get to remove repos !!
731 add_cache(config)
728 add_cache(config)
732 engine = engine_from_config(config, 'sqlalchemy.db1.')
729 engine = engine_from_config(config, 'sqlalchemy.db1.')
733 init_model(engine)
730 init_model(engine)
734
731
735
732
736 def check_git_version():
733 def check_git_version():
737 """
734 """
738 Checks what version of git is installed in system, and issues a warning
735 Checks what version of git is installed in system, and issues a warning
739 if it's too old for RhodeCode to properly work.
736 if it's too old for RhodeCode to properly work.
740 """
737 """
741 from rhodecode import BACKENDS
738 from rhodecode import BACKENDS
742 from rhodecode.lib.vcs.backends.git.repository import GitRepository
739 from rhodecode.lib.vcs.backends.git.repository import GitRepository
743 from distutils.version import StrictVersion
740 from distutils.version import StrictVersion
744
741
745 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
742 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
746 _safe=True)
743 _safe=True)
747
744
748 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
745 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
749 if len(ver.split('.')) > 3:
746 if len(ver.split('.')) > 3:
750 #StrictVersion needs to be only 3 element type
747 #StrictVersion needs to be only 3 element type
751 ver = '.'.join(ver.split('.')[:3])
748 ver = '.'.join(ver.split('.')[:3])
752 try:
749 try:
753 _ver = StrictVersion(ver)
750 _ver = StrictVersion(ver)
754 except Exception:
751 except Exception:
755 _ver = StrictVersion('0.0.0')
752 _ver = StrictVersion('0.0.0')
756 stderr = traceback.format_exc()
753 stderr = traceback.format_exc()
757
754
758 req_ver = '1.7.4'
755 req_ver = '1.7.4'
759 to_old_git = False
756 to_old_git = False
760 if _ver < StrictVersion(req_ver):
757 if _ver < StrictVersion(req_ver):
761 to_old_git = True
758 to_old_git = True
762
759
763 if 'git' in BACKENDS:
760 if 'git' in BACKENDS:
764 log.debug('GIT version detected: %s' % stdout)
761 log.debug('GIT version detected: %s' % stdout)
765 if stderr:
762 if stderr:
766 log.warning('Unable to detect git version, org error was: %r' % stderr)
763 log.warning('Unable to detect git version, org error was: %r' % stderr)
767 elif to_old_git:
764 elif to_old_git:
768 log.warning('RhodeCode detected git version %s, which is too old '
765 log.warning('RhodeCode detected git version %s, which is too old '
769 'for the system to function properly. Make sure '
766 'for the system to function properly. Make sure '
770 'its version is at least %s' % (ver, req_ver))
767 'its version is at least %s' % (ver, req_ver))
771 return _ver
768 return _ver
772
769
773
770
774 @decorator.decorator
771 @decorator.decorator
775 def jsonify(func, *args, **kwargs):
772 def jsonify(func, *args, **kwargs):
776 """Action decorator that formats output for JSON
773 """Action decorator that formats output for JSON
777
774
778 Given a function that will return content, this decorator will turn
775 Given a function that will return content, this decorator will turn
779 the result into JSON, with a content-type of 'application/json' and
776 the result into JSON, with a content-type of 'application/json' and
780 output it.
777 output it.
781
778
782 """
779 """
783 from pylons.decorators.util import get_pylons
780 from pylons.decorators.util import get_pylons
784 from rhodecode.lib.ext_json import json
781 from rhodecode.lib.ext_json import json
785 pylons = get_pylons(args)
782 pylons = get_pylons(args)
786 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
783 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
787 data = func(*args, **kwargs)
784 data = func(*args, **kwargs)
788 if isinstance(data, (list, tuple)):
785 if isinstance(data, (list, tuple)):
789 msg = "JSON responses with Array envelopes are susceptible to " \
786 msg = "JSON responses with Array envelopes are susceptible to " \
790 "cross-site data leak attacks, see " \
787 "cross-site data leak attacks, see " \
791 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
788 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
792 warnings.warn(msg, Warning, 2)
789 warnings.warn(msg, Warning, 2)
793 log.warning(msg)
790 log.warning(msg)
794 log.debug("Returning JSON wrapped action output")
791 log.debug("Returning JSON wrapped action output")
795 return json.dumps(data, encoding='utf-8')
792 return json.dumps(data, encoding='utf-8')
General Comments 0
You need to be logged in to leave comments. Login now