##// END OF EJS Templates
warn when repo path is skipped because it is read-only
Mads Kiilerich -
r3607:891be8b0 beta
parent child Browse files
Show More
@@ -1,794 +1,795 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action:%s on %s by user:%s ip:%s' %
165 log.info('Logging action:%s on %s by user:%s ip:%s' %
166 (action, safe_unicode(repo), user_obj, ipaddr))
166 (action, safe_unicode(repo), user_obj, ipaddr))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except:
169 except:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False, skip_removed_repos=True):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185
185
186 def _get_repos(p):
186 def _get_repos(p):
187 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
188 log.warn('ignoring repo path without write access: %s', p)
188 return
189 return
189 for dirpath in os.listdir(p):
190 for dirpath in os.listdir(p):
190 if os.path.isfile(os.path.join(p, dirpath)):
191 if os.path.isfile(os.path.join(p, dirpath)):
191 continue
192 continue
192 cur_path = os.path.join(p, dirpath)
193 cur_path = os.path.join(p, dirpath)
193
194
194 # skip removed repos
195 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
197 continue
197
198
198 #skip .<somethin> dirs
199 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
200 if dirpath.startswith('.'):
200 continue
201 continue
201
202
202 try:
203 try:
203 scm_info = get_scm(cur_path)
204 scm_info = get_scm(cur_path)
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 except VCSError:
206 except VCSError:
206 if not recursive:
207 if not recursive:
207 continue
208 continue
208 #check if this dir containts other repos for recursive scan
209 #check if this dir containts other repos for recursive scan
209 rec_path = os.path.join(p, dirpath)
210 rec_path = os.path.join(p, dirpath)
210 if os.path.isdir(rec_path):
211 if os.path.isdir(rec_path):
211 for inner_scm in _get_repos(rec_path):
212 for inner_scm in _get_repos(rec_path):
212 yield inner_scm
213 yield inner_scm
213
214
214 return _get_repos(path)
215 return _get_repos(path)
215
216
216 #alias for backward compat
217 #alias for backward compat
217 get_filesystem_repos = get_repos
218 get_filesystem_repos = get_repos
218
219
219
220
220 def is_valid_repo(repo_name, base_path, scm=None):
221 def is_valid_repo(repo_name, base_path, scm=None):
221 """
222 """
222 Returns True if given path is a valid repository False otherwise.
223 Returns True if given path is a valid repository False otherwise.
223 If scm param is given also compare if given scm is the same as expected
224 If scm param is given also compare if given scm is the same as expected
224 from scm parameter
225 from scm parameter
225
226
226 :param repo_name:
227 :param repo_name:
227 :param base_path:
228 :param base_path:
228 :param scm:
229 :param scm:
229
230
230 :return True: if given path is a valid repository
231 :return True: if given path is a valid repository
231 """
232 """
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233
234
234 try:
235 try:
235 scm_ = get_scm(full_path)
236 scm_ = get_scm(full_path)
236 if scm:
237 if scm:
237 return scm_[0] == scm
238 return scm_[0] == scm
238 return True
239 return True
239 except VCSError:
240 except VCSError:
240 return False
241 return False
241
242
242
243
243 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
244 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
244 """
245 """
245 Returns True if given path is a repos group False otherwise
246 Returns True if given path is a repos group False otherwise
246
247
247 :param repo_name:
248 :param repo_name:
248 :param base_path:
249 :param base_path:
249 """
250 """
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251
252
252 # check if it's not a repo
253 # check if it's not a repo
253 if is_valid_repo(repos_group_name, base_path):
254 if is_valid_repo(repos_group_name, base_path):
254 return False
255 return False
255
256
256 try:
257 try:
257 # we need to check bare git repos at higher level
258 # we need to check bare git repos at higher level
258 # since we might match branches/hooks/info/objects or possible
259 # since we might match branches/hooks/info/objects or possible
259 # other things inside bare git repo
260 # other things inside bare git repo
260 get_scm(os.path.dirname(full_path))
261 get_scm(os.path.dirname(full_path))
261 return False
262 return False
262 except VCSError:
263 except VCSError:
263 pass
264 pass
264
265
265 # check if it's a valid path
266 # check if it's a valid path
266 if skip_path_check or os.path.isdir(full_path):
267 if skip_path_check or os.path.isdir(full_path):
267 return True
268 return True
268
269
269 return False
270 return False
270
271
271
272
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 while True:
274 while True:
274 ok = raw_input(prompt)
275 ok = raw_input(prompt)
275 if ok in ('y', 'ye', 'yes'):
276 if ok in ('y', 'ye', 'yes'):
276 return True
277 return True
277 if ok in ('n', 'no', 'nop', 'nope'):
278 if ok in ('n', 'no', 'nop', 'nope'):
278 return False
279 return False
279 retries = retries - 1
280 retries = retries - 1
280 if retries < 0:
281 if retries < 0:
281 raise IOError
282 raise IOError
282 print complaint
283 print complaint
283
284
284 #propagated from mercurial documentation
285 #propagated from mercurial documentation
285 ui_sections = ['alias', 'auth',
286 ui_sections = ['alias', 'auth',
286 'decode/encode', 'defaults',
287 'decode/encode', 'defaults',
287 'diff', 'email',
288 'diff', 'email',
288 'extensions', 'format',
289 'extensions', 'format',
289 'merge-patterns', 'merge-tools',
290 'merge-patterns', 'merge-tools',
290 'hooks', 'http_proxy',
291 'hooks', 'http_proxy',
291 'smtp', 'patch',
292 'smtp', 'patch',
292 'paths', 'profiling',
293 'paths', 'profiling',
293 'server', 'trusted',
294 'server', 'trusted',
294 'ui', 'web', ]
295 'ui', 'web', ]
295
296
296
297
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 """
299 """
299 A function that will read python rc files or database
300 A function that will read python rc files or database
300 and make an mercurial ui object from read options
301 and make an mercurial ui object from read options
301
302
302 :param path: path to mercurial config file
303 :param path: path to mercurial config file
303 :param checkpaths: check the path
304 :param checkpaths: check the path
304 :param read_from: read from 'file' or 'db'
305 :param read_from: read from 'file' or 'db'
305 """
306 """
306
307
307 baseui = ui.ui()
308 baseui = ui.ui()
308
309
309 # clean the baseui object
310 # clean the baseui object
310 baseui._ocfg = config.config()
311 baseui._ocfg = config.config()
311 baseui._ucfg = config.config()
312 baseui._ucfg = config.config()
312 baseui._tcfg = config.config()
313 baseui._tcfg = config.config()
313
314
314 if read_from == 'file':
315 if read_from == 'file':
315 if not os.path.isfile(path):
316 if not os.path.isfile(path):
316 log.debug('hgrc file is not present at %s, skipping...' % path)
317 log.debug('hgrc file is not present at %s, skipping...' % path)
317 return False
318 return False
318 log.debug('reading hgrc from %s' % path)
319 log.debug('reading hgrc from %s' % path)
319 cfg = config.config()
320 cfg = config.config()
320 cfg.read(path)
321 cfg.read(path)
321 for section in ui_sections:
322 for section in ui_sections:
322 for k, v in cfg.items(section):
323 for k, v in cfg.items(section):
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325
326
326 elif read_from == 'db':
327 elif read_from == 'db':
327 sa = meta.Session()
328 sa = meta.Session()
328 ret = sa.query(RhodeCodeUi)\
329 ret = sa.query(RhodeCodeUi)\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 .all()
331 .all()
331
332
332 hg_ui = ret
333 hg_ui = ret
333 for ui_ in hg_ui:
334 for ui_ in hg_ui:
334 if ui_.ui_active:
335 if ui_.ui_active:
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 ui_.ui_key, ui_.ui_value)
337 ui_.ui_key, ui_.ui_value)
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 safe_str(ui_.ui_value))
339 safe_str(ui_.ui_value))
339 if ui_.ui_key == 'push_ssl':
340 if ui_.ui_key == 'push_ssl':
340 # force set push_ssl requirement to False, rhodecode
341 # force set push_ssl requirement to False, rhodecode
341 # handles that
342 # handles that
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 False)
344 False)
344 if clear_session:
345 if clear_session:
345 meta.Session.remove()
346 meta.Session.remove()
346 return baseui
347 return baseui
347
348
348
349
349 def set_rhodecode_config(config):
350 def set_rhodecode_config(config):
350 """
351 """
351 Updates pylons config with new settings from database
352 Updates pylons config with new settings from database
352
353
353 :param config:
354 :param config:
354 """
355 """
355 hgsettings = RhodeCodeSetting.get_app_settings()
356 hgsettings = RhodeCodeSetting.get_app_settings()
356
357
357 for k, v in hgsettings.items():
358 for k, v in hgsettings.items():
358 config[k] = v
359 config[k] = v
359
360
360
361
361 def invalidate_cache(cache_key, *args):
362 def invalidate_cache(cache_key, *args):
362 """
363 """
363 Puts cache invalidation task into db for
364 Puts cache invalidation task into db for
364 further global cache invalidation
365 further global cache invalidation
365 """
366 """
366
367
367 from rhodecode.model.scm import ScmModel
368 from rhodecode.model.scm import ScmModel
368
369
369 if cache_key.startswith('get_repo_cached_'):
370 if cache_key.startswith('get_repo_cached_'):
370 name = cache_key.split('get_repo_cached_')[-1]
371 name = cache_key.split('get_repo_cached_')[-1]
371 ScmModel().mark_for_invalidation(name)
372 ScmModel().mark_for_invalidation(name)
372
373
373
374
374 def map_groups(path):
375 def map_groups(path):
375 """
376 """
376 Given a full path to a repository, create all nested groups that this
377 Given a full path to a repository, create all nested groups that this
377 repo is inside. This function creates parent-child relationships between
378 repo is inside. This function creates parent-child relationships between
378 groups and creates default perms for all new groups.
379 groups and creates default perms for all new groups.
379
380
380 :param paths: full path to repository
381 :param paths: full path to repository
381 """
382 """
382 sa = meta.Session()
383 sa = meta.Session()
383 groups = path.split(Repository.url_sep())
384 groups = path.split(Repository.url_sep())
384 parent = None
385 parent = None
385 group = None
386 group = None
386
387
387 # last element is repo in nested groups structure
388 # last element is repo in nested groups structure
388 groups = groups[:-1]
389 groups = groups[:-1]
389 rgm = ReposGroupModel(sa)
390 rgm = ReposGroupModel(sa)
390 for lvl, group_name in enumerate(groups):
391 for lvl, group_name in enumerate(groups):
391 group_name = '/'.join(groups[:lvl] + [group_name])
392 group_name = '/'.join(groups[:lvl] + [group_name])
392 group = RepoGroup.get_by_group_name(group_name)
393 group = RepoGroup.get_by_group_name(group_name)
393 desc = '%s group' % group_name
394 desc = '%s group' % group_name
394
395
395 # skip folders that are now removed repos
396 # skip folders that are now removed repos
396 if REMOVED_REPO_PAT.match(group_name):
397 if REMOVED_REPO_PAT.match(group_name):
397 break
398 break
398
399
399 if group is None:
400 if group is None:
400 log.debug('creating group level: %s group_name: %s' % (lvl,
401 log.debug('creating group level: %s group_name: %s' % (lvl,
401 group_name))
402 group_name))
402 group = RepoGroup(group_name, parent)
403 group = RepoGroup(group_name, parent)
403 group.group_description = desc
404 group.group_description = desc
404 sa.add(group)
405 sa.add(group)
405 rgm._create_default_perms(group)
406 rgm._create_default_perms(group)
406 sa.flush()
407 sa.flush()
407 parent = group
408 parent = group
408 return group
409 return group
409
410
410
411
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 install_git_hook=False):
413 install_git_hook=False):
413 """
414 """
414 maps all repos given in initial_repo_list, non existing repositories
415 maps all repos given in initial_repo_list, non existing repositories
415 are created, if remove_obsolete is True it also check for db entries
416 are created, if remove_obsolete is True it also check for db entries
416 that are not in initial_repo_list and removes them.
417 that are not in initial_repo_list and removes them.
417
418
418 :param initial_repo_list: list of repositories found by scanning methods
419 :param initial_repo_list: list of repositories found by scanning methods
419 :param remove_obsolete: check for obsolete entries in database
420 :param remove_obsolete: check for obsolete entries in database
420 :param install_git_hook: if this is True, also check and install githook
421 :param install_git_hook: if this is True, also check and install githook
421 for a repo if missing
422 for a repo if missing
422 """
423 """
423 from rhodecode.model.repo import RepoModel
424 from rhodecode.model.repo import RepoModel
424 from rhodecode.model.scm import ScmModel
425 from rhodecode.model.scm import ScmModel
425 sa = meta.Session()
426 sa = meta.Session()
426 rm = RepoModel()
427 rm = RepoModel()
427 user = sa.query(User).filter(User.admin == True).first()
428 user = sa.query(User).filter(User.admin == True).first()
428 if user is None:
429 if user is None:
429 raise Exception('Missing administrative account!')
430 raise Exception('Missing administrative account!')
430 added = []
431 added = []
431
432
432 ##creation defaults
433 ##creation defaults
433 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
434 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
434 enable_statistics = defs.get('repo_enable_statistics')
435 enable_statistics = defs.get('repo_enable_statistics')
435 enable_locking = defs.get('repo_enable_locking')
436 enable_locking = defs.get('repo_enable_locking')
436 enable_downloads = defs.get('repo_enable_downloads')
437 enable_downloads = defs.get('repo_enable_downloads')
437 private = defs.get('repo_private')
438 private = defs.get('repo_private')
438
439
439 for name, repo in initial_repo_list.items():
440 for name, repo in initial_repo_list.items():
440 group = map_groups(name)
441 group = map_groups(name)
441 db_repo = rm.get_by_repo_name(name)
442 db_repo = rm.get_by_repo_name(name)
442 # found repo that is on filesystem not in RhodeCode database
443 # found repo that is on filesystem not in RhodeCode database
443 if not db_repo:
444 if not db_repo:
444 log.info('repository %s not found, creating now' % name)
445 log.info('repository %s not found, creating now' % name)
445 added.append(name)
446 added.append(name)
446 desc = (repo.description
447 desc = (repo.description
447 if repo.description != 'unknown'
448 if repo.description != 'unknown'
448 else '%s repository' % name)
449 else '%s repository' % name)
449
450
450 new_repo = rm.create_repo(
451 new_repo = rm.create_repo(
451 repo_name=name,
452 repo_name=name,
452 repo_type=repo.alias,
453 repo_type=repo.alias,
453 description=desc,
454 description=desc,
454 repos_group=getattr(group, 'group_id', None),
455 repos_group=getattr(group, 'group_id', None),
455 owner=user,
456 owner=user,
456 just_db=True,
457 just_db=True,
457 enable_locking=enable_locking,
458 enable_locking=enable_locking,
458 enable_downloads=enable_downloads,
459 enable_downloads=enable_downloads,
459 enable_statistics=enable_statistics,
460 enable_statistics=enable_statistics,
460 private=private
461 private=private
461 )
462 )
462 # we added that repo just now, and make sure it has githook
463 # we added that repo just now, and make sure it has githook
463 # installed
464 # installed
464 if new_repo.repo_type == 'git':
465 if new_repo.repo_type == 'git':
465 ScmModel().install_git_hook(new_repo.scm_instance)
466 ScmModel().install_git_hook(new_repo.scm_instance)
466 new_repo.update_changeset_cache()
467 new_repo.update_changeset_cache()
467 elif install_git_hook:
468 elif install_git_hook:
468 if db_repo.repo_type == 'git':
469 if db_repo.repo_type == 'git':
469 ScmModel().install_git_hook(db_repo.scm_instance)
470 ScmModel().install_git_hook(db_repo.scm_instance)
470 # during starting install all cache keys for all repositories in the
471 # during starting install all cache keys for all repositories in the
471 # system, this will register all repos and multiple instances
472 # system, this will register all repos and multiple instances
472 cache_key = CacheInvalidation._get_cache_key(name)
473 cache_key = CacheInvalidation._get_cache_key(name)
473 log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
474 log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
474 CacheInvalidation.invalidate(name)
475 CacheInvalidation.invalidate(name)
475
476
476 sa.commit()
477 sa.commit()
477 removed = []
478 removed = []
478 if remove_obsolete:
479 if remove_obsolete:
479 # remove from database those repositories that are not in the filesystem
480 # remove from database those repositories that are not in the filesystem
480 for repo in sa.query(Repository).all():
481 for repo in sa.query(Repository).all():
481 if repo.repo_name not in initial_repo_list.keys():
482 if repo.repo_name not in initial_repo_list.keys():
482 log.debug("Removing non-existing repository found in db `%s`" %
483 log.debug("Removing non-existing repository found in db `%s`" %
483 repo.repo_name)
484 repo.repo_name)
484 try:
485 try:
485 sa.delete(repo)
486 sa.delete(repo)
486 sa.commit()
487 sa.commit()
487 removed.append(repo.repo_name)
488 removed.append(repo.repo_name)
488 except:
489 except:
489 #don't hold further removals on error
490 #don't hold further removals on error
490 log.error(traceback.format_exc())
491 log.error(traceback.format_exc())
491 sa.rollback()
492 sa.rollback()
492 return added, removed
493 return added, removed
493
494
494
495
495 # set cache regions for beaker so celery can utilise it
496 # set cache regions for beaker so celery can utilise it
496 def add_cache(settings):
497 def add_cache(settings):
497 cache_settings = {'regions': None}
498 cache_settings = {'regions': None}
498 for key in settings.keys():
499 for key in settings.keys():
499 for prefix in ['beaker.cache.', 'cache.']:
500 for prefix in ['beaker.cache.', 'cache.']:
500 if key.startswith(prefix):
501 if key.startswith(prefix):
501 name = key.split(prefix)[1].strip()
502 name = key.split(prefix)[1].strip()
502 cache_settings[name] = settings[key].strip()
503 cache_settings[name] = settings[key].strip()
503 if cache_settings['regions']:
504 if cache_settings['regions']:
504 for region in cache_settings['regions'].split(','):
505 for region in cache_settings['regions'].split(','):
505 region = region.strip()
506 region = region.strip()
506 region_settings = {}
507 region_settings = {}
507 for key, value in cache_settings.items():
508 for key, value in cache_settings.items():
508 if key.startswith(region):
509 if key.startswith(region):
509 region_settings[key.split('.')[1]] = value
510 region_settings[key.split('.')[1]] = value
510 region_settings['expire'] = int(region_settings.get('expire',
511 region_settings['expire'] = int(region_settings.get('expire',
511 60))
512 60))
512 region_settings.setdefault('lock_dir',
513 region_settings.setdefault('lock_dir',
513 cache_settings.get('lock_dir'))
514 cache_settings.get('lock_dir'))
514 region_settings.setdefault('data_dir',
515 region_settings.setdefault('data_dir',
515 cache_settings.get('data_dir'))
516 cache_settings.get('data_dir'))
516
517
517 if 'type' not in region_settings:
518 if 'type' not in region_settings:
518 region_settings['type'] = cache_settings.get('type',
519 region_settings['type'] = cache_settings.get('type',
519 'memory')
520 'memory')
520 beaker.cache.cache_regions[region] = region_settings
521 beaker.cache.cache_regions[region] = region_settings
521
522
522
523
523 def load_rcextensions(root_path):
524 def load_rcextensions(root_path):
524 import rhodecode
525 import rhodecode
525 from rhodecode.config import conf
526 from rhodecode.config import conf
526
527
527 path = os.path.join(root_path, 'rcextensions', '__init__.py')
528 path = os.path.join(root_path, 'rcextensions', '__init__.py')
528 if os.path.isfile(path):
529 if os.path.isfile(path):
529 rcext = create_module('rc', path)
530 rcext = create_module('rc', path)
530 EXT = rhodecode.EXTENSIONS = rcext
531 EXT = rhodecode.EXTENSIONS = rcext
531 log.debug('Found rcextensions now loading %s...' % rcext)
532 log.debug('Found rcextensions now loading %s...' % rcext)
532
533
533 # Additional mappings that are not present in the pygments lexers
534 # Additional mappings that are not present in the pygments lexers
534 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
535 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
535
536
536 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
537 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
537
538
538 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
539 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
539 log.debug('settings custom INDEX_EXTENSIONS')
540 log.debug('settings custom INDEX_EXTENSIONS')
540 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
541 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
541
542
542 #ADDITIONAL MAPPINGS
543 #ADDITIONAL MAPPINGS
543 log.debug('adding extra into INDEX_EXTENSIONS')
544 log.debug('adding extra into INDEX_EXTENSIONS')
544 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
545 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
545
546
546 # auto check if the module is not missing any data, set to default if is
547 # auto check if the module is not missing any data, set to default if is
547 # this will help autoupdate new feature of rcext module
548 # this will help autoupdate new feature of rcext module
548 from rhodecode.config import rcextensions
549 from rhodecode.config import rcextensions
549 for k in dir(rcextensions):
550 for k in dir(rcextensions):
550 if not k.startswith('_') and not hasattr(EXT, k):
551 if not k.startswith('_') and not hasattr(EXT, k):
551 setattr(EXT, k, getattr(rcextensions, k))
552 setattr(EXT, k, getattr(rcextensions, k))
552
553
553
554
554 def get_custom_lexer(extension):
555 def get_custom_lexer(extension):
555 """
556 """
556 returns a custom lexer if it's defined in rcextensions module, or None
557 returns a custom lexer if it's defined in rcextensions module, or None
557 if there's no custom lexer defined
558 if there's no custom lexer defined
558 """
559 """
559 import rhodecode
560 import rhodecode
560 from pygments import lexers
561 from pygments import lexers
561 #check if we didn't define this extension as other lexer
562 #check if we didn't define this extension as other lexer
562 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
563 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
563 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
564 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
564 return lexers.get_lexer_by_name(_lexer_name)
565 return lexers.get_lexer_by_name(_lexer_name)
565
566
566
567
567 #==============================================================================
568 #==============================================================================
568 # TEST FUNCTIONS AND CREATORS
569 # TEST FUNCTIONS AND CREATORS
569 #==============================================================================
570 #==============================================================================
570 def create_test_index(repo_location, config, full_index):
571 def create_test_index(repo_location, config, full_index):
571 """
572 """
572 Makes default test index
573 Makes default test index
573
574
574 :param config: test config
575 :param config: test config
575 :param full_index:
576 :param full_index:
576 """
577 """
577
578
578 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
579 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
579 from rhodecode.lib.pidlock import DaemonLock, LockHeld
580 from rhodecode.lib.pidlock import DaemonLock, LockHeld
580
581
581 repo_location = repo_location
582 repo_location = repo_location
582
583
583 index_location = os.path.join(config['app_conf']['index_dir'])
584 index_location = os.path.join(config['app_conf']['index_dir'])
584 if not os.path.exists(index_location):
585 if not os.path.exists(index_location):
585 os.makedirs(index_location)
586 os.makedirs(index_location)
586
587
587 try:
588 try:
588 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
589 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
589 WhooshIndexingDaemon(index_location=index_location,
590 WhooshIndexingDaemon(index_location=index_location,
590 repo_location=repo_location)\
591 repo_location=repo_location)\
591 .run(full_index=full_index)
592 .run(full_index=full_index)
592 l.release()
593 l.release()
593 except LockHeld:
594 except LockHeld:
594 pass
595 pass
595
596
596
597
597 def create_test_env(repos_test_path, config):
598 def create_test_env(repos_test_path, config):
598 """
599 """
599 Makes a fresh database and
600 Makes a fresh database and
600 install test repository into tmp dir
601 install test repository into tmp dir
601 """
602 """
602 from rhodecode.lib.db_manage import DbManage
603 from rhodecode.lib.db_manage import DbManage
603 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
604 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
604
605
605 # PART ONE create db
606 # PART ONE create db
606 dbconf = config['sqlalchemy.db1.url']
607 dbconf = config['sqlalchemy.db1.url']
607 log.debug('making test db %s' % dbconf)
608 log.debug('making test db %s' % dbconf)
608
609
609 # create test dir if it doesn't exist
610 # create test dir if it doesn't exist
610 if not os.path.isdir(repos_test_path):
611 if not os.path.isdir(repos_test_path):
611 log.debug('Creating testdir %s' % repos_test_path)
612 log.debug('Creating testdir %s' % repos_test_path)
612 os.makedirs(repos_test_path)
613 os.makedirs(repos_test_path)
613
614
614 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
615 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
615 tests=True)
616 tests=True)
616 dbmanage.create_tables(override=True)
617 dbmanage.create_tables(override=True)
617 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
618 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
618 dbmanage.create_default_user()
619 dbmanage.create_default_user()
619 dbmanage.admin_prompt()
620 dbmanage.admin_prompt()
620 dbmanage.create_permissions()
621 dbmanage.create_permissions()
621 dbmanage.populate_default_permissions()
622 dbmanage.populate_default_permissions()
622 Session().commit()
623 Session().commit()
623 # PART TWO make test repo
624 # PART TWO make test repo
624 log.debug('making test vcs repositories')
625 log.debug('making test vcs repositories')
625
626
626 idx_path = config['app_conf']['index_dir']
627 idx_path = config['app_conf']['index_dir']
627 data_path = config['app_conf']['cache_dir']
628 data_path = config['app_conf']['cache_dir']
628
629
629 #clean index and data
630 #clean index and data
630 if idx_path and os.path.exists(idx_path):
631 if idx_path and os.path.exists(idx_path):
631 log.debug('remove %s' % idx_path)
632 log.debug('remove %s' % idx_path)
632 shutil.rmtree(idx_path)
633 shutil.rmtree(idx_path)
633
634
634 if data_path and os.path.exists(data_path):
635 if data_path and os.path.exists(data_path):
635 log.debug('remove %s' % data_path)
636 log.debug('remove %s' % data_path)
636 shutil.rmtree(data_path)
637 shutil.rmtree(data_path)
637
638
638 #CREATE DEFAULT TEST REPOS
639 #CREATE DEFAULT TEST REPOS
639 cur_dir = dn(dn(abspath(__file__)))
640 cur_dir = dn(dn(abspath(__file__)))
640 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
641 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
641 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
642 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
642 tar.close()
643 tar.close()
643
644
644 cur_dir = dn(dn(abspath(__file__)))
645 cur_dir = dn(dn(abspath(__file__)))
645 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
646 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
646 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
647 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
647 tar.close()
648 tar.close()
648
649
649 #LOAD VCS test stuff
650 #LOAD VCS test stuff
650 from rhodecode.tests.vcs import setup_package
651 from rhodecode.tests.vcs import setup_package
651 setup_package()
652 setup_package()
652
653
653
654
654 #==============================================================================
655 #==============================================================================
655 # PASTER COMMANDS
656 # PASTER COMMANDS
656 #==============================================================================
657 #==============================================================================
657 class BasePasterCommand(Command):
658 class BasePasterCommand(Command):
658 """
659 """
659 Abstract Base Class for paster commands.
660 Abstract Base Class for paster commands.
660
661
661 The celery commands are somewhat aggressive about loading
662 The celery commands are somewhat aggressive about loading
662 celery.conf, and since our module sets the `CELERY_LOADER`
663 celery.conf, and since our module sets the `CELERY_LOADER`
663 environment variable to our loader, we have to bootstrap a bit and
664 environment variable to our loader, we have to bootstrap a bit and
664 make sure we've had a chance to load the pylons config off of the
665 make sure we've had a chance to load the pylons config off of the
665 command line, otherwise everything fails.
666 command line, otherwise everything fails.
666 """
667 """
667 min_args = 1
668 min_args = 1
668 min_args_error = "Please provide a paster config file as an argument."
669 min_args_error = "Please provide a paster config file as an argument."
669 takes_config_file = 1
670 takes_config_file = 1
670 requires_config_file = True
671 requires_config_file = True
671
672
672 def notify_msg(self, msg, log=False):
673 def notify_msg(self, msg, log=False):
673 """Make a notification to user, additionally if logger is passed
674 """Make a notification to user, additionally if logger is passed
674 it logs this action using given logger
675 it logs this action using given logger
675
676
676 :param msg: message that will be printed to user
677 :param msg: message that will be printed to user
677 :param log: logging instance, to use to additionally log this message
678 :param log: logging instance, to use to additionally log this message
678
679
679 """
680 """
680 if log and isinstance(log, logging):
681 if log and isinstance(log, logging):
681 log(msg)
682 log(msg)
682
683
683 def run(self, args):
684 def run(self, args):
684 """
685 """
685 Overrides Command.run
686 Overrides Command.run
686
687
687 Checks for a config file argument and loads it.
688 Checks for a config file argument and loads it.
688 """
689 """
689 if len(args) < self.min_args:
690 if len(args) < self.min_args:
690 raise BadCommand(
691 raise BadCommand(
691 self.min_args_error % {'min_args': self.min_args,
692 self.min_args_error % {'min_args': self.min_args,
692 'actual_args': len(args)})
693 'actual_args': len(args)})
693
694
694 # Decrement because we're going to lob off the first argument.
695 # Decrement because we're going to lob off the first argument.
695 # @@ This is hacky
696 # @@ This is hacky
696 self.min_args -= 1
697 self.min_args -= 1
697 self.bootstrap_config(args[0])
698 self.bootstrap_config(args[0])
698 self.update_parser()
699 self.update_parser()
699 return super(BasePasterCommand, self).run(args[1:])
700 return super(BasePasterCommand, self).run(args[1:])
700
701
701 def update_parser(self):
702 def update_parser(self):
702 """
703 """
703 Abstract method. Allows for the class's parser to be updated
704 Abstract method. Allows for the class's parser to be updated
704 before the superclass's `run` method is called. Necessary to
705 before the superclass's `run` method is called. Necessary to
705 allow options/arguments to be passed through to the underlying
706 allow options/arguments to be passed through to the underlying
706 celery command.
707 celery command.
707 """
708 """
708 raise NotImplementedError("Abstract Method.")
709 raise NotImplementedError("Abstract Method.")
709
710
710 def bootstrap_config(self, conf):
711 def bootstrap_config(self, conf):
711 """
712 """
712 Loads the pylons configuration.
713 Loads the pylons configuration.
713 """
714 """
714 from pylons import config as pylonsconfig
715 from pylons import config as pylonsconfig
715
716
716 self.path_to_ini_file = os.path.realpath(conf)
717 self.path_to_ini_file = os.path.realpath(conf)
717 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
718 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
718 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
719 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
719
720
720 def _init_session(self):
721 def _init_session(self):
721 """
722 """
722 Inits SqlAlchemy Session
723 Inits SqlAlchemy Session
723 """
724 """
724 logging.config.fileConfig(self.path_to_ini_file)
725 logging.config.fileConfig(self.path_to_ini_file)
725 from pylons import config
726 from pylons import config
726 from rhodecode.model import init_model
727 from rhodecode.model import init_model
727 from rhodecode.lib.utils2 import engine_from_config
728 from rhodecode.lib.utils2 import engine_from_config
728
729
729 #get to remove repos !!
730 #get to remove repos !!
730 add_cache(config)
731 add_cache(config)
731 engine = engine_from_config(config, 'sqlalchemy.db1.')
732 engine = engine_from_config(config, 'sqlalchemy.db1.')
732 init_model(engine)
733 init_model(engine)
733
734
734
735
735 def check_git_version():
736 def check_git_version():
736 """
737 """
737 Checks what version of git is installed in system, and issues a warning
738 Checks what version of git is installed in system, and issues a warning
738 if it's too old for RhodeCode to properly work.
739 if it's too old for RhodeCode to properly work.
739 """
740 """
740 from rhodecode import BACKENDS
741 from rhodecode import BACKENDS
741 from rhodecode.lib.vcs.backends.git.repository import GitRepository
742 from rhodecode.lib.vcs.backends.git.repository import GitRepository
742 from distutils.version import StrictVersion
743 from distutils.version import StrictVersion
743
744
744 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
745 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
745 _safe=True)
746 _safe=True)
746
747
747 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
748 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
748 if len(ver.split('.')) > 3:
749 if len(ver.split('.')) > 3:
749 #StrictVersion needs to be only 3 element type
750 #StrictVersion needs to be only 3 element type
750 ver = '.'.join(ver.split('.')[:3])
751 ver = '.'.join(ver.split('.')[:3])
751 try:
752 try:
752 _ver = StrictVersion(ver)
753 _ver = StrictVersion(ver)
753 except:
754 except:
754 _ver = StrictVersion('0.0.0')
755 _ver = StrictVersion('0.0.0')
755 stderr = traceback.format_exc()
756 stderr = traceback.format_exc()
756
757
757 req_ver = '1.7.4'
758 req_ver = '1.7.4'
758 to_old_git = False
759 to_old_git = False
759 if _ver < StrictVersion(req_ver):
760 if _ver < StrictVersion(req_ver):
760 to_old_git = True
761 to_old_git = True
761
762
762 if 'git' in BACKENDS:
763 if 'git' in BACKENDS:
763 log.debug('GIT version detected: %s' % stdout)
764 log.debug('GIT version detected: %s' % stdout)
764 if stderr:
765 if stderr:
765 log.warning('Unable to detect git version, org error was: %r' % stderr)
766 log.warning('Unable to detect git version, org error was: %r' % stderr)
766 elif to_old_git:
767 elif to_old_git:
767 log.warning('RhodeCode detected git version %s, which is too old '
768 log.warning('RhodeCode detected git version %s, which is too old '
768 'for the system to function properly. Make sure '
769 'for the system to function properly. Make sure '
769 'its version is at least %s' % (ver, req_ver))
770 'its version is at least %s' % (ver, req_ver))
770 return _ver
771 return _ver
771
772
772
773
773 @decorator.decorator
774 @decorator.decorator
774 def jsonify(func, *args, **kwargs):
775 def jsonify(func, *args, **kwargs):
775 """Action decorator that formats output for JSON
776 """Action decorator that formats output for JSON
776
777
777 Given a function that will return content, this decorator will turn
778 Given a function that will return content, this decorator will turn
778 the result into JSON, with a content-type of 'application/json' and
779 the result into JSON, with a content-type of 'application/json' and
779 output it.
780 output it.
780
781
781 """
782 """
782 from pylons.decorators.util import get_pylons
783 from pylons.decorators.util import get_pylons
783 from rhodecode.lib.ext_json import json
784 from rhodecode.lib.ext_json import json
784 pylons = get_pylons(args)
785 pylons = get_pylons(args)
785 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
786 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
786 data = func(*args, **kwargs)
787 data = func(*args, **kwargs)
787 if isinstance(data, (list, tuple)):
788 if isinstance(data, (list, tuple)):
788 msg = "JSON responses with Array envelopes are susceptible to " \
789 msg = "JSON responses with Array envelopes are susceptible to " \
789 "cross-site data leak attacks, see " \
790 "cross-site data leak attacks, see " \
790 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
791 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
791 warnings.warn(msg, Warning, 2)
792 warnings.warn(msg, Warning, 2)
792 log.warning(msg)
793 log.warning(msg)
793 log.debug("Returning JSON wrapped action output")
794 log.debug("Returning JSON wrapped action output")
794 return json.dumps(data, encoding='utf-8')
795 return json.dumps(data, encoding='utf-8')
General Comments 0
You need to be logged in to leave comments. Login now