##// END OF EJS Templates
pytest: Create test search index from rc_testdata package.
Martin Bornhold -
r212:c1cbe8e0 default
parent child Browse files
Show More
@@ -1,982 +1,982 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 from os.path import abspath
36 from os.path import abspath
37 from os.path import dirname as dn, join as jn
37 from os.path import dirname as dn, join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from paste.script.command import Command, BadCommand
41 from paste.script.command import Command, BadCommand
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 from mako import exceptions
43 from mako import exceptions
44
44
45 from rhodecode.lib.fakemod import create_module
45 from rhodecode.lib.fakemod import create_module
46 from rhodecode.lib.vcs.backends.base import Config
46 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.exceptions import VCSError
47 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.utils2 import (
49 from rhodecode.lib.utils2 import (
50 safe_str, safe_unicode, get_current_rhodecode_user, md5)
50 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 from rhodecode.model import meta
51 from rhodecode.model import meta
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
53 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.repo_group import RepoGroupModel
55 from rhodecode.model.repo_group import RepoGroupModel
56 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
56 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 _license_cache = None
62 _license_cache = None
63
63
64
64
65 def recursive_replace(str_, replace=' '):
65 def recursive_replace(str_, replace=' '):
66 """
66 """
67 Recursive replace of given sign to just one instance
67 Recursive replace of given sign to just one instance
68
68
69 :param str_: given string
69 :param str_: given string
70 :param replace: char to find and replace multiple instances
70 :param replace: char to find and replace multiple instances
71
71
72 Examples::
72 Examples::
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 'Mighty-Mighty-Bo-sstones'
74 'Mighty-Mighty-Bo-sstones'
75 """
75 """
76
76
77 if str_.find(replace * 2) == -1:
77 if str_.find(replace * 2) == -1:
78 return str_
78 return str_
79 else:
79 else:
80 str_ = str_.replace(replace * 2, replace)
80 str_ = str_.replace(replace * 2, replace)
81 return recursive_replace(str_, replace)
81 return recursive_replace(str_, replace)
82
82
83
83
84 def repo_name_slug(value):
84 def repo_name_slug(value):
85 """
85 """
86 Return slug of name of repository
86 Return slug of name of repository
87 This function is called on each creation/modification
87 This function is called on each creation/modification
88 of repository to prevent bad names in repo
88 of repository to prevent bad names in repo
89 """
89 """
90
90
91 slug = remove_formatting(value)
91 slug = remove_formatting(value)
92 slug = strip_tags(slug)
92 slug = strip_tags(slug)
93
93
94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 slug = slug.replace(c, '-')
95 slug = slug.replace(c, '-')
96 slug = recursive_replace(slug, '-')
96 slug = recursive_replace(slug, '-')
97 slug = collapse(slug, '-')
97 slug = collapse(slug, '-')
98 return slug
98 return slug
99
99
100
100
101 #==============================================================================
101 #==============================================================================
102 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
102 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
103 #==============================================================================
103 #==============================================================================
104 def get_repo_slug(request):
104 def get_repo_slug(request):
105 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 _repo = request.environ['pylons.routes_dict'].get('repo_name')
106 if _repo:
106 if _repo:
107 _repo = _repo.rstrip('/')
107 _repo = _repo.rstrip('/')
108 return _repo
108 return _repo
109
109
110
110
111 def get_repo_group_slug(request):
111 def get_repo_group_slug(request):
112 _group = request.environ['pylons.routes_dict'].get('group_name')
112 _group = request.environ['pylons.routes_dict'].get('group_name')
113 if _group:
113 if _group:
114 _group = _group.rstrip('/')
114 _group = _group.rstrip('/')
115 return _group
115 return _group
116
116
117
117
118 def get_user_group_slug(request):
118 def get_user_group_slug(request):
119 _group = request.environ['pylons.routes_dict'].get('user_group_id')
119 _group = request.environ['pylons.routes_dict'].get('user_group_id')
120 try:
120 try:
121 _group = UserGroup.get(_group)
121 _group = UserGroup.get(_group)
122 if _group:
122 if _group:
123 _group = _group.users_group_name
123 _group = _group.users_group_name
124 except Exception:
124 except Exception:
125 log.debug(traceback.format_exc())
125 log.debug(traceback.format_exc())
126 #catch all failures here
126 #catch all failures here
127 pass
127 pass
128
128
129 return _group
129 return _group
130
130
131
131
132 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
132 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
133 """
133 """
134 Action logger for various actions made by users
134 Action logger for various actions made by users
135
135
136 :param user: user that made this action, can be a unique username string or
136 :param user: user that made this action, can be a unique username string or
137 object containing user_id attribute
137 object containing user_id attribute
138 :param action: action to log, should be on of predefined unique actions for
138 :param action: action to log, should be on of predefined unique actions for
139 easy translations
139 easy translations
140 :param repo: string name of repository or object containing repo_id,
140 :param repo: string name of repository or object containing repo_id,
141 that action was made on
141 that action was made on
142 :param ipaddr: optional ip address from what the action was made
142 :param ipaddr: optional ip address from what the action was made
143 :param sa: optional sqlalchemy session
143 :param sa: optional sqlalchemy session
144
144
145 """
145 """
146
146
147 if not sa:
147 if not sa:
148 sa = meta.Session()
148 sa = meta.Session()
149 # if we don't get explicit IP address try to get one from registered user
149 # if we don't get explicit IP address try to get one from registered user
150 # in tmpl context var
150 # in tmpl context var
151 if not ipaddr:
151 if not ipaddr:
152 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
152 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
153
153
154 try:
154 try:
155 if getattr(user, 'user_id', None):
155 if getattr(user, 'user_id', None):
156 user_obj = User.get(user.user_id)
156 user_obj = User.get(user.user_id)
157 elif isinstance(user, basestring):
157 elif isinstance(user, basestring):
158 user_obj = User.get_by_username(user)
158 user_obj = User.get_by_username(user)
159 else:
159 else:
160 raise Exception('You have to provide a user object or a username')
160 raise Exception('You have to provide a user object or a username')
161
161
162 if getattr(repo, 'repo_id', None):
162 if getattr(repo, 'repo_id', None):
163 repo_obj = Repository.get(repo.repo_id)
163 repo_obj = Repository.get(repo.repo_id)
164 repo_name = repo_obj.repo_name
164 repo_name = repo_obj.repo_name
165 elif isinstance(repo, basestring):
165 elif isinstance(repo, basestring):
166 repo_name = repo.lstrip('/')
166 repo_name = repo.lstrip('/')
167 repo_obj = Repository.get_by_repo_name(repo_name)
167 repo_obj = Repository.get_by_repo_name(repo_name)
168 else:
168 else:
169 repo_obj = None
169 repo_obj = None
170 repo_name = ''
170 repo_name = ''
171
171
172 user_log = UserLog()
172 user_log = UserLog()
173 user_log.user_id = user_obj.user_id
173 user_log.user_id = user_obj.user_id
174 user_log.username = user_obj.username
174 user_log.username = user_obj.username
175 action = safe_unicode(action)
175 action = safe_unicode(action)
176 user_log.action = action[:1200000]
176 user_log.action = action[:1200000]
177
177
178 user_log.repository = repo_obj
178 user_log.repository = repo_obj
179 user_log.repository_name = repo_name
179 user_log.repository_name = repo_name
180
180
181 user_log.action_date = datetime.datetime.now()
181 user_log.action_date = datetime.datetime.now()
182 user_log.user_ip = ipaddr
182 user_log.user_ip = ipaddr
183 sa.add(user_log)
183 sa.add(user_log)
184
184
185 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
185 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
186 action, safe_unicode(repo), user_obj, ipaddr)
186 action, safe_unicode(repo), user_obj, ipaddr)
187 if commit:
187 if commit:
188 sa.commit()
188 sa.commit()
189 except Exception:
189 except Exception:
190 log.error(traceback.format_exc())
190 log.error(traceback.format_exc())
191 raise
191 raise
192
192
193
193
194 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
194 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
195 """
195 """
196 Scans given path for repos and return (name,(type,path)) tuple
196 Scans given path for repos and return (name,(type,path)) tuple
197
197
198 :param path: path to scan for repositories
198 :param path: path to scan for repositories
199 :param recursive: recursive search and return names with subdirs in front
199 :param recursive: recursive search and return names with subdirs in front
200 """
200 """
201
201
202 # remove ending slash for better results
202 # remove ending slash for better results
203 path = path.rstrip(os.sep)
203 path = path.rstrip(os.sep)
204 log.debug('now scanning in %s location recursive:%s...', path, recursive)
204 log.debug('now scanning in %s location recursive:%s...', path, recursive)
205
205
206 def _get_repos(p):
206 def _get_repos(p):
207 dirpaths = _get_dirpaths(p)
207 dirpaths = _get_dirpaths(p)
208 if not _is_dir_writable(p):
208 if not _is_dir_writable(p):
209 log.warning('repo path without write access: %s', p)
209 log.warning('repo path without write access: %s', p)
210
210
211 for dirpath in dirpaths:
211 for dirpath in dirpaths:
212 if os.path.isfile(os.path.join(p, dirpath)):
212 if os.path.isfile(os.path.join(p, dirpath)):
213 continue
213 continue
214 cur_path = os.path.join(p, dirpath)
214 cur_path = os.path.join(p, dirpath)
215
215
216 # skip removed repos
216 # skip removed repos
217 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
217 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
218 continue
218 continue
219
219
220 #skip .<somethin> dirs
220 #skip .<somethin> dirs
221 if dirpath.startswith('.'):
221 if dirpath.startswith('.'):
222 continue
222 continue
223
223
224 try:
224 try:
225 scm_info = get_scm(cur_path)
225 scm_info = get_scm(cur_path)
226 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
226 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
227 except VCSError:
227 except VCSError:
228 if not recursive:
228 if not recursive:
229 continue
229 continue
230 #check if this dir containts other repos for recursive scan
230 #check if this dir containts other repos for recursive scan
231 rec_path = os.path.join(p, dirpath)
231 rec_path = os.path.join(p, dirpath)
232 if os.path.isdir(rec_path):
232 if os.path.isdir(rec_path):
233 for inner_scm in _get_repos(rec_path):
233 for inner_scm in _get_repos(rec_path):
234 yield inner_scm
234 yield inner_scm
235
235
236 return _get_repos(path)
236 return _get_repos(path)
237
237
238
238
239 def _get_dirpaths(p):
239 def _get_dirpaths(p):
240 try:
240 try:
241 # OS-independable way of checking if we have at least read-only
241 # OS-independable way of checking if we have at least read-only
242 # access or not.
242 # access or not.
243 dirpaths = os.listdir(p)
243 dirpaths = os.listdir(p)
244 except OSError:
244 except OSError:
245 log.warning('ignoring repo path without read access: %s', p)
245 log.warning('ignoring repo path without read access: %s', p)
246 return []
246 return []
247
247
248 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
248 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
249 # decode paths and suddenly returns unicode objects itself. The items it
249 # decode paths and suddenly returns unicode objects itself. The items it
250 # cannot decode are returned as strings and cause issues.
250 # cannot decode are returned as strings and cause issues.
251 #
251 #
252 # Those paths are ignored here until a solid solution for path handling has
252 # Those paths are ignored here until a solid solution for path handling has
253 # been built.
253 # been built.
254 expected_type = type(p)
254 expected_type = type(p)
255
255
256 def _has_correct_type(item):
256 def _has_correct_type(item):
257 if type(item) is not expected_type:
257 if type(item) is not expected_type:
258 log.error(
258 log.error(
259 u"Ignoring path %s since it cannot be decoded into unicode.",
259 u"Ignoring path %s since it cannot be decoded into unicode.",
260 # Using "repr" to make sure that we see the byte value in case
260 # Using "repr" to make sure that we see the byte value in case
261 # of support.
261 # of support.
262 repr(item))
262 repr(item))
263 return False
263 return False
264 return True
264 return True
265
265
266 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
266 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
267
267
268 return dirpaths
268 return dirpaths
269
269
270
270
271 def _is_dir_writable(path):
271 def _is_dir_writable(path):
272 """
272 """
273 Probe if `path` is writable.
273 Probe if `path` is writable.
274
274
275 Due to trouble on Cygwin / Windows, this is actually probing if it is
275 Due to trouble on Cygwin / Windows, this is actually probing if it is
276 possible to create a file inside of `path`, stat does not produce reliable
276 possible to create a file inside of `path`, stat does not produce reliable
277 results in this case.
277 results in this case.
278 """
278 """
279 try:
279 try:
280 with tempfile.TemporaryFile(dir=path):
280 with tempfile.TemporaryFile(dir=path):
281 pass
281 pass
282 except OSError:
282 except OSError:
283 return False
283 return False
284 return True
284 return True
285
285
286
286
287 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
287 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
288 """
288 """
289 Returns True if given path is a valid repository False otherwise.
289 Returns True if given path is a valid repository False otherwise.
290 If expect_scm param is given also, compare if given scm is the same
290 If expect_scm param is given also, compare if given scm is the same
291 as expected from scm parameter. If explicit_scm is given don't try to
291 as expected from scm parameter. If explicit_scm is given don't try to
292 detect the scm, just use the given one to check if repo is valid
292 detect the scm, just use the given one to check if repo is valid
293
293
294 :param repo_name:
294 :param repo_name:
295 :param base_path:
295 :param base_path:
296 :param expect_scm:
296 :param expect_scm:
297 :param explicit_scm:
297 :param explicit_scm:
298
298
299 :return True: if given path is a valid repository
299 :return True: if given path is a valid repository
300 """
300 """
301 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
301 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
302 log.debug('Checking if `%s` is a valid path for repository', repo_name)
302 log.debug('Checking if `%s` is a valid path for repository', repo_name)
303
303
304 try:
304 try:
305 if explicit_scm:
305 if explicit_scm:
306 detected_scms = [get_scm_backend(explicit_scm)]
306 detected_scms = [get_scm_backend(explicit_scm)]
307 else:
307 else:
308 detected_scms = get_scm(full_path)
308 detected_scms = get_scm(full_path)
309
309
310 if expect_scm:
310 if expect_scm:
311 return detected_scms[0] == expect_scm
311 return detected_scms[0] == expect_scm
312 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
312 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
313 return True
313 return True
314 except VCSError:
314 except VCSError:
315 log.debug('path: %s is not a valid repo !', full_path)
315 log.debug('path: %s is not a valid repo !', full_path)
316 return False
316 return False
317
317
318
318
319 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
319 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
320 """
320 """
321 Returns True if given path is a repository group, False otherwise
321 Returns True if given path is a repository group, False otherwise
322
322
323 :param repo_name:
323 :param repo_name:
324 :param base_path:
324 :param base_path:
325 """
325 """
326 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
326 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
327 log.debug('Checking if `%s` is a valid path for repository group',
327 log.debug('Checking if `%s` is a valid path for repository group',
328 repo_group_name)
328 repo_group_name)
329
329
330 # check if it's not a repo
330 # check if it's not a repo
331 if is_valid_repo(repo_group_name, base_path):
331 if is_valid_repo(repo_group_name, base_path):
332 log.debug('Repo called %s exist, it is not a valid '
332 log.debug('Repo called %s exist, it is not a valid '
333 'repo group' % repo_group_name)
333 'repo group' % repo_group_name)
334 return False
334 return False
335
335
336 try:
336 try:
337 # we need to check bare git repos at higher level
337 # we need to check bare git repos at higher level
338 # since we might match branches/hooks/info/objects or possible
338 # since we might match branches/hooks/info/objects or possible
339 # other things inside bare git repo
339 # other things inside bare git repo
340 scm_ = get_scm(os.path.dirname(full_path))
340 scm_ = get_scm(os.path.dirname(full_path))
341 log.debug('path: %s is a vcs object:%s, not valid '
341 log.debug('path: %s is a vcs object:%s, not valid '
342 'repo group' % (full_path, scm_))
342 'repo group' % (full_path, scm_))
343 return False
343 return False
344 except VCSError:
344 except VCSError:
345 pass
345 pass
346
346
347 # check if it's a valid path
347 # check if it's a valid path
348 if skip_path_check or os.path.isdir(full_path):
348 if skip_path_check or os.path.isdir(full_path):
349 log.debug('path: %s is a valid repo group !', full_path)
349 log.debug('path: %s is a valid repo group !', full_path)
350 return True
350 return True
351
351
352 log.debug('path: %s is not a valid repo group !', full_path)
352 log.debug('path: %s is not a valid repo group !', full_path)
353 return False
353 return False
354
354
355
355
356 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
356 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
357 while True:
357 while True:
358 ok = raw_input(prompt)
358 ok = raw_input(prompt)
359 if ok in ('y', 'ye', 'yes'):
359 if ok in ('y', 'ye', 'yes'):
360 return True
360 return True
361 if ok in ('n', 'no', 'nop', 'nope'):
361 if ok in ('n', 'no', 'nop', 'nope'):
362 return False
362 return False
363 retries = retries - 1
363 retries = retries - 1
364 if retries < 0:
364 if retries < 0:
365 raise IOError
365 raise IOError
366 print complaint
366 print complaint
367
367
368 # propagated from mercurial documentation
368 # propagated from mercurial documentation
369 ui_sections = [
369 ui_sections = [
370 'alias', 'auth',
370 'alias', 'auth',
371 'decode/encode', 'defaults',
371 'decode/encode', 'defaults',
372 'diff', 'email',
372 'diff', 'email',
373 'extensions', 'format',
373 'extensions', 'format',
374 'merge-patterns', 'merge-tools',
374 'merge-patterns', 'merge-tools',
375 'hooks', 'http_proxy',
375 'hooks', 'http_proxy',
376 'smtp', 'patch',
376 'smtp', 'patch',
377 'paths', 'profiling',
377 'paths', 'profiling',
378 'server', 'trusted',
378 'server', 'trusted',
379 'ui', 'web', ]
379 'ui', 'web', ]
380
380
381
381
382 def config_data_from_db(clear_session=True, repo=None):
382 def config_data_from_db(clear_session=True, repo=None):
383 """
383 """
384 Read the configuration data from the database and return configuration
384 Read the configuration data from the database and return configuration
385 tuples.
385 tuples.
386 """
386 """
387 config = []
387 config = []
388
388
389 sa = meta.Session()
389 sa = meta.Session()
390 settings_model = VcsSettingsModel(repo=repo, sa=sa)
390 settings_model = VcsSettingsModel(repo=repo, sa=sa)
391
391
392 ui_settings = settings_model.get_ui_settings()
392 ui_settings = settings_model.get_ui_settings()
393
393
394 for setting in ui_settings:
394 for setting in ui_settings:
395 if setting.active:
395 if setting.active:
396 log.debug(
396 log.debug(
397 'settings ui from db: [%s] %s=%s',
397 'settings ui from db: [%s] %s=%s',
398 setting.section, setting.key, setting.value)
398 setting.section, setting.key, setting.value)
399 config.append((
399 config.append((
400 safe_str(setting.section), safe_str(setting.key),
400 safe_str(setting.section), safe_str(setting.key),
401 safe_str(setting.value)))
401 safe_str(setting.value)))
402 if setting.key == 'push_ssl':
402 if setting.key == 'push_ssl':
403 # force set push_ssl requirement to False, rhodecode
403 # force set push_ssl requirement to False, rhodecode
404 # handles that
404 # handles that
405 config.append((
405 config.append((
406 safe_str(setting.section), safe_str(setting.key), False))
406 safe_str(setting.section), safe_str(setting.key), False))
407 if clear_session:
407 if clear_session:
408 meta.Session.remove()
408 meta.Session.remove()
409
409
410 # TODO: mikhail: probably it makes no sense to re-read hooks information.
410 # TODO: mikhail: probably it makes no sense to re-read hooks information.
411 # It's already there and activated/deactivated
411 # It's already there and activated/deactivated
412 skip_entries = []
412 skip_entries = []
413 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
413 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
414 if 'pull' not in enabled_hook_classes:
414 if 'pull' not in enabled_hook_classes:
415 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
415 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
416 if 'push' not in enabled_hook_classes:
416 if 'push' not in enabled_hook_classes:
417 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
417 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
418
418
419 config = [entry for entry in config if entry[:2] not in skip_entries]
419 config = [entry for entry in config if entry[:2] not in skip_entries]
420
420
421 return config
421 return config
422
422
423
423
424 def make_db_config(clear_session=True, repo=None):
424 def make_db_config(clear_session=True, repo=None):
425 """
425 """
426 Create a :class:`Config` instance based on the values in the database.
426 Create a :class:`Config` instance based on the values in the database.
427 """
427 """
428 config = Config()
428 config = Config()
429 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
429 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
430 for section, option, value in config_data:
430 for section, option, value in config_data:
431 config.set(section, option, value)
431 config.set(section, option, value)
432 return config
432 return config
433
433
434
434
435 def get_enabled_hook_classes(ui_settings):
435 def get_enabled_hook_classes(ui_settings):
436 """
436 """
437 Return the enabled hook classes.
437 Return the enabled hook classes.
438
438
439 :param ui_settings: List of ui_settings as returned
439 :param ui_settings: List of ui_settings as returned
440 by :meth:`VcsSettingsModel.get_ui_settings`
440 by :meth:`VcsSettingsModel.get_ui_settings`
441
441
442 :return: a list with the enabled hook classes. The order is not guaranteed.
442 :return: a list with the enabled hook classes. The order is not guaranteed.
443 :rtype: list
443 :rtype: list
444 """
444 """
445 enabled_hooks = []
445 enabled_hooks = []
446 active_hook_keys = [
446 active_hook_keys = [
447 key for section, key, value, active in ui_settings
447 key for section, key, value, active in ui_settings
448 if section == 'hooks' and active]
448 if section == 'hooks' and active]
449
449
450 hook_names = {
450 hook_names = {
451 RhodeCodeUi.HOOK_PUSH: 'push',
451 RhodeCodeUi.HOOK_PUSH: 'push',
452 RhodeCodeUi.HOOK_PULL: 'pull',
452 RhodeCodeUi.HOOK_PULL: 'pull',
453 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
453 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
454 }
454 }
455
455
456 for key in active_hook_keys:
456 for key in active_hook_keys:
457 hook = hook_names.get(key)
457 hook = hook_names.get(key)
458 if hook:
458 if hook:
459 enabled_hooks.append(hook)
459 enabled_hooks.append(hook)
460
460
461 return enabled_hooks
461 return enabled_hooks
462
462
463
463
464 def set_rhodecode_config(config):
464 def set_rhodecode_config(config):
465 """
465 """
466 Updates pylons config with new settings from database
466 Updates pylons config with new settings from database
467
467
468 :param config:
468 :param config:
469 """
469 """
470 app_settings = SettingsModel().get_all_settings()
470 app_settings = SettingsModel().get_all_settings()
471
471
472 for k, v in app_settings.items():
472 for k, v in app_settings.items():
473 config[k] = v
473 config[k] = v
474
474
475
475
476 def map_groups(path):
476 def map_groups(path):
477 """
477 """
478 Given a full path to a repository, create all nested groups that this
478 Given a full path to a repository, create all nested groups that this
479 repo is inside. This function creates parent-child relationships between
479 repo is inside. This function creates parent-child relationships between
480 groups and creates default perms for all new groups.
480 groups and creates default perms for all new groups.
481
481
482 :param paths: full path to repository
482 :param paths: full path to repository
483 """
483 """
484 sa = meta.Session()
484 sa = meta.Session()
485 groups = path.split(Repository.NAME_SEP)
485 groups = path.split(Repository.NAME_SEP)
486 parent = None
486 parent = None
487 group = None
487 group = None
488
488
489 # last element is repo in nested groups structure
489 # last element is repo in nested groups structure
490 groups = groups[:-1]
490 groups = groups[:-1]
491 rgm = RepoGroupModel(sa)
491 rgm = RepoGroupModel(sa)
492 owner = User.get_first_admin()
492 owner = User.get_first_admin()
493 for lvl, group_name in enumerate(groups):
493 for lvl, group_name in enumerate(groups):
494 group_name = '/'.join(groups[:lvl] + [group_name])
494 group_name = '/'.join(groups[:lvl] + [group_name])
495 group = RepoGroup.get_by_group_name(group_name)
495 group = RepoGroup.get_by_group_name(group_name)
496 desc = '%s group' % group_name
496 desc = '%s group' % group_name
497
497
498 # skip folders that are now removed repos
498 # skip folders that are now removed repos
499 if REMOVED_REPO_PAT.match(group_name):
499 if REMOVED_REPO_PAT.match(group_name):
500 break
500 break
501
501
502 if group is None:
502 if group is None:
503 log.debug('creating group level: %s group_name: %s',
503 log.debug('creating group level: %s group_name: %s',
504 lvl, group_name)
504 lvl, group_name)
505 group = RepoGroup(group_name, parent)
505 group = RepoGroup(group_name, parent)
506 group.group_description = desc
506 group.group_description = desc
507 group.user = owner
507 group.user = owner
508 sa.add(group)
508 sa.add(group)
509 perm_obj = rgm._create_default_perms(group)
509 perm_obj = rgm._create_default_perms(group)
510 sa.add(perm_obj)
510 sa.add(perm_obj)
511 sa.flush()
511 sa.flush()
512
512
513 parent = group
513 parent = group
514 return group
514 return group
515
515
516
516
517 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
517 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
518 """
518 """
519 maps all repos given in initial_repo_list, non existing repositories
519 maps all repos given in initial_repo_list, non existing repositories
520 are created, if remove_obsolete is True it also checks for db entries
520 are created, if remove_obsolete is True it also checks for db entries
521 that are not in initial_repo_list and removes them.
521 that are not in initial_repo_list and removes them.
522
522
523 :param initial_repo_list: list of repositories found by scanning methods
523 :param initial_repo_list: list of repositories found by scanning methods
524 :param remove_obsolete: check for obsolete entries in database
524 :param remove_obsolete: check for obsolete entries in database
525 """
525 """
526 from rhodecode.model.repo import RepoModel
526 from rhodecode.model.repo import RepoModel
527 from rhodecode.model.scm import ScmModel
527 from rhodecode.model.scm import ScmModel
528 sa = meta.Session()
528 sa = meta.Session()
529 repo_model = RepoModel()
529 repo_model = RepoModel()
530 user = User.get_first_admin()
530 user = User.get_first_admin()
531 added = []
531 added = []
532
532
533 # creation defaults
533 # creation defaults
534 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
534 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
535 enable_statistics = defs.get('repo_enable_statistics')
535 enable_statistics = defs.get('repo_enable_statistics')
536 enable_locking = defs.get('repo_enable_locking')
536 enable_locking = defs.get('repo_enable_locking')
537 enable_downloads = defs.get('repo_enable_downloads')
537 enable_downloads = defs.get('repo_enable_downloads')
538 private = defs.get('repo_private')
538 private = defs.get('repo_private')
539
539
540 for name, repo in initial_repo_list.items():
540 for name, repo in initial_repo_list.items():
541 group = map_groups(name)
541 group = map_groups(name)
542 unicode_name = safe_unicode(name)
542 unicode_name = safe_unicode(name)
543 db_repo = repo_model.get_by_repo_name(unicode_name)
543 db_repo = repo_model.get_by_repo_name(unicode_name)
544 # found repo that is on filesystem not in RhodeCode database
544 # found repo that is on filesystem not in RhodeCode database
545 if not db_repo:
545 if not db_repo:
546 log.info('repository %s not found, creating now', name)
546 log.info('repository %s not found, creating now', name)
547 added.append(name)
547 added.append(name)
548 desc = (repo.description
548 desc = (repo.description
549 if repo.description != 'unknown'
549 if repo.description != 'unknown'
550 else '%s repository' % name)
550 else '%s repository' % name)
551
551
552 db_repo = repo_model._create_repo(
552 db_repo = repo_model._create_repo(
553 repo_name=name,
553 repo_name=name,
554 repo_type=repo.alias,
554 repo_type=repo.alias,
555 description=desc,
555 description=desc,
556 repo_group=getattr(group, 'group_id', None),
556 repo_group=getattr(group, 'group_id', None),
557 owner=user,
557 owner=user,
558 enable_locking=enable_locking,
558 enable_locking=enable_locking,
559 enable_downloads=enable_downloads,
559 enable_downloads=enable_downloads,
560 enable_statistics=enable_statistics,
560 enable_statistics=enable_statistics,
561 private=private,
561 private=private,
562 state=Repository.STATE_CREATED
562 state=Repository.STATE_CREATED
563 )
563 )
564 sa.commit()
564 sa.commit()
565 # we added that repo just now, and make sure we updated server info
565 # we added that repo just now, and make sure we updated server info
566 if db_repo.repo_type == 'git':
566 if db_repo.repo_type == 'git':
567 git_repo = db_repo.scm_instance()
567 git_repo = db_repo.scm_instance()
568 # update repository server-info
568 # update repository server-info
569 log.debug('Running update server info')
569 log.debug('Running update server info')
570 git_repo._update_server_info()
570 git_repo._update_server_info()
571
571
572 db_repo.update_commit_cache()
572 db_repo.update_commit_cache()
573
573
574 config = db_repo._config
574 config = db_repo._config
575 config.set('extensions', 'largefiles', '')
575 config.set('extensions', 'largefiles', '')
576 ScmModel().install_hooks(
576 ScmModel().install_hooks(
577 db_repo.scm_instance(config=config),
577 db_repo.scm_instance(config=config),
578 repo_type=db_repo.repo_type)
578 repo_type=db_repo.repo_type)
579
579
580 removed = []
580 removed = []
581 if remove_obsolete:
581 if remove_obsolete:
582 # remove from database those repositories that are not in the filesystem
582 # remove from database those repositories that are not in the filesystem
583 for repo in sa.query(Repository).all():
583 for repo in sa.query(Repository).all():
584 if repo.repo_name not in initial_repo_list.keys():
584 if repo.repo_name not in initial_repo_list.keys():
585 log.debug("Removing non-existing repository found in db `%s`",
585 log.debug("Removing non-existing repository found in db `%s`",
586 repo.repo_name)
586 repo.repo_name)
587 try:
587 try:
588 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
588 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
589 sa.commit()
589 sa.commit()
590 removed.append(repo.repo_name)
590 removed.append(repo.repo_name)
591 except Exception:
591 except Exception:
592 # don't hold further removals on error
592 # don't hold further removals on error
593 log.error(traceback.format_exc())
593 log.error(traceback.format_exc())
594 sa.rollback()
594 sa.rollback()
595
595
596 def splitter(full_repo_name):
596 def splitter(full_repo_name):
597 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
597 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
598 gr_name = None
598 gr_name = None
599 if len(_parts) == 2:
599 if len(_parts) == 2:
600 gr_name = _parts[0]
600 gr_name = _parts[0]
601 return gr_name
601 return gr_name
602
602
603 initial_repo_group_list = [splitter(x) for x in
603 initial_repo_group_list = [splitter(x) for x in
604 initial_repo_list.keys() if splitter(x)]
604 initial_repo_list.keys() if splitter(x)]
605
605
606 # remove from database those repository groups that are not in the
606 # remove from database those repository groups that are not in the
607 # filesystem due to parent child relationships we need to delete them
607 # filesystem due to parent child relationships we need to delete them
608 # in a specific order of most nested first
608 # in a specific order of most nested first
609 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
609 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
610 nested_sort = lambda gr: len(gr.split('/'))
610 nested_sort = lambda gr: len(gr.split('/'))
611 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
611 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
612 if group_name not in initial_repo_group_list:
612 if group_name not in initial_repo_group_list:
613 repo_group = RepoGroup.get_by_group_name(group_name)
613 repo_group = RepoGroup.get_by_group_name(group_name)
614 if (repo_group.children.all() or
614 if (repo_group.children.all() or
615 not RepoGroupModel().check_exist_filesystem(
615 not RepoGroupModel().check_exist_filesystem(
616 group_name=group_name, exc_on_failure=False)):
616 group_name=group_name, exc_on_failure=False)):
617 continue
617 continue
618
618
619 log.info(
619 log.info(
620 'Removing non-existing repository group found in db `%s`',
620 'Removing non-existing repository group found in db `%s`',
621 group_name)
621 group_name)
622 try:
622 try:
623 RepoGroupModel(sa).delete(group_name, fs_remove=False)
623 RepoGroupModel(sa).delete(group_name, fs_remove=False)
624 sa.commit()
624 sa.commit()
625 removed.append(group_name)
625 removed.append(group_name)
626 except Exception:
626 except Exception:
627 # don't hold further removals on error
627 # don't hold further removals on error
628 log.exception(
628 log.exception(
629 'Unable to remove repository group `%s`',
629 'Unable to remove repository group `%s`',
630 group_name)
630 group_name)
631 sa.rollback()
631 sa.rollback()
632 raise
632 raise
633
633
634 return added, removed
634 return added, removed
635
635
636
636
637 def get_default_cache_settings(settings):
637 def get_default_cache_settings(settings):
638 cache_settings = {}
638 cache_settings = {}
639 for key in settings.keys():
639 for key in settings.keys():
640 for prefix in ['beaker.cache.', 'cache.']:
640 for prefix in ['beaker.cache.', 'cache.']:
641 if key.startswith(prefix):
641 if key.startswith(prefix):
642 name = key.split(prefix)[1].strip()
642 name = key.split(prefix)[1].strip()
643 cache_settings[name] = settings[key].strip()
643 cache_settings[name] = settings[key].strip()
644 return cache_settings
644 return cache_settings
645
645
646
646
647 # set cache regions for beaker so celery can utilise it
647 # set cache regions for beaker so celery can utilise it
648 def add_cache(settings):
648 def add_cache(settings):
649 from rhodecode.lib import caches
649 from rhodecode.lib import caches
650 cache_settings = {'regions': None}
650 cache_settings = {'regions': None}
651 # main cache settings used as default ...
651 # main cache settings used as default ...
652 cache_settings.update(get_default_cache_settings(settings))
652 cache_settings.update(get_default_cache_settings(settings))
653
653
654 if cache_settings['regions']:
654 if cache_settings['regions']:
655 for region in cache_settings['regions'].split(','):
655 for region in cache_settings['regions'].split(','):
656 region = region.strip()
656 region = region.strip()
657 region_settings = {}
657 region_settings = {}
658 for key, value in cache_settings.items():
658 for key, value in cache_settings.items():
659 if key.startswith(region):
659 if key.startswith(region):
660 region_settings[key.split('.')[1]] = value
660 region_settings[key.split('.')[1]] = value
661
661
662 caches.configure_cache_region(
662 caches.configure_cache_region(
663 region, region_settings, cache_settings)
663 region, region_settings, cache_settings)
664
664
665
665
666 def load_rcextensions(root_path):
666 def load_rcextensions(root_path):
667 import rhodecode
667 import rhodecode
668 from rhodecode.config import conf
668 from rhodecode.config import conf
669
669
670 path = os.path.join(root_path, 'rcextensions', '__init__.py')
670 path = os.path.join(root_path, 'rcextensions', '__init__.py')
671 if os.path.isfile(path):
671 if os.path.isfile(path):
672 rcext = create_module('rc', path)
672 rcext = create_module('rc', path)
673 EXT = rhodecode.EXTENSIONS = rcext
673 EXT = rhodecode.EXTENSIONS = rcext
674 log.debug('Found rcextensions now loading %s...', rcext)
674 log.debug('Found rcextensions now loading %s...', rcext)
675
675
676 # Additional mappings that are not present in the pygments lexers
676 # Additional mappings that are not present in the pygments lexers
677 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
677 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
678
678
679 # auto check if the module is not missing any data, set to default if is
679 # auto check if the module is not missing any data, set to default if is
680 # this will help autoupdate new feature of rcext module
680 # this will help autoupdate new feature of rcext module
681 #from rhodecode.config import rcextensions
681 #from rhodecode.config import rcextensions
682 #for k in dir(rcextensions):
682 #for k in dir(rcextensions):
683 # if not k.startswith('_') and not hasattr(EXT, k):
683 # if not k.startswith('_') and not hasattr(EXT, k):
684 # setattr(EXT, k, getattr(rcextensions, k))
684 # setattr(EXT, k, getattr(rcextensions, k))
685
685
686
686
687 def get_custom_lexer(extension):
687 def get_custom_lexer(extension):
688 """
688 """
689 returns a custom lexer if it is defined in rcextensions module, or None
689 returns a custom lexer if it is defined in rcextensions module, or None
690 if there's no custom lexer defined
690 if there's no custom lexer defined
691 """
691 """
692 import rhodecode
692 import rhodecode
693 from pygments import lexers
693 from pygments import lexers
694 # check if we didn't define this extension as other lexer
694 # check if we didn't define this extension as other lexer
695 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
695 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
696 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
696 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
697 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
697 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
698 return lexers.get_lexer_by_name(_lexer_name)
698 return lexers.get_lexer_by_name(_lexer_name)
699
699
700
700
701 #==============================================================================
701 #==============================================================================
702 # TEST FUNCTIONS AND CREATORS
702 # TEST FUNCTIONS AND CREATORS
703 #==============================================================================
703 #==============================================================================
704 def create_test_index(repo_location, config, full_index):
704 def create_test_index(repo_location, config, full_index):
705 """
705 """
706 Makes default test index
706 Makes default test index
707
707
708 :param config: test config
708 :param config: test config
709 :param full_index:
709 :param full_index:
710 # start test server:
710 # start test server:
711 rcserver --with-vcsserver test.ini
711 rcserver --with-vcsserver test.ini
712
712
713 # build index and store it in /tmp/rc/index:
713 # build index and store it in /tmp/rc/index:
714 rhodecode-index --force --api-host=http://vps1.dev:5000 --api-key=xxx --engine-location=/tmp/rc/index
714 rhodecode-index --force --api-host=http://vps1.dev:5000 --api-key=xxx --engine-location=/tmp/rc/index
715
715
716 # package and move new packages
716 # package and move new packages
717 tar -zcvf vcs_search_index.tar.gz -C /tmp/rc index
717 tar -zcvf vcs_search_index.tar.gz -C /tmp/rc index
718 mv vcs_search_index.tar.gz rhodecode/tests/fixtures/
718 mv vcs_search_index.tar.gz rhodecode/tests/fixtures/
719
719
720 """
720 """
721 cur_dir = dn(dn(abspath(__file__)))
721 import rc_testdata
722 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
722
723 'vcs_search_index.tar.gz')) as tar:
723 rc_testdata.extract_search_index(
724 tar.extractall(os.path.dirname(config['search.location']))
724 'vcs_search_index', os.path.dirname(config['search.location']))
725
725
726
726
727 def create_test_env(repos_test_path, config):
727 def create_test_env(repos_test_path, config):
728 """
728 """
729 Makes a fresh database and
729 Makes a fresh database and
730 installs test repository into tmp dir
730 installs test repository into tmp dir
731 """
731 """
732 from rhodecode.lib.db_manage import DbManage
732 from rhodecode.lib.db_manage import DbManage
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO, TESTS_TMP_PATH
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO, TESTS_TMP_PATH
734
734
735 # PART ONE create db
735 # PART ONE create db
736 dbconf = config['sqlalchemy.db1.url']
736 dbconf = config['sqlalchemy.db1.url']
737 log.debug('making test db %s', dbconf)
737 log.debug('making test db %s', dbconf)
738
738
739 # create test dir if it doesn't exist
739 # create test dir if it doesn't exist
740 if not os.path.isdir(repos_test_path):
740 if not os.path.isdir(repos_test_path):
741 log.debug('Creating testdir %s', repos_test_path)
741 log.debug('Creating testdir %s', repos_test_path)
742 os.makedirs(repos_test_path)
742 os.makedirs(repos_test_path)
743
743
744 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
744 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
745 tests=True, cli_args={'force_ask': True})
745 tests=True, cli_args={'force_ask': True})
746 dbmanage.create_tables(override=True)
746 dbmanage.create_tables(override=True)
747 dbmanage.set_db_version()
747 dbmanage.set_db_version()
748 # for tests dynamically set new root paths based on generated content
748 # for tests dynamically set new root paths based on generated content
749 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
749 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
750 dbmanage.create_default_user()
750 dbmanage.create_default_user()
751 dbmanage.create_test_admin_and_users()
751 dbmanage.create_test_admin_and_users()
752 dbmanage.create_permissions()
752 dbmanage.create_permissions()
753 dbmanage.populate_default_permissions()
753 dbmanage.populate_default_permissions()
754 Session().commit()
754 Session().commit()
755 # PART TWO make test repo
755 # PART TWO make test repo
756 log.debug('making test vcs repositories')
756 log.debug('making test vcs repositories')
757
757
758 idx_path = config['search.location']
758 idx_path = config['search.location']
759 data_path = config['cache_dir']
759 data_path = config['cache_dir']
760
760
761 # clean index and data
761 # clean index and data
762 if idx_path and os.path.exists(idx_path):
762 if idx_path and os.path.exists(idx_path):
763 log.debug('remove %s', idx_path)
763 log.debug('remove %s', idx_path)
764 shutil.rmtree(idx_path)
764 shutil.rmtree(idx_path)
765
765
766 if data_path and os.path.exists(data_path):
766 if data_path and os.path.exists(data_path):
767 log.debug('remove %s', data_path)
767 log.debug('remove %s', data_path)
768 shutil.rmtree(data_path)
768 shutil.rmtree(data_path)
769
769
770 # CREATE DEFAULT TEST REPOS
770 # CREATE DEFAULT TEST REPOS
771 cur_dir = dn(dn(abspath(__file__)))
771 cur_dir = dn(dn(abspath(__file__)))
772 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
772 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
773 'vcs_test_hg.tar.gz')) as tar:
773 'vcs_test_hg.tar.gz')) as tar:
774 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
774 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
775
775
776 cur_dir = dn(dn(abspath(__file__)))
776 cur_dir = dn(dn(abspath(__file__)))
777 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
777 with tarfile.open(jn(cur_dir, 'tests', 'fixtures',
778 'vcs_test_git.tar.gz')) as tar:
778 'vcs_test_git.tar.gz')) as tar:
779 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
779 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
780
780
781 # Note: Subversion is in the process of being integrated with the system,
781 # Note: Subversion is in the process of being integrated with the system,
782 # until we have a properly packed version of the test svn repository, this
782 # until we have a properly packed version of the test svn repository, this
783 # tries to copy over the repo from a package "rc_testdata"
783 # tries to copy over the repo from a package "rc_testdata"
784 import rc_testdata
784 import rc_testdata
785 svn_repo_path = rc_testdata.get_svn_repo_archive()
785 svn_repo_path = rc_testdata.get_svn_repo_archive()
786 with tarfile.open(svn_repo_path) as tar:
786 with tarfile.open(svn_repo_path) as tar:
787 tar.extractall(jn(TESTS_TMP_PATH, SVN_REPO))
787 tar.extractall(jn(TESTS_TMP_PATH, SVN_REPO))
788
788
789
789
790 #==============================================================================
790 #==============================================================================
791 # PASTER COMMANDS
791 # PASTER COMMANDS
792 #==============================================================================
792 #==============================================================================
793 class BasePasterCommand(Command):
793 class BasePasterCommand(Command):
794 """
794 """
795 Abstract Base Class for paster commands.
795 Abstract Base Class for paster commands.
796
796
797 The celery commands are somewhat aggressive about loading
797 The celery commands are somewhat aggressive about loading
798 celery.conf, and since our module sets the `CELERY_LOADER`
798 celery.conf, and since our module sets the `CELERY_LOADER`
799 environment variable to our loader, we have to bootstrap a bit and
799 environment variable to our loader, we have to bootstrap a bit and
800 make sure we've had a chance to load the pylons config off of the
800 make sure we've had a chance to load the pylons config off of the
801 command line, otherwise everything fails.
801 command line, otherwise everything fails.
802 """
802 """
803 min_args = 1
803 min_args = 1
804 min_args_error = "Please provide a paster config file as an argument."
804 min_args_error = "Please provide a paster config file as an argument."
805 takes_config_file = 1
805 takes_config_file = 1
806 requires_config_file = True
806 requires_config_file = True
807
807
808 def notify_msg(self, msg, log=False):
808 def notify_msg(self, msg, log=False):
809 """Make a notification to user, additionally if logger is passed
809 """Make a notification to user, additionally if logger is passed
810 it logs this action using given logger
810 it logs this action using given logger
811
811
812 :param msg: message that will be printed to user
812 :param msg: message that will be printed to user
813 :param log: logging instance, to use to additionally log this message
813 :param log: logging instance, to use to additionally log this message
814
814
815 """
815 """
816 if log and isinstance(log, logging):
816 if log and isinstance(log, logging):
817 log(msg)
817 log(msg)
818
818
819 def run(self, args):
819 def run(self, args):
820 """
820 """
821 Overrides Command.run
821 Overrides Command.run
822
822
823 Checks for a config file argument and loads it.
823 Checks for a config file argument and loads it.
824 """
824 """
825 if len(args) < self.min_args:
825 if len(args) < self.min_args:
826 raise BadCommand(
826 raise BadCommand(
827 self.min_args_error % {'min_args': self.min_args,
827 self.min_args_error % {'min_args': self.min_args,
828 'actual_args': len(args)})
828 'actual_args': len(args)})
829
829
830 # Decrement because we're going to lob off the first argument.
830 # Decrement because we're going to lob off the first argument.
831 # @@ This is hacky
831 # @@ This is hacky
832 self.min_args -= 1
832 self.min_args -= 1
833 self.bootstrap_config(args[0])
833 self.bootstrap_config(args[0])
834 self.update_parser()
834 self.update_parser()
835 return super(BasePasterCommand, self).run(args[1:])
835 return super(BasePasterCommand, self).run(args[1:])
836
836
837 def update_parser(self):
837 def update_parser(self):
838 """
838 """
839 Abstract method. Allows for the class' parser to be updated
839 Abstract method. Allows for the class' parser to be updated
840 before the superclass' `run` method is called. Necessary to
840 before the superclass' `run` method is called. Necessary to
841 allow options/arguments to be passed through to the underlying
841 allow options/arguments to be passed through to the underlying
842 celery command.
842 celery command.
843 """
843 """
844 raise NotImplementedError("Abstract Method.")
844 raise NotImplementedError("Abstract Method.")
845
845
846 def bootstrap_config(self, conf):
846 def bootstrap_config(self, conf):
847 """
847 """
848 Loads the pylons configuration.
848 Loads the pylons configuration.
849 """
849 """
850 from pylons import config as pylonsconfig
850 from pylons import config as pylonsconfig
851
851
852 self.path_to_ini_file = os.path.realpath(conf)
852 self.path_to_ini_file = os.path.realpath(conf)
853 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
853 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
854 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
854 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
855
855
856 def _init_session(self):
856 def _init_session(self):
857 """
857 """
858 Inits SqlAlchemy Session
858 Inits SqlAlchemy Session
859 """
859 """
860 logging.config.fileConfig(self.path_to_ini_file)
860 logging.config.fileConfig(self.path_to_ini_file)
861 from pylons import config
861 from pylons import config
862 from rhodecode.config.utils import initialize_database
862 from rhodecode.config.utils import initialize_database
863
863
864 # get to remove repos !!
864 # get to remove repos !!
865 add_cache(config)
865 add_cache(config)
866 initialize_database(config)
866 initialize_database(config)
867
867
868
868
869 @decorator.decorator
869 @decorator.decorator
870 def jsonify(func, *args, **kwargs):
870 def jsonify(func, *args, **kwargs):
871 """Action decorator that formats output for JSON
871 """Action decorator that formats output for JSON
872
872
873 Given a function that will return content, this decorator will turn
873 Given a function that will return content, this decorator will turn
874 the result into JSON, with a content-type of 'application/json' and
874 the result into JSON, with a content-type of 'application/json' and
875 output it.
875 output it.
876
876
877 """
877 """
878 from pylons.decorators.util import get_pylons
878 from pylons.decorators.util import get_pylons
879 from rhodecode.lib.ext_json import json
879 from rhodecode.lib.ext_json import json
880 pylons = get_pylons(args)
880 pylons = get_pylons(args)
881 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
881 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
882 data = func(*args, **kwargs)
882 data = func(*args, **kwargs)
883 if isinstance(data, (list, tuple)):
883 if isinstance(data, (list, tuple)):
884 msg = "JSON responses with Array envelopes are susceptible to " \
884 msg = "JSON responses with Array envelopes are susceptible to " \
885 "cross-site data leak attacks, see " \
885 "cross-site data leak attacks, see " \
886 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
886 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
887 warnings.warn(msg, Warning, 2)
887 warnings.warn(msg, Warning, 2)
888 log.warning(msg)
888 log.warning(msg)
889 log.debug("Returning JSON wrapped action output")
889 log.debug("Returning JSON wrapped action output")
890 return json.dumps(data, encoding='utf-8')
890 return json.dumps(data, encoding='utf-8')
891
891
892
892
893 class PartialRenderer(object):
893 class PartialRenderer(object):
894 """
894 """
895 Partial renderer used to render chunks of html used in datagrids
895 Partial renderer used to render chunks of html used in datagrids
896 use like::
896 use like::
897
897
898 _render = PartialRenderer('data_table/_dt_elements.html')
898 _render = PartialRenderer('data_table/_dt_elements.html')
899 _render('quick_menu', args, kwargs)
899 _render('quick_menu', args, kwargs)
900 PartialRenderer.h,
900 PartialRenderer.h,
901 c,
901 c,
902 _,
902 _,
903 ungettext
903 ungettext
904 are the template stuff initialized inside and can be re-used later
904 are the template stuff initialized inside and can be re-used later
905
905
906 :param tmpl_name: template path relate to /templates/ dir
906 :param tmpl_name: template path relate to /templates/ dir
907 """
907 """
908
908
909 def __init__(self, tmpl_name):
909 def __init__(self, tmpl_name):
910 import rhodecode
910 import rhodecode
911 from pylons import request, tmpl_context as c
911 from pylons import request, tmpl_context as c
912 from pylons.i18n.translation import _, ungettext
912 from pylons.i18n.translation import _, ungettext
913 from rhodecode.lib import helpers as h
913 from rhodecode.lib import helpers as h
914
914
915 self.tmpl_name = tmpl_name
915 self.tmpl_name = tmpl_name
916 self.rhodecode = rhodecode
916 self.rhodecode = rhodecode
917 self.c = c
917 self.c = c
918 self._ = _
918 self._ = _
919 self.ungettext = ungettext
919 self.ungettext = ungettext
920 self.h = h
920 self.h = h
921 self.request = request
921 self.request = request
922
922
923 def _mako_lookup(self):
923 def _mako_lookup(self):
924 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
924 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
925 return _tmpl_lookup.get_template(self.tmpl_name)
925 return _tmpl_lookup.get_template(self.tmpl_name)
926
926
927 def _update_kwargs_for_render(self, kwargs):
927 def _update_kwargs_for_render(self, kwargs):
928 """
928 """
929 Inject params required for Mako rendering
929 Inject params required for Mako rendering
930 """
930 """
931 _kwargs = {
931 _kwargs = {
932 '_': self._,
932 '_': self._,
933 'h': self.h,
933 'h': self.h,
934 'c': self.c,
934 'c': self.c,
935 'request': self.request,
935 'request': self.request,
936 'ungettext': self.ungettext,
936 'ungettext': self.ungettext,
937 }
937 }
938 _kwargs.update(kwargs)
938 _kwargs.update(kwargs)
939 return _kwargs
939 return _kwargs
940
940
941 def _render_with_exc(self, render_func, args, kwargs):
941 def _render_with_exc(self, render_func, args, kwargs):
942 try:
942 try:
943 return render_func.render(*args, **kwargs)
943 return render_func.render(*args, **kwargs)
944 except:
944 except:
945 log.error(exceptions.text_error_template().render())
945 log.error(exceptions.text_error_template().render())
946 raise
946 raise
947
947
948 def _get_template(self, template_obj, def_name):
948 def _get_template(self, template_obj, def_name):
949 if def_name:
949 if def_name:
950 tmpl = template_obj.get_def(def_name)
950 tmpl = template_obj.get_def(def_name)
951 else:
951 else:
952 tmpl = template_obj
952 tmpl = template_obj
953 return tmpl
953 return tmpl
954
954
955 def render(self, def_name, *args, **kwargs):
955 def render(self, def_name, *args, **kwargs):
956 lookup_obj = self._mako_lookup()
956 lookup_obj = self._mako_lookup()
957 tmpl = self._get_template(lookup_obj, def_name=def_name)
957 tmpl = self._get_template(lookup_obj, def_name=def_name)
958 kwargs = self._update_kwargs_for_render(kwargs)
958 kwargs = self._update_kwargs_for_render(kwargs)
959 return self._render_with_exc(tmpl, args, kwargs)
959 return self._render_with_exc(tmpl, args, kwargs)
960
960
961 def __call__(self, tmpl, *args, **kwargs):
961 def __call__(self, tmpl, *args, **kwargs):
962 return self.render(tmpl, *args, **kwargs)
962 return self.render(tmpl, *args, **kwargs)
963
963
964
964
965 def password_changed(auth_user, session):
965 def password_changed(auth_user, session):
966 if auth_user.username == User.DEFAULT_USER:
966 if auth_user.username == User.DEFAULT_USER:
967 return False
967 return False
968 password_hash = md5(auth_user.password) if auth_user.password else None
968 password_hash = md5(auth_user.password) if auth_user.password else None
969 rhodecode_user = session.get('rhodecode_user', {})
969 rhodecode_user = session.get('rhodecode_user', {})
970 session_password_hash = rhodecode_user.get('password', '')
970 session_password_hash = rhodecode_user.get('password', '')
971 return password_hash != session_password_hash
971 return password_hash != session_password_hash
972
972
973
973
974 def read_opensource_licenses():
974 def read_opensource_licenses():
975 global _license_cache
975 global _license_cache
976
976
977 if not _license_cache:
977 if not _license_cache:
978 licenses = pkg_resources.resource_string(
978 licenses = pkg_resources.resource_string(
979 'rhodecode', 'config/licenses.json')
979 'rhodecode', 'config/licenses.json')
980 _license_cache = json.loads(licenses)
980 _license_cache = json.loads(licenses)
981
981
982 return _license_cache
982 return _license_cache
General Comments 0
You need to be logged in to leave comments. Login now