##// END OF EJS Templates
logging: added additional log info to vcs detection util.
marcink -
r1315:72279f5c default
parent child Browse files
Show More
@@ -1,1019 +1,1020 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 import hashlib
36 import hashlib
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from paste.script.command import Command, BadCommand
41 from paste.script.command import Command, BadCommand
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 from mako import exceptions
43 from mako import exceptions
44 from pyramid.threadlocal import get_current_registry
44 from pyramid.threadlocal import get_current_registry
45
45
46 from rhodecode.lib.fakemod import create_module
46 from rhodecode.lib.fakemod import create_module
47 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
63 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
64 # regular expressions.
64 # regular expressions.
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66
66
67 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72
72
73 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75
75
76 _license_cache = None
76 _license_cache = None
77
77
78
78
79 def repo_name_slug(value):
79 def repo_name_slug(value):
80 """
80 """
81 Return slug of name of repository
81 Return slug of name of repository
82 This function is called on each creation/modification
82 This function is called on each creation/modification
83 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
84 """
84 """
85 replacement_char = '-'
85 replacement_char = '-'
86
86
87 slug = remove_formatting(value)
87 slug = remove_formatting(value)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
90 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
91 return slug
91 return slug
92
92
93
93
94 #==============================================================================
94 #==============================================================================
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 #==============================================================================
96 #==============================================================================
97 def get_repo_slug(request):
97 def get_repo_slug(request):
98 _repo = request.environ['pylons.routes_dict'].get('repo_name')
98 _repo = request.environ['pylons.routes_dict'].get('repo_name')
99 if _repo:
99 if _repo:
100 _repo = _repo.rstrip('/')
100 _repo = _repo.rstrip('/')
101 return _repo
101 return _repo
102
102
103
103
104 def get_repo_group_slug(request):
104 def get_repo_group_slug(request):
105 _group = request.environ['pylons.routes_dict'].get('group_name')
105 _group = request.environ['pylons.routes_dict'].get('group_name')
106 if _group:
106 if _group:
107 _group = _group.rstrip('/')
107 _group = _group.rstrip('/')
108 return _group
108 return _group
109
109
110
110
111 def get_user_group_slug(request):
111 def get_user_group_slug(request):
112 _group = request.environ['pylons.routes_dict'].get('user_group_id')
112 _group = request.environ['pylons.routes_dict'].get('user_group_id')
113 try:
113 try:
114 _group = UserGroup.get(_group)
114 _group = UserGroup.get(_group)
115 if _group:
115 if _group:
116 _group = _group.users_group_name
116 _group = _group.users_group_name
117 except Exception:
117 except Exception:
118 log.debug(traceback.format_exc())
118 log.debug(traceback.format_exc())
119 #catch all failures here
119 #catch all failures here
120 pass
120 pass
121
121
122 return _group
122 return _group
123
123
124
124
125 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
125 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
126 """
126 """
127 Action logger for various actions made by users
127 Action logger for various actions made by users
128
128
129 :param user: user that made this action, can be a unique username string or
129 :param user: user that made this action, can be a unique username string or
130 object containing user_id attribute
130 object containing user_id attribute
131 :param action: action to log, should be on of predefined unique actions for
131 :param action: action to log, should be on of predefined unique actions for
132 easy translations
132 easy translations
133 :param repo: string name of repository or object containing repo_id,
133 :param repo: string name of repository or object containing repo_id,
134 that action was made on
134 that action was made on
135 :param ipaddr: optional ip address from what the action was made
135 :param ipaddr: optional ip address from what the action was made
136 :param sa: optional sqlalchemy session
136 :param sa: optional sqlalchemy session
137
137
138 """
138 """
139
139
140 if not sa:
140 if not sa:
141 sa = meta.Session()
141 sa = meta.Session()
142 # if we don't get explicit IP address try to get one from registered user
142 # if we don't get explicit IP address try to get one from registered user
143 # in tmpl context var
143 # in tmpl context var
144 if not ipaddr:
144 if not ipaddr:
145 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
145 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
146
146
147 try:
147 try:
148 if getattr(user, 'user_id', None):
148 if getattr(user, 'user_id', None):
149 user_obj = User.get(user.user_id)
149 user_obj = User.get(user.user_id)
150 elif isinstance(user, basestring):
150 elif isinstance(user, basestring):
151 user_obj = User.get_by_username(user)
151 user_obj = User.get_by_username(user)
152 else:
152 else:
153 raise Exception('You have to provide a user object or a username')
153 raise Exception('You have to provide a user object or a username')
154
154
155 if getattr(repo, 'repo_id', None):
155 if getattr(repo, 'repo_id', None):
156 repo_obj = Repository.get(repo.repo_id)
156 repo_obj = Repository.get(repo.repo_id)
157 repo_name = repo_obj.repo_name
157 repo_name = repo_obj.repo_name
158 elif isinstance(repo, basestring):
158 elif isinstance(repo, basestring):
159 repo_name = repo.lstrip('/')
159 repo_name = repo.lstrip('/')
160 repo_obj = Repository.get_by_repo_name(repo_name)
160 repo_obj = Repository.get_by_repo_name(repo_name)
161 else:
161 else:
162 repo_obj = None
162 repo_obj = None
163 repo_name = ''
163 repo_name = ''
164
164
165 user_log = UserLog()
165 user_log = UserLog()
166 user_log.user_id = user_obj.user_id
166 user_log.user_id = user_obj.user_id
167 user_log.username = user_obj.username
167 user_log.username = user_obj.username
168 action = safe_unicode(action)
168 action = safe_unicode(action)
169 user_log.action = action[:1200000]
169 user_log.action = action[:1200000]
170
170
171 user_log.repository = repo_obj
171 user_log.repository = repo_obj
172 user_log.repository_name = repo_name
172 user_log.repository_name = repo_name
173
173
174 user_log.action_date = datetime.datetime.now()
174 user_log.action_date = datetime.datetime.now()
175 user_log.user_ip = ipaddr
175 user_log.user_ip = ipaddr
176 sa.add(user_log)
176 sa.add(user_log)
177
177
178 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
178 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
179 action, safe_unicode(repo), user_obj, ipaddr)
179 action, safe_unicode(repo), user_obj, ipaddr)
180 if commit:
180 if commit:
181 sa.commit()
181 sa.commit()
182 except Exception:
182 except Exception:
183 log.error(traceback.format_exc())
183 log.error(traceback.format_exc())
184 raise
184 raise
185
185
186
186
187 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
187 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
188 """
188 """
189 Scans given path for repos and return (name,(type,path)) tuple
189 Scans given path for repos and return (name,(type,path)) tuple
190
190
191 :param path: path to scan for repositories
191 :param path: path to scan for repositories
192 :param recursive: recursive search and return names with subdirs in front
192 :param recursive: recursive search and return names with subdirs in front
193 """
193 """
194
194
195 # remove ending slash for better results
195 # remove ending slash for better results
196 path = path.rstrip(os.sep)
196 path = path.rstrip(os.sep)
197 log.debug('now scanning in %s location recursive:%s...', path, recursive)
197 log.debug('now scanning in %s location recursive:%s...', path, recursive)
198
198
199 def _get_repos(p):
199 def _get_repos(p):
200 dirpaths = _get_dirpaths(p)
200 dirpaths = _get_dirpaths(p)
201 if not _is_dir_writable(p):
201 if not _is_dir_writable(p):
202 log.warning('repo path without write access: %s', p)
202 log.warning('repo path without write access: %s', p)
203
203
204 for dirpath in dirpaths:
204 for dirpath in dirpaths:
205 if os.path.isfile(os.path.join(p, dirpath)):
205 if os.path.isfile(os.path.join(p, dirpath)):
206 continue
206 continue
207 cur_path = os.path.join(p, dirpath)
207 cur_path = os.path.join(p, dirpath)
208
208
209 # skip removed repos
209 # skip removed repos
210 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
210 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
211 continue
211 continue
212
212
213 #skip .<somethin> dirs
213 #skip .<somethin> dirs
214 if dirpath.startswith('.'):
214 if dirpath.startswith('.'):
215 continue
215 continue
216
216
217 try:
217 try:
218 scm_info = get_scm(cur_path)
218 scm_info = get_scm(cur_path)
219 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
219 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
220 except VCSError:
220 except VCSError:
221 if not recursive:
221 if not recursive:
222 continue
222 continue
223 #check if this dir containts other repos for recursive scan
223 #check if this dir containts other repos for recursive scan
224 rec_path = os.path.join(p, dirpath)
224 rec_path = os.path.join(p, dirpath)
225 if os.path.isdir(rec_path):
225 if os.path.isdir(rec_path):
226 for inner_scm in _get_repos(rec_path):
226 for inner_scm in _get_repos(rec_path):
227 yield inner_scm
227 yield inner_scm
228
228
229 return _get_repos(path)
229 return _get_repos(path)
230
230
231
231
232 def _get_dirpaths(p):
232 def _get_dirpaths(p):
233 try:
233 try:
234 # OS-independable way of checking if we have at least read-only
234 # OS-independable way of checking if we have at least read-only
235 # access or not.
235 # access or not.
236 dirpaths = os.listdir(p)
236 dirpaths = os.listdir(p)
237 except OSError:
237 except OSError:
238 log.warning('ignoring repo path without read access: %s', p)
238 log.warning('ignoring repo path without read access: %s', p)
239 return []
239 return []
240
240
241 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
241 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
242 # decode paths and suddenly returns unicode objects itself. The items it
242 # decode paths and suddenly returns unicode objects itself. The items it
243 # cannot decode are returned as strings and cause issues.
243 # cannot decode are returned as strings and cause issues.
244 #
244 #
245 # Those paths are ignored here until a solid solution for path handling has
245 # Those paths are ignored here until a solid solution for path handling has
246 # been built.
246 # been built.
247 expected_type = type(p)
247 expected_type = type(p)
248
248
249 def _has_correct_type(item):
249 def _has_correct_type(item):
250 if type(item) is not expected_type:
250 if type(item) is not expected_type:
251 log.error(
251 log.error(
252 u"Ignoring path %s since it cannot be decoded into unicode.",
252 u"Ignoring path %s since it cannot be decoded into unicode.",
253 # Using "repr" to make sure that we see the byte value in case
253 # Using "repr" to make sure that we see the byte value in case
254 # of support.
254 # of support.
255 repr(item))
255 repr(item))
256 return False
256 return False
257 return True
257 return True
258
258
259 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
259 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
260
260
261 return dirpaths
261 return dirpaths
262
262
263
263
264 def _is_dir_writable(path):
264 def _is_dir_writable(path):
265 """
265 """
266 Probe if `path` is writable.
266 Probe if `path` is writable.
267
267
268 Due to trouble on Cygwin / Windows, this is actually probing if it is
268 Due to trouble on Cygwin / Windows, this is actually probing if it is
269 possible to create a file inside of `path`, stat does not produce reliable
269 possible to create a file inside of `path`, stat does not produce reliable
270 results in this case.
270 results in this case.
271 """
271 """
272 try:
272 try:
273 with tempfile.TemporaryFile(dir=path):
273 with tempfile.TemporaryFile(dir=path):
274 pass
274 pass
275 except OSError:
275 except OSError:
276 return False
276 return False
277 return True
277 return True
278
278
279
279
280 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
280 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
281 """
281 """
282 Returns True if given path is a valid repository False otherwise.
282 Returns True if given path is a valid repository False otherwise.
283 If expect_scm param is given also, compare if given scm is the same
283 If expect_scm param is given also, compare if given scm is the same
284 as expected from scm parameter. If explicit_scm is given don't try to
284 as expected from scm parameter. If explicit_scm is given don't try to
285 detect the scm, just use the given one to check if repo is valid
285 detect the scm, just use the given one to check if repo is valid
286
286
287 :param repo_name:
287 :param repo_name:
288 :param base_path:
288 :param base_path:
289 :param expect_scm:
289 :param expect_scm:
290 :param explicit_scm:
290 :param explicit_scm:
291
291
292 :return True: if given path is a valid repository
292 :return True: if given path is a valid repository
293 """
293 """
294 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
294 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
295 log.debug('Checking if `%s` is a valid path for repository', repo_name)
295 log.debug('Checking if `%s` is a valid path for repository. '
296 'Explicit type: %s', repo_name, explicit_scm)
296
297
297 try:
298 try:
298 if explicit_scm:
299 if explicit_scm:
299 detected_scms = [get_scm_backend(explicit_scm)]
300 detected_scms = [get_scm_backend(explicit_scm)]
300 else:
301 else:
301 detected_scms = get_scm(full_path)
302 detected_scms = get_scm(full_path)
302
303
303 if expect_scm:
304 if expect_scm:
304 return detected_scms[0] == expect_scm
305 return detected_scms[0] == expect_scm
305 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
306 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
306 return True
307 return True
307 except VCSError:
308 except VCSError:
308 log.debug('path: %s is not a valid repo !', full_path)
309 log.debug('path: %s is not a valid repo !', full_path)
309 return False
310 return False
310
311
311
312
312 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
313 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
313 """
314 """
314 Returns True if given path is a repository group, False otherwise
315 Returns True if given path is a repository group, False otherwise
315
316
316 :param repo_name:
317 :param repo_name:
317 :param base_path:
318 :param base_path:
318 """
319 """
319 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
320 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
320 log.debug('Checking if `%s` is a valid path for repository group',
321 log.debug('Checking if `%s` is a valid path for repository group',
321 repo_group_name)
322 repo_group_name)
322
323
323 # check if it's not a repo
324 # check if it's not a repo
324 if is_valid_repo(repo_group_name, base_path):
325 if is_valid_repo(repo_group_name, base_path):
325 log.debug('Repo called %s exist, it is not a valid '
326 log.debug('Repo called %s exist, it is not a valid '
326 'repo group' % repo_group_name)
327 'repo group' % repo_group_name)
327 return False
328 return False
328
329
329 try:
330 try:
330 # we need to check bare git repos at higher level
331 # we need to check bare git repos at higher level
331 # since we might match branches/hooks/info/objects or possible
332 # since we might match branches/hooks/info/objects or possible
332 # other things inside bare git repo
333 # other things inside bare git repo
333 scm_ = get_scm(os.path.dirname(full_path))
334 scm_ = get_scm(os.path.dirname(full_path))
334 log.debug('path: %s is a vcs object:%s, not valid '
335 log.debug('path: %s is a vcs object:%s, not valid '
335 'repo group' % (full_path, scm_))
336 'repo group' % (full_path, scm_))
336 return False
337 return False
337 except VCSError:
338 except VCSError:
338 pass
339 pass
339
340
340 # check if it's a valid path
341 # check if it's a valid path
341 if skip_path_check or os.path.isdir(full_path):
342 if skip_path_check or os.path.isdir(full_path):
342 log.debug('path: %s is a valid repo group !', full_path)
343 log.debug('path: %s is a valid repo group !', full_path)
343 return True
344 return True
344
345
345 log.debug('path: %s is not a valid repo group !', full_path)
346 log.debug('path: %s is not a valid repo group !', full_path)
346 return False
347 return False
347
348
348
349
349 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
350 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
350 while True:
351 while True:
351 ok = raw_input(prompt)
352 ok = raw_input(prompt)
352 if ok.lower() in ('y', 'ye', 'yes'):
353 if ok.lower() in ('y', 'ye', 'yes'):
353 return True
354 return True
354 if ok.lower() in ('n', 'no', 'nop', 'nope'):
355 if ok.lower() in ('n', 'no', 'nop', 'nope'):
355 return False
356 return False
356 retries = retries - 1
357 retries = retries - 1
357 if retries < 0:
358 if retries < 0:
358 raise IOError
359 raise IOError
359 print(complaint)
360 print(complaint)
360
361
361 # propagated from mercurial documentation
362 # propagated from mercurial documentation
362 ui_sections = [
363 ui_sections = [
363 'alias', 'auth',
364 'alias', 'auth',
364 'decode/encode', 'defaults',
365 'decode/encode', 'defaults',
365 'diff', 'email',
366 'diff', 'email',
366 'extensions', 'format',
367 'extensions', 'format',
367 'merge-patterns', 'merge-tools',
368 'merge-patterns', 'merge-tools',
368 'hooks', 'http_proxy',
369 'hooks', 'http_proxy',
369 'smtp', 'patch',
370 'smtp', 'patch',
370 'paths', 'profiling',
371 'paths', 'profiling',
371 'server', 'trusted',
372 'server', 'trusted',
372 'ui', 'web', ]
373 'ui', 'web', ]
373
374
374
375
375 def config_data_from_db(clear_session=True, repo=None):
376 def config_data_from_db(clear_session=True, repo=None):
376 """
377 """
377 Read the configuration data from the database and return configuration
378 Read the configuration data from the database and return configuration
378 tuples.
379 tuples.
379 """
380 """
380 from rhodecode.model.settings import VcsSettingsModel
381 from rhodecode.model.settings import VcsSettingsModel
381
382
382 config = []
383 config = []
383
384
384 sa = meta.Session()
385 sa = meta.Session()
385 settings_model = VcsSettingsModel(repo=repo, sa=sa)
386 settings_model = VcsSettingsModel(repo=repo, sa=sa)
386
387
387 ui_settings = settings_model.get_ui_settings()
388 ui_settings = settings_model.get_ui_settings()
388
389
389 for setting in ui_settings:
390 for setting in ui_settings:
390 if setting.active:
391 if setting.active:
391 log.debug(
392 log.debug(
392 'settings ui from db: [%s] %s=%s',
393 'settings ui from db: [%s] %s=%s',
393 setting.section, setting.key, setting.value)
394 setting.section, setting.key, setting.value)
394 config.append((
395 config.append((
395 safe_str(setting.section), safe_str(setting.key),
396 safe_str(setting.section), safe_str(setting.key),
396 safe_str(setting.value)))
397 safe_str(setting.value)))
397 if setting.key == 'push_ssl':
398 if setting.key == 'push_ssl':
398 # force set push_ssl requirement to False, rhodecode
399 # force set push_ssl requirement to False, rhodecode
399 # handles that
400 # handles that
400 config.append((
401 config.append((
401 safe_str(setting.section), safe_str(setting.key), False))
402 safe_str(setting.section), safe_str(setting.key), False))
402 if clear_session:
403 if clear_session:
403 meta.Session.remove()
404 meta.Session.remove()
404
405
405 # TODO: mikhail: probably it makes no sense to re-read hooks information.
406 # TODO: mikhail: probably it makes no sense to re-read hooks information.
406 # It's already there and activated/deactivated
407 # It's already there and activated/deactivated
407 skip_entries = []
408 skip_entries = []
408 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
409 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
409 if 'pull' not in enabled_hook_classes:
410 if 'pull' not in enabled_hook_classes:
410 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
411 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
411 if 'push' not in enabled_hook_classes:
412 if 'push' not in enabled_hook_classes:
412 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
413 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
413
414
414 config = [entry for entry in config if entry[:2] not in skip_entries]
415 config = [entry for entry in config if entry[:2] not in skip_entries]
415
416
416 return config
417 return config
417
418
418
419
419 def make_db_config(clear_session=True, repo=None):
420 def make_db_config(clear_session=True, repo=None):
420 """
421 """
421 Create a :class:`Config` instance based on the values in the database.
422 Create a :class:`Config` instance based on the values in the database.
422 """
423 """
423 config = Config()
424 config = Config()
424 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
425 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
425 for section, option, value in config_data:
426 for section, option, value in config_data:
426 config.set(section, option, value)
427 config.set(section, option, value)
427 return config
428 return config
428
429
429
430
430 def get_enabled_hook_classes(ui_settings):
431 def get_enabled_hook_classes(ui_settings):
431 """
432 """
432 Return the enabled hook classes.
433 Return the enabled hook classes.
433
434
434 :param ui_settings: List of ui_settings as returned
435 :param ui_settings: List of ui_settings as returned
435 by :meth:`VcsSettingsModel.get_ui_settings`
436 by :meth:`VcsSettingsModel.get_ui_settings`
436
437
437 :return: a list with the enabled hook classes. The order is not guaranteed.
438 :return: a list with the enabled hook classes. The order is not guaranteed.
438 :rtype: list
439 :rtype: list
439 """
440 """
440 enabled_hooks = []
441 enabled_hooks = []
441 active_hook_keys = [
442 active_hook_keys = [
442 key for section, key, value, active in ui_settings
443 key for section, key, value, active in ui_settings
443 if section == 'hooks' and active]
444 if section == 'hooks' and active]
444
445
445 hook_names = {
446 hook_names = {
446 RhodeCodeUi.HOOK_PUSH: 'push',
447 RhodeCodeUi.HOOK_PUSH: 'push',
447 RhodeCodeUi.HOOK_PULL: 'pull',
448 RhodeCodeUi.HOOK_PULL: 'pull',
448 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
449 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
449 }
450 }
450
451
451 for key in active_hook_keys:
452 for key in active_hook_keys:
452 hook = hook_names.get(key)
453 hook = hook_names.get(key)
453 if hook:
454 if hook:
454 enabled_hooks.append(hook)
455 enabled_hooks.append(hook)
455
456
456 return enabled_hooks
457 return enabled_hooks
457
458
458
459
459 def set_rhodecode_config(config):
460 def set_rhodecode_config(config):
460 """
461 """
461 Updates pylons config with new settings from database
462 Updates pylons config with new settings from database
462
463
463 :param config:
464 :param config:
464 """
465 """
465 from rhodecode.model.settings import SettingsModel
466 from rhodecode.model.settings import SettingsModel
466 app_settings = SettingsModel().get_all_settings()
467 app_settings = SettingsModel().get_all_settings()
467
468
468 for k, v in app_settings.items():
469 for k, v in app_settings.items():
469 config[k] = v
470 config[k] = v
470
471
471
472
472 def get_rhodecode_realm():
473 def get_rhodecode_realm():
473 """
474 """
474 Return the rhodecode realm from database.
475 Return the rhodecode realm from database.
475 """
476 """
476 from rhodecode.model.settings import SettingsModel
477 from rhodecode.model.settings import SettingsModel
477 realm = SettingsModel().get_setting_by_name('realm')
478 realm = SettingsModel().get_setting_by_name('realm')
478 return safe_str(realm.app_settings_value)
479 return safe_str(realm.app_settings_value)
479
480
480
481
481 def get_rhodecode_base_path():
482 def get_rhodecode_base_path():
482 """
483 """
483 Returns the base path. The base path is the filesystem path which points
484 Returns the base path. The base path is the filesystem path which points
484 to the repository store.
485 to the repository store.
485 """
486 """
486 from rhodecode.model.settings import SettingsModel
487 from rhodecode.model.settings import SettingsModel
487 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
488 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
488 return safe_str(paths_ui.ui_value)
489 return safe_str(paths_ui.ui_value)
489
490
490
491
491 def map_groups(path):
492 def map_groups(path):
492 """
493 """
493 Given a full path to a repository, create all nested groups that this
494 Given a full path to a repository, create all nested groups that this
494 repo is inside. This function creates parent-child relationships between
495 repo is inside. This function creates parent-child relationships between
495 groups and creates default perms for all new groups.
496 groups and creates default perms for all new groups.
496
497
497 :param paths: full path to repository
498 :param paths: full path to repository
498 """
499 """
499 from rhodecode.model.repo_group import RepoGroupModel
500 from rhodecode.model.repo_group import RepoGroupModel
500 sa = meta.Session()
501 sa = meta.Session()
501 groups = path.split(Repository.NAME_SEP)
502 groups = path.split(Repository.NAME_SEP)
502 parent = None
503 parent = None
503 group = None
504 group = None
504
505
505 # last element is repo in nested groups structure
506 # last element is repo in nested groups structure
506 groups = groups[:-1]
507 groups = groups[:-1]
507 rgm = RepoGroupModel(sa)
508 rgm = RepoGroupModel(sa)
508 owner = User.get_first_super_admin()
509 owner = User.get_first_super_admin()
509 for lvl, group_name in enumerate(groups):
510 for lvl, group_name in enumerate(groups):
510 group_name = '/'.join(groups[:lvl] + [group_name])
511 group_name = '/'.join(groups[:lvl] + [group_name])
511 group = RepoGroup.get_by_group_name(group_name)
512 group = RepoGroup.get_by_group_name(group_name)
512 desc = '%s group' % group_name
513 desc = '%s group' % group_name
513
514
514 # skip folders that are now removed repos
515 # skip folders that are now removed repos
515 if REMOVED_REPO_PAT.match(group_name):
516 if REMOVED_REPO_PAT.match(group_name):
516 break
517 break
517
518
518 if group is None:
519 if group is None:
519 log.debug('creating group level: %s group_name: %s',
520 log.debug('creating group level: %s group_name: %s',
520 lvl, group_name)
521 lvl, group_name)
521 group = RepoGroup(group_name, parent)
522 group = RepoGroup(group_name, parent)
522 group.group_description = desc
523 group.group_description = desc
523 group.user = owner
524 group.user = owner
524 sa.add(group)
525 sa.add(group)
525 perm_obj = rgm._create_default_perms(group)
526 perm_obj = rgm._create_default_perms(group)
526 sa.add(perm_obj)
527 sa.add(perm_obj)
527 sa.flush()
528 sa.flush()
528
529
529 parent = group
530 parent = group
530 return group
531 return group
531
532
532
533
533 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
534 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
534 """
535 """
535 maps all repos given in initial_repo_list, non existing repositories
536 maps all repos given in initial_repo_list, non existing repositories
536 are created, if remove_obsolete is True it also checks for db entries
537 are created, if remove_obsolete is True it also checks for db entries
537 that are not in initial_repo_list and removes them.
538 that are not in initial_repo_list and removes them.
538
539
539 :param initial_repo_list: list of repositories found by scanning methods
540 :param initial_repo_list: list of repositories found by scanning methods
540 :param remove_obsolete: check for obsolete entries in database
541 :param remove_obsolete: check for obsolete entries in database
541 """
542 """
542 from rhodecode.model.repo import RepoModel
543 from rhodecode.model.repo import RepoModel
543 from rhodecode.model.scm import ScmModel
544 from rhodecode.model.scm import ScmModel
544 from rhodecode.model.repo_group import RepoGroupModel
545 from rhodecode.model.repo_group import RepoGroupModel
545 from rhodecode.model.settings import SettingsModel
546 from rhodecode.model.settings import SettingsModel
546
547
547 sa = meta.Session()
548 sa = meta.Session()
548 repo_model = RepoModel()
549 repo_model = RepoModel()
549 user = User.get_first_super_admin()
550 user = User.get_first_super_admin()
550 added = []
551 added = []
551
552
552 # creation defaults
553 # creation defaults
553 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
554 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
554 enable_statistics = defs.get('repo_enable_statistics')
555 enable_statistics = defs.get('repo_enable_statistics')
555 enable_locking = defs.get('repo_enable_locking')
556 enable_locking = defs.get('repo_enable_locking')
556 enable_downloads = defs.get('repo_enable_downloads')
557 enable_downloads = defs.get('repo_enable_downloads')
557 private = defs.get('repo_private')
558 private = defs.get('repo_private')
558
559
559 for name, repo in initial_repo_list.items():
560 for name, repo in initial_repo_list.items():
560 group = map_groups(name)
561 group = map_groups(name)
561 unicode_name = safe_unicode(name)
562 unicode_name = safe_unicode(name)
562 db_repo = repo_model.get_by_repo_name(unicode_name)
563 db_repo = repo_model.get_by_repo_name(unicode_name)
563 # found repo that is on filesystem not in RhodeCode database
564 # found repo that is on filesystem not in RhodeCode database
564 if not db_repo:
565 if not db_repo:
565 log.info('repository %s not found, creating now', name)
566 log.info('repository %s not found, creating now', name)
566 added.append(name)
567 added.append(name)
567 desc = (repo.description
568 desc = (repo.description
568 if repo.description != 'unknown'
569 if repo.description != 'unknown'
569 else '%s repository' % name)
570 else '%s repository' % name)
570
571
571 db_repo = repo_model._create_repo(
572 db_repo = repo_model._create_repo(
572 repo_name=name,
573 repo_name=name,
573 repo_type=repo.alias,
574 repo_type=repo.alias,
574 description=desc,
575 description=desc,
575 repo_group=getattr(group, 'group_id', None),
576 repo_group=getattr(group, 'group_id', None),
576 owner=user,
577 owner=user,
577 enable_locking=enable_locking,
578 enable_locking=enable_locking,
578 enable_downloads=enable_downloads,
579 enable_downloads=enable_downloads,
579 enable_statistics=enable_statistics,
580 enable_statistics=enable_statistics,
580 private=private,
581 private=private,
581 state=Repository.STATE_CREATED
582 state=Repository.STATE_CREATED
582 )
583 )
583 sa.commit()
584 sa.commit()
584 # we added that repo just now, and make sure we updated server info
585 # we added that repo just now, and make sure we updated server info
585 if db_repo.repo_type == 'git':
586 if db_repo.repo_type == 'git':
586 git_repo = db_repo.scm_instance()
587 git_repo = db_repo.scm_instance()
587 # update repository server-info
588 # update repository server-info
588 log.debug('Running update server info')
589 log.debug('Running update server info')
589 git_repo._update_server_info()
590 git_repo._update_server_info()
590
591
591 db_repo.update_commit_cache()
592 db_repo.update_commit_cache()
592
593
593 config = db_repo._config
594 config = db_repo._config
594 config.set('extensions', 'largefiles', '')
595 config.set('extensions', 'largefiles', '')
595 ScmModel().install_hooks(
596 ScmModel().install_hooks(
596 db_repo.scm_instance(config=config),
597 db_repo.scm_instance(config=config),
597 repo_type=db_repo.repo_type)
598 repo_type=db_repo.repo_type)
598
599
599 removed = []
600 removed = []
600 if remove_obsolete:
601 if remove_obsolete:
601 # remove from database those repositories that are not in the filesystem
602 # remove from database those repositories that are not in the filesystem
602 for repo in sa.query(Repository).all():
603 for repo in sa.query(Repository).all():
603 if repo.repo_name not in initial_repo_list.keys():
604 if repo.repo_name not in initial_repo_list.keys():
604 log.debug("Removing non-existing repository found in db `%s`",
605 log.debug("Removing non-existing repository found in db `%s`",
605 repo.repo_name)
606 repo.repo_name)
606 try:
607 try:
607 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
608 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
608 sa.commit()
609 sa.commit()
609 removed.append(repo.repo_name)
610 removed.append(repo.repo_name)
610 except Exception:
611 except Exception:
611 # don't hold further removals on error
612 # don't hold further removals on error
612 log.error(traceback.format_exc())
613 log.error(traceback.format_exc())
613 sa.rollback()
614 sa.rollback()
614
615
615 def splitter(full_repo_name):
616 def splitter(full_repo_name):
616 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
617 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
617 gr_name = None
618 gr_name = None
618 if len(_parts) == 2:
619 if len(_parts) == 2:
619 gr_name = _parts[0]
620 gr_name = _parts[0]
620 return gr_name
621 return gr_name
621
622
622 initial_repo_group_list = [splitter(x) for x in
623 initial_repo_group_list = [splitter(x) for x in
623 initial_repo_list.keys() if splitter(x)]
624 initial_repo_list.keys() if splitter(x)]
624
625
625 # remove from database those repository groups that are not in the
626 # remove from database those repository groups that are not in the
626 # filesystem due to parent child relationships we need to delete them
627 # filesystem due to parent child relationships we need to delete them
627 # in a specific order of most nested first
628 # in a specific order of most nested first
628 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
629 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
629 nested_sort = lambda gr: len(gr.split('/'))
630 nested_sort = lambda gr: len(gr.split('/'))
630 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
631 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
631 if group_name not in initial_repo_group_list:
632 if group_name not in initial_repo_group_list:
632 repo_group = RepoGroup.get_by_group_name(group_name)
633 repo_group = RepoGroup.get_by_group_name(group_name)
633 if (repo_group.children.all() or
634 if (repo_group.children.all() or
634 not RepoGroupModel().check_exist_filesystem(
635 not RepoGroupModel().check_exist_filesystem(
635 group_name=group_name, exc_on_failure=False)):
636 group_name=group_name, exc_on_failure=False)):
636 continue
637 continue
637
638
638 log.info(
639 log.info(
639 'Removing non-existing repository group found in db `%s`',
640 'Removing non-existing repository group found in db `%s`',
640 group_name)
641 group_name)
641 try:
642 try:
642 RepoGroupModel(sa).delete(group_name, fs_remove=False)
643 RepoGroupModel(sa).delete(group_name, fs_remove=False)
643 sa.commit()
644 sa.commit()
644 removed.append(group_name)
645 removed.append(group_name)
645 except Exception:
646 except Exception:
646 # don't hold further removals on error
647 # don't hold further removals on error
647 log.exception(
648 log.exception(
648 'Unable to remove repository group `%s`',
649 'Unable to remove repository group `%s`',
649 group_name)
650 group_name)
650 sa.rollback()
651 sa.rollback()
651 raise
652 raise
652
653
653 return added, removed
654 return added, removed
654
655
655
656
656 def get_default_cache_settings(settings):
657 def get_default_cache_settings(settings):
657 cache_settings = {}
658 cache_settings = {}
658 for key in settings.keys():
659 for key in settings.keys():
659 for prefix in ['beaker.cache.', 'cache.']:
660 for prefix in ['beaker.cache.', 'cache.']:
660 if key.startswith(prefix):
661 if key.startswith(prefix):
661 name = key.split(prefix)[1].strip()
662 name = key.split(prefix)[1].strip()
662 cache_settings[name] = settings[key].strip()
663 cache_settings[name] = settings[key].strip()
663 return cache_settings
664 return cache_settings
664
665
665
666
666 # set cache regions for beaker so celery can utilise it
667 # set cache regions for beaker so celery can utilise it
667 def add_cache(settings):
668 def add_cache(settings):
668 from rhodecode.lib import caches
669 from rhodecode.lib import caches
669 cache_settings = {'regions': None}
670 cache_settings = {'regions': None}
670 # main cache settings used as default ...
671 # main cache settings used as default ...
671 cache_settings.update(get_default_cache_settings(settings))
672 cache_settings.update(get_default_cache_settings(settings))
672
673
673 if cache_settings['regions']:
674 if cache_settings['regions']:
674 for region in cache_settings['regions'].split(','):
675 for region in cache_settings['regions'].split(','):
675 region = region.strip()
676 region = region.strip()
676 region_settings = {}
677 region_settings = {}
677 for key, value in cache_settings.items():
678 for key, value in cache_settings.items():
678 if key.startswith(region):
679 if key.startswith(region):
679 region_settings[key.split('.')[1]] = value
680 region_settings[key.split('.')[1]] = value
680
681
681 caches.configure_cache_region(
682 caches.configure_cache_region(
682 region, region_settings, cache_settings)
683 region, region_settings, cache_settings)
683
684
684
685
685 def load_rcextensions(root_path):
686 def load_rcextensions(root_path):
686 import rhodecode
687 import rhodecode
687 from rhodecode.config import conf
688 from rhodecode.config import conf
688
689
689 path = os.path.join(root_path, 'rcextensions', '__init__.py')
690 path = os.path.join(root_path, 'rcextensions', '__init__.py')
690 if os.path.isfile(path):
691 if os.path.isfile(path):
691 rcext = create_module('rc', path)
692 rcext = create_module('rc', path)
692 EXT = rhodecode.EXTENSIONS = rcext
693 EXT = rhodecode.EXTENSIONS = rcext
693 log.debug('Found rcextensions now loading %s...', rcext)
694 log.debug('Found rcextensions now loading %s...', rcext)
694
695
695 # Additional mappings that are not present in the pygments lexers
696 # Additional mappings that are not present in the pygments lexers
696 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
697 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
697
698
698 # auto check if the module is not missing any data, set to default if is
699 # auto check if the module is not missing any data, set to default if is
699 # this will help autoupdate new feature of rcext module
700 # this will help autoupdate new feature of rcext module
700 #from rhodecode.config import rcextensions
701 #from rhodecode.config import rcextensions
701 #for k in dir(rcextensions):
702 #for k in dir(rcextensions):
702 # if not k.startswith('_') and not hasattr(EXT, k):
703 # if not k.startswith('_') and not hasattr(EXT, k):
703 # setattr(EXT, k, getattr(rcextensions, k))
704 # setattr(EXT, k, getattr(rcextensions, k))
704
705
705
706
706 def get_custom_lexer(extension):
707 def get_custom_lexer(extension):
707 """
708 """
708 returns a custom lexer if it is defined in rcextensions module, or None
709 returns a custom lexer if it is defined in rcextensions module, or None
709 if there's no custom lexer defined
710 if there's no custom lexer defined
710 """
711 """
711 import rhodecode
712 import rhodecode
712 from pygments import lexers
713 from pygments import lexers
713 # check if we didn't define this extension as other lexer
714 # check if we didn't define this extension as other lexer
714 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
715 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
715 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
716 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
716 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
717 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
717 return lexers.get_lexer_by_name(_lexer_name)
718 return lexers.get_lexer_by_name(_lexer_name)
718
719
719
720
720 #==============================================================================
721 #==============================================================================
721 # TEST FUNCTIONS AND CREATORS
722 # TEST FUNCTIONS AND CREATORS
722 #==============================================================================
723 #==============================================================================
723 def create_test_index(repo_location, config):
724 def create_test_index(repo_location, config):
724 """
725 """
725 Makes default test index.
726 Makes default test index.
726 """
727 """
727 import rc_testdata
728 import rc_testdata
728
729
729 rc_testdata.extract_search_index(
730 rc_testdata.extract_search_index(
730 'vcs_search_index', os.path.dirname(config['search.location']))
731 'vcs_search_index', os.path.dirname(config['search.location']))
731
732
732
733
733 def create_test_directory(test_path):
734 def create_test_directory(test_path):
734 """
735 """
735 Create test directory if it doesn't exist.
736 Create test directory if it doesn't exist.
736 """
737 """
737 if not os.path.isdir(test_path):
738 if not os.path.isdir(test_path):
738 log.debug('Creating testdir %s', test_path)
739 log.debug('Creating testdir %s', test_path)
739 os.makedirs(test_path)
740 os.makedirs(test_path)
740
741
741
742
742 def create_test_database(test_path, config):
743 def create_test_database(test_path, config):
743 """
744 """
744 Makes a fresh database.
745 Makes a fresh database.
745 """
746 """
746 from rhodecode.lib.db_manage import DbManage
747 from rhodecode.lib.db_manage import DbManage
747
748
748 # PART ONE create db
749 # PART ONE create db
749 dbconf = config['sqlalchemy.db1.url']
750 dbconf = config['sqlalchemy.db1.url']
750 log.debug('making test db %s', dbconf)
751 log.debug('making test db %s', dbconf)
751
752
752 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
753 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
753 tests=True, cli_args={'force_ask': True})
754 tests=True, cli_args={'force_ask': True})
754 dbmanage.create_tables(override=True)
755 dbmanage.create_tables(override=True)
755 dbmanage.set_db_version()
756 dbmanage.set_db_version()
756 # for tests dynamically set new root paths based on generated content
757 # for tests dynamically set new root paths based on generated content
757 dbmanage.create_settings(dbmanage.config_prompt(test_path))
758 dbmanage.create_settings(dbmanage.config_prompt(test_path))
758 dbmanage.create_default_user()
759 dbmanage.create_default_user()
759 dbmanage.create_test_admin_and_users()
760 dbmanage.create_test_admin_and_users()
760 dbmanage.create_permissions()
761 dbmanage.create_permissions()
761 dbmanage.populate_default_permissions()
762 dbmanage.populate_default_permissions()
762 Session().commit()
763 Session().commit()
763
764
764
765
765 def create_test_repositories(test_path, config):
766 def create_test_repositories(test_path, config):
766 """
767 """
767 Creates test repositories in the temporary directory. Repositories are
768 Creates test repositories in the temporary directory. Repositories are
768 extracted from archives within the rc_testdata package.
769 extracted from archives within the rc_testdata package.
769 """
770 """
770 import rc_testdata
771 import rc_testdata
771 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
772 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
772
773
773 log.debug('making test vcs repositories')
774 log.debug('making test vcs repositories')
774
775
775 idx_path = config['search.location']
776 idx_path = config['search.location']
776 data_path = config['cache_dir']
777 data_path = config['cache_dir']
777
778
778 # clean index and data
779 # clean index and data
779 if idx_path and os.path.exists(idx_path):
780 if idx_path and os.path.exists(idx_path):
780 log.debug('remove %s', idx_path)
781 log.debug('remove %s', idx_path)
781 shutil.rmtree(idx_path)
782 shutil.rmtree(idx_path)
782
783
783 if data_path and os.path.exists(data_path):
784 if data_path and os.path.exists(data_path):
784 log.debug('remove %s', data_path)
785 log.debug('remove %s', data_path)
785 shutil.rmtree(data_path)
786 shutil.rmtree(data_path)
786
787
787 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
788 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
788 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
789 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
789
790
790 # Note: Subversion is in the process of being integrated with the system,
791 # Note: Subversion is in the process of being integrated with the system,
791 # until we have a properly packed version of the test svn repository, this
792 # until we have a properly packed version of the test svn repository, this
792 # tries to copy over the repo from a package "rc_testdata"
793 # tries to copy over the repo from a package "rc_testdata"
793 svn_repo_path = rc_testdata.get_svn_repo_archive()
794 svn_repo_path = rc_testdata.get_svn_repo_archive()
794 with tarfile.open(svn_repo_path) as tar:
795 with tarfile.open(svn_repo_path) as tar:
795 tar.extractall(jn(test_path, SVN_REPO))
796 tar.extractall(jn(test_path, SVN_REPO))
796
797
797
798
798 #==============================================================================
799 #==============================================================================
799 # PASTER COMMANDS
800 # PASTER COMMANDS
800 #==============================================================================
801 #==============================================================================
801 class BasePasterCommand(Command):
802 class BasePasterCommand(Command):
802 """
803 """
803 Abstract Base Class for paster commands.
804 Abstract Base Class for paster commands.
804
805
805 The celery commands are somewhat aggressive about loading
806 The celery commands are somewhat aggressive about loading
806 celery.conf, and since our module sets the `CELERY_LOADER`
807 celery.conf, and since our module sets the `CELERY_LOADER`
807 environment variable to our loader, we have to bootstrap a bit and
808 environment variable to our loader, we have to bootstrap a bit and
808 make sure we've had a chance to load the pylons config off of the
809 make sure we've had a chance to load the pylons config off of the
809 command line, otherwise everything fails.
810 command line, otherwise everything fails.
810 """
811 """
811 min_args = 1
812 min_args = 1
812 min_args_error = "Please provide a paster config file as an argument."
813 min_args_error = "Please provide a paster config file as an argument."
813 takes_config_file = 1
814 takes_config_file = 1
814 requires_config_file = True
815 requires_config_file = True
815
816
816 def notify_msg(self, msg, log=False):
817 def notify_msg(self, msg, log=False):
817 """Make a notification to user, additionally if logger is passed
818 """Make a notification to user, additionally if logger is passed
818 it logs this action using given logger
819 it logs this action using given logger
819
820
820 :param msg: message that will be printed to user
821 :param msg: message that will be printed to user
821 :param log: logging instance, to use to additionally log this message
822 :param log: logging instance, to use to additionally log this message
822
823
823 """
824 """
824 if log and isinstance(log, logging):
825 if log and isinstance(log, logging):
825 log(msg)
826 log(msg)
826
827
827 def run(self, args):
828 def run(self, args):
828 """
829 """
829 Overrides Command.run
830 Overrides Command.run
830
831
831 Checks for a config file argument and loads it.
832 Checks for a config file argument and loads it.
832 """
833 """
833 if len(args) < self.min_args:
834 if len(args) < self.min_args:
834 raise BadCommand(
835 raise BadCommand(
835 self.min_args_error % {'min_args': self.min_args,
836 self.min_args_error % {'min_args': self.min_args,
836 'actual_args': len(args)})
837 'actual_args': len(args)})
837
838
838 # Decrement because we're going to lob off the first argument.
839 # Decrement because we're going to lob off the first argument.
839 # @@ This is hacky
840 # @@ This is hacky
840 self.min_args -= 1
841 self.min_args -= 1
841 self.bootstrap_config(args[0])
842 self.bootstrap_config(args[0])
842 self.update_parser()
843 self.update_parser()
843 return super(BasePasterCommand, self).run(args[1:])
844 return super(BasePasterCommand, self).run(args[1:])
844
845
845 def update_parser(self):
846 def update_parser(self):
846 """
847 """
847 Abstract method. Allows for the class' parser to be updated
848 Abstract method. Allows for the class' parser to be updated
848 before the superclass' `run` method is called. Necessary to
849 before the superclass' `run` method is called. Necessary to
849 allow options/arguments to be passed through to the underlying
850 allow options/arguments to be passed through to the underlying
850 celery command.
851 celery command.
851 """
852 """
852 raise NotImplementedError("Abstract Method.")
853 raise NotImplementedError("Abstract Method.")
853
854
854 def bootstrap_config(self, conf):
855 def bootstrap_config(self, conf):
855 """
856 """
856 Loads the pylons configuration.
857 Loads the pylons configuration.
857 """
858 """
858 from pylons import config as pylonsconfig
859 from pylons import config as pylonsconfig
859
860
860 self.path_to_ini_file = os.path.realpath(conf)
861 self.path_to_ini_file = os.path.realpath(conf)
861 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
862 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
862 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
863 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
863
864
864 def _init_session(self):
865 def _init_session(self):
865 """
866 """
866 Inits SqlAlchemy Session
867 Inits SqlAlchemy Session
867 """
868 """
868 logging.config.fileConfig(self.path_to_ini_file)
869 logging.config.fileConfig(self.path_to_ini_file)
869 from pylons import config
870 from pylons import config
870 from rhodecode.config.utils import initialize_database
871 from rhodecode.config.utils import initialize_database
871
872
872 # get to remove repos !!
873 # get to remove repos !!
873 add_cache(config)
874 add_cache(config)
874 initialize_database(config)
875 initialize_database(config)
875
876
876
877
877 @decorator.decorator
878 @decorator.decorator
878 def jsonify(func, *args, **kwargs):
879 def jsonify(func, *args, **kwargs):
879 """Action decorator that formats output for JSON
880 """Action decorator that formats output for JSON
880
881
881 Given a function that will return content, this decorator will turn
882 Given a function that will return content, this decorator will turn
882 the result into JSON, with a content-type of 'application/json' and
883 the result into JSON, with a content-type of 'application/json' and
883 output it.
884 output it.
884
885
885 """
886 """
886 from pylons.decorators.util import get_pylons
887 from pylons.decorators.util import get_pylons
887 from rhodecode.lib.ext_json import json
888 from rhodecode.lib.ext_json import json
888 pylons = get_pylons(args)
889 pylons = get_pylons(args)
889 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
890 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
890 data = func(*args, **kwargs)
891 data = func(*args, **kwargs)
891 if isinstance(data, (list, tuple)):
892 if isinstance(data, (list, tuple)):
892 msg = "JSON responses with Array envelopes are susceptible to " \
893 msg = "JSON responses with Array envelopes are susceptible to " \
893 "cross-site data leak attacks, see " \
894 "cross-site data leak attacks, see " \
894 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
895 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
895 warnings.warn(msg, Warning, 2)
896 warnings.warn(msg, Warning, 2)
896 log.warning(msg)
897 log.warning(msg)
897 log.debug("Returning JSON wrapped action output")
898 log.debug("Returning JSON wrapped action output")
898 return json.dumps(data, encoding='utf-8')
899 return json.dumps(data, encoding='utf-8')
899
900
900
901
901 class PartialRenderer(object):
902 class PartialRenderer(object):
902 """
903 """
903 Partial renderer used to render chunks of html used in datagrids
904 Partial renderer used to render chunks of html used in datagrids
904 use like::
905 use like::
905
906
906 _render = PartialRenderer('data_table/_dt_elements.mako')
907 _render = PartialRenderer('data_table/_dt_elements.mako')
907 _render('quick_menu', args, kwargs)
908 _render('quick_menu', args, kwargs)
908 PartialRenderer.h,
909 PartialRenderer.h,
909 c,
910 c,
910 _,
911 _,
911 ungettext
912 ungettext
912 are the template stuff initialized inside and can be re-used later
913 are the template stuff initialized inside and can be re-used later
913
914
914 :param tmpl_name: template path relate to /templates/ dir
915 :param tmpl_name: template path relate to /templates/ dir
915 """
916 """
916
917
917 def __init__(self, tmpl_name):
918 def __init__(self, tmpl_name):
918 import rhodecode
919 import rhodecode
919 from pylons import request, tmpl_context as c
920 from pylons import request, tmpl_context as c
920 from pylons.i18n.translation import _, ungettext
921 from pylons.i18n.translation import _, ungettext
921 from rhodecode.lib import helpers as h
922 from rhodecode.lib import helpers as h
922
923
923 self.tmpl_name = tmpl_name
924 self.tmpl_name = tmpl_name
924 self.rhodecode = rhodecode
925 self.rhodecode = rhodecode
925 self.c = c
926 self.c = c
926 self._ = _
927 self._ = _
927 self.ungettext = ungettext
928 self.ungettext = ungettext
928 self.h = h
929 self.h = h
929 self.request = request
930 self.request = request
930
931
931 def _mako_lookup(self):
932 def _mako_lookup(self):
932 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
933 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
933 return _tmpl_lookup.get_template(self.tmpl_name)
934 return _tmpl_lookup.get_template(self.tmpl_name)
934
935
935 def _update_kwargs_for_render(self, kwargs):
936 def _update_kwargs_for_render(self, kwargs):
936 """
937 """
937 Inject params required for Mako rendering
938 Inject params required for Mako rendering
938 """
939 """
939 _kwargs = {
940 _kwargs = {
940 '_': self._,
941 '_': self._,
941 'h': self.h,
942 'h': self.h,
942 'c': self.c,
943 'c': self.c,
943 'request': self.request,
944 'request': self.request,
944 'ungettext': self.ungettext,
945 'ungettext': self.ungettext,
945 }
946 }
946 _kwargs.update(kwargs)
947 _kwargs.update(kwargs)
947 return _kwargs
948 return _kwargs
948
949
949 def _render_with_exc(self, render_func, args, kwargs):
950 def _render_with_exc(self, render_func, args, kwargs):
950 try:
951 try:
951 return render_func.render(*args, **kwargs)
952 return render_func.render(*args, **kwargs)
952 except:
953 except:
953 log.error(exceptions.text_error_template().render())
954 log.error(exceptions.text_error_template().render())
954 raise
955 raise
955
956
956 def _get_template(self, template_obj, def_name):
957 def _get_template(self, template_obj, def_name):
957 if def_name:
958 if def_name:
958 tmpl = template_obj.get_def(def_name)
959 tmpl = template_obj.get_def(def_name)
959 else:
960 else:
960 tmpl = template_obj
961 tmpl = template_obj
961 return tmpl
962 return tmpl
962
963
963 def render(self, def_name, *args, **kwargs):
964 def render(self, def_name, *args, **kwargs):
964 lookup_obj = self._mako_lookup()
965 lookup_obj = self._mako_lookup()
965 tmpl = self._get_template(lookup_obj, def_name=def_name)
966 tmpl = self._get_template(lookup_obj, def_name=def_name)
966 kwargs = self._update_kwargs_for_render(kwargs)
967 kwargs = self._update_kwargs_for_render(kwargs)
967 return self._render_with_exc(tmpl, args, kwargs)
968 return self._render_with_exc(tmpl, args, kwargs)
968
969
969 def __call__(self, tmpl, *args, **kwargs):
970 def __call__(self, tmpl, *args, **kwargs):
970 return self.render(tmpl, *args, **kwargs)
971 return self.render(tmpl, *args, **kwargs)
971
972
972
973
973 def password_changed(auth_user, session):
974 def password_changed(auth_user, session):
974 # Never report password change in case of default user or anonymous user.
975 # Never report password change in case of default user or anonymous user.
975 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
976 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
976 return False
977 return False
977
978
978 password_hash = md5(auth_user.password) if auth_user.password else None
979 password_hash = md5(auth_user.password) if auth_user.password else None
979 rhodecode_user = session.get('rhodecode_user', {})
980 rhodecode_user = session.get('rhodecode_user', {})
980 session_password_hash = rhodecode_user.get('password', '')
981 session_password_hash = rhodecode_user.get('password', '')
981 return password_hash != session_password_hash
982 return password_hash != session_password_hash
982
983
983
984
984 def read_opensource_licenses():
985 def read_opensource_licenses():
985 global _license_cache
986 global _license_cache
986
987
987 if not _license_cache:
988 if not _license_cache:
988 licenses = pkg_resources.resource_string(
989 licenses = pkg_resources.resource_string(
989 'rhodecode', 'config/licenses.json')
990 'rhodecode', 'config/licenses.json')
990 _license_cache = json.loads(licenses)
991 _license_cache = json.loads(licenses)
991
992
992 return _license_cache
993 return _license_cache
993
994
994
995
995 def get_registry(request):
996 def get_registry(request):
996 """
997 """
997 Utility to get the pyramid registry from a request. During migration to
998 Utility to get the pyramid registry from a request. During migration to
998 pyramid we sometimes want to use the pyramid registry from pylons context.
999 pyramid we sometimes want to use the pyramid registry from pylons context.
999 Therefore this utility returns `request.registry` for pyramid requests and
1000 Therefore this utility returns `request.registry` for pyramid requests and
1000 uses `get_current_registry()` for pylons requests.
1001 uses `get_current_registry()` for pylons requests.
1001 """
1002 """
1002 try:
1003 try:
1003 return request.registry
1004 return request.registry
1004 except AttributeError:
1005 except AttributeError:
1005 return get_current_registry()
1006 return get_current_registry()
1006
1007
1007
1008
1008 def generate_platform_uuid():
1009 def generate_platform_uuid():
1009 """
1010 """
1010 Generates platform UUID based on it's name
1011 Generates platform UUID based on it's name
1011 """
1012 """
1012 import platform
1013 import platform
1013
1014
1014 try:
1015 try:
1015 uuid_list = [platform.platform()]
1016 uuid_list = [platform.platform()]
1016 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1017 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1017 except Exception as e:
1018 except Exception as e:
1018 log.error('Failed to generate host uuid: %s' % e)
1019 log.error('Failed to generate host uuid: %s' % e)
1019 return 'UNDEFINED'
1020 return 'UNDEFINED'
General Comments 0
You need to be logged in to leave comments. Login now