##// END OF EJS Templates
files: use html+mako lexer for .mako files.
marcink -
r1592:da8c8187 default
parent child Browse files
Show More
@@ -1,1042 +1,1042 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 import hashlib
36 import hashlib
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from paste.script.command import Command, BadCommand
41 from paste.script.command import Command, BadCommand
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 from mako import exceptions
43 from mako import exceptions
44 from pyramid.threadlocal import get_current_registry
44 from pyramid.threadlocal import get_current_registry
45 from pyramid.request import Request
45 from pyramid.request import Request
46
46
47 from rhodecode.lib.fakemod import create_module
47 from rhodecode.lib.fakemod import create_module
48 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.backends.base import Config
49 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 from rhodecode.lib.utils2 import (
51 from rhodecode.lib.utils2 import (
52 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 safe_str, safe_unicode, get_current_rhodecode_user, md5)
53 from rhodecode.model import meta
53 from rhodecode.model import meta
54 from rhodecode.model.db import (
54 from rhodecode.model.db import (
55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62
62
63 # String which contains characters that are not allowed in slug names for
63 # String which contains characters that are not allowed in slug names for
64 # repositories or repository groups. It is properly escaped to use it in
64 # repositories or repository groups. It is properly escaped to use it in
65 # regular expressions.
65 # regular expressions.
66 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67
67
68 # Regex that matches forbidden characters in repo/group slugs.
68 # Regex that matches forbidden characters in repo/group slugs.
69 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
70
70
71 # Regex that matches allowed characters in repo/group slugs.
71 # Regex that matches allowed characters in repo/group slugs.
72 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
73
73
74 # Regex that matches whole repo/group slugs.
74 # Regex that matches whole repo/group slugs.
75 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
76
76
77 _license_cache = None
77 _license_cache = None
78
78
79
79
80 def repo_name_slug(value):
80 def repo_name_slug(value):
81 """
81 """
82 Return slug of name of repository
82 Return slug of name of repository
83 This function is called on each creation/modification
83 This function is called on each creation/modification
84 of repository to prevent bad names in repo
84 of repository to prevent bad names in repo
85 """
85 """
86 replacement_char = '-'
86 replacement_char = '-'
87
87
88 slug = remove_formatting(value)
88 slug = remove_formatting(value)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = re.sub('[\s]+', '-', slug)
90 slug = re.sub('[\s]+', '-', slug)
91 slug = collapse(slug, replacement_char)
91 slug = collapse(slug, replacement_char)
92 return slug
92 return slug
93
93
94
94
95 #==============================================================================
95 #==============================================================================
96 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 #==============================================================================
97 #==============================================================================
98 def get_repo_slug(request):
98 def get_repo_slug(request):
99 if isinstance(request, Request) and getattr(request, 'matchdict', None):
99 if isinstance(request, Request) and getattr(request, 'matchdict', None):
100 # pyramid
100 # pyramid
101 _repo = request.matchdict.get('repo_name')
101 _repo = request.matchdict.get('repo_name')
102 else:
102 else:
103 _repo = request.environ['pylons.routes_dict'].get('repo_name')
103 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104
104
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repo_group_slug(request):
110 def get_repo_group_slug(request):
111 if isinstance(request, Request) and getattr(request, 'matchdict', None):
111 if isinstance(request, Request) and getattr(request, 'matchdict', None):
112 # pyramid
112 # pyramid
113 _group = request.matchdict.get('group_name')
113 _group = request.matchdict.get('group_name')
114 else:
114 else:
115 _group = request.environ['pylons.routes_dict'].get('group_name')
115 _group = request.environ['pylons.routes_dict'].get('group_name')
116
116
117 if _group:
117 if _group:
118 _group = _group.rstrip('/')
118 _group = _group.rstrip('/')
119 return _group
119 return _group
120
120
121
121
122 def get_user_group_slug(request):
122 def get_user_group_slug(request):
123 if isinstance(request, Request) and getattr(request, 'matchdict', None):
123 if isinstance(request, Request) and getattr(request, 'matchdict', None):
124 # pyramid
124 # pyramid
125 _group = request.matchdict.get('user_group_id')
125 _group = request.matchdict.get('user_group_id')
126 else:
126 else:
127 _group = request.environ['pylons.routes_dict'].get('user_group_id')
127 _group = request.environ['pylons.routes_dict'].get('user_group_id')
128
128
129 try:
129 try:
130 _group = UserGroup.get(_group)
130 _group = UserGroup.get(_group)
131 if _group:
131 if _group:
132 _group = _group.users_group_name
132 _group = _group.users_group_name
133 except Exception:
133 except Exception:
134 log.debug(traceback.format_exc())
134 log.debug(traceback.format_exc())
135 # catch all failures here
135 # catch all failures here
136 pass
136 pass
137
137
138 return _group
138 return _group
139
139
140
140
141 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
141 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
142 """
142 """
143 Action logger for various actions made by users
143 Action logger for various actions made by users
144
144
145 :param user: user that made this action, can be a unique username string or
145 :param user: user that made this action, can be a unique username string or
146 object containing user_id attribute
146 object containing user_id attribute
147 :param action: action to log, should be on of predefined unique actions for
147 :param action: action to log, should be on of predefined unique actions for
148 easy translations
148 easy translations
149 :param repo: string name of repository or object containing repo_id,
149 :param repo: string name of repository or object containing repo_id,
150 that action was made on
150 that action was made on
151 :param ipaddr: optional ip address from what the action was made
151 :param ipaddr: optional ip address from what the action was made
152 :param sa: optional sqlalchemy session
152 :param sa: optional sqlalchemy session
153
153
154 """
154 """
155
155
156 if not sa:
156 if not sa:
157 sa = meta.Session()
157 sa = meta.Session()
158 # if we don't get explicit IP address try to get one from registered user
158 # if we don't get explicit IP address try to get one from registered user
159 # in tmpl context var
159 # in tmpl context var
160 if not ipaddr:
160 if not ipaddr:
161 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
161 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
162
162
163 try:
163 try:
164 if getattr(user, 'user_id', None):
164 if getattr(user, 'user_id', None):
165 user_obj = User.get(user.user_id)
165 user_obj = User.get(user.user_id)
166 elif isinstance(user, basestring):
166 elif isinstance(user, basestring):
167 user_obj = User.get_by_username(user)
167 user_obj = User.get_by_username(user)
168 else:
168 else:
169 raise Exception('You have to provide a user object or a username')
169 raise Exception('You have to provide a user object or a username')
170
170
171 if getattr(repo, 'repo_id', None):
171 if getattr(repo, 'repo_id', None):
172 repo_obj = Repository.get(repo.repo_id)
172 repo_obj = Repository.get(repo.repo_id)
173 repo_name = repo_obj.repo_name
173 repo_name = repo_obj.repo_name
174 elif isinstance(repo, basestring):
174 elif isinstance(repo, basestring):
175 repo_name = repo.lstrip('/')
175 repo_name = repo.lstrip('/')
176 repo_obj = Repository.get_by_repo_name(repo_name)
176 repo_obj = Repository.get_by_repo_name(repo_name)
177 else:
177 else:
178 repo_obj = None
178 repo_obj = None
179 repo_name = ''
179 repo_name = ''
180
180
181 user_log = UserLog()
181 user_log = UserLog()
182 user_log.user_id = user_obj.user_id
182 user_log.user_id = user_obj.user_id
183 user_log.username = user_obj.username
183 user_log.username = user_obj.username
184 action = safe_unicode(action)
184 action = safe_unicode(action)
185 user_log.action = action[:1200000]
185 user_log.action = action[:1200000]
186
186
187 user_log.repository = repo_obj
187 user_log.repository = repo_obj
188 user_log.repository_name = repo_name
188 user_log.repository_name = repo_name
189
189
190 user_log.action_date = datetime.datetime.now()
190 user_log.action_date = datetime.datetime.now()
191 user_log.user_ip = ipaddr
191 user_log.user_ip = ipaddr
192 sa.add(user_log)
192 sa.add(user_log)
193
193
194 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
194 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
195 action, safe_unicode(repo), user_obj, ipaddr)
195 action, safe_unicode(repo), user_obj, ipaddr)
196 if commit:
196 if commit:
197 sa.commit()
197 sa.commit()
198 except Exception:
198 except Exception:
199 log.error(traceback.format_exc())
199 log.error(traceback.format_exc())
200 raise
200 raise
201
201
202
202
203 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
203 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
204 """
204 """
205 Scans given path for repos and return (name,(type,path)) tuple
205 Scans given path for repos and return (name,(type,path)) tuple
206
206
207 :param path: path to scan for repositories
207 :param path: path to scan for repositories
208 :param recursive: recursive search and return names with subdirs in front
208 :param recursive: recursive search and return names with subdirs in front
209 """
209 """
210
210
211 # remove ending slash for better results
211 # remove ending slash for better results
212 path = path.rstrip(os.sep)
212 path = path.rstrip(os.sep)
213 log.debug('now scanning in %s location recursive:%s...', path, recursive)
213 log.debug('now scanning in %s location recursive:%s...', path, recursive)
214
214
215 def _get_repos(p):
215 def _get_repos(p):
216 dirpaths = _get_dirpaths(p)
216 dirpaths = _get_dirpaths(p)
217 if not _is_dir_writable(p):
217 if not _is_dir_writable(p):
218 log.warning('repo path without write access: %s', p)
218 log.warning('repo path without write access: %s', p)
219
219
220 for dirpath in dirpaths:
220 for dirpath in dirpaths:
221 if os.path.isfile(os.path.join(p, dirpath)):
221 if os.path.isfile(os.path.join(p, dirpath)):
222 continue
222 continue
223 cur_path = os.path.join(p, dirpath)
223 cur_path = os.path.join(p, dirpath)
224
224
225 # skip removed repos
225 # skip removed repos
226 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
226 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
227 continue
227 continue
228
228
229 #skip .<somethin> dirs
229 #skip .<somethin> dirs
230 if dirpath.startswith('.'):
230 if dirpath.startswith('.'):
231 continue
231 continue
232
232
233 try:
233 try:
234 scm_info = get_scm(cur_path)
234 scm_info = get_scm(cur_path)
235 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
235 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
236 except VCSError:
236 except VCSError:
237 if not recursive:
237 if not recursive:
238 continue
238 continue
239 #check if this dir containts other repos for recursive scan
239 #check if this dir containts other repos for recursive scan
240 rec_path = os.path.join(p, dirpath)
240 rec_path = os.path.join(p, dirpath)
241 if os.path.isdir(rec_path):
241 if os.path.isdir(rec_path):
242 for inner_scm in _get_repos(rec_path):
242 for inner_scm in _get_repos(rec_path):
243 yield inner_scm
243 yield inner_scm
244
244
245 return _get_repos(path)
245 return _get_repos(path)
246
246
247
247
248 def _get_dirpaths(p):
248 def _get_dirpaths(p):
249 try:
249 try:
250 # OS-independable way of checking if we have at least read-only
250 # OS-independable way of checking if we have at least read-only
251 # access or not.
251 # access or not.
252 dirpaths = os.listdir(p)
252 dirpaths = os.listdir(p)
253 except OSError:
253 except OSError:
254 log.warning('ignoring repo path without read access: %s', p)
254 log.warning('ignoring repo path without read access: %s', p)
255 return []
255 return []
256
256
257 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
257 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
258 # decode paths and suddenly returns unicode objects itself. The items it
258 # decode paths and suddenly returns unicode objects itself. The items it
259 # cannot decode are returned as strings and cause issues.
259 # cannot decode are returned as strings and cause issues.
260 #
260 #
261 # Those paths are ignored here until a solid solution for path handling has
261 # Those paths are ignored here until a solid solution for path handling has
262 # been built.
262 # been built.
263 expected_type = type(p)
263 expected_type = type(p)
264
264
265 def _has_correct_type(item):
265 def _has_correct_type(item):
266 if type(item) is not expected_type:
266 if type(item) is not expected_type:
267 log.error(
267 log.error(
268 u"Ignoring path %s since it cannot be decoded into unicode.",
268 u"Ignoring path %s since it cannot be decoded into unicode.",
269 # Using "repr" to make sure that we see the byte value in case
269 # Using "repr" to make sure that we see the byte value in case
270 # of support.
270 # of support.
271 repr(item))
271 repr(item))
272 return False
272 return False
273 return True
273 return True
274
274
275 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
275 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
276
276
277 return dirpaths
277 return dirpaths
278
278
279
279
280 def _is_dir_writable(path):
280 def _is_dir_writable(path):
281 """
281 """
282 Probe if `path` is writable.
282 Probe if `path` is writable.
283
283
284 Due to trouble on Cygwin / Windows, this is actually probing if it is
284 Due to trouble on Cygwin / Windows, this is actually probing if it is
285 possible to create a file inside of `path`, stat does not produce reliable
285 possible to create a file inside of `path`, stat does not produce reliable
286 results in this case.
286 results in this case.
287 """
287 """
288 try:
288 try:
289 with tempfile.TemporaryFile(dir=path):
289 with tempfile.TemporaryFile(dir=path):
290 pass
290 pass
291 except OSError:
291 except OSError:
292 return False
292 return False
293 return True
293 return True
294
294
295
295
296 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
296 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
297 """
297 """
298 Returns True if given path is a valid repository False otherwise.
298 Returns True if given path is a valid repository False otherwise.
299 If expect_scm param is given also, compare if given scm is the same
299 If expect_scm param is given also, compare if given scm is the same
300 as expected from scm parameter. If explicit_scm is given don't try to
300 as expected from scm parameter. If explicit_scm is given don't try to
301 detect the scm, just use the given one to check if repo is valid
301 detect the scm, just use the given one to check if repo is valid
302
302
303 :param repo_name:
303 :param repo_name:
304 :param base_path:
304 :param base_path:
305 :param expect_scm:
305 :param expect_scm:
306 :param explicit_scm:
306 :param explicit_scm:
307
307
308 :return True: if given path is a valid repository
308 :return True: if given path is a valid repository
309 """
309 """
310 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
310 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
311 log.debug('Checking if `%s` is a valid path for repository. '
311 log.debug('Checking if `%s` is a valid path for repository. '
312 'Explicit type: %s', repo_name, explicit_scm)
312 'Explicit type: %s', repo_name, explicit_scm)
313
313
314 try:
314 try:
315 if explicit_scm:
315 if explicit_scm:
316 detected_scms = [get_scm_backend(explicit_scm)]
316 detected_scms = [get_scm_backend(explicit_scm)]
317 else:
317 else:
318 detected_scms = get_scm(full_path)
318 detected_scms = get_scm(full_path)
319
319
320 if expect_scm:
320 if expect_scm:
321 return detected_scms[0] == expect_scm
321 return detected_scms[0] == expect_scm
322 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
322 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
323 return True
323 return True
324 except VCSError:
324 except VCSError:
325 log.debug('path: %s is not a valid repo !', full_path)
325 log.debug('path: %s is not a valid repo !', full_path)
326 return False
326 return False
327
327
328
328
329 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
329 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
330 """
330 """
331 Returns True if given path is a repository group, False otherwise
331 Returns True if given path is a repository group, False otherwise
332
332
333 :param repo_name:
333 :param repo_name:
334 :param base_path:
334 :param base_path:
335 """
335 """
336 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
336 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
337 log.debug('Checking if `%s` is a valid path for repository group',
337 log.debug('Checking if `%s` is a valid path for repository group',
338 repo_group_name)
338 repo_group_name)
339
339
340 # check if it's not a repo
340 # check if it's not a repo
341 if is_valid_repo(repo_group_name, base_path):
341 if is_valid_repo(repo_group_name, base_path):
342 log.debug('Repo called %s exist, it is not a valid '
342 log.debug('Repo called %s exist, it is not a valid '
343 'repo group' % repo_group_name)
343 'repo group' % repo_group_name)
344 return False
344 return False
345
345
346 try:
346 try:
347 # we need to check bare git repos at higher level
347 # we need to check bare git repos at higher level
348 # since we might match branches/hooks/info/objects or possible
348 # since we might match branches/hooks/info/objects or possible
349 # other things inside bare git repo
349 # other things inside bare git repo
350 scm_ = get_scm(os.path.dirname(full_path))
350 scm_ = get_scm(os.path.dirname(full_path))
351 log.debug('path: %s is a vcs object:%s, not valid '
351 log.debug('path: %s is a vcs object:%s, not valid '
352 'repo group' % (full_path, scm_))
352 'repo group' % (full_path, scm_))
353 return False
353 return False
354 except VCSError:
354 except VCSError:
355 pass
355 pass
356
356
357 # check if it's a valid path
357 # check if it's a valid path
358 if skip_path_check or os.path.isdir(full_path):
358 if skip_path_check or os.path.isdir(full_path):
359 log.debug('path: %s is a valid repo group !', full_path)
359 log.debug('path: %s is a valid repo group !', full_path)
360 return True
360 return True
361
361
362 log.debug('path: %s is not a valid repo group !', full_path)
362 log.debug('path: %s is not a valid repo group !', full_path)
363 return False
363 return False
364
364
365
365
366 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
366 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
367 while True:
367 while True:
368 ok = raw_input(prompt)
368 ok = raw_input(prompt)
369 if ok.lower() in ('y', 'ye', 'yes'):
369 if ok.lower() in ('y', 'ye', 'yes'):
370 return True
370 return True
371 if ok.lower() in ('n', 'no', 'nop', 'nope'):
371 if ok.lower() in ('n', 'no', 'nop', 'nope'):
372 return False
372 return False
373 retries = retries - 1
373 retries = retries - 1
374 if retries < 0:
374 if retries < 0:
375 raise IOError
375 raise IOError
376 print(complaint)
376 print(complaint)
377
377
378 # propagated from mercurial documentation
378 # propagated from mercurial documentation
379 ui_sections = [
379 ui_sections = [
380 'alias', 'auth',
380 'alias', 'auth',
381 'decode/encode', 'defaults',
381 'decode/encode', 'defaults',
382 'diff', 'email',
382 'diff', 'email',
383 'extensions', 'format',
383 'extensions', 'format',
384 'merge-patterns', 'merge-tools',
384 'merge-patterns', 'merge-tools',
385 'hooks', 'http_proxy',
385 'hooks', 'http_proxy',
386 'smtp', 'patch',
386 'smtp', 'patch',
387 'paths', 'profiling',
387 'paths', 'profiling',
388 'server', 'trusted',
388 'server', 'trusted',
389 'ui', 'web', ]
389 'ui', 'web', ]
390
390
391
391
392 def config_data_from_db(clear_session=True, repo=None):
392 def config_data_from_db(clear_session=True, repo=None):
393 """
393 """
394 Read the configuration data from the database and return configuration
394 Read the configuration data from the database and return configuration
395 tuples.
395 tuples.
396 """
396 """
397 from rhodecode.model.settings import VcsSettingsModel
397 from rhodecode.model.settings import VcsSettingsModel
398
398
399 config = []
399 config = []
400
400
401 sa = meta.Session()
401 sa = meta.Session()
402 settings_model = VcsSettingsModel(repo=repo, sa=sa)
402 settings_model = VcsSettingsModel(repo=repo, sa=sa)
403
403
404 ui_settings = settings_model.get_ui_settings()
404 ui_settings = settings_model.get_ui_settings()
405
405
406 for setting in ui_settings:
406 for setting in ui_settings:
407 if setting.active:
407 if setting.active:
408 log.debug(
408 log.debug(
409 'settings ui from db: [%s] %s=%s',
409 'settings ui from db: [%s] %s=%s',
410 setting.section, setting.key, setting.value)
410 setting.section, setting.key, setting.value)
411 config.append((
411 config.append((
412 safe_str(setting.section), safe_str(setting.key),
412 safe_str(setting.section), safe_str(setting.key),
413 safe_str(setting.value)))
413 safe_str(setting.value)))
414 if setting.key == 'push_ssl':
414 if setting.key == 'push_ssl':
415 # force set push_ssl requirement to False, rhodecode
415 # force set push_ssl requirement to False, rhodecode
416 # handles that
416 # handles that
417 config.append((
417 config.append((
418 safe_str(setting.section), safe_str(setting.key), False))
418 safe_str(setting.section), safe_str(setting.key), False))
419 if clear_session:
419 if clear_session:
420 meta.Session.remove()
420 meta.Session.remove()
421
421
422 # TODO: mikhail: probably it makes no sense to re-read hooks information.
422 # TODO: mikhail: probably it makes no sense to re-read hooks information.
423 # It's already there and activated/deactivated
423 # It's already there and activated/deactivated
424 skip_entries = []
424 skip_entries = []
425 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
425 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
426 if 'pull' not in enabled_hook_classes:
426 if 'pull' not in enabled_hook_classes:
427 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
427 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
428 if 'push' not in enabled_hook_classes:
428 if 'push' not in enabled_hook_classes:
429 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
429 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
430 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
430 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
431
431
432 config = [entry for entry in config if entry[:2] not in skip_entries]
432 config = [entry for entry in config if entry[:2] not in skip_entries]
433
433
434 return config
434 return config
435
435
436
436
437 def make_db_config(clear_session=True, repo=None):
437 def make_db_config(clear_session=True, repo=None):
438 """
438 """
439 Create a :class:`Config` instance based on the values in the database.
439 Create a :class:`Config` instance based on the values in the database.
440 """
440 """
441 config = Config()
441 config = Config()
442 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
442 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
443 for section, option, value in config_data:
443 for section, option, value in config_data:
444 config.set(section, option, value)
444 config.set(section, option, value)
445 return config
445 return config
446
446
447
447
448 def get_enabled_hook_classes(ui_settings):
448 def get_enabled_hook_classes(ui_settings):
449 """
449 """
450 Return the enabled hook classes.
450 Return the enabled hook classes.
451
451
452 :param ui_settings: List of ui_settings as returned
452 :param ui_settings: List of ui_settings as returned
453 by :meth:`VcsSettingsModel.get_ui_settings`
453 by :meth:`VcsSettingsModel.get_ui_settings`
454
454
455 :return: a list with the enabled hook classes. The order is not guaranteed.
455 :return: a list with the enabled hook classes. The order is not guaranteed.
456 :rtype: list
456 :rtype: list
457 """
457 """
458 enabled_hooks = []
458 enabled_hooks = []
459 active_hook_keys = [
459 active_hook_keys = [
460 key for section, key, value, active in ui_settings
460 key for section, key, value, active in ui_settings
461 if section == 'hooks' and active]
461 if section == 'hooks' and active]
462
462
463 hook_names = {
463 hook_names = {
464 RhodeCodeUi.HOOK_PUSH: 'push',
464 RhodeCodeUi.HOOK_PUSH: 'push',
465 RhodeCodeUi.HOOK_PULL: 'pull',
465 RhodeCodeUi.HOOK_PULL: 'pull',
466 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
466 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
467 }
467 }
468
468
469 for key in active_hook_keys:
469 for key in active_hook_keys:
470 hook = hook_names.get(key)
470 hook = hook_names.get(key)
471 if hook:
471 if hook:
472 enabled_hooks.append(hook)
472 enabled_hooks.append(hook)
473
473
474 return enabled_hooks
474 return enabled_hooks
475
475
476
476
477 def set_rhodecode_config(config):
477 def set_rhodecode_config(config):
478 """
478 """
479 Updates pylons config with new settings from database
479 Updates pylons config with new settings from database
480
480
481 :param config:
481 :param config:
482 """
482 """
483 from rhodecode.model.settings import SettingsModel
483 from rhodecode.model.settings import SettingsModel
484 app_settings = SettingsModel().get_all_settings()
484 app_settings = SettingsModel().get_all_settings()
485
485
486 for k, v in app_settings.items():
486 for k, v in app_settings.items():
487 config[k] = v
487 config[k] = v
488
488
489
489
490 def get_rhodecode_realm():
490 def get_rhodecode_realm():
491 """
491 """
492 Return the rhodecode realm from database.
492 Return the rhodecode realm from database.
493 """
493 """
494 from rhodecode.model.settings import SettingsModel
494 from rhodecode.model.settings import SettingsModel
495 realm = SettingsModel().get_setting_by_name('realm')
495 realm = SettingsModel().get_setting_by_name('realm')
496 return safe_str(realm.app_settings_value)
496 return safe_str(realm.app_settings_value)
497
497
498
498
499 def get_rhodecode_base_path():
499 def get_rhodecode_base_path():
500 """
500 """
501 Returns the base path. The base path is the filesystem path which points
501 Returns the base path. The base path is the filesystem path which points
502 to the repository store.
502 to the repository store.
503 """
503 """
504 from rhodecode.model.settings import SettingsModel
504 from rhodecode.model.settings import SettingsModel
505 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
505 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
506 return safe_str(paths_ui.ui_value)
506 return safe_str(paths_ui.ui_value)
507
507
508
508
509 def map_groups(path):
509 def map_groups(path):
510 """
510 """
511 Given a full path to a repository, create all nested groups that this
511 Given a full path to a repository, create all nested groups that this
512 repo is inside. This function creates parent-child relationships between
512 repo is inside. This function creates parent-child relationships between
513 groups and creates default perms for all new groups.
513 groups and creates default perms for all new groups.
514
514
515 :param paths: full path to repository
515 :param paths: full path to repository
516 """
516 """
517 from rhodecode.model.repo_group import RepoGroupModel
517 from rhodecode.model.repo_group import RepoGroupModel
518 sa = meta.Session()
518 sa = meta.Session()
519 groups = path.split(Repository.NAME_SEP)
519 groups = path.split(Repository.NAME_SEP)
520 parent = None
520 parent = None
521 group = None
521 group = None
522
522
523 # last element is repo in nested groups structure
523 # last element is repo in nested groups structure
524 groups = groups[:-1]
524 groups = groups[:-1]
525 rgm = RepoGroupModel(sa)
525 rgm = RepoGroupModel(sa)
526 owner = User.get_first_super_admin()
526 owner = User.get_first_super_admin()
527 for lvl, group_name in enumerate(groups):
527 for lvl, group_name in enumerate(groups):
528 group_name = '/'.join(groups[:lvl] + [group_name])
528 group_name = '/'.join(groups[:lvl] + [group_name])
529 group = RepoGroup.get_by_group_name(group_name)
529 group = RepoGroup.get_by_group_name(group_name)
530 desc = '%s group' % group_name
530 desc = '%s group' % group_name
531
531
532 # skip folders that are now removed repos
532 # skip folders that are now removed repos
533 if REMOVED_REPO_PAT.match(group_name):
533 if REMOVED_REPO_PAT.match(group_name):
534 break
534 break
535
535
536 if group is None:
536 if group is None:
537 log.debug('creating group level: %s group_name: %s',
537 log.debug('creating group level: %s group_name: %s',
538 lvl, group_name)
538 lvl, group_name)
539 group = RepoGroup(group_name, parent)
539 group = RepoGroup(group_name, parent)
540 group.group_description = desc
540 group.group_description = desc
541 group.user = owner
541 group.user = owner
542 sa.add(group)
542 sa.add(group)
543 perm_obj = rgm._create_default_perms(group)
543 perm_obj = rgm._create_default_perms(group)
544 sa.add(perm_obj)
544 sa.add(perm_obj)
545 sa.flush()
545 sa.flush()
546
546
547 parent = group
547 parent = group
548 return group
548 return group
549
549
550
550
551 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
551 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
552 """
552 """
553 maps all repos given in initial_repo_list, non existing repositories
553 maps all repos given in initial_repo_list, non existing repositories
554 are created, if remove_obsolete is True it also checks for db entries
554 are created, if remove_obsolete is True it also checks for db entries
555 that are not in initial_repo_list and removes them.
555 that are not in initial_repo_list and removes them.
556
556
557 :param initial_repo_list: list of repositories found by scanning methods
557 :param initial_repo_list: list of repositories found by scanning methods
558 :param remove_obsolete: check for obsolete entries in database
558 :param remove_obsolete: check for obsolete entries in database
559 """
559 """
560 from rhodecode.model.repo import RepoModel
560 from rhodecode.model.repo import RepoModel
561 from rhodecode.model.scm import ScmModel
561 from rhodecode.model.scm import ScmModel
562 from rhodecode.model.repo_group import RepoGroupModel
562 from rhodecode.model.repo_group import RepoGroupModel
563 from rhodecode.model.settings import SettingsModel
563 from rhodecode.model.settings import SettingsModel
564
564
565 sa = meta.Session()
565 sa = meta.Session()
566 repo_model = RepoModel()
566 repo_model = RepoModel()
567 user = User.get_first_super_admin()
567 user = User.get_first_super_admin()
568 added = []
568 added = []
569
569
570 # creation defaults
570 # creation defaults
571 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
571 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
572 enable_statistics = defs.get('repo_enable_statistics')
572 enable_statistics = defs.get('repo_enable_statistics')
573 enable_locking = defs.get('repo_enable_locking')
573 enable_locking = defs.get('repo_enable_locking')
574 enable_downloads = defs.get('repo_enable_downloads')
574 enable_downloads = defs.get('repo_enable_downloads')
575 private = defs.get('repo_private')
575 private = defs.get('repo_private')
576
576
577 for name, repo in initial_repo_list.items():
577 for name, repo in initial_repo_list.items():
578 group = map_groups(name)
578 group = map_groups(name)
579 unicode_name = safe_unicode(name)
579 unicode_name = safe_unicode(name)
580 db_repo = repo_model.get_by_repo_name(unicode_name)
580 db_repo = repo_model.get_by_repo_name(unicode_name)
581 # found repo that is on filesystem not in RhodeCode database
581 # found repo that is on filesystem not in RhodeCode database
582 if not db_repo:
582 if not db_repo:
583 log.info('repository %s not found, creating now', name)
583 log.info('repository %s not found, creating now', name)
584 added.append(name)
584 added.append(name)
585 desc = (repo.description
585 desc = (repo.description
586 if repo.description != 'unknown'
586 if repo.description != 'unknown'
587 else '%s repository' % name)
587 else '%s repository' % name)
588
588
589 db_repo = repo_model._create_repo(
589 db_repo = repo_model._create_repo(
590 repo_name=name,
590 repo_name=name,
591 repo_type=repo.alias,
591 repo_type=repo.alias,
592 description=desc,
592 description=desc,
593 repo_group=getattr(group, 'group_id', None),
593 repo_group=getattr(group, 'group_id', None),
594 owner=user,
594 owner=user,
595 enable_locking=enable_locking,
595 enable_locking=enable_locking,
596 enable_downloads=enable_downloads,
596 enable_downloads=enable_downloads,
597 enable_statistics=enable_statistics,
597 enable_statistics=enable_statistics,
598 private=private,
598 private=private,
599 state=Repository.STATE_CREATED
599 state=Repository.STATE_CREATED
600 )
600 )
601 sa.commit()
601 sa.commit()
602 # we added that repo just now, and make sure we updated server info
602 # we added that repo just now, and make sure we updated server info
603 if db_repo.repo_type == 'git':
603 if db_repo.repo_type == 'git':
604 git_repo = db_repo.scm_instance()
604 git_repo = db_repo.scm_instance()
605 # update repository server-info
605 # update repository server-info
606 log.debug('Running update server info')
606 log.debug('Running update server info')
607 git_repo._update_server_info()
607 git_repo._update_server_info()
608
608
609 db_repo.update_commit_cache()
609 db_repo.update_commit_cache()
610
610
611 config = db_repo._config
611 config = db_repo._config
612 config.set('extensions', 'largefiles', '')
612 config.set('extensions', 'largefiles', '')
613 ScmModel().install_hooks(
613 ScmModel().install_hooks(
614 db_repo.scm_instance(config=config),
614 db_repo.scm_instance(config=config),
615 repo_type=db_repo.repo_type)
615 repo_type=db_repo.repo_type)
616
616
617 removed = []
617 removed = []
618 if remove_obsolete:
618 if remove_obsolete:
619 # remove from database those repositories that are not in the filesystem
619 # remove from database those repositories that are not in the filesystem
620 for repo in sa.query(Repository).all():
620 for repo in sa.query(Repository).all():
621 if repo.repo_name not in initial_repo_list.keys():
621 if repo.repo_name not in initial_repo_list.keys():
622 log.debug("Removing non-existing repository found in db `%s`",
622 log.debug("Removing non-existing repository found in db `%s`",
623 repo.repo_name)
623 repo.repo_name)
624 try:
624 try:
625 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
625 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
626 sa.commit()
626 sa.commit()
627 removed.append(repo.repo_name)
627 removed.append(repo.repo_name)
628 except Exception:
628 except Exception:
629 # don't hold further removals on error
629 # don't hold further removals on error
630 log.error(traceback.format_exc())
630 log.error(traceback.format_exc())
631 sa.rollback()
631 sa.rollback()
632
632
633 def splitter(full_repo_name):
633 def splitter(full_repo_name):
634 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
634 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
635 gr_name = None
635 gr_name = None
636 if len(_parts) == 2:
636 if len(_parts) == 2:
637 gr_name = _parts[0]
637 gr_name = _parts[0]
638 return gr_name
638 return gr_name
639
639
640 initial_repo_group_list = [splitter(x) for x in
640 initial_repo_group_list = [splitter(x) for x in
641 initial_repo_list.keys() if splitter(x)]
641 initial_repo_list.keys() if splitter(x)]
642
642
643 # remove from database those repository groups that are not in the
643 # remove from database those repository groups that are not in the
644 # filesystem due to parent child relationships we need to delete them
644 # filesystem due to parent child relationships we need to delete them
645 # in a specific order of most nested first
645 # in a specific order of most nested first
646 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
646 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
647 nested_sort = lambda gr: len(gr.split('/'))
647 nested_sort = lambda gr: len(gr.split('/'))
648 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
648 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
649 if group_name not in initial_repo_group_list:
649 if group_name not in initial_repo_group_list:
650 repo_group = RepoGroup.get_by_group_name(group_name)
650 repo_group = RepoGroup.get_by_group_name(group_name)
651 if (repo_group.children.all() or
651 if (repo_group.children.all() or
652 not RepoGroupModel().check_exist_filesystem(
652 not RepoGroupModel().check_exist_filesystem(
653 group_name=group_name, exc_on_failure=False)):
653 group_name=group_name, exc_on_failure=False)):
654 continue
654 continue
655
655
656 log.info(
656 log.info(
657 'Removing non-existing repository group found in db `%s`',
657 'Removing non-existing repository group found in db `%s`',
658 group_name)
658 group_name)
659 try:
659 try:
660 RepoGroupModel(sa).delete(group_name, fs_remove=False)
660 RepoGroupModel(sa).delete(group_name, fs_remove=False)
661 sa.commit()
661 sa.commit()
662 removed.append(group_name)
662 removed.append(group_name)
663 except Exception:
663 except Exception:
664 # don't hold further removals on error
664 # don't hold further removals on error
665 log.exception(
665 log.exception(
666 'Unable to remove repository group `%s`',
666 'Unable to remove repository group `%s`',
667 group_name)
667 group_name)
668 sa.rollback()
668 sa.rollback()
669 raise
669 raise
670
670
671 return added, removed
671 return added, removed
672
672
673
673
674 def get_default_cache_settings(settings):
674 def get_default_cache_settings(settings):
675 cache_settings = {}
675 cache_settings = {}
676 for key in settings.keys():
676 for key in settings.keys():
677 for prefix in ['beaker.cache.', 'cache.']:
677 for prefix in ['beaker.cache.', 'cache.']:
678 if key.startswith(prefix):
678 if key.startswith(prefix):
679 name = key.split(prefix)[1].strip()
679 name = key.split(prefix)[1].strip()
680 cache_settings[name] = settings[key].strip()
680 cache_settings[name] = settings[key].strip()
681 return cache_settings
681 return cache_settings
682
682
683
683
684 # set cache regions for beaker so celery can utilise it
684 # set cache regions for beaker so celery can utilise it
685 def add_cache(settings):
685 def add_cache(settings):
686 from rhodecode.lib import caches
686 from rhodecode.lib import caches
687 cache_settings = {'regions': None}
687 cache_settings = {'regions': None}
688 # main cache settings used as default ...
688 # main cache settings used as default ...
689 cache_settings.update(get_default_cache_settings(settings))
689 cache_settings.update(get_default_cache_settings(settings))
690
690
691 if cache_settings['regions']:
691 if cache_settings['regions']:
692 for region in cache_settings['regions'].split(','):
692 for region in cache_settings['regions'].split(','):
693 region = region.strip()
693 region = region.strip()
694 region_settings = {}
694 region_settings = {}
695 for key, value in cache_settings.items():
695 for key, value in cache_settings.items():
696 if key.startswith(region):
696 if key.startswith(region):
697 region_settings[key.split('.')[1]] = value
697 region_settings[key.split('.')[1]] = value
698
698
699 caches.configure_cache_region(
699 caches.configure_cache_region(
700 region, region_settings, cache_settings)
700 region, region_settings, cache_settings)
701
701
702
702
703 def load_rcextensions(root_path):
703 def load_rcextensions(root_path):
704 import rhodecode
704 import rhodecode
705 from rhodecode.config import conf
705 from rhodecode.config import conf
706
706
707 path = os.path.join(root_path, 'rcextensions', '__init__.py')
707 path = os.path.join(root_path, 'rcextensions', '__init__.py')
708 if os.path.isfile(path):
708 if os.path.isfile(path):
709 rcext = create_module('rc', path)
709 rcext = create_module('rc', path)
710 EXT = rhodecode.EXTENSIONS = rcext
710 EXT = rhodecode.EXTENSIONS = rcext
711 log.debug('Found rcextensions now loading %s...', rcext)
711 log.debug('Found rcextensions now loading %s...', rcext)
712
712
713 # Additional mappings that are not present in the pygments lexers
713 # Additional mappings that are not present in the pygments lexers
714 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
714 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
715
715
716 # auto check if the module is not missing any data, set to default if is
716 # auto check if the module is not missing any data, set to default if is
717 # this will help autoupdate new feature of rcext module
717 # this will help autoupdate new feature of rcext module
718 #from rhodecode.config import rcextensions
718 #from rhodecode.config import rcextensions
719 #for k in dir(rcextensions):
719 #for k in dir(rcextensions):
720 # if not k.startswith('_') and not hasattr(EXT, k):
720 # if not k.startswith('_') and not hasattr(EXT, k):
721 # setattr(EXT, k, getattr(rcextensions, k))
721 # setattr(EXT, k, getattr(rcextensions, k))
722
722
723
723
724 def get_custom_lexer(extension):
724 def get_custom_lexer(extension):
725 """
725 """
726 returns a custom lexer if it is defined in rcextensions module, or None
726 returns a custom lexer if it is defined in rcextensions module, or None
727 if there's no custom lexer defined
727 if there's no custom lexer defined
728 """
728 """
729 import rhodecode
729 import rhodecode
730 from pygments import lexers
730 from pygments import lexers
731
731
732 # custom override made by RhodeCode
732 # custom override made by RhodeCode
733 if extension in ['mako']:
733 if extension in ['mako']:
734 return lexers.get_lexer_by_name('Mako')
734 return lexers.get_lexer_by_name('html+mako')
735
735
736 # check if we didn't define this extension as other lexer
736 # check if we didn't define this extension as other lexer
737 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
737 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
738 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
738 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
739 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
739 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
740 return lexers.get_lexer_by_name(_lexer_name)
740 return lexers.get_lexer_by_name(_lexer_name)
741
741
742
742
743 #==============================================================================
743 #==============================================================================
744 # TEST FUNCTIONS AND CREATORS
744 # TEST FUNCTIONS AND CREATORS
745 #==============================================================================
745 #==============================================================================
746 def create_test_index(repo_location, config):
746 def create_test_index(repo_location, config):
747 """
747 """
748 Makes default test index.
748 Makes default test index.
749 """
749 """
750 import rc_testdata
750 import rc_testdata
751
751
752 rc_testdata.extract_search_index(
752 rc_testdata.extract_search_index(
753 'vcs_search_index', os.path.dirname(config['search.location']))
753 'vcs_search_index', os.path.dirname(config['search.location']))
754
754
755
755
756 def create_test_directory(test_path):
756 def create_test_directory(test_path):
757 """
757 """
758 Create test directory if it doesn't exist.
758 Create test directory if it doesn't exist.
759 """
759 """
760 if not os.path.isdir(test_path):
760 if not os.path.isdir(test_path):
761 log.debug('Creating testdir %s', test_path)
761 log.debug('Creating testdir %s', test_path)
762 os.makedirs(test_path)
762 os.makedirs(test_path)
763
763
764
764
765 def create_test_database(test_path, config):
765 def create_test_database(test_path, config):
766 """
766 """
767 Makes a fresh database.
767 Makes a fresh database.
768 """
768 """
769 from rhodecode.lib.db_manage import DbManage
769 from rhodecode.lib.db_manage import DbManage
770
770
771 # PART ONE create db
771 # PART ONE create db
772 dbconf = config['sqlalchemy.db1.url']
772 dbconf = config['sqlalchemy.db1.url']
773 log.debug('making test db %s', dbconf)
773 log.debug('making test db %s', dbconf)
774
774
775 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
775 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
776 tests=True, cli_args={'force_ask': True})
776 tests=True, cli_args={'force_ask': True})
777 dbmanage.create_tables(override=True)
777 dbmanage.create_tables(override=True)
778 dbmanage.set_db_version()
778 dbmanage.set_db_version()
779 # for tests dynamically set new root paths based on generated content
779 # for tests dynamically set new root paths based on generated content
780 dbmanage.create_settings(dbmanage.config_prompt(test_path))
780 dbmanage.create_settings(dbmanage.config_prompt(test_path))
781 dbmanage.create_default_user()
781 dbmanage.create_default_user()
782 dbmanage.create_test_admin_and_users()
782 dbmanage.create_test_admin_and_users()
783 dbmanage.create_permissions()
783 dbmanage.create_permissions()
784 dbmanage.populate_default_permissions()
784 dbmanage.populate_default_permissions()
785 Session().commit()
785 Session().commit()
786
786
787
787
788 def create_test_repositories(test_path, config):
788 def create_test_repositories(test_path, config):
789 """
789 """
790 Creates test repositories in the temporary directory. Repositories are
790 Creates test repositories in the temporary directory. Repositories are
791 extracted from archives within the rc_testdata package.
791 extracted from archives within the rc_testdata package.
792 """
792 """
793 import rc_testdata
793 import rc_testdata
794 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
794 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
795
795
796 log.debug('making test vcs repositories')
796 log.debug('making test vcs repositories')
797
797
798 idx_path = config['search.location']
798 idx_path = config['search.location']
799 data_path = config['cache_dir']
799 data_path = config['cache_dir']
800
800
801 # clean index and data
801 # clean index and data
802 if idx_path and os.path.exists(idx_path):
802 if idx_path and os.path.exists(idx_path):
803 log.debug('remove %s', idx_path)
803 log.debug('remove %s', idx_path)
804 shutil.rmtree(idx_path)
804 shutil.rmtree(idx_path)
805
805
806 if data_path and os.path.exists(data_path):
806 if data_path and os.path.exists(data_path):
807 log.debug('remove %s', data_path)
807 log.debug('remove %s', data_path)
808 shutil.rmtree(data_path)
808 shutil.rmtree(data_path)
809
809
810 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
810 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
811 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
811 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
812
812
813 # Note: Subversion is in the process of being integrated with the system,
813 # Note: Subversion is in the process of being integrated with the system,
814 # until we have a properly packed version of the test svn repository, this
814 # until we have a properly packed version of the test svn repository, this
815 # tries to copy over the repo from a package "rc_testdata"
815 # tries to copy over the repo from a package "rc_testdata"
816 svn_repo_path = rc_testdata.get_svn_repo_archive()
816 svn_repo_path = rc_testdata.get_svn_repo_archive()
817 with tarfile.open(svn_repo_path) as tar:
817 with tarfile.open(svn_repo_path) as tar:
818 tar.extractall(jn(test_path, SVN_REPO))
818 tar.extractall(jn(test_path, SVN_REPO))
819
819
820
820
821 #==============================================================================
821 #==============================================================================
822 # PASTER COMMANDS
822 # PASTER COMMANDS
823 #==============================================================================
823 #==============================================================================
824 class BasePasterCommand(Command):
824 class BasePasterCommand(Command):
825 """
825 """
826 Abstract Base Class for paster commands.
826 Abstract Base Class for paster commands.
827
827
828 The celery commands are somewhat aggressive about loading
828 The celery commands are somewhat aggressive about loading
829 celery.conf, and since our module sets the `CELERY_LOADER`
829 celery.conf, and since our module sets the `CELERY_LOADER`
830 environment variable to our loader, we have to bootstrap a bit and
830 environment variable to our loader, we have to bootstrap a bit and
831 make sure we've had a chance to load the pylons config off of the
831 make sure we've had a chance to load the pylons config off of the
832 command line, otherwise everything fails.
832 command line, otherwise everything fails.
833 """
833 """
834 min_args = 1
834 min_args = 1
835 min_args_error = "Please provide a paster config file as an argument."
835 min_args_error = "Please provide a paster config file as an argument."
836 takes_config_file = 1
836 takes_config_file = 1
837 requires_config_file = True
837 requires_config_file = True
838
838
839 def notify_msg(self, msg, log=False):
839 def notify_msg(self, msg, log=False):
840 """Make a notification to user, additionally if logger is passed
840 """Make a notification to user, additionally if logger is passed
841 it logs this action using given logger
841 it logs this action using given logger
842
842
843 :param msg: message that will be printed to user
843 :param msg: message that will be printed to user
844 :param log: logging instance, to use to additionally log this message
844 :param log: logging instance, to use to additionally log this message
845
845
846 """
846 """
847 if log and isinstance(log, logging):
847 if log and isinstance(log, logging):
848 log(msg)
848 log(msg)
849
849
850 def run(self, args):
850 def run(self, args):
851 """
851 """
852 Overrides Command.run
852 Overrides Command.run
853
853
854 Checks for a config file argument and loads it.
854 Checks for a config file argument and loads it.
855 """
855 """
856 if len(args) < self.min_args:
856 if len(args) < self.min_args:
857 raise BadCommand(
857 raise BadCommand(
858 self.min_args_error % {'min_args': self.min_args,
858 self.min_args_error % {'min_args': self.min_args,
859 'actual_args': len(args)})
859 'actual_args': len(args)})
860
860
861 # Decrement because we're going to lob off the first argument.
861 # Decrement because we're going to lob off the first argument.
862 # @@ This is hacky
862 # @@ This is hacky
863 self.min_args -= 1
863 self.min_args -= 1
864 self.bootstrap_config(args[0])
864 self.bootstrap_config(args[0])
865 self.update_parser()
865 self.update_parser()
866 return super(BasePasterCommand, self).run(args[1:])
866 return super(BasePasterCommand, self).run(args[1:])
867
867
868 def update_parser(self):
868 def update_parser(self):
869 """
869 """
870 Abstract method. Allows for the class' parser to be updated
870 Abstract method. Allows for the class' parser to be updated
871 before the superclass' `run` method is called. Necessary to
871 before the superclass' `run` method is called. Necessary to
872 allow options/arguments to be passed through to the underlying
872 allow options/arguments to be passed through to the underlying
873 celery command.
873 celery command.
874 """
874 """
875 raise NotImplementedError("Abstract Method.")
875 raise NotImplementedError("Abstract Method.")
876
876
877 def bootstrap_config(self, conf):
877 def bootstrap_config(self, conf):
878 """
878 """
879 Loads the pylons configuration.
879 Loads the pylons configuration.
880 """
880 """
881 from pylons import config as pylonsconfig
881 from pylons import config as pylonsconfig
882
882
883 self.path_to_ini_file = os.path.realpath(conf)
883 self.path_to_ini_file = os.path.realpath(conf)
884 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
884 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
885 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
885 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
886
886
887 def _init_session(self):
887 def _init_session(self):
888 """
888 """
889 Inits SqlAlchemy Session
889 Inits SqlAlchemy Session
890 """
890 """
891 logging.config.fileConfig(self.path_to_ini_file)
891 logging.config.fileConfig(self.path_to_ini_file)
892 from pylons import config
892 from pylons import config
893 from rhodecode.config.utils import initialize_database
893 from rhodecode.config.utils import initialize_database
894
894
895 # get to remove repos !!
895 # get to remove repos !!
896 add_cache(config)
896 add_cache(config)
897 initialize_database(config)
897 initialize_database(config)
898
898
899
899
900 @decorator.decorator
900 @decorator.decorator
901 def jsonify(func, *args, **kwargs):
901 def jsonify(func, *args, **kwargs):
902 """Action decorator that formats output for JSON
902 """Action decorator that formats output for JSON
903
903
904 Given a function that will return content, this decorator will turn
904 Given a function that will return content, this decorator will turn
905 the result into JSON, with a content-type of 'application/json' and
905 the result into JSON, with a content-type of 'application/json' and
906 output it.
906 output it.
907
907
908 """
908 """
909 from pylons.decorators.util import get_pylons
909 from pylons.decorators.util import get_pylons
910 from rhodecode.lib.ext_json import json
910 from rhodecode.lib.ext_json import json
911 pylons = get_pylons(args)
911 pylons = get_pylons(args)
912 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
912 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
913 data = func(*args, **kwargs)
913 data = func(*args, **kwargs)
914 if isinstance(data, (list, tuple)):
914 if isinstance(data, (list, tuple)):
915 msg = "JSON responses with Array envelopes are susceptible to " \
915 msg = "JSON responses with Array envelopes are susceptible to " \
916 "cross-site data leak attacks, see " \
916 "cross-site data leak attacks, see " \
917 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
917 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
918 warnings.warn(msg, Warning, 2)
918 warnings.warn(msg, Warning, 2)
919 log.warning(msg)
919 log.warning(msg)
920 log.debug("Returning JSON wrapped action output")
920 log.debug("Returning JSON wrapped action output")
921 return json.dumps(data, encoding='utf-8')
921 return json.dumps(data, encoding='utf-8')
922
922
923
923
924 class PartialRenderer(object):
924 class PartialRenderer(object):
925 """
925 """
926 Partial renderer used to render chunks of html used in datagrids
926 Partial renderer used to render chunks of html used in datagrids
927 use like::
927 use like::
928
928
929 _render = PartialRenderer('data_table/_dt_elements.mako')
929 _render = PartialRenderer('data_table/_dt_elements.mako')
930 _render('quick_menu', args, kwargs)
930 _render('quick_menu', args, kwargs)
931 PartialRenderer.h,
931 PartialRenderer.h,
932 c,
932 c,
933 _,
933 _,
934 ungettext
934 ungettext
935 are the template stuff initialized inside and can be re-used later
935 are the template stuff initialized inside and can be re-used later
936
936
937 :param tmpl_name: template path relate to /templates/ dir
937 :param tmpl_name: template path relate to /templates/ dir
938 """
938 """
939
939
940 def __init__(self, tmpl_name):
940 def __init__(self, tmpl_name):
941 import rhodecode
941 import rhodecode
942 from pylons import request, tmpl_context as c
942 from pylons import request, tmpl_context as c
943 from pylons.i18n.translation import _, ungettext
943 from pylons.i18n.translation import _, ungettext
944 from rhodecode.lib import helpers as h
944 from rhodecode.lib import helpers as h
945
945
946 self.tmpl_name = tmpl_name
946 self.tmpl_name = tmpl_name
947 self.rhodecode = rhodecode
947 self.rhodecode = rhodecode
948 self.c = c
948 self.c = c
949 self._ = _
949 self._ = _
950 self.ungettext = ungettext
950 self.ungettext = ungettext
951 self.h = h
951 self.h = h
952 self.request = request
952 self.request = request
953
953
954 def _mako_lookup(self):
954 def _mako_lookup(self):
955 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
955 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
956 return _tmpl_lookup.get_template(self.tmpl_name)
956 return _tmpl_lookup.get_template(self.tmpl_name)
957
957
958 def _update_kwargs_for_render(self, kwargs):
958 def _update_kwargs_for_render(self, kwargs):
959 """
959 """
960 Inject params required for Mako rendering
960 Inject params required for Mako rendering
961 """
961 """
962 _kwargs = {
962 _kwargs = {
963 '_': self._,
963 '_': self._,
964 'h': self.h,
964 'h': self.h,
965 'c': self.c,
965 'c': self.c,
966 'request': self.request,
966 'request': self.request,
967 'ungettext': self.ungettext,
967 'ungettext': self.ungettext,
968 }
968 }
969 _kwargs.update(kwargs)
969 _kwargs.update(kwargs)
970 return _kwargs
970 return _kwargs
971
971
972 def _render_with_exc(self, render_func, args, kwargs):
972 def _render_with_exc(self, render_func, args, kwargs):
973 try:
973 try:
974 return render_func.render(*args, **kwargs)
974 return render_func.render(*args, **kwargs)
975 except:
975 except:
976 log.error(exceptions.text_error_template().render())
976 log.error(exceptions.text_error_template().render())
977 raise
977 raise
978
978
979 def _get_template(self, template_obj, def_name):
979 def _get_template(self, template_obj, def_name):
980 if def_name:
980 if def_name:
981 tmpl = template_obj.get_def(def_name)
981 tmpl = template_obj.get_def(def_name)
982 else:
982 else:
983 tmpl = template_obj
983 tmpl = template_obj
984 return tmpl
984 return tmpl
985
985
986 def render(self, def_name, *args, **kwargs):
986 def render(self, def_name, *args, **kwargs):
987 lookup_obj = self._mako_lookup()
987 lookup_obj = self._mako_lookup()
988 tmpl = self._get_template(lookup_obj, def_name=def_name)
988 tmpl = self._get_template(lookup_obj, def_name=def_name)
989 kwargs = self._update_kwargs_for_render(kwargs)
989 kwargs = self._update_kwargs_for_render(kwargs)
990 return self._render_with_exc(tmpl, args, kwargs)
990 return self._render_with_exc(tmpl, args, kwargs)
991
991
992 def __call__(self, tmpl, *args, **kwargs):
992 def __call__(self, tmpl, *args, **kwargs):
993 return self.render(tmpl, *args, **kwargs)
993 return self.render(tmpl, *args, **kwargs)
994
994
995
995
996 def password_changed(auth_user, session):
996 def password_changed(auth_user, session):
997 # Never report password change in case of default user or anonymous user.
997 # Never report password change in case of default user or anonymous user.
998 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
998 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
999 return False
999 return False
1000
1000
1001 password_hash = md5(auth_user.password) if auth_user.password else None
1001 password_hash = md5(auth_user.password) if auth_user.password else None
1002 rhodecode_user = session.get('rhodecode_user', {})
1002 rhodecode_user = session.get('rhodecode_user', {})
1003 session_password_hash = rhodecode_user.get('password', '')
1003 session_password_hash = rhodecode_user.get('password', '')
1004 return password_hash != session_password_hash
1004 return password_hash != session_password_hash
1005
1005
1006
1006
1007 def read_opensource_licenses():
1007 def read_opensource_licenses():
1008 global _license_cache
1008 global _license_cache
1009
1009
1010 if not _license_cache:
1010 if not _license_cache:
1011 licenses = pkg_resources.resource_string(
1011 licenses = pkg_resources.resource_string(
1012 'rhodecode', 'config/licenses.json')
1012 'rhodecode', 'config/licenses.json')
1013 _license_cache = json.loads(licenses)
1013 _license_cache = json.loads(licenses)
1014
1014
1015 return _license_cache
1015 return _license_cache
1016
1016
1017
1017
1018 def get_registry(request):
1018 def get_registry(request):
1019 """
1019 """
1020 Utility to get the pyramid registry from a request. During migration to
1020 Utility to get the pyramid registry from a request. During migration to
1021 pyramid we sometimes want to use the pyramid registry from pylons context.
1021 pyramid we sometimes want to use the pyramid registry from pylons context.
1022 Therefore this utility returns `request.registry` for pyramid requests and
1022 Therefore this utility returns `request.registry` for pyramid requests and
1023 uses `get_current_registry()` for pylons requests.
1023 uses `get_current_registry()` for pylons requests.
1024 """
1024 """
1025 try:
1025 try:
1026 return request.registry
1026 return request.registry
1027 except AttributeError:
1027 except AttributeError:
1028 return get_current_registry()
1028 return get_current_registry()
1029
1029
1030
1030
1031 def generate_platform_uuid():
1031 def generate_platform_uuid():
1032 """
1032 """
1033 Generates platform UUID based on it's name
1033 Generates platform UUID based on it's name
1034 """
1034 """
1035 import platform
1035 import platform
1036
1036
1037 try:
1037 try:
1038 uuid_list = [platform.platform()]
1038 uuid_list = [platform.platform()]
1039 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1039 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1040 except Exception as e:
1040 except Exception as e:
1041 log.error('Failed to generate host uuid: %s' % e)
1041 log.error('Failed to generate host uuid: %s' % e)
1042 return 'UNDEFINED'
1042 return 'UNDEFINED'
General Comments 0
You need to be logged in to leave comments. Login now