##// END OF EJS Templates
fix(service-api): rely on urljoin for constructing the call url
super-admin -
r5319:cc68847e default
parent child Browse files
Show More
@@ -1,857 +1,859 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24
24 import decorator
25 import decorator
25 import logging
26 import logging
26 import os
27 import os
27 import re
28 import re
28 import sys
29 import sys
29 import shutil
30 import shutil
30 import socket
31 import socket
31 import tempfile
32 import tempfile
32 import traceback
33 import traceback
33 import tarfile
34 import tarfile
35 import urllib.parse
34 import warnings
36 import warnings
35 from functools import wraps
37 from functools import wraps
36 from os.path import join as jn
38 from os.path import join as jn
37 from configparser import NoOptionError
39 from configparser import NoOptionError
38
40
39 import paste
41 import paste
40 import pkg_resources
42 import pkg_resources
41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
43 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42
44
43 from mako import exceptions
45 from mako import exceptions
44
46
45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
47 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
46 from rhodecode.lib.type_utils import AttributeDict
48 from rhodecode.lib.type_utils import AttributeDict
47 from rhodecode.lib.str_utils import safe_bytes, safe_str
49 from rhodecode.lib.str_utils import safe_bytes, safe_str
48 from rhodecode.lib.vcs.backends.base import Config
50 from rhodecode.lib.vcs.backends.base import Config
49 from rhodecode.lib.vcs.exceptions import VCSError
51 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
52 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 from rhodecode.lib.ext_json import sjson as json
53 from rhodecode.lib.ext_json import sjson as json
52 from rhodecode.model import meta
54 from rhodecode.model import meta
53 from rhodecode.model.db import (
55 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
56 from rhodecode.lib.pyramid_utils import get_config
58 from rhodecode.lib.pyramid_utils import get_config
57 from rhodecode.lib.vcs import CurlSession
59 from rhodecode.lib.vcs import CurlSession
58 from rhodecode.lib.vcs.exceptions import ImproperlyConfiguredError
60 from rhodecode.lib.vcs.exceptions import ImproperlyConfiguredError
59
61
60
62
61 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
62
64
63 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64
66
65 # String which contains characters that are not allowed in slug names for
67 # String which contains characters that are not allowed in slug names for
66 # repositories or repository groups. It is properly escaped to use it in
68 # repositories or repository groups. It is properly escaped to use it in
67 # regular expressions.
69 # regular expressions.
68 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
70 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
69
71
70 # Regex that matches forbidden characters in repo/group slugs.
72 # Regex that matches forbidden characters in repo/group slugs.
71 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
73 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
72
74
73 # Regex that matches allowed characters in repo/group slugs.
75 # Regex that matches allowed characters in repo/group slugs.
74 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
76 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
75
77
76 # Regex that matches whole repo/group slugs.
78 # Regex that matches whole repo/group slugs.
77 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
79 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
78
80
79 _license_cache = None
81 _license_cache = None
80
82
81
83
82 def adopt_for_celery(func):
84 def adopt_for_celery(func):
83 """
85 """
84 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
86 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
85 for further usage as a celery tasks.
87 for further usage as a celery tasks.
86 """
88 """
87 @wraps(func)
89 @wraps(func)
88 def wrapper(extras):
90 def wrapper(extras):
89 extras = AttributeDict(extras)
91 extras = AttributeDict(extras)
90 # HooksResponse implements to_json method which must be used there.
92 # HooksResponse implements to_json method which must be used there.
91 return func(extras).to_json()
93 return func(extras).to_json()
92 return wrapper
94 return wrapper
93
95
94
96
95 def repo_name_slug(value):
97 def repo_name_slug(value):
96 """
98 """
97 Return slug of name of repository
99 Return slug of name of repository
98 This function is called on each creation/modification
100 This function is called on each creation/modification
99 of repository to prevent bad names in repo
101 of repository to prevent bad names in repo
100 """
102 """
101
103
102 replacement_char = '-'
104 replacement_char = '-'
103
105
104 slug = strip_tags(value)
106 slug = strip_tags(value)
105 slug = convert_accented_entities(slug)
107 slug = convert_accented_entities(slug)
106 slug = convert_misc_entities(slug)
108 slug = convert_misc_entities(slug)
107
109
108 slug = SLUG_BAD_CHAR_RE.sub('', slug)
110 slug = SLUG_BAD_CHAR_RE.sub('', slug)
109 slug = re.sub(r'[\s]+', '-', slug)
111 slug = re.sub(r'[\s]+', '-', slug)
110 slug = collapse(slug, replacement_char)
112 slug = collapse(slug, replacement_char)
111
113
112 return slug
114 return slug
113
115
114
116
115 #==============================================================================
117 #==============================================================================
116 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
118 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
117 #==============================================================================
119 #==============================================================================
118 def get_repo_slug(request):
120 def get_repo_slug(request):
119 _repo = ''
121 _repo = ''
120
122
121 if hasattr(request, 'db_repo_name'):
123 if hasattr(request, 'db_repo_name'):
122 # if our requests has set db reference use it for name, this
124 # if our requests has set db reference use it for name, this
123 # translates the example.com/_<id> into proper repo names
125 # translates the example.com/_<id> into proper repo names
124 _repo = request.db_repo_name
126 _repo = request.db_repo_name
125 elif getattr(request, 'matchdict', None):
127 elif getattr(request, 'matchdict', None):
126 # pyramid
128 # pyramid
127 _repo = request.matchdict.get('repo_name')
129 _repo = request.matchdict.get('repo_name')
128
130
129 if _repo:
131 if _repo:
130 _repo = _repo.rstrip('/')
132 _repo = _repo.rstrip('/')
131 return _repo
133 return _repo
132
134
133
135
134 def get_repo_group_slug(request):
136 def get_repo_group_slug(request):
135 _group = ''
137 _group = ''
136 if hasattr(request, 'db_repo_group'):
138 if hasattr(request, 'db_repo_group'):
137 # if our requests has set db reference use it for name, this
139 # if our requests has set db reference use it for name, this
138 # translates the example.com/_<id> into proper repo group names
140 # translates the example.com/_<id> into proper repo group names
139 _group = request.db_repo_group.group_name
141 _group = request.db_repo_group.group_name
140 elif getattr(request, 'matchdict', None):
142 elif getattr(request, 'matchdict', None):
141 # pyramid
143 # pyramid
142 _group = request.matchdict.get('repo_group_name')
144 _group = request.matchdict.get('repo_group_name')
143
145
144 if _group:
146 if _group:
145 _group = _group.rstrip('/')
147 _group = _group.rstrip('/')
146 return _group
148 return _group
147
149
148
150
149 def get_user_group_slug(request):
151 def get_user_group_slug(request):
150 _user_group = ''
152 _user_group = ''
151
153
152 if hasattr(request, 'db_user_group'):
154 if hasattr(request, 'db_user_group'):
153 _user_group = request.db_user_group.users_group_name
155 _user_group = request.db_user_group.users_group_name
154 elif getattr(request, 'matchdict', None):
156 elif getattr(request, 'matchdict', None):
155 # pyramid
157 # pyramid
156 _user_group = request.matchdict.get('user_group_id')
158 _user_group = request.matchdict.get('user_group_id')
157 _user_group_name = request.matchdict.get('user_group_name')
159 _user_group_name = request.matchdict.get('user_group_name')
158 try:
160 try:
159 if _user_group:
161 if _user_group:
160 _user_group = UserGroup.get(_user_group)
162 _user_group = UserGroup.get(_user_group)
161 elif _user_group_name:
163 elif _user_group_name:
162 _user_group = UserGroup.get_by_group_name(_user_group_name)
164 _user_group = UserGroup.get_by_group_name(_user_group_name)
163
165
164 if _user_group:
166 if _user_group:
165 _user_group = _user_group.users_group_name
167 _user_group = _user_group.users_group_name
166 except Exception:
168 except Exception:
167 log.exception('Failed to get user group by id and name')
169 log.exception('Failed to get user group by id and name')
168 # catch all failures here
170 # catch all failures here
169 return None
171 return None
170
172
171 return _user_group
173 return _user_group
172
174
173
175
174 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
176 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
175 """
177 """
176 Scans given path for repos and return (name,(type,path)) tuple
178 Scans given path for repos and return (name,(type,path)) tuple
177
179
178 :param path: path to scan for repositories
180 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
181 :param recursive: recursive search and return names with subdirs in front
180 """
182 """
181
183
182 # remove ending slash for better results
184 # remove ending slash for better results
183 path = path.rstrip(os.sep)
185 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...', path, recursive)
186 log.debug('now scanning in %s location recursive:%s...', path, recursive)
185
187
186 def _get_repos(p):
188 def _get_repos(p):
187 dirpaths = get_dirpaths(p)
189 dirpaths = get_dirpaths(p)
188 if not _is_dir_writable(p):
190 if not _is_dir_writable(p):
189 log.warning('repo path without write access: %s', p)
191 log.warning('repo path without write access: %s', p)
190
192
191 for dirpath in dirpaths:
193 for dirpath in dirpaths:
192 if os.path.isfile(os.path.join(p, dirpath)):
194 if os.path.isfile(os.path.join(p, dirpath)):
193 continue
195 continue
194 cur_path = os.path.join(p, dirpath)
196 cur_path = os.path.join(p, dirpath)
195
197
196 # skip removed repos
198 # skip removed repos
197 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
199 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
198 continue
200 continue
199
201
200 #skip .<somethin> dirs
202 #skip .<somethin> dirs
201 if dirpath.startswith('.'):
203 if dirpath.startswith('.'):
202 continue
204 continue
203
205
204 try:
206 try:
205 scm_info = get_scm(cur_path)
207 scm_info = get_scm(cur_path)
206 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
208 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
207 except VCSError:
209 except VCSError:
208 if not recursive:
210 if not recursive:
209 continue
211 continue
210 #check if this dir containts other repos for recursive scan
212 #check if this dir containts other repos for recursive scan
211 rec_path = os.path.join(p, dirpath)
213 rec_path = os.path.join(p, dirpath)
212 if os.path.isdir(rec_path):
214 if os.path.isdir(rec_path):
213 yield from _get_repos(rec_path)
215 yield from _get_repos(rec_path)
214
216
215 return _get_repos(path)
217 return _get_repos(path)
216
218
217
219
218 def get_dirpaths(p: str) -> list:
220 def get_dirpaths(p: str) -> list:
219 try:
221 try:
220 # OS-independable way of checking if we have at least read-only
222 # OS-independable way of checking if we have at least read-only
221 # access or not.
223 # access or not.
222 dirpaths = os.listdir(p)
224 dirpaths = os.listdir(p)
223 except OSError:
225 except OSError:
224 log.warning('ignoring repo path without read access: %s', p)
226 log.warning('ignoring repo path without read access: %s', p)
225 return []
227 return []
226
228
227 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
229 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
228 # decode paths and suddenly returns unicode objects itself. The items it
230 # decode paths and suddenly returns unicode objects itself. The items it
229 # cannot decode are returned as strings and cause issues.
231 # cannot decode are returned as strings and cause issues.
230 #
232 #
231 # Those paths are ignored here until a solid solution for path handling has
233 # Those paths are ignored here until a solid solution for path handling has
232 # been built.
234 # been built.
233 expected_type = type(p)
235 expected_type = type(p)
234
236
235 def _has_correct_type(item):
237 def _has_correct_type(item):
236 if type(item) is not expected_type:
238 if type(item) is not expected_type:
237 log.error(
239 log.error(
238 "Ignoring path %s since it cannot be decoded into str.",
240 "Ignoring path %s since it cannot be decoded into str.",
239 # Using "repr" to make sure that we see the byte value in case
241 # Using "repr" to make sure that we see the byte value in case
240 # of support.
242 # of support.
241 repr(item))
243 repr(item))
242 return False
244 return False
243 return True
245 return True
244
246
245 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
247 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
246
248
247 return dirpaths
249 return dirpaths
248
250
249
251
250 def _is_dir_writable(path):
252 def _is_dir_writable(path):
251 """
253 """
252 Probe if `path` is writable.
254 Probe if `path` is writable.
253
255
254 Due to trouble on Cygwin / Windows, this is actually probing if it is
256 Due to trouble on Cygwin / Windows, this is actually probing if it is
255 possible to create a file inside of `path`, stat does not produce reliable
257 possible to create a file inside of `path`, stat does not produce reliable
256 results in this case.
258 results in this case.
257 """
259 """
258 try:
260 try:
259 with tempfile.TemporaryFile(dir=path):
261 with tempfile.TemporaryFile(dir=path):
260 pass
262 pass
261 except OSError:
263 except OSError:
262 return False
264 return False
263 return True
265 return True
264
266
265
267
266 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
268 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
267 """
269 """
268 Returns True if given path is a valid repository False otherwise.
270 Returns True if given path is a valid repository False otherwise.
269 If expect_scm param is given also, compare if given scm is the same
271 If expect_scm param is given also, compare if given scm is the same
270 as expected from scm parameter. If explicit_scm is given don't try to
272 as expected from scm parameter. If explicit_scm is given don't try to
271 detect the scm, just use the given one to check if repo is valid
273 detect the scm, just use the given one to check if repo is valid
272
274
273 :param repo_name:
275 :param repo_name:
274 :param base_path:
276 :param base_path:
275 :param expect_scm:
277 :param expect_scm:
276 :param explicit_scm:
278 :param explicit_scm:
277 :param config:
279 :param config:
278
280
279 :return True: if given path is a valid repository
281 :return True: if given path is a valid repository
280 """
282 """
281 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
283 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
282 log.debug('Checking if `%s` is a valid path for repository. '
284 log.debug('Checking if `%s` is a valid path for repository. '
283 'Explicit type: %s', repo_name, explicit_scm)
285 'Explicit type: %s', repo_name, explicit_scm)
284
286
285 try:
287 try:
286 if explicit_scm:
288 if explicit_scm:
287 detected_scms = [get_scm_backend(explicit_scm)(
289 detected_scms = [get_scm_backend(explicit_scm)(
288 full_path, config=config).alias]
290 full_path, config=config).alias]
289 else:
291 else:
290 detected_scms = get_scm(full_path)
292 detected_scms = get_scm(full_path)
291
293
292 if expect_scm:
294 if expect_scm:
293 return detected_scms[0] == expect_scm
295 return detected_scms[0] == expect_scm
294 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
296 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
295 return True
297 return True
296 except VCSError:
298 except VCSError:
297 log.debug('path: %s is not a valid repo !', full_path)
299 log.debug('path: %s is not a valid repo !', full_path)
298 return False
300 return False
299
301
300
302
301 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
303 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
302 """
304 """
303 Returns True if a given path is a repository group, False otherwise
305 Returns True if a given path is a repository group, False otherwise
304
306
305 :param repo_group_name:
307 :param repo_group_name:
306 :param base_path:
308 :param base_path:
307 """
309 """
308 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
310 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
309 log.debug('Checking if `%s` is a valid path for repository group',
311 log.debug('Checking if `%s` is a valid path for repository group',
310 repo_group_name)
312 repo_group_name)
311
313
312 # check if it's not a repo
314 # check if it's not a repo
313 if is_valid_repo(repo_group_name, base_path):
315 if is_valid_repo(repo_group_name, base_path):
314 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
316 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
315 return False
317 return False
316
318
317 try:
319 try:
318 # we need to check bare git repos at higher level
320 # we need to check bare git repos at higher level
319 # since we might match branches/hooks/info/objects or possible
321 # since we might match branches/hooks/info/objects or possible
320 # other things inside bare git repo
322 # other things inside bare git repo
321 maybe_repo = os.path.dirname(full_path)
323 maybe_repo = os.path.dirname(full_path)
322 if maybe_repo == base_path:
324 if maybe_repo == base_path:
323 # skip root level repo check; we know root location CANNOT BE a repo group
325 # skip root level repo check; we know root location CANNOT BE a repo group
324 return False
326 return False
325
327
326 scm_ = get_scm(maybe_repo)
328 scm_ = get_scm(maybe_repo)
327 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
329 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
328 return False
330 return False
329 except VCSError:
331 except VCSError:
330 pass
332 pass
331
333
332 # check if it's a valid path
334 # check if it's a valid path
333 if skip_path_check or os.path.isdir(full_path):
335 if skip_path_check or os.path.isdir(full_path):
334 log.debug('path: %s is a valid repo group !', full_path)
336 log.debug('path: %s is a valid repo group !', full_path)
335 return True
337 return True
336
338
337 log.debug('path: %s is not a valid repo group !', full_path)
339 log.debug('path: %s is not a valid repo group !', full_path)
338 return False
340 return False
339
341
340
342
341 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
343 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
342 while True:
344 while True:
343 ok = input(prompt)
345 ok = input(prompt)
344 if ok.lower() in ('y', 'ye', 'yes'):
346 if ok.lower() in ('y', 'ye', 'yes'):
345 return True
347 return True
346 if ok.lower() in ('n', 'no', 'nop', 'nope'):
348 if ok.lower() in ('n', 'no', 'nop', 'nope'):
347 return False
349 return False
348 retries = retries - 1
350 retries = retries - 1
349 if retries < 0:
351 if retries < 0:
350 raise OSError
352 raise OSError
351 print(complaint)
353 print(complaint)
352
354
353 # propagated from mercurial documentation
355 # propagated from mercurial documentation
354 ui_sections = [
356 ui_sections = [
355 'alias', 'auth',
357 'alias', 'auth',
356 'decode/encode', 'defaults',
358 'decode/encode', 'defaults',
357 'diff', 'email',
359 'diff', 'email',
358 'extensions', 'format',
360 'extensions', 'format',
359 'merge-patterns', 'merge-tools',
361 'merge-patterns', 'merge-tools',
360 'hooks', 'http_proxy',
362 'hooks', 'http_proxy',
361 'smtp', 'patch',
363 'smtp', 'patch',
362 'paths', 'profiling',
364 'paths', 'profiling',
363 'server', 'trusted',
365 'server', 'trusted',
364 'ui', 'web', ]
366 'ui', 'web', ]
365
367
366
368
367 def config_data_from_db(clear_session=True, repo=None):
369 def config_data_from_db(clear_session=True, repo=None):
368 """
370 """
369 Read the configuration data from the database and return configuration
371 Read the configuration data from the database and return configuration
370 tuples.
372 tuples.
371 """
373 """
372 from rhodecode.model.settings import VcsSettingsModel
374 from rhodecode.model.settings import VcsSettingsModel
373
375
374 config = []
376 config = []
375
377
376 sa = meta.Session()
378 sa = meta.Session()
377 settings_model = VcsSettingsModel(repo=repo, sa=sa)
379 settings_model = VcsSettingsModel(repo=repo, sa=sa)
378
380
379 ui_settings = settings_model.get_ui_settings()
381 ui_settings = settings_model.get_ui_settings()
380
382
381 ui_data = []
383 ui_data = []
382 for setting in ui_settings:
384 for setting in ui_settings:
383 if setting.active:
385 if setting.active:
384 ui_data.append((setting.section, setting.key, setting.value))
386 ui_data.append((setting.section, setting.key, setting.value))
385 config.append((
387 config.append((
386 safe_str(setting.section), safe_str(setting.key),
388 safe_str(setting.section), safe_str(setting.key),
387 safe_str(setting.value)))
389 safe_str(setting.value)))
388 if setting.key == 'push_ssl':
390 if setting.key == 'push_ssl':
389 # force set push_ssl requirement to False, rhodecode
391 # force set push_ssl requirement to False, rhodecode
390 # handles that
392 # handles that
391 config.append((
393 config.append((
392 safe_str(setting.section), safe_str(setting.key), False))
394 safe_str(setting.section), safe_str(setting.key), False))
393 log.debug(
395 log.debug(
394 'settings ui from db@repo[%s]: %s',
396 'settings ui from db@repo[%s]: %s',
395 repo,
397 repo,
396 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
398 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
397 if clear_session:
399 if clear_session:
398 meta.Session.remove()
400 meta.Session.remove()
399
401
400 # TODO: mikhail: probably it makes no sense to re-read hooks information.
402 # TODO: mikhail: probably it makes no sense to re-read hooks information.
401 # It's already there and activated/deactivated
403 # It's already there and activated/deactivated
402 skip_entries = []
404 skip_entries = []
403 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
405 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
404 if 'pull' not in enabled_hook_classes:
406 if 'pull' not in enabled_hook_classes:
405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
407 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
406 if 'push' not in enabled_hook_classes:
408 if 'push' not in enabled_hook_classes:
407 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
409 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
408 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
410 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
409 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
411 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
410
412
411 config = [entry for entry in config if entry[:2] not in skip_entries]
413 config = [entry for entry in config if entry[:2] not in skip_entries]
412
414
413 return config
415 return config
414
416
415
417
416 def make_db_config(clear_session=True, repo=None):
418 def make_db_config(clear_session=True, repo=None):
417 """
419 """
418 Create a :class:`Config` instance based on the values in the database.
420 Create a :class:`Config` instance based on the values in the database.
419 """
421 """
420 config = Config()
422 config = Config()
421 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
423 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
422 for section, option, value in config_data:
424 for section, option, value in config_data:
423 config.set(section, option, value)
425 config.set(section, option, value)
424 return config
426 return config
425
427
426
428
427 def get_enabled_hook_classes(ui_settings):
429 def get_enabled_hook_classes(ui_settings):
428 """
430 """
429 Return the enabled hook classes.
431 Return the enabled hook classes.
430
432
431 :param ui_settings: List of ui_settings as returned
433 :param ui_settings: List of ui_settings as returned
432 by :meth:`VcsSettingsModel.get_ui_settings`
434 by :meth:`VcsSettingsModel.get_ui_settings`
433
435
434 :return: a list with the enabled hook classes. The order is not guaranteed.
436 :return: a list with the enabled hook classes. The order is not guaranteed.
435 :rtype: list
437 :rtype: list
436 """
438 """
437 enabled_hooks = []
439 enabled_hooks = []
438 active_hook_keys = [
440 active_hook_keys = [
439 key for section, key, value, active in ui_settings
441 key for section, key, value, active in ui_settings
440 if section == 'hooks' and active]
442 if section == 'hooks' and active]
441
443
442 hook_names = {
444 hook_names = {
443 RhodeCodeUi.HOOK_PUSH: 'push',
445 RhodeCodeUi.HOOK_PUSH: 'push',
444 RhodeCodeUi.HOOK_PULL: 'pull',
446 RhodeCodeUi.HOOK_PULL: 'pull',
445 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
447 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
446 }
448 }
447
449
448 for key in active_hook_keys:
450 for key in active_hook_keys:
449 hook = hook_names.get(key)
451 hook = hook_names.get(key)
450 if hook:
452 if hook:
451 enabled_hooks.append(hook)
453 enabled_hooks.append(hook)
452
454
453 return enabled_hooks
455 return enabled_hooks
454
456
455
457
456 def set_rhodecode_config(config):
458 def set_rhodecode_config(config):
457 """
459 """
458 Updates pyramid config with new settings from database
460 Updates pyramid config with new settings from database
459
461
460 :param config:
462 :param config:
461 """
463 """
462 from rhodecode.model.settings import SettingsModel
464 from rhodecode.model.settings import SettingsModel
463 app_settings = SettingsModel().get_all_settings()
465 app_settings = SettingsModel().get_all_settings()
464
466
465 for k, v in list(app_settings.items()):
467 for k, v in list(app_settings.items()):
466 config[k] = v
468 config[k] = v
467
469
468
470
469 def get_rhodecode_realm():
471 def get_rhodecode_realm():
470 """
472 """
471 Return the rhodecode realm from database.
473 Return the rhodecode realm from database.
472 """
474 """
473 from rhodecode.model.settings import SettingsModel
475 from rhodecode.model.settings import SettingsModel
474 realm = SettingsModel().get_setting_by_name('realm')
476 realm = SettingsModel().get_setting_by_name('realm')
475 return safe_str(realm.app_settings_value)
477 return safe_str(realm.app_settings_value)
476
478
477
479
478 def get_rhodecode_base_path():
480 def get_rhodecode_base_path():
479 """
481 """
480 Returns the base path. The base path is the filesystem path which points
482 Returns the base path. The base path is the filesystem path which points
481 to the repository store.
483 to the repository store.
482 """
484 """
483
485
484 import rhodecode
486 import rhodecode
485 return rhodecode.CONFIG['default_base_path']
487 return rhodecode.CONFIG['default_base_path']
486
488
487
489
488 def map_groups(path):
490 def map_groups(path):
489 """
491 """
490 Given a full path to a repository, create all nested groups that this
492 Given a full path to a repository, create all nested groups that this
491 repo is inside. This function creates parent-child relationships between
493 repo is inside. This function creates parent-child relationships between
492 groups and creates default perms for all new groups.
494 groups and creates default perms for all new groups.
493
495
494 :param paths: full path to repository
496 :param paths: full path to repository
495 """
497 """
496 from rhodecode.model.repo_group import RepoGroupModel
498 from rhodecode.model.repo_group import RepoGroupModel
497 sa = meta.Session()
499 sa = meta.Session()
498 groups = path.split(Repository.NAME_SEP)
500 groups = path.split(Repository.NAME_SEP)
499 parent = None
501 parent = None
500 group = None
502 group = None
501
503
502 # last element is repo in nested groups structure
504 # last element is repo in nested groups structure
503 groups = groups[:-1]
505 groups = groups[:-1]
504 rgm = RepoGroupModel(sa)
506 rgm = RepoGroupModel(sa)
505 owner = User.get_first_super_admin()
507 owner = User.get_first_super_admin()
506 for lvl, group_name in enumerate(groups):
508 for lvl, group_name in enumerate(groups):
507 group_name = '/'.join(groups[:lvl] + [group_name])
509 group_name = '/'.join(groups[:lvl] + [group_name])
508 group = RepoGroup.get_by_group_name(group_name)
510 group = RepoGroup.get_by_group_name(group_name)
509 desc = '%s group' % group_name
511 desc = '%s group' % group_name
510
512
511 # skip folders that are now removed repos
513 # skip folders that are now removed repos
512 if REMOVED_REPO_PAT.match(group_name):
514 if REMOVED_REPO_PAT.match(group_name):
513 break
515 break
514
516
515 if group is None:
517 if group is None:
516 log.debug('creating group level: %s group_name: %s',
518 log.debug('creating group level: %s group_name: %s',
517 lvl, group_name)
519 lvl, group_name)
518 group = RepoGroup(group_name, parent)
520 group = RepoGroup(group_name, parent)
519 group.group_description = desc
521 group.group_description = desc
520 group.user = owner
522 group.user = owner
521 sa.add(group)
523 sa.add(group)
522 perm_obj = rgm._create_default_perms(group)
524 perm_obj = rgm._create_default_perms(group)
523 sa.add(perm_obj)
525 sa.add(perm_obj)
524 sa.flush()
526 sa.flush()
525
527
526 parent = group
528 parent = group
527 return group
529 return group
528
530
529
531
530 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
532 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
531 """
533 """
532 maps all repos given in initial_repo_list, non existing repositories
534 maps all repos given in initial_repo_list, non existing repositories
533 are created, if remove_obsolete is True it also checks for db entries
535 are created, if remove_obsolete is True it also checks for db entries
534 that are not in initial_repo_list and removes them.
536 that are not in initial_repo_list and removes them.
535
537
536 :param initial_repo_list: list of repositories found by scanning methods
538 :param initial_repo_list: list of repositories found by scanning methods
537 :param remove_obsolete: check for obsolete entries in database
539 :param remove_obsolete: check for obsolete entries in database
538 """
540 """
539 from rhodecode.model.repo import RepoModel
541 from rhodecode.model.repo import RepoModel
540 from rhodecode.model.repo_group import RepoGroupModel
542 from rhodecode.model.repo_group import RepoGroupModel
541 from rhodecode.model.settings import SettingsModel
543 from rhodecode.model.settings import SettingsModel
542
544
543 sa = meta.Session()
545 sa = meta.Session()
544 repo_model = RepoModel()
546 repo_model = RepoModel()
545 user = User.get_first_super_admin()
547 user = User.get_first_super_admin()
546 added = []
548 added = []
547
549
548 # creation defaults
550 # creation defaults
549 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
551 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
550 enable_statistics = defs.get('repo_enable_statistics')
552 enable_statistics = defs.get('repo_enable_statistics')
551 enable_locking = defs.get('repo_enable_locking')
553 enable_locking = defs.get('repo_enable_locking')
552 enable_downloads = defs.get('repo_enable_downloads')
554 enable_downloads = defs.get('repo_enable_downloads')
553 private = defs.get('repo_private')
555 private = defs.get('repo_private')
554
556
555 for name, repo in list(initial_repo_list.items()):
557 for name, repo in list(initial_repo_list.items()):
556 group = map_groups(name)
558 group = map_groups(name)
557 str_name = safe_str(name)
559 str_name = safe_str(name)
558 db_repo = repo_model.get_by_repo_name(str_name)
560 db_repo = repo_model.get_by_repo_name(str_name)
559
561
560 # found repo that is on filesystem not in RhodeCode database
562 # found repo that is on filesystem not in RhodeCode database
561 if not db_repo:
563 if not db_repo:
562 log.info('repository `%s` not found in the database, creating now', name)
564 log.info('repository `%s` not found in the database, creating now', name)
563 added.append(name)
565 added.append(name)
564 desc = (repo.description
566 desc = (repo.description
565 if repo.description != 'unknown'
567 if repo.description != 'unknown'
566 else '%s repository' % name)
568 else '%s repository' % name)
567
569
568 db_repo = repo_model._create_repo(
570 db_repo = repo_model._create_repo(
569 repo_name=name,
571 repo_name=name,
570 repo_type=repo.alias,
572 repo_type=repo.alias,
571 description=desc,
573 description=desc,
572 repo_group=getattr(group, 'group_id', None),
574 repo_group=getattr(group, 'group_id', None),
573 owner=user,
575 owner=user,
574 enable_locking=enable_locking,
576 enable_locking=enable_locking,
575 enable_downloads=enable_downloads,
577 enable_downloads=enable_downloads,
576 enable_statistics=enable_statistics,
578 enable_statistics=enable_statistics,
577 private=private,
579 private=private,
578 state=Repository.STATE_CREATED
580 state=Repository.STATE_CREATED
579 )
581 )
580 sa.commit()
582 sa.commit()
581 # we added that repo just now, and make sure we updated server info
583 # we added that repo just now, and make sure we updated server info
582 if db_repo.repo_type == 'git':
584 if db_repo.repo_type == 'git':
583 git_repo = db_repo.scm_instance()
585 git_repo = db_repo.scm_instance()
584 # update repository server-info
586 # update repository server-info
585 log.debug('Running update server info')
587 log.debug('Running update server info')
586 git_repo._update_server_info(force=True)
588 git_repo._update_server_info(force=True)
587
589
588 db_repo.update_commit_cache()
590 db_repo.update_commit_cache()
589
591
590 config = db_repo._config
592 config = db_repo._config
591 config.set('extensions', 'largefiles', '')
593 config.set('extensions', 'largefiles', '')
592 repo = db_repo.scm_instance(config=config)
594 repo = db_repo.scm_instance(config=config)
593 repo.install_hooks(force=force_hooks_rebuild)
595 repo.install_hooks(force=force_hooks_rebuild)
594
596
595 removed = []
597 removed = []
596 if remove_obsolete:
598 if remove_obsolete:
597 # remove from database those repositories that are not in the filesystem
599 # remove from database those repositories that are not in the filesystem
598 for repo in sa.query(Repository).all():
600 for repo in sa.query(Repository).all():
599 if repo.repo_name not in list(initial_repo_list.keys()):
601 if repo.repo_name not in list(initial_repo_list.keys()):
600 log.debug("Removing non-existing repository found in db `%s`",
602 log.debug("Removing non-existing repository found in db `%s`",
601 repo.repo_name)
603 repo.repo_name)
602 try:
604 try:
603 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
605 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
604 sa.commit()
606 sa.commit()
605 removed.append(repo.repo_name)
607 removed.append(repo.repo_name)
606 except Exception:
608 except Exception:
607 # don't hold further removals on error
609 # don't hold further removals on error
608 log.error(traceback.format_exc())
610 log.error(traceback.format_exc())
609 sa.rollback()
611 sa.rollback()
610
612
611 def splitter(full_repo_name):
613 def splitter(full_repo_name):
612 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
614 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
613 gr_name = None
615 gr_name = None
614 if len(_parts) == 2:
616 if len(_parts) == 2:
615 gr_name = _parts[0]
617 gr_name = _parts[0]
616 return gr_name
618 return gr_name
617
619
618 initial_repo_group_list = [splitter(x) for x in
620 initial_repo_group_list = [splitter(x) for x in
619 list(initial_repo_list.keys()) if splitter(x)]
621 list(initial_repo_list.keys()) if splitter(x)]
620
622
621 # remove from database those repository groups that are not in the
623 # remove from database those repository groups that are not in the
622 # filesystem due to parent child relationships we need to delete them
624 # filesystem due to parent child relationships we need to delete them
623 # in a specific order of most nested first
625 # in a specific order of most nested first
624 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
626 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
625 def nested_sort(gr):
627 def nested_sort(gr):
626 return len(gr.split('/'))
628 return len(gr.split('/'))
627 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
629 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
628 if group_name not in initial_repo_group_list:
630 if group_name not in initial_repo_group_list:
629 repo_group = RepoGroup.get_by_group_name(group_name)
631 repo_group = RepoGroup.get_by_group_name(group_name)
630 if (repo_group.children.all() or
632 if (repo_group.children.all() or
631 not RepoGroupModel().check_exist_filesystem(
633 not RepoGroupModel().check_exist_filesystem(
632 group_name=group_name, exc_on_failure=False)):
634 group_name=group_name, exc_on_failure=False)):
633 continue
635 continue
634
636
635 log.info(
637 log.info(
636 'Removing non-existing repository group found in db `%s`',
638 'Removing non-existing repository group found in db `%s`',
637 group_name)
639 group_name)
638 try:
640 try:
639 RepoGroupModel(sa).delete(group_name, fs_remove=False)
641 RepoGroupModel(sa).delete(group_name, fs_remove=False)
640 sa.commit()
642 sa.commit()
641 removed.append(group_name)
643 removed.append(group_name)
642 except Exception:
644 except Exception:
643 # don't hold further removals on error
645 # don't hold further removals on error
644 log.exception(
646 log.exception(
645 'Unable to remove repository group `%s`',
647 'Unable to remove repository group `%s`',
646 group_name)
648 group_name)
647 sa.rollback()
649 sa.rollback()
648 raise
650 raise
649
651
650 return added, removed
652 return added, removed
651
653
652
654
653 def load_rcextensions(root_path):
655 def load_rcextensions(root_path):
654 import rhodecode
656 import rhodecode
655 from rhodecode.config import conf
657 from rhodecode.config import conf
656
658
657 path = os.path.join(root_path)
659 path = os.path.join(root_path)
658 sys.path.append(path)
660 sys.path.append(path)
659
661
660 try:
662 try:
661 rcextensions = __import__('rcextensions')
663 rcextensions = __import__('rcextensions')
662 except ImportError:
664 except ImportError:
663 if os.path.isdir(os.path.join(path, 'rcextensions')):
665 if os.path.isdir(os.path.join(path, 'rcextensions')):
664 log.warning('Unable to load rcextensions from %s', path)
666 log.warning('Unable to load rcextensions from %s', path)
665 rcextensions = None
667 rcextensions = None
666
668
667 if rcextensions:
669 if rcextensions:
668 log.info('Loaded rcextensions from %s...', rcextensions)
670 log.info('Loaded rcextensions from %s...', rcextensions)
669 rhodecode.EXTENSIONS = rcextensions
671 rhodecode.EXTENSIONS = rcextensions
670
672
671 # Additional mappings that are not present in the pygments lexers
673 # Additional mappings that are not present in the pygments lexers
672 conf.LANGUAGES_EXTENSIONS_MAP.update(
674 conf.LANGUAGES_EXTENSIONS_MAP.update(
673 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
675 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
674
676
675
677
676 def get_custom_lexer(extension):
678 def get_custom_lexer(extension):
677 """
679 """
678 returns a custom lexer if it is defined in rcextensions module, or None
680 returns a custom lexer if it is defined in rcextensions module, or None
679 if there's no custom lexer defined
681 if there's no custom lexer defined
680 """
682 """
681 import rhodecode
683 import rhodecode
682 from pygments import lexers
684 from pygments import lexers
683
685
684 # custom override made by RhodeCode
686 # custom override made by RhodeCode
685 if extension in ['mako']:
687 if extension in ['mako']:
686 return lexers.get_lexer_by_name('html+mako')
688 return lexers.get_lexer_by_name('html+mako')
687
689
688 # check if we didn't define this extension as other lexer
690 # check if we didn't define this extension as other lexer
689 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
691 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
690 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
692 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
691 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
693 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
692 return lexers.get_lexer_by_name(_lexer_name)
694 return lexers.get_lexer_by_name(_lexer_name)
693
695
694
696
695 #==============================================================================
697 #==============================================================================
696 # TEST FUNCTIONS AND CREATORS
698 # TEST FUNCTIONS AND CREATORS
697 #==============================================================================
699 #==============================================================================
698 def create_test_index(repo_location, config):
700 def create_test_index(repo_location, config):
699 """
701 """
700 Makes default test index.
702 Makes default test index.
701 """
703 """
702 try:
704 try:
703 import rc_testdata
705 import rc_testdata
704 except ImportError:
706 except ImportError:
705 raise ImportError('Failed to import rc_testdata, '
707 raise ImportError('Failed to import rc_testdata, '
706 'please make sure this package is installed from requirements_test.txt')
708 'please make sure this package is installed from requirements_test.txt')
707 rc_testdata.extract_search_index(
709 rc_testdata.extract_search_index(
708 'vcs_search_index', os.path.dirname(config['search.location']))
710 'vcs_search_index', os.path.dirname(config['search.location']))
709
711
710
712
711 def create_test_directory(test_path):
713 def create_test_directory(test_path):
712 """
714 """
713 Create test directory if it doesn't exist.
715 Create test directory if it doesn't exist.
714 """
716 """
715 if not os.path.isdir(test_path):
717 if not os.path.isdir(test_path):
716 log.debug('Creating testdir %s', test_path)
718 log.debug('Creating testdir %s', test_path)
717 os.makedirs(test_path)
719 os.makedirs(test_path)
718
720
719
721
720 def create_test_database(test_path, config):
722 def create_test_database(test_path, config):
721 """
723 """
722 Makes a fresh database.
724 Makes a fresh database.
723 """
725 """
724 from rhodecode.lib.db_manage import DbManage
726 from rhodecode.lib.db_manage import DbManage
725 from rhodecode.lib.utils2 import get_encryption_key
727 from rhodecode.lib.utils2 import get_encryption_key
726
728
727 # PART ONE create db
729 # PART ONE create db
728 dbconf = config['sqlalchemy.db1.url']
730 dbconf = config['sqlalchemy.db1.url']
729 enc_key = get_encryption_key(config)
731 enc_key = get_encryption_key(config)
730
732
731 log.debug('making test db %s', dbconf)
733 log.debug('making test db %s', dbconf)
732
734
733 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
735 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
734 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
736 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
735 dbmanage.create_tables(override=True)
737 dbmanage.create_tables(override=True)
736 dbmanage.set_db_version()
738 dbmanage.set_db_version()
737 # for tests dynamically set new root paths based on generated content
739 # for tests dynamically set new root paths based on generated content
738 dbmanage.create_settings(dbmanage.config_prompt(test_path))
740 dbmanage.create_settings(dbmanage.config_prompt(test_path))
739 dbmanage.create_default_user()
741 dbmanage.create_default_user()
740 dbmanage.create_test_admin_and_users()
742 dbmanage.create_test_admin_and_users()
741 dbmanage.create_permissions()
743 dbmanage.create_permissions()
742 dbmanage.populate_default_permissions()
744 dbmanage.populate_default_permissions()
743 Session().commit()
745 Session().commit()
744
746
745
747
746 def create_test_repositories(test_path, config):
748 def create_test_repositories(test_path, config):
747 """
749 """
748 Creates test repositories in the temporary directory. Repositories are
750 Creates test repositories in the temporary directory. Repositories are
749 extracted from archives within the rc_testdata package.
751 extracted from archives within the rc_testdata package.
750 """
752 """
751 import rc_testdata
753 import rc_testdata
752 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
754 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
753
755
754 log.debug('making test vcs repositories')
756 log.debug('making test vcs repositories')
755
757
756 idx_path = config['search.location']
758 idx_path = config['search.location']
757 data_path = config['cache_dir']
759 data_path = config['cache_dir']
758
760
759 # clean index and data
761 # clean index and data
760 if idx_path and os.path.exists(idx_path):
762 if idx_path and os.path.exists(idx_path):
761 log.debug('remove %s', idx_path)
763 log.debug('remove %s', idx_path)
762 shutil.rmtree(idx_path)
764 shutil.rmtree(idx_path)
763
765
764 if data_path and os.path.exists(data_path):
766 if data_path and os.path.exists(data_path):
765 log.debug('remove %s', data_path)
767 log.debug('remove %s', data_path)
766 shutil.rmtree(data_path)
768 shutil.rmtree(data_path)
767
769
768 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
770 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
769 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
771 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
770
772
771 # Note: Subversion is in the process of being integrated with the system,
773 # Note: Subversion is in the process of being integrated with the system,
772 # until we have a properly packed version of the test svn repository, this
774 # until we have a properly packed version of the test svn repository, this
773 # tries to copy over the repo from a package "rc_testdata"
775 # tries to copy over the repo from a package "rc_testdata"
774 svn_repo_path = rc_testdata.get_svn_repo_archive()
776 svn_repo_path = rc_testdata.get_svn_repo_archive()
775 with tarfile.open(svn_repo_path) as tar:
777 with tarfile.open(svn_repo_path) as tar:
776 tar.extractall(jn(test_path, SVN_REPO))
778 tar.extractall(jn(test_path, SVN_REPO))
777
779
778
780
779 def password_changed(auth_user, session):
781 def password_changed(auth_user, session):
780 # Never report password change in case of default user or anonymous user.
782 # Never report password change in case of default user or anonymous user.
781 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
783 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
782 return False
784 return False
783
785
784 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
786 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
785 rhodecode_user = session.get('rhodecode_user', {})
787 rhodecode_user = session.get('rhodecode_user', {})
786 session_password_hash = rhodecode_user.get('password', '')
788 session_password_hash = rhodecode_user.get('password', '')
787 return password_hash != session_password_hash
789 return password_hash != session_password_hash
788
790
789
791
790 def read_opensource_licenses():
792 def read_opensource_licenses():
791 global _license_cache
793 global _license_cache
792
794
793 if not _license_cache:
795 if not _license_cache:
794 licenses = pkg_resources.resource_string(
796 licenses = pkg_resources.resource_string(
795 'rhodecode', 'config/licenses.json')
797 'rhodecode', 'config/licenses.json')
796 _license_cache = json.loads(licenses)
798 _license_cache = json.loads(licenses)
797
799
798 return _license_cache
800 return _license_cache
799
801
800
802
801 def generate_platform_uuid():
803 def generate_platform_uuid():
802 """
804 """
803 Generates platform UUID based on it's name
805 Generates platform UUID based on it's name
804 """
806 """
805 import platform
807 import platform
806
808
807 try:
809 try:
808 uuid_list = [platform.platform()]
810 uuid_list = [platform.platform()]
809 return sha256_safe(':'.join(uuid_list))
811 return sha256_safe(':'.join(uuid_list))
810 except Exception as e:
812 except Exception as e:
811 log.error('Failed to generate host uuid: %s', e)
813 log.error('Failed to generate host uuid: %s', e)
812 return 'UNDEFINED'
814 return 'UNDEFINED'
813
815
814
816
815 def send_test_email(recipients, email_body='TEST EMAIL'):
817 def send_test_email(recipients, email_body='TEST EMAIL'):
816 """
818 """
817 Simple code for generating test emails.
819 Simple code for generating test emails.
818 Usage::
820 Usage::
819
821
820 from rhodecode.lib import utils
822 from rhodecode.lib import utils
821 utils.send_test_email()
823 utils.send_test_email()
822 """
824 """
823 from rhodecode.lib.celerylib import tasks, run_task
825 from rhodecode.lib.celerylib import tasks, run_task
824
826
825 email_body = email_body_plaintext = email_body
827 email_body = email_body_plaintext = email_body
826 subject = f'SUBJECT FROM: {socket.gethostname()}'
828 subject = f'SUBJECT FROM: {socket.gethostname()}'
827 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
829 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
828
830
829
831
830 def call_service_api(ini_path, payload):
832 def call_service_api(ini_path, payload):
831 config = get_config(ini_path)
833 config = get_config(ini_path)
832 try:
834 try:
833 host = config.get('app:main', 'app.service_api.host')
835 host = config.get('app:main', 'app.service_api.host')
834 except NoOptionError:
836 except NoOptionError:
835 raise ImproperlyConfiguredError(
837 raise ImproperlyConfiguredError(
836 "app.service_api.host is missing. "
838 "app.service_api.host is missing. "
837 "Please ensure that app.service_api.host and app.service_api.token are "
839 "Please ensure that app.service_api.host and app.service_api.token are "
838 "defined inside of .ini configuration file."
840 "defined inside of .ini configuration file."
839 )
841 )
840 try:
842 try:
841 api_url = config.get('app:main', 'rhodecode.api.url')
843 api_url = config.get('app:main', 'rhodecode.api.url')
842 except NoOptionError:
844 except NoOptionError:
843 from rhodecode import api
845 from rhodecode import api
844 log.debug('Cannot find rhodecode.api.url, setting API URL TO Default value')
846 log.debug('Cannot find rhodecode.api.url, setting API URL TO Default value')
845 api_url = api.DEFAULT_URL
847 api_url = api.DEFAULT_URL
846
848
847 payload.update({
849 payload.update({
848 'id': 'service',
850 'id': 'service',
849 'auth_token': config.get('app:main', 'app.service_api.token')
851 'auth_token': config.get('app:main', 'app.service_api.token')
850 })
852 })
851
853
852 response = CurlSession().post(f'{host}{api_url}', json.dumps(payload))
854 response = CurlSession().post(urllib.parse.urljoin(host, api_url), json.dumps(payload))
853
855
854 if response.status_code != 200:
856 if response.status_code != 200:
855 raise Exception("Service API responded with error")
857 raise Exception("Service API responded with error")
856
858
857 return json.loads(response.content)['result']
859 return json.loads(response.content)['result']
General Comments 0
You need to be logged in to leave comments. Login now