##// END OF EJS Templates
utils: ported to python3 and new app
super-admin -
r5076:e00a2a48 default
parent child Browse files
Show More
@@ -1,798 +1,809 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Utilities library for RhodeCode
21 Utilities library for RhodeCode
22 """
22 """
23
23
24 import datetime
24 import datetime
25 import decorator
25 import decorator
26 import json
27 import logging
26 import logging
28 import os
27 import os
29 import re
28 import re
30 import sys
29 import sys
31 import shutil
30 import shutil
32 import socket
31 import socket
33 import tempfile
32 import tempfile
34 import traceback
33 import traceback
35 import tarfile
34 import tarfile
36 import warnings
35 import warnings
37 import hashlib
38 from os.path import join as jn
36 from os.path import join as jn
39
37
40 import paste
38 import paste
41 import pkg_resources
39 import pkg_resources
42 from webhelpers2.text import collapse, remove_formatting
40 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41
43 from mako import exceptions
42 from mako import exceptions
44 from pyramid.threadlocal import get_current_registry
45
43
44 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
45 from rhodecode.lib.str_utils import safe_bytes, safe_str
46 from rhodecode.lib.vcs.backends.base import Config
46 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.exceptions import VCSError
47 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.utils2 import (
49 from rhodecode.lib.ext_json import sjson as json
50 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
51 from rhodecode.model import meta
50 from rhodecode.model import meta
52 from rhodecode.model.db import (
51 from rhodecode.model.db import (
53 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
52 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
55
54
56
55
57 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
58
57
59 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
58 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60
59
61 # String which contains characters that are not allowed in slug names for
60 # String which contains characters that are not allowed in slug names for
62 # repositories or repository groups. It is properly escaped to use it in
61 # repositories or repository groups. It is properly escaped to use it in
63 # regular expressions.
62 # regular expressions.
64 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
63 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65
64
66 # Regex that matches forbidden characters in repo/group slugs.
65 # Regex that matches forbidden characters in repo/group slugs.
67 SLUG_BAD_CHAR_RE = re.compile('[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
66 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
68
67
69 # Regex that matches allowed characters in repo/group slugs.
68 # Regex that matches allowed characters in repo/group slugs.
70 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
69 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
71
70
72 # Regex that matches whole repo/group slugs.
71 # Regex that matches whole repo/group slugs.
73 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
72 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
74
73
75 _license_cache = None
74 _license_cache = None
76
75
77
76
78 def repo_name_slug(value):
77 def repo_name_slug(value):
79 """
78 """
80 Return slug of name of repository
79 Return slug of name of repository
81 This function is called on each creation/modification
80 This function is called on each creation/modification
82 of repository to prevent bad names in repo
81 of repository to prevent bad names in repo
83 """
82 """
83
84 replacement_char = '-'
84 replacement_char = '-'
85
85
86 slug = remove_formatting(value)
86 slug = strip_tags(value)
87 slug = convert_accented_entities(slug)
88 slug = convert_misc_entities(slug)
89
87 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = re.sub('[\s]+', '-', slug)
91 slug = re.sub(r'[\s]+', '-', slug)
89 slug = collapse(slug, replacement_char)
92 slug = collapse(slug, replacement_char)
93
90 return slug
94 return slug
91
95
92
96
93 #==============================================================================
97 #==============================================================================
94 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
98 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 #==============================================================================
99 #==============================================================================
96 def get_repo_slug(request):
100 def get_repo_slug(request):
97 _repo = ''
101 _repo = ''
98
102
99 if hasattr(request, 'db_repo'):
103 if hasattr(request, 'db_repo_name'):
100 # if our requests has set db reference use it for name, this
104 # if our requests has set db reference use it for name, this
101 # translates the example.com/_<id> into proper repo names
105 # translates the example.com/_<id> into proper repo names
102 _repo = request.db_repo.repo_name
106 _repo = request.db_repo_name
103 elif getattr(request, 'matchdict', None):
107 elif getattr(request, 'matchdict', None):
104 # pyramid
108 # pyramid
105 _repo = request.matchdict.get('repo_name')
109 _repo = request.matchdict.get('repo_name')
106
110
107 if _repo:
111 if _repo:
108 _repo = _repo.rstrip('/')
112 _repo = _repo.rstrip('/')
109 return _repo
113 return _repo
110
114
111
115
112 def get_repo_group_slug(request):
116 def get_repo_group_slug(request):
113 _group = ''
117 _group = ''
114 if hasattr(request, 'db_repo_group'):
118 if hasattr(request, 'db_repo_group'):
115 # if our requests has set db reference use it for name, this
119 # if our requests has set db reference use it for name, this
116 # translates the example.com/_<id> into proper repo group names
120 # translates the example.com/_<id> into proper repo group names
117 _group = request.db_repo_group.group_name
121 _group = request.db_repo_group.group_name
118 elif getattr(request, 'matchdict', None):
122 elif getattr(request, 'matchdict', None):
119 # pyramid
123 # pyramid
120 _group = request.matchdict.get('repo_group_name')
124 _group = request.matchdict.get('repo_group_name')
121
125
122 if _group:
126 if _group:
123 _group = _group.rstrip('/')
127 _group = _group.rstrip('/')
124 return _group
128 return _group
125
129
126
130
127 def get_user_group_slug(request):
131 def get_user_group_slug(request):
128 _user_group = ''
132 _user_group = ''
129
133
130 if hasattr(request, 'db_user_group'):
134 if hasattr(request, 'db_user_group'):
131 _user_group = request.db_user_group.users_group_name
135 _user_group = request.db_user_group.users_group_name
132 elif getattr(request, 'matchdict', None):
136 elif getattr(request, 'matchdict', None):
133 # pyramid
137 # pyramid
134 _user_group = request.matchdict.get('user_group_id')
138 _user_group = request.matchdict.get('user_group_id')
135 _user_group_name = request.matchdict.get('user_group_name')
139 _user_group_name = request.matchdict.get('user_group_name')
136 try:
140 try:
137 if _user_group:
141 if _user_group:
138 _user_group = UserGroup.get(_user_group)
142 _user_group = UserGroup.get(_user_group)
139 elif _user_group_name:
143 elif _user_group_name:
140 _user_group = UserGroup.get_by_group_name(_user_group_name)
144 _user_group = UserGroup.get_by_group_name(_user_group_name)
141
145
142 if _user_group:
146 if _user_group:
143 _user_group = _user_group.users_group_name
147 _user_group = _user_group.users_group_name
144 except Exception:
148 except Exception:
145 log.exception('Failed to get user group by id and name')
149 log.exception('Failed to get user group by id and name')
146 # catch all failures here
150 # catch all failures here
147 return None
151 return None
148
152
149 return _user_group
153 return _user_group
150
154
151
155
152 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
156 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
153 """
157 """
154 Scans given path for repos and return (name,(type,path)) tuple
158 Scans given path for repos and return (name,(type,path)) tuple
155
159
156 :param path: path to scan for repositories
160 :param path: path to scan for repositories
157 :param recursive: recursive search and return names with subdirs in front
161 :param recursive: recursive search and return names with subdirs in front
158 """
162 """
159
163
160 # remove ending slash for better results
164 # remove ending slash for better results
161 path = path.rstrip(os.sep)
165 path = path.rstrip(os.sep)
162 log.debug('now scanning in %s location recursive:%s...', path, recursive)
166 log.debug('now scanning in %s location recursive:%s...', path, recursive)
163
167
164 def _get_repos(p):
168 def _get_repos(p):
165 dirpaths = _get_dirpaths(p)
169 dirpaths = get_dirpaths(p)
166 if not _is_dir_writable(p):
170 if not _is_dir_writable(p):
167 log.warning('repo path without write access: %s', p)
171 log.warning('repo path without write access: %s', p)
168
172
169 for dirpath in dirpaths:
173 for dirpath in dirpaths:
170 if os.path.isfile(os.path.join(p, dirpath)):
174 if os.path.isfile(os.path.join(p, dirpath)):
171 continue
175 continue
172 cur_path = os.path.join(p, dirpath)
176 cur_path = os.path.join(p, dirpath)
173
177
174 # skip removed repos
178 # skip removed repos
175 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
179 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
176 continue
180 continue
177
181
178 #skip .<somethin> dirs
182 #skip .<somethin> dirs
179 if dirpath.startswith('.'):
183 if dirpath.startswith('.'):
180 continue
184 continue
181
185
182 try:
186 try:
183 scm_info = get_scm(cur_path)
187 scm_info = get_scm(cur_path)
184 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
188 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
185 except VCSError:
189 except VCSError:
186 if not recursive:
190 if not recursive:
187 continue
191 continue
188 #check if this dir containts other repos for recursive scan
192 #check if this dir containts other repos for recursive scan
189 rec_path = os.path.join(p, dirpath)
193 rec_path = os.path.join(p, dirpath)
190 if os.path.isdir(rec_path):
194 if os.path.isdir(rec_path):
191 for inner_scm in _get_repos(rec_path):
195 for inner_scm in _get_repos(rec_path):
192 yield inner_scm
196 yield inner_scm
193
197
194 return _get_repos(path)
198 return _get_repos(path)
195
199
196
200
197 def _get_dirpaths(p):
201 def get_dirpaths(p: str) -> list:
198 try:
202 try:
199 # OS-independable way of checking if we have at least read-only
203 # OS-independable way of checking if we have at least read-only
200 # access or not.
204 # access or not.
201 dirpaths = os.listdir(p)
205 dirpaths = os.listdir(p)
202 except OSError:
206 except OSError:
203 log.warning('ignoring repo path without read access: %s', p)
207 log.warning('ignoring repo path without read access: %s', p)
204 return []
208 return []
205
209
206 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
210 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
207 # decode paths and suddenly returns unicode objects itself. The items it
211 # decode paths and suddenly returns unicode objects itself. The items it
208 # cannot decode are returned as strings and cause issues.
212 # cannot decode are returned as strings and cause issues.
209 #
213 #
210 # Those paths are ignored here until a solid solution for path handling has
214 # Those paths are ignored here until a solid solution for path handling has
211 # been built.
215 # been built.
212 expected_type = type(p)
216 expected_type = type(p)
213
217
214 def _has_correct_type(item):
218 def _has_correct_type(item):
215 if type(item) is not expected_type:
219 if type(item) is not expected_type:
216 log.error(
220 log.error(
217 "Ignoring path %s since it cannot be decoded into unicode.",
221 "Ignoring path %s since it cannot be decoded into str.",
218 # Using "repr" to make sure that we see the byte value in case
222 # Using "repr" to make sure that we see the byte value in case
219 # of support.
223 # of support.
220 repr(item))
224 repr(item))
221 return False
225 return False
222 return True
226 return True
223
227
224 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
228 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
225
229
226 return dirpaths
230 return dirpaths
227
231
228
232
229 def _is_dir_writable(path):
233 def _is_dir_writable(path):
230 """
234 """
231 Probe if `path` is writable.
235 Probe if `path` is writable.
232
236
233 Due to trouble on Cygwin / Windows, this is actually probing if it is
237 Due to trouble on Cygwin / Windows, this is actually probing if it is
234 possible to create a file inside of `path`, stat does not produce reliable
238 possible to create a file inside of `path`, stat does not produce reliable
235 results in this case.
239 results in this case.
236 """
240 """
237 try:
241 try:
238 with tempfile.TemporaryFile(dir=path):
242 with tempfile.TemporaryFile(dir=path):
239 pass
243 pass
240 except OSError:
244 except OSError:
241 return False
245 return False
242 return True
246 return True
243
247
244
248
245 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
249 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
246 """
250 """
247 Returns True if given path is a valid repository False otherwise.
251 Returns True if given path is a valid repository False otherwise.
248 If expect_scm param is given also, compare if given scm is the same
252 If expect_scm param is given also, compare if given scm is the same
249 as expected from scm parameter. If explicit_scm is given don't try to
253 as expected from scm parameter. If explicit_scm is given don't try to
250 detect the scm, just use the given one to check if repo is valid
254 detect the scm, just use the given one to check if repo is valid
251
255
252 :param repo_name:
256 :param repo_name:
253 :param base_path:
257 :param base_path:
254 :param expect_scm:
258 :param expect_scm:
255 :param explicit_scm:
259 :param explicit_scm:
256 :param config:
260 :param config:
257
261
258 :return True: if given path is a valid repository
262 :return True: if given path is a valid repository
259 """
263 """
260 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
264 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
261 log.debug('Checking if `%s` is a valid path for repository. '
265 log.debug('Checking if `%s` is a valid path for repository. '
262 'Explicit type: %s', repo_name, explicit_scm)
266 'Explicit type: %s', repo_name, explicit_scm)
263
267
264 try:
268 try:
265 if explicit_scm:
269 if explicit_scm:
266 detected_scms = [get_scm_backend(explicit_scm)(
270 detected_scms = [get_scm_backend(explicit_scm)(
267 full_path, config=config).alias]
271 full_path, config=config).alias]
268 else:
272 else:
269 detected_scms = get_scm(full_path)
273 detected_scms = get_scm(full_path)
270
274
271 if expect_scm:
275 if expect_scm:
272 return detected_scms[0] == expect_scm
276 return detected_scms[0] == expect_scm
273 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
277 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
274 return True
278 return True
275 except VCSError:
279 except VCSError:
276 log.debug('path: %s is not a valid repo !', full_path)
280 log.debug('path: %s is not a valid repo !', full_path)
277 return False
281 return False
278
282
279
283
280 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
284 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
281 """
285 """
282 Returns True if given path is a repository group, False otherwise
286 Returns True if given path is a repository group, False otherwise
283
287
284 :param repo_name:
288 :param repo_name:
285 :param base_path:
289 :param base_path:
286 """
290 """
287 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
291 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
288 log.debug('Checking if `%s` is a valid path for repository group',
292 log.debug('Checking if `%s` is a valid path for repository group',
289 repo_group_name)
293 repo_group_name)
290
294
291 # check if it's not a repo
295 # check if it's not a repo
292 if is_valid_repo(repo_group_name, base_path):
296 if is_valid_repo(repo_group_name, base_path):
293 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
297 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
294 return False
298 return False
295
299
296 try:
300 try:
297 # we need to check bare git repos at higher level
301 # we need to check bare git repos at higher level
298 # since we might match branches/hooks/info/objects or possible
302 # since we might match branches/hooks/info/objects or possible
299 # other things inside bare git repo
303 # other things inside bare git repo
300 maybe_repo = os.path.dirname(full_path)
304 maybe_repo = os.path.dirname(full_path)
301 if maybe_repo == base_path:
305 if maybe_repo == base_path:
302 # skip root level repo check, we know root location CANNOT BE a repo group
306 # skip root level repo check, we know root location CANNOT BE a repo group
303 return False
307 return False
304
308
305 scm_ = get_scm(maybe_repo)
309 scm_ = get_scm(maybe_repo)
306 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
310 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
307 return False
311 return False
308 except VCSError:
312 except VCSError:
309 pass
313 pass
310
314
311 # check if it's a valid path
315 # check if it's a valid path
312 if skip_path_check or os.path.isdir(full_path):
316 if skip_path_check or os.path.isdir(full_path):
313 log.debug('path: %s is a valid repo group !', full_path)
317 log.debug('path: %s is a valid repo group !', full_path)
314 return True
318 return True
315
319
316 log.debug('path: %s is not a valid repo group !', full_path)
320 log.debug('path: %s is not a valid repo group !', full_path)
317 return False
321 return False
318
322
319
323
320 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
324 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
321 while True:
325 while True:
322 ok = eval(input(prompt))
326 ok = eval(input(prompt))
323 if ok.lower() in ('y', 'ye', 'yes'):
327 if ok.lower() in ('y', 'ye', 'yes'):
324 return True
328 return True
325 if ok.lower() in ('n', 'no', 'nop', 'nope'):
329 if ok.lower() in ('n', 'no', 'nop', 'nope'):
326 return False
330 return False
327 retries = retries - 1
331 retries = retries - 1
328 if retries < 0:
332 if retries < 0:
329 raise IOError
333 raise IOError
330 print(complaint)
334 print(complaint)
331
335
332 # propagated from mercurial documentation
336 # propagated from mercurial documentation
333 ui_sections = [
337 ui_sections = [
334 'alias', 'auth',
338 'alias', 'auth',
335 'decode/encode', 'defaults',
339 'decode/encode', 'defaults',
336 'diff', 'email',
340 'diff', 'email',
337 'extensions', 'format',
341 'extensions', 'format',
338 'merge-patterns', 'merge-tools',
342 'merge-patterns', 'merge-tools',
339 'hooks', 'http_proxy',
343 'hooks', 'http_proxy',
340 'smtp', 'patch',
344 'smtp', 'patch',
341 'paths', 'profiling',
345 'paths', 'profiling',
342 'server', 'trusted',
346 'server', 'trusted',
343 'ui', 'web', ]
347 'ui', 'web', ]
344
348
345
349
346 def config_data_from_db(clear_session=True, repo=None):
350 def config_data_from_db(clear_session=True, repo=None):
347 """
351 """
348 Read the configuration data from the database and return configuration
352 Read the configuration data from the database and return configuration
349 tuples.
353 tuples.
350 """
354 """
351 from rhodecode.model.settings import VcsSettingsModel
355 from rhodecode.model.settings import VcsSettingsModel
352
356
353 config = []
357 config = []
354
358
355 sa = meta.Session()
359 sa = meta.Session()
356 settings_model = VcsSettingsModel(repo=repo, sa=sa)
360 settings_model = VcsSettingsModel(repo=repo, sa=sa)
357
361
358 ui_settings = settings_model.get_ui_settings()
362 ui_settings = settings_model.get_ui_settings()
359
363
360 ui_data = []
364 ui_data = []
361 for setting in ui_settings:
365 for setting in ui_settings:
362 if setting.active:
366 if setting.active:
363 ui_data.append((setting.section, setting.key, setting.value))
367 ui_data.append((setting.section, setting.key, setting.value))
364 config.append((
368 config.append((
365 safe_str(setting.section), safe_str(setting.key),
369 safe_str(setting.section), safe_str(setting.key),
366 safe_str(setting.value)))
370 safe_str(setting.value)))
367 if setting.key == 'push_ssl':
371 if setting.key == 'push_ssl':
368 # force set push_ssl requirement to False, rhodecode
372 # force set push_ssl requirement to False, rhodecode
369 # handles that
373 # handles that
370 config.append((
374 config.append((
371 safe_str(setting.section), safe_str(setting.key), False))
375 safe_str(setting.section), safe_str(setting.key), False))
372 log.debug(
376 log.debug(
373 'settings ui from db@repo[%s]: %s',
377 'settings ui from db@repo[%s]: %s',
374 repo,
378 repo,
375 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
379 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
376 if clear_session:
380 if clear_session:
377 meta.Session.remove()
381 meta.Session.remove()
378
382
379 # TODO: mikhail: probably it makes no sense to re-read hooks information.
383 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 # It's already there and activated/deactivated
384 # It's already there and activated/deactivated
381 skip_entries = []
385 skip_entries = []
382 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
386 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 if 'pull' not in enabled_hook_classes:
387 if 'pull' not in enabled_hook_classes:
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 if 'push' not in enabled_hook_classes:
389 if 'push' not in enabled_hook_classes:
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
391 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
392 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389
393
390 config = [entry for entry in config if entry[:2] not in skip_entries]
394 config = [entry for entry in config if entry[:2] not in skip_entries]
391
395
392 return config
396 return config
393
397
394
398
395 def make_db_config(clear_session=True, repo=None):
399 def make_db_config(clear_session=True, repo=None):
396 """
400 """
397 Create a :class:`Config` instance based on the values in the database.
401 Create a :class:`Config` instance based on the values in the database.
398 """
402 """
399 config = Config()
403 config = Config()
400 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
404 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 for section, option, value in config_data:
405 for section, option, value in config_data:
402 config.set(section, option, value)
406 config.set(section, option, value)
403 return config
407 return config
404
408
405
409
406 def get_enabled_hook_classes(ui_settings):
410 def get_enabled_hook_classes(ui_settings):
407 """
411 """
408 Return the enabled hook classes.
412 Return the enabled hook classes.
409
413
410 :param ui_settings: List of ui_settings as returned
414 :param ui_settings: List of ui_settings as returned
411 by :meth:`VcsSettingsModel.get_ui_settings`
415 by :meth:`VcsSettingsModel.get_ui_settings`
412
416
413 :return: a list with the enabled hook classes. The order is not guaranteed.
417 :return: a list with the enabled hook classes. The order is not guaranteed.
414 :rtype: list
418 :rtype: list
415 """
419 """
416 enabled_hooks = []
420 enabled_hooks = []
417 active_hook_keys = [
421 active_hook_keys = [
418 key for section, key, value, active in ui_settings
422 key for section, key, value, active in ui_settings
419 if section == 'hooks' and active]
423 if section == 'hooks' and active]
420
424
421 hook_names = {
425 hook_names = {
422 RhodeCodeUi.HOOK_PUSH: 'push',
426 RhodeCodeUi.HOOK_PUSH: 'push',
423 RhodeCodeUi.HOOK_PULL: 'pull',
427 RhodeCodeUi.HOOK_PULL: 'pull',
424 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
428 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 }
429 }
426
430
427 for key in active_hook_keys:
431 for key in active_hook_keys:
428 hook = hook_names.get(key)
432 hook = hook_names.get(key)
429 if hook:
433 if hook:
430 enabled_hooks.append(hook)
434 enabled_hooks.append(hook)
431
435
432 return enabled_hooks
436 return enabled_hooks
433
437
434
438
435 def set_rhodecode_config(config):
439 def set_rhodecode_config(config):
436 """
440 """
437 Updates pyramid config with new settings from database
441 Updates pyramid config with new settings from database
438
442
439 :param config:
443 :param config:
440 """
444 """
441 from rhodecode.model.settings import SettingsModel
445 from rhodecode.model.settings import SettingsModel
442 app_settings = SettingsModel().get_all_settings()
446 app_settings = SettingsModel().get_all_settings()
443
447
444 for k, v in app_settings.items():
448 for k, v in list(app_settings.items()):
445 config[k] = v
449 config[k] = v
446
450
447
451
448 def get_rhodecode_realm():
452 def get_rhodecode_realm():
449 """
453 """
450 Return the rhodecode realm from database.
454 Return the rhodecode realm from database.
451 """
455 """
452 from rhodecode.model.settings import SettingsModel
456 from rhodecode.model.settings import SettingsModel
453 realm = SettingsModel().get_setting_by_name('realm')
457 realm = SettingsModel().get_setting_by_name('realm')
454 return safe_str(realm.app_settings_value)
458 return safe_str(realm.app_settings_value)
455
459
456
460
457 def get_rhodecode_base_path():
461 def get_rhodecode_base_path():
458 """
462 """
459 Returns the base path. The base path is the filesystem path which points
463 Returns the base path. The base path is the filesystem path which points
460 to the repository store.
464 to the repository store.
461 """
465 """
462 from rhodecode.model.settings import SettingsModel
466
463 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
467 import rhodecode
464 return safe_str(paths_ui.ui_value)
468 return rhodecode.CONFIG['default_base_path']
465
469
466
470
467 def map_groups(path):
471 def map_groups(path):
468 """
472 """
469 Given a full path to a repository, create all nested groups that this
473 Given a full path to a repository, create all nested groups that this
470 repo is inside. This function creates parent-child relationships between
474 repo is inside. This function creates parent-child relationships between
471 groups and creates default perms for all new groups.
475 groups and creates default perms for all new groups.
472
476
473 :param paths: full path to repository
477 :param paths: full path to repository
474 """
478 """
475 from rhodecode.model.repo_group import RepoGroupModel
479 from rhodecode.model.repo_group import RepoGroupModel
476 sa = meta.Session()
480 sa = meta.Session()
477 groups = path.split(Repository.NAME_SEP)
481 groups = path.split(Repository.NAME_SEP)
478 parent = None
482 parent = None
479 group = None
483 group = None
480
484
481 # last element is repo in nested groups structure
485 # last element is repo in nested groups structure
482 groups = groups[:-1]
486 groups = groups[:-1]
483 rgm = RepoGroupModel(sa)
487 rgm = RepoGroupModel(sa)
484 owner = User.get_first_super_admin()
488 owner = User.get_first_super_admin()
485 for lvl, group_name in enumerate(groups):
489 for lvl, group_name in enumerate(groups):
486 group_name = '/'.join(groups[:lvl] + [group_name])
490 group_name = '/'.join(groups[:lvl] + [group_name])
487 group = RepoGroup.get_by_group_name(group_name)
491 group = RepoGroup.get_by_group_name(group_name)
488 desc = '%s group' % group_name
492 desc = '%s group' % group_name
489
493
490 # skip folders that are now removed repos
494 # skip folders that are now removed repos
491 if REMOVED_REPO_PAT.match(group_name):
495 if REMOVED_REPO_PAT.match(group_name):
492 break
496 break
493
497
494 if group is None:
498 if group is None:
495 log.debug('creating group level: %s group_name: %s',
499 log.debug('creating group level: %s group_name: %s',
496 lvl, group_name)
500 lvl, group_name)
497 group = RepoGroup(group_name, parent)
501 group = RepoGroup(group_name, parent)
498 group.group_description = desc
502 group.group_description = desc
499 group.user = owner
503 group.user = owner
500 sa.add(group)
504 sa.add(group)
501 perm_obj = rgm._create_default_perms(group)
505 perm_obj = rgm._create_default_perms(group)
502 sa.add(perm_obj)
506 sa.add(perm_obj)
503 sa.flush()
507 sa.flush()
504
508
505 parent = group
509 parent = group
506 return group
510 return group
507
511
508
512
509 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
513 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 """
514 """
511 maps all repos given in initial_repo_list, non existing repositories
515 maps all repos given in initial_repo_list, non existing repositories
512 are created, if remove_obsolete is True it also checks for db entries
516 are created, if remove_obsolete is True it also checks for db entries
513 that are not in initial_repo_list and removes them.
517 that are not in initial_repo_list and removes them.
514
518
515 :param initial_repo_list: list of repositories found by scanning methods
519 :param initial_repo_list: list of repositories found by scanning methods
516 :param remove_obsolete: check for obsolete entries in database
520 :param remove_obsolete: check for obsolete entries in database
517 """
521 """
518 from rhodecode.model.repo import RepoModel
522 from rhodecode.model.repo import RepoModel
519 from rhodecode.model.repo_group import RepoGroupModel
523 from rhodecode.model.repo_group import RepoGroupModel
520 from rhodecode.model.settings import SettingsModel
524 from rhodecode.model.settings import SettingsModel
521
525
522 sa = meta.Session()
526 sa = meta.Session()
523 repo_model = RepoModel()
527 repo_model = RepoModel()
524 user = User.get_first_super_admin()
528 user = User.get_first_super_admin()
525 added = []
529 added = []
526
530
527 # creation defaults
531 # creation defaults
528 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
532 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 enable_statistics = defs.get('repo_enable_statistics')
533 enable_statistics = defs.get('repo_enable_statistics')
530 enable_locking = defs.get('repo_enable_locking')
534 enable_locking = defs.get('repo_enable_locking')
531 enable_downloads = defs.get('repo_enable_downloads')
535 enable_downloads = defs.get('repo_enable_downloads')
532 private = defs.get('repo_private')
536 private = defs.get('repo_private')
533
537
534 for name, repo in initial_repo_list.items():
538 for name, repo in list(initial_repo_list.items()):
535 group = map_groups(name)
539 group = map_groups(name)
536 unicode_name = safe_unicode(name)
540 str_name = safe_str(name)
537 db_repo = repo_model.get_by_repo_name(unicode_name)
541 db_repo = repo_model.get_by_repo_name(str_name)
538 # found repo that is on filesystem not in RhodeCode database
542 # found repo that is on filesystem not in RhodeCode database
539 if not db_repo:
543 if not db_repo:
540 log.info('repository %s not found, creating now', name)
544 log.info('repository %s not found, creating now', name)
541 added.append(name)
545 added.append(name)
542 desc = (repo.description
546 desc = (repo.description
543 if repo.description != 'unknown'
547 if repo.description != 'unknown'
544 else '%s repository' % name)
548 else '%s repository' % name)
545
549
546 db_repo = repo_model._create_repo(
550 db_repo = repo_model._create_repo(
547 repo_name=name,
551 repo_name=name,
548 repo_type=repo.alias,
552 repo_type=repo.alias,
549 description=desc,
553 description=desc,
550 repo_group=getattr(group, 'group_id', None),
554 repo_group=getattr(group, 'group_id', None),
551 owner=user,
555 owner=user,
552 enable_locking=enable_locking,
556 enable_locking=enable_locking,
553 enable_downloads=enable_downloads,
557 enable_downloads=enable_downloads,
554 enable_statistics=enable_statistics,
558 enable_statistics=enable_statistics,
555 private=private,
559 private=private,
556 state=Repository.STATE_CREATED
560 state=Repository.STATE_CREATED
557 )
561 )
558 sa.commit()
562 sa.commit()
559 # we added that repo just now, and make sure we updated server info
563 # we added that repo just now, and make sure we updated server info
560 if db_repo.repo_type == 'git':
564 if db_repo.repo_type == 'git':
561 git_repo = db_repo.scm_instance()
565 git_repo = db_repo.scm_instance()
562 # update repository server-info
566 # update repository server-info
563 log.debug('Running update server info')
567 log.debug('Running update server info')
564 git_repo._update_server_info()
568 git_repo._update_server_info()
565
569
566 db_repo.update_commit_cache()
570 db_repo.update_commit_cache()
567
571
568 config = db_repo._config
572 config = db_repo._config
569 config.set('extensions', 'largefiles', '')
573 config.set('extensions', 'largefiles', '')
570 repo = db_repo.scm_instance(config=config)
574 repo = db_repo.scm_instance(config=config)
571 repo.install_hooks()
575 repo.install_hooks()
572
576
573 removed = []
577 removed = []
574 if remove_obsolete:
578 if remove_obsolete:
575 # remove from database those repositories that are not in the filesystem
579 # remove from database those repositories that are not in the filesystem
576 for repo in sa.query(Repository).all():
580 for repo in sa.query(Repository).all():
577 if repo.repo_name not in initial_repo_list.keys():
581 if repo.repo_name not in list(initial_repo_list.keys()):
578 log.debug("Removing non-existing repository found in db `%s`",
582 log.debug("Removing non-existing repository found in db `%s`",
579 repo.repo_name)
583 repo.repo_name)
580 try:
584 try:
581 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
585 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 sa.commit()
586 sa.commit()
583 removed.append(repo.repo_name)
587 removed.append(repo.repo_name)
584 except Exception:
588 except Exception:
585 # don't hold further removals on error
589 # don't hold further removals on error
586 log.error(traceback.format_exc())
590 log.error(traceback.format_exc())
587 sa.rollback()
591 sa.rollback()
588
592
589 def splitter(full_repo_name):
593 def splitter(full_repo_name):
590 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
594 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 gr_name = None
595 gr_name = None
592 if len(_parts) == 2:
596 if len(_parts) == 2:
593 gr_name = _parts[0]
597 gr_name = _parts[0]
594 return gr_name
598 return gr_name
595
599
596 initial_repo_group_list = [splitter(x) for x in
600 initial_repo_group_list = [splitter(x) for x in
597 initial_repo_list.keys() if splitter(x)]
601 list(initial_repo_list.keys()) if splitter(x)]
598
602
599 # remove from database those repository groups that are not in the
603 # remove from database those repository groups that are not in the
600 # filesystem due to parent child relationships we need to delete them
604 # filesystem due to parent child relationships we need to delete them
601 # in a specific order of most nested first
605 # in a specific order of most nested first
602 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
606 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 nested_sort = lambda gr: len(gr.split('/'))
607 def nested_sort(gr):
608 return len(gr.split('/'))
604 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
609 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 if group_name not in initial_repo_group_list:
610 if group_name not in initial_repo_group_list:
606 repo_group = RepoGroup.get_by_group_name(group_name)
611 repo_group = RepoGroup.get_by_group_name(group_name)
607 if (repo_group.children.all() or
612 if (repo_group.children.all() or
608 not RepoGroupModel().check_exist_filesystem(
613 not RepoGroupModel().check_exist_filesystem(
609 group_name=group_name, exc_on_failure=False)):
614 group_name=group_name, exc_on_failure=False)):
610 continue
615 continue
611
616
612 log.info(
617 log.info(
613 'Removing non-existing repository group found in db `%s`',
618 'Removing non-existing repository group found in db `%s`',
614 group_name)
619 group_name)
615 try:
620 try:
616 RepoGroupModel(sa).delete(group_name, fs_remove=False)
621 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 sa.commit()
622 sa.commit()
618 removed.append(group_name)
623 removed.append(group_name)
619 except Exception:
624 except Exception:
620 # don't hold further removals on error
625 # don't hold further removals on error
621 log.exception(
626 log.exception(
622 'Unable to remove repository group `%s`',
627 'Unable to remove repository group `%s`',
623 group_name)
628 group_name)
624 sa.rollback()
629 sa.rollback()
625 raise
630 raise
626
631
627 return added, removed
632 return added, removed
628
633
629
634
630 def load_rcextensions(root_path):
635 def load_rcextensions(root_path):
631 import rhodecode
636 import rhodecode
632 from rhodecode.config import conf
637 from rhodecode.config import conf
633
638
634 path = os.path.join(root_path)
639 path = os.path.join(root_path)
635 sys.path.append(path)
640 sys.path.append(path)
636
641
637 try:
642 try:
638 rcextensions = __import__('rcextensions')
643 rcextensions = __import__('rcextensions')
639 except ImportError:
644 except ImportError:
640 if os.path.isdir(os.path.join(path, 'rcextensions')):
645 if os.path.isdir(os.path.join(path, 'rcextensions')):
641 log.warn('Unable to load rcextensions from %s', path)
646 log.warning('Unable to load rcextensions from %s', path)
642 rcextensions = None
647 rcextensions = None
643
648
644 if rcextensions:
649 if rcextensions:
645 log.info('Loaded rcextensions from %s...', rcextensions)
650 log.info('Loaded rcextensions from %s...', rcextensions)
646 rhodecode.EXTENSIONS = rcextensions
651 rhodecode.EXTENSIONS = rcextensions
647
652
648 # Additional mappings that are not present in the pygments lexers
653 # Additional mappings that are not present in the pygments lexers
649 conf.LANGUAGES_EXTENSIONS_MAP.update(
654 conf.LANGUAGES_EXTENSIONS_MAP.update(
650 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
655 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
651
656
652
657
653 def get_custom_lexer(extension):
658 def get_custom_lexer(extension):
654 """
659 """
655 returns a custom lexer if it is defined in rcextensions module, or None
660 returns a custom lexer if it is defined in rcextensions module, or None
656 if there's no custom lexer defined
661 if there's no custom lexer defined
657 """
662 """
658 import rhodecode
663 import rhodecode
659 from pygments import lexers
664 from pygments import lexers
660
665
661 # custom override made by RhodeCode
666 # custom override made by RhodeCode
662 if extension in ['mako']:
667 if extension in ['mako']:
663 return lexers.get_lexer_by_name('html+mako')
668 return lexers.get_lexer_by_name('html+mako')
664
669
665 # check if we didn't define this extension as other lexer
670 # check if we didn't define this extension as other lexer
666 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
671 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
667 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
672 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
668 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
673 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
669 return lexers.get_lexer_by_name(_lexer_name)
674 return lexers.get_lexer_by_name(_lexer_name)
670
675
671
676
672 #==============================================================================
677 #==============================================================================
673 # TEST FUNCTIONS AND CREATORS
678 # TEST FUNCTIONS AND CREATORS
674 #==============================================================================
679 #==============================================================================
675 def create_test_index(repo_location, config):
680 def create_test_index(repo_location, config):
676 """
681 """
677 Makes default test index.
682 Makes default test index.
678 """
683 """
679 import rc_testdata
684 try:
680
685 import rc_testdata
686 except ImportError:
687 raise ImportError('Failed to import rc_testdata, '
688 'please make sure this package is installed from requirements_test.txt')
681 rc_testdata.extract_search_index(
689 rc_testdata.extract_search_index(
682 'vcs_search_index', os.path.dirname(config['search.location']))
690 'vcs_search_index', os.path.dirname(config['search.location']))
683
691
684
692
685 def create_test_directory(test_path):
693 def create_test_directory(test_path):
686 """
694 """
687 Create test directory if it doesn't exist.
695 Create test directory if it doesn't exist.
688 """
696 """
689 if not os.path.isdir(test_path):
697 if not os.path.isdir(test_path):
690 log.debug('Creating testdir %s', test_path)
698 log.debug('Creating testdir %s', test_path)
691 os.makedirs(test_path)
699 os.makedirs(test_path)
692
700
693
701
694 def create_test_database(test_path, config):
702 def create_test_database(test_path, config):
695 """
703 """
696 Makes a fresh database.
704 Makes a fresh database.
697 """
705 """
698 from rhodecode.lib.db_manage import DbManage
706 from rhodecode.lib.db_manage import DbManage
707 from rhodecode.lib.utils2 import get_encryption_key
699
708
700 # PART ONE create db
709 # PART ONE create db
701 dbconf = config['sqlalchemy.db1.url']
710 dbconf = config['sqlalchemy.db1.url']
711 enc_key = get_encryption_key(config)
712
702 log.debug('making test db %s', dbconf)
713 log.debug('making test db %s', dbconf)
703
714
704 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
715 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
705 tests=True, cli_args={'force_ask': True})
716 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
706 dbmanage.create_tables(override=True)
717 dbmanage.create_tables(override=True)
707 dbmanage.set_db_version()
718 dbmanage.set_db_version()
708 # for tests dynamically set new root paths based on generated content
719 # for tests dynamically set new root paths based on generated content
709 dbmanage.create_settings(dbmanage.config_prompt(test_path))
720 dbmanage.create_settings(dbmanage.config_prompt(test_path))
710 dbmanage.create_default_user()
721 dbmanage.create_default_user()
711 dbmanage.create_test_admin_and_users()
722 dbmanage.create_test_admin_and_users()
712 dbmanage.create_permissions()
723 dbmanage.create_permissions()
713 dbmanage.populate_default_permissions()
724 dbmanage.populate_default_permissions()
714 Session().commit()
725 Session().commit()
715
726
716
727
717 def create_test_repositories(test_path, config):
728 def create_test_repositories(test_path, config):
718 """
729 """
719 Creates test repositories in the temporary directory. Repositories are
730 Creates test repositories in the temporary directory. Repositories are
720 extracted from archives within the rc_testdata package.
731 extracted from archives within the rc_testdata package.
721 """
732 """
722 import rc_testdata
733 import rc_testdata
723 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
734 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
724
735
725 log.debug('making test vcs repositories')
736 log.debug('making test vcs repositories')
726
737
727 idx_path = config['search.location']
738 idx_path = config['search.location']
728 data_path = config['cache_dir']
739 data_path = config['cache_dir']
729
740
730 # clean index and data
741 # clean index and data
731 if idx_path and os.path.exists(idx_path):
742 if idx_path and os.path.exists(idx_path):
732 log.debug('remove %s', idx_path)
743 log.debug('remove %s', idx_path)
733 shutil.rmtree(idx_path)
744 shutil.rmtree(idx_path)
734
745
735 if data_path and os.path.exists(data_path):
746 if data_path and os.path.exists(data_path):
736 log.debug('remove %s', data_path)
747 log.debug('remove %s', data_path)
737 shutil.rmtree(data_path)
748 shutil.rmtree(data_path)
738
749
739 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
750 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
740 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
751 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
741
752
742 # Note: Subversion is in the process of being integrated with the system,
753 # Note: Subversion is in the process of being integrated with the system,
743 # until we have a properly packed version of the test svn repository, this
754 # until we have a properly packed version of the test svn repository, this
744 # tries to copy over the repo from a package "rc_testdata"
755 # tries to copy over the repo from a package "rc_testdata"
745 svn_repo_path = rc_testdata.get_svn_repo_archive()
756 svn_repo_path = rc_testdata.get_svn_repo_archive()
746 with tarfile.open(svn_repo_path) as tar:
757 with tarfile.open(svn_repo_path) as tar:
747 tar.extractall(jn(test_path, SVN_REPO))
758 tar.extractall(jn(test_path, SVN_REPO))
748
759
749
760
750 def password_changed(auth_user, session):
761 def password_changed(auth_user, session):
751 # Never report password change in case of default user or anonymous user.
762 # Never report password change in case of default user or anonymous user.
752 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
763 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
753 return False
764 return False
754
765
755 password_hash = md5(auth_user.password) if auth_user.password else None
766 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
756 rhodecode_user = session.get('rhodecode_user', {})
767 rhodecode_user = session.get('rhodecode_user', {})
757 session_password_hash = rhodecode_user.get('password', '')
768 session_password_hash = rhodecode_user.get('password', '')
758 return password_hash != session_password_hash
769 return password_hash != session_password_hash
759
770
760
771
761 def read_opensource_licenses():
772 def read_opensource_licenses():
762 global _license_cache
773 global _license_cache
763
774
764 if not _license_cache:
775 if not _license_cache:
765 licenses = pkg_resources.resource_string(
776 licenses = pkg_resources.resource_string(
766 'rhodecode', 'config/licenses.json')
777 'rhodecode', 'config/licenses.json')
767 _license_cache = json.loads(licenses)
778 _license_cache = json.loads(licenses)
768
779
769 return _license_cache
780 return _license_cache
770
781
771
782
772 def generate_platform_uuid():
783 def generate_platform_uuid():
773 """
784 """
774 Generates platform UUID based on it's name
785 Generates platform UUID based on it's name
775 """
786 """
776 import platform
787 import platform
777
788
778 try:
789 try:
779 uuid_list = [platform.platform()]
790 uuid_list = [platform.platform()]
780 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
791 return sha256_safe(':'.join(uuid_list))
781 except Exception as e:
792 except Exception as e:
782 log.error('Failed to generate host uuid: %s', e)
793 log.error('Failed to generate host uuid: %s', e)
783 return 'UNDEFINED'
794 return 'UNDEFINED'
784
795
785
796
786 def send_test_email(recipients, email_body='TEST EMAIL'):
797 def send_test_email(recipients, email_body='TEST EMAIL'):
787 """
798 """
788 Simple code for generating test emails.
799 Simple code for generating test emails.
789 Usage::
800 Usage::
790
801
791 from rhodecode.lib import utils
802 from rhodecode.lib import utils
792 utils.send_test_email()
803 utils.send_test_email()
793 """
804 """
794 from rhodecode.lib.celerylib import tasks, run_task
805 from rhodecode.lib.celerylib import tasks, run_task
795
806
796 email_body = email_body_plaintext = email_body
807 email_body = email_body_plaintext = email_body
797 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
808 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
798 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
809 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,1047 +1,989 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import logging
29 import logging
30 import re
30 import re
31 import sys
31 import sys
32 import time
32 import time
33 import urllib.request
33 import urllib.request
34 import urllib.parse
34 import urllib.parse
35 import urllib.error
35 import urllib.error
36 import urlobject
36 import urlobject
37 import uuid
37 import uuid
38 import getpass
38 import getpass
39 import socket
39 import socket
40 import errno
40 import errno
41 import random
41 import random
42 from functools import update_wrapper, partial, wraps
42 import functools
43 from contextlib import closing
43 from contextlib import closing
44
44
45 import pygments.lexers
45 import pygments.lexers
46 import sqlalchemy
46 import sqlalchemy
47 import sqlalchemy.event
47 import sqlalchemy.engine.url
48 import sqlalchemy.engine.url
48 import sqlalchemy.exc
49 import sqlalchemy.exc
49 import sqlalchemy.sql
50 import sqlalchemy.sql
50 import webob
51 import webob
51 import pyramid.threadlocal
52 from pyramid.settings import asbool
52 from pyramid.settings import asbool
53
53
54 import rhodecode
54 import rhodecode
55 from rhodecode.translation import _, _pluralize
55 from rhodecode.translation import _, _pluralize
56 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
56 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
57 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
57 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
58 from rhodecode.lib.type_utils import aslist, str2bool
58 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
59 from functools import reduce
60
61 #TODO: there's no longer safe_unicode, we mock it now, but should remove it
62 safe_unicode = safe_str
63
59
64
60
65 def __get_lem(extra_mapping=None):
61 def __get_lem(extra_mapping=None):
66 """
62 """
67 Get language extension map based on what's inside pygments lexers
63 Get language extension map based on what's inside pygments lexers
68 """
64 """
69 d = collections.defaultdict(lambda: [])
65 d = collections.defaultdict(lambda: [])
70
66
71 def __clean(s):
67 def __clean(s):
72 s = s.lstrip('*')
68 s = s.lstrip('*')
73 s = s.lstrip('.')
69 s = s.lstrip('.')
74
70
75 if s.find('[') != -1:
71 if s.find('[') != -1:
76 exts = []
72 exts = []
77 start, stop = s.find('['), s.find(']')
73 start, stop = s.find('['), s.find(']')
78
74
79 for suffix in s[start + 1:stop]:
75 for suffix in s[start + 1:stop]:
80 exts.append(s[:s.find('[')] + suffix)
76 exts.append(s[:s.find('[')] + suffix)
81 return [e.lower() for e in exts]
77 return [e.lower() for e in exts]
82 else:
78 else:
83 return [s.lower()]
79 return [s.lower()]
84
80
85 for lx, t in sorted(pygments.lexers.LEXERS.items()):
81 for lx, t in sorted(pygments.lexers.LEXERS.items()):
86 m = list(map(__clean, t[-2]))
82 m = list(map(__clean, t[-2]))
87 if m:
83 if m:
88 m = reduce(lambda x, y: x + y, m)
84 m = functools.reduce(lambda x, y: x + y, m)
89 for ext in m:
85 for ext in m:
90 desc = lx.replace('Lexer', '')
86 desc = lx.replace('Lexer', '')
91 d[ext].append(desc)
87 d[ext].append(desc)
92
88
93 data = dict(d)
89 data = dict(d)
94
90
95 extra_mapping = extra_mapping or {}
91 extra_mapping = extra_mapping or {}
96 if extra_mapping:
92 if extra_mapping:
97 for k, v in extra_mapping.items():
93 for k, v in list(extra_mapping.items()):
98 if k not in data:
94 if k not in data:
99 # register new mapping2lexer
95 # register new mapping2lexer
100 data[k] = [v]
96 data[k] = [v]
101
97
102 return data
98 return data
103
99
104
100
105 def convert_line_endings(line, mode):
101 def convert_line_endings(line: str, mode) -> str:
106 """
102 """
107 Converts a given line "line end" accordingly to given mode
103 Converts a given line "line end" accordingly to given mode
108
104
109 Available modes are::
105 Available modes are::
110 0 - Unix
106 0 - Unix
111 1 - Mac
107 1 - Mac
112 2 - DOS
108 2 - DOS
113
109
114 :param line: given line to convert
110 :param line: given line to convert
115 :param mode: mode to convert to
111 :param mode: mode to convert to
116 :rtype: str
117 :return: converted line according to mode
112 :return: converted line according to mode
118 """
113 """
119 if mode == 0:
114 if mode == 0:
120 line = line.replace('\r\n', '\n')
115 line = line.replace('\r\n', '\n')
121 line = line.replace('\r', '\n')
116 line = line.replace('\r', '\n')
122 elif mode == 1:
117 elif mode == 1:
123 line = line.replace('\r\n', '\r')
118 line = line.replace('\r\n', '\r')
124 line = line.replace('\n', '\r')
119 line = line.replace('\n', '\r')
125 elif mode == 2:
120 elif mode == 2:
126 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
121 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
127 return line
122 return line
128
123
129
124
130 def detect_mode(line, default):
125 def detect_mode(line: str, default) -> int:
131 """
126 """
132 Detects line break for given line, if line break couldn't be found
127 Detects line break for given line, if line break couldn't be found
133 given default value is returned
128 given default value is returned
134
129
135 :param line: str line
130 :param line: str line
136 :param default: default
131 :param default: default
137 :rtype: int
138 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
132 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
139 """
133 """
140 if line.endswith('\r\n'):
134 if line.endswith('\r\n'):
141 return 2
135 return 2
142 elif line.endswith('\n'):
136 elif line.endswith('\n'):
143 return 0
137 return 0
144 elif line.endswith('\r'):
138 elif line.endswith('\r'):
145 return 1
139 return 1
146 else:
140 else:
147 return default
141 return default
148
142
149
143
150 def remove_suffix(s, suffix):
144 def remove_suffix(s, suffix):
151 if s.endswith(suffix):
145 if s.endswith(suffix):
152 s = s[:-1 * len(suffix)]
146 s = s[:-1 * len(suffix)]
153 return s
147 return s
154
148
155
149
156 def remove_prefix(s, prefix):
150 def remove_prefix(s, prefix):
157 if s.startswith(prefix):
151 if s.startswith(prefix):
158 s = s[len(prefix):]
152 s = s[len(prefix):]
159 return s
153 return s
160
154
161
155
162 def find_calling_context(ignore_modules=None):
156 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
163 """
157 """
164 Look through the calling stack and return the frame which called
158 Look through the calling stack and return the frame which called
165 this function and is part of core module ( ie. rhodecode.* )
159 this function and is part of core module ( ie. rhodecode.* )
166
160
167 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
161 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
162 :param depth:
163 :param output_writer:
164 :param indent:
168
165
169 usage::
166 usage::
167
170 from rhodecode.lib.utils2 import find_calling_context
168 from rhodecode.lib.utils2 import find_calling_context
171
169
172 calling_context = find_calling_context(ignore_modules=[
170 calling_context = find_calling_context(ignore_modules=[
173 'rhodecode.lib.caching_query',
171 'rhodecode.lib.caching_query',
174 'rhodecode.model.settings',
172 'rhodecode.model.settings',
175 ])
173 ])
176
174
177 if calling_context:
178 cc_str = 'call context %s:%s' % (
179 calling_context.f_code.co_filename,
180 calling_context.f_lineno,
181 )
182 print(cc_str)
183 """
175 """
176 import inspect
177 if not output_writer:
178 try:
179 from rich import print as pprint
180 except ImportError:
181 pprint = print
182 output_writer = pprint
184
183
185 ignore_modules = ignore_modules or []
184 frame = inspect.currentframe()
185 cc = []
186 try:
187 for i in range(depth): # current frame + 3 callers
188 frame = frame.f_back
189 if not frame:
190 break
186
191
187 f = sys._getframe(2)
192 info = inspect.getframeinfo(frame)
188 while f.f_back is not None:
193 name = frame.f_globals.get('__name__')
189 name = f.f_globals.get('__name__')
190 if name and name.startswith(__name__.split('.')[0]):
191 if name not in ignore_modules:
194 if name not in ignore_modules:
192 return f
195 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
193 f = f.f_back
196 finally:
194 return None
197 # Avoids a reference cycle
198 del frame
199
200 output_writer('* INFO: This code was called from: *')
201 for cnt, frm_info in enumerate(cc):
202 if not indent:
203 cnt = 1
204 output_writer(' ' * cnt + frm_info)
195
205
196
206
197 def ping_connection(connection, branch):
207 def ping_connection(connection, branch):
198 if branch:
208 if branch:
199 # "branch" refers to a sub-connection of a connection,
209 # "branch" refers to a sub-connection of a connection,
200 # we don't want to bother pinging on these.
210 # we don't want to bother pinging on these.
201 return
211 return
202
212
203 # turn off "close with result". This flag is only used with
213 # turn off "close with result". This flag is only used with
204 # "connectionless" execution, otherwise will be False in any case
214 # "connectionless" execution, otherwise will be False in any case
205 save_should_close_with_result = connection.should_close_with_result
215 save_should_close_with_result = connection.should_close_with_result
206 connection.should_close_with_result = False
216 connection.should_close_with_result = False
207
217
208 try:
218 try:
209 # run a SELECT 1. use a core select() so that
219 # run a SELECT 1. use a core select() so that
210 # the SELECT of a scalar value without a table is
220 # the SELECT of a scalar value without a table is
211 # appropriately formatted for the backend
221 # appropriately formatted for the backend
212 connection.scalar(sqlalchemy.sql.select([1]))
222 connection.scalar(sqlalchemy.sql.select([1]))
213 except sqlalchemy.exc.DBAPIError as err:
223 except sqlalchemy.exc.DBAPIError as err:
214 # catch SQLAlchemy's DBAPIError, which is a wrapper
224 # catch SQLAlchemy's DBAPIError, which is a wrapper
215 # for the DBAPI's exception. It includes a .connection_invalidated
225 # for the DBAPI's exception. It includes a .connection_invalidated
216 # attribute which specifies if this connection is a "disconnect"
226 # attribute which specifies if this connection is a "disconnect"
217 # condition, which is based on inspection of the original exception
227 # condition, which is based on inspection of the original exception
218 # by the dialect in use.
228 # by the dialect in use.
219 if err.connection_invalidated:
229 if err.connection_invalidated:
220 # run the same SELECT again - the connection will re-validate
230 # run the same SELECT again - the connection will re-validate
221 # itself and establish a new connection. The disconnect detection
231 # itself and establish a new connection. The disconnect detection
222 # here also causes the whole connection pool to be invalidated
232 # here also causes the whole connection pool to be invalidated
223 # so that all stale connections are discarded.
233 # so that all stale connections are discarded.
224 connection.scalar(sqlalchemy.sql.select([1]))
234 connection.scalar(sqlalchemy.sql.select([1]))
225 else:
235 else:
226 raise
236 raise
227 finally:
237 finally:
228 # restore "close with result"
238 # restore "close with result"
229 connection.should_close_with_result = save_should_close_with_result
239 connection.should_close_with_result = save_should_close_with_result
230
240
231
241
232 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
242 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
233 """Custom engine_from_config functions."""
243 """Custom engine_from_config functions."""
234 log = logging.getLogger('sqlalchemy.engine')
244 log = logging.getLogger('sqlalchemy.engine')
235 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
245 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
236 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
246 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
237
247
238 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
248 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
239
249
240 def color_sql(sql):
250 def color_sql(sql):
241 color_seq = '\033[1;33m' # This is yellow: code 33
251 color_seq = '\033[1;33m' # This is yellow: code 33
242 normal = '\x1b[0m'
252 normal = '\x1b[0m'
243 return ''.join([color_seq, sql, normal])
253 return ''.join([color_seq, sql, normal])
244
254
245 if use_ping_connection:
255 if use_ping_connection:
246 log.debug('Adding ping_connection on the engine config.')
256 log.debug('Adding ping_connection on the engine config.')
247 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
257 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
248
258
249 if debug:
259 if debug:
250 # attach events only for debug configuration
260 # attach events only for debug configuration
251 def before_cursor_execute(conn, cursor, statement,
261 def before_cursor_execute(conn, cursor, statement,
252 parameters, context, executemany):
262 parameters, context, executemany):
253 setattr(conn, 'query_start_time', time.time())
263 setattr(conn, 'query_start_time', time.time())
254 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
264 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
255 calling_context = find_calling_context(ignore_modules=[
265 find_calling_context(ignore_modules=[
256 'rhodecode.lib.caching_query',
266 'rhodecode.lib.caching_query',
257 'rhodecode.model.settings',
267 'rhodecode.model.settings',
258 ])
268 ], output_writer=log.info)
259 if calling_context:
260 log.info(color_sql('call context %s:%s' % (
261 calling_context.f_code.co_filename,
262 calling_context.f_lineno,
263 )))
264
269
265 def after_cursor_execute(conn, cursor, statement,
270 def after_cursor_execute(conn, cursor, statement,
266 parameters, context, executemany):
271 parameters, context, executemany):
267 delattr(conn, 'query_start_time')
272 delattr(conn, 'query_start_time')
268
273
269 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
274 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
270 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
275 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
271
276
272 return engine
277 return engine
273
278
274
279
275 def get_encryption_key(config):
280 def get_encryption_key(config) -> bytes:
276 secret = config.get('rhodecode.encrypted_values.secret')
281 secret = config.get('rhodecode.encrypted_values.secret')
277 default = config['beaker.session.secret']
282 default = config['beaker.session.secret']
278 return secret or default
283 enc_key = secret or default
284
285 return safe_bytes(enc_key)
279
286
280
287
281 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
288 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
282 """
289 """
283 Turns a datetime into an age string.
290 Turns a datetime into an age string.
284 If show_short_version is True, this generates a shorter string with
291 If show_short_version is True, this generates a shorter string with
285 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
292 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
286
293
287 * IMPORTANT*
294 * IMPORTANT*
288 Code of this function is written in special way so it's easier to
295 Code of this function is written in special way so it's easier to
289 backport it to javascript. If you mean to update it, please also update
296 backport it to javascript. If you mean to update it, please also update
290 `jquery.timeago-extension.js` file
297 `jquery.timeago-extension.js` file
291
298
292 :param prevdate: datetime object
299 :param prevdate: datetime object
293 :param now: get current time, if not define we use
300 :param now: get current time, if not define we use
294 `datetime.datetime.now()`
301 `datetime.datetime.now()`
295 :param show_short_version: if it should approximate the date and
302 :param show_short_version: if it should approximate the date and
296 return a shorter string
303 return a shorter string
297 :param show_suffix:
304 :param show_suffix:
298 :param short_format: show short format, eg 2D instead of 2 days
305 :param short_format: show short format, eg 2D instead of 2 days
299 :rtype: unicode
306 :rtype: unicode
300 :returns: unicode words describing age
307 :returns: unicode words describing age
301 """
308 """
302
309
303 def _get_relative_delta(now, prevdate):
310 def _get_relative_delta(now, prevdate):
304 base = dateutil.relativedelta.relativedelta(now, prevdate)
311 base = dateutil.relativedelta.relativedelta(now, prevdate)
305 return {
312 return {
306 'year': base.years,
313 'year': base.years,
307 'month': base.months,
314 'month': base.months,
308 'day': base.days,
315 'day': base.days,
309 'hour': base.hours,
316 'hour': base.hours,
310 'minute': base.minutes,
317 'minute': base.minutes,
311 'second': base.seconds,
318 'second': base.seconds,
312 }
319 }
313
320
314 def _is_leap_year(year):
321 def _is_leap_year(year):
315 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
322 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
316
323
317 def get_month(prevdate):
324 def get_month(prevdate):
318 return prevdate.month
325 return prevdate.month
319
326
320 def get_year(prevdate):
327 def get_year(prevdate):
321 return prevdate.year
328 return prevdate.year
322
329
323 now = now or datetime.datetime.now()
330 now = now or datetime.datetime.now()
324 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
331 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
325 deltas = {}
332 deltas = {}
326 future = False
333 future = False
327
334
328 if prevdate > now:
335 if prevdate > now:
329 now_old = now
336 now_old = now
330 now = prevdate
337 now = prevdate
331 prevdate = now_old
338 prevdate = now_old
332 future = True
339 future = True
333 if future:
340 if future:
334 prevdate = prevdate.replace(microsecond=0)
341 prevdate = prevdate.replace(microsecond=0)
335 # Get date parts deltas
342 # Get date parts deltas
336 for part in order:
343 for part in order:
337 rel_delta = _get_relative_delta(now, prevdate)
344 rel_delta = _get_relative_delta(now, prevdate)
338 deltas[part] = rel_delta[part]
345 deltas[part] = rel_delta[part]
339
346
340 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
347 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
341 # not 1 hour, -59 minutes and -59 seconds)
348 # not 1 hour, -59 minutes and -59 seconds)
342 offsets = [[5, 60], [4, 60], [3, 24]]
349 offsets = [[5, 60], [4, 60], [3, 24]]
343 for element in offsets: # seconds, minutes, hours
350 for element in offsets: # seconds, minutes, hours
344 num = element[0]
351 num = element[0]
345 length = element[1]
352 length = element[1]
346
353
347 part = order[num]
354 part = order[num]
348 carry_part = order[num - 1]
355 carry_part = order[num - 1]
349
356
350 if deltas[part] < 0:
357 if deltas[part] < 0:
351 deltas[part] += length
358 deltas[part] += length
352 deltas[carry_part] -= 1
359 deltas[carry_part] -= 1
353
360
354 # Same thing for days except that the increment depends on the (variable)
361 # Same thing for days except that the increment depends on the (variable)
355 # number of days in the month
362 # number of days in the month
356 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
363 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
357 if deltas['day'] < 0:
364 if deltas['day'] < 0:
358 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
365 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
359 deltas['day'] += 29
366 deltas['day'] += 29
360 else:
367 else:
361 deltas['day'] += month_lengths[get_month(prevdate) - 1]
368 deltas['day'] += month_lengths[get_month(prevdate) - 1]
362
369
363 deltas['month'] -= 1
370 deltas['month'] -= 1
364
371
365 if deltas['month'] < 0:
372 if deltas['month'] < 0:
366 deltas['month'] += 12
373 deltas['month'] += 12
367 deltas['year'] -= 1
374 deltas['year'] -= 1
368
375
369 # Format the result
376 # Format the result
370 if short_format:
377 if short_format:
371 fmt_funcs = {
378 fmt_funcs = {
372 'year': lambda d: '%dy' % d,
379 'year': lambda d: '%dy' % d,
373 'month': lambda d: '%dm' % d,
380 'month': lambda d: '%dm' % d,
374 'day': lambda d: '%dd' % d,
381 'day': lambda d: '%dd' % d,
375 'hour': lambda d: '%dh' % d,
382 'hour': lambda d: '%dh' % d,
376 'minute': lambda d: '%dmin' % d,
383 'minute': lambda d: '%dmin' % d,
377 'second': lambda d: '%dsec' % d,
384 'second': lambda d: '%dsec' % d,
378 }
385 }
379 else:
386 else:
380 fmt_funcs = {
387 fmt_funcs = {
381 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
388 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
382 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
389 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
383 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
390 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
384 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
391 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
385 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
392 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
386 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
393 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
387 }
394 }
388
395
389 i = 0
396 i = 0
390 for part in order:
397 for part in order:
391 value = deltas[part]
398 value = deltas[part]
392 if value != 0:
399 if value != 0:
393
400
394 if i < 5:
401 if i < 5:
395 sub_part = order[i + 1]
402 sub_part = order[i + 1]
396 sub_value = deltas[sub_part]
403 sub_value = deltas[sub_part]
397 else:
404 else:
398 sub_value = 0
405 sub_value = 0
399
406
400 if sub_value == 0 or show_short_version:
407 if sub_value == 0 or show_short_version:
401 _val = fmt_funcs[part](value)
408 _val = fmt_funcs[part](value)
402 if future:
409 if future:
403 if show_suffix:
410 if show_suffix:
404 return _('in ${ago}', mapping={'ago': _val})
411 return _('in ${ago}', mapping={'ago': _val})
405 else:
412 else:
406 return _(_val)
413 return _(_val)
407
414
408 else:
415 else:
409 if show_suffix:
416 if show_suffix:
410 return _('${ago} ago', mapping={'ago': _val})
417 return _('${ago} ago', mapping={'ago': _val})
411 else:
418 else:
412 return _(_val)
419 return _(_val)
413
420
414 val = fmt_funcs[part](value)
421 val = fmt_funcs[part](value)
415 val_detail = fmt_funcs[sub_part](sub_value)
422 val_detail = fmt_funcs[sub_part](sub_value)
416 mapping = {'val': val, 'detail': val_detail}
423 mapping = {'val': val, 'detail': val_detail}
417
424
418 if short_format:
425 if short_format:
419 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
426 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
420 if show_suffix:
427 if show_suffix:
421 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
428 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
422 if future:
429 if future:
423 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
430 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
424 else:
431 else:
425 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
432 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
426 if show_suffix:
433 if show_suffix:
427 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
434 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
428 if future:
435 if future:
429 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
436 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
430
437
431 return datetime_tmpl
438 return datetime_tmpl
432 i += 1
439 i += 1
433 return _('just now')
440 return _('just now')
434
441
435
442
436 def age_from_seconds(seconds):
443 def age_from_seconds(seconds):
437 seconds = safe_int(seconds) or 0
444 seconds = safe_int(seconds) or 0
438 prevdate = time_to_datetime(time.time() + seconds)
445 prevdate = time_to_datetime(time.time() + seconds)
439 return age(prevdate, show_suffix=False, show_short_version=True)
446 return age(prevdate, show_suffix=False, show_short_version=True)
440
447
441
448
442 def cleaned_uri(uri):
449 def cleaned_uri(uri):
443 """
450 """
444 Quotes '[' and ']' from uri if there is only one of them.
451 Quotes '[' and ']' from uri if there is only one of them.
445 according to RFC3986 we cannot use such chars in uri
452 according to RFC3986 we cannot use such chars in uri
446 :param uri:
453 :param uri:
447 :return: uri without this chars
454 :return: uri without this chars
448 """
455 """
449 return urllib.parse.quote(uri, safe='@$:/')
456 return urllib.parse.quote(uri, safe='@$:/')
450
457
451
458
452 def credentials_filter(uri):
459 def credentials_filter(uri):
453 """
460 """
454 Returns a url with removed credentials
461 Returns a url with removed credentials
455
462
456 :param uri:
463 :param uri:
457 """
464 """
458 import urlobject
465 import urlobject
459 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
466 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
460 return 'InvalidDecryptionKey'
467 return 'InvalidDecryptionKey'
461
468
462 url_obj = urlobject.URLObject(cleaned_uri(uri))
469 url_obj = urlobject.URLObject(cleaned_uri(uri))
463 url_obj = url_obj.without_password().without_username()
470 url_obj = url_obj.without_password().without_username()
464
471
465 return url_obj
472 return url_obj
466
473
467
474
468 def get_host_info(request):
475 def get_host_info(request):
469 """
476 """
470 Generate host info, to obtain full url e.g https://server.com
477 Generate host info, to obtain full url e.g https://server.com
471 use this
478 use this
472 `{scheme}://{netloc}`
479 `{scheme}://{netloc}`
473 """
480 """
474 if not request:
481 if not request:
475 return {}
482 return {}
476
483
477 qualified_home_url = request.route_url('home')
484 qualified_home_url = request.route_url('home')
478 parsed_url = urlobject.URLObject(qualified_home_url)
485 parsed_url = urlobject.URLObject(qualified_home_url)
479 decoded_path = safe_unicode(urllib.parse.unquote(parsed_url.path.rstrip('/')))
486 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
480
487
481 return {
488 return {
482 'scheme': parsed_url.scheme,
489 'scheme': parsed_url.scheme,
483 'netloc': parsed_url.netloc+decoded_path,
490 'netloc': parsed_url.netloc+decoded_path,
484 'hostname': parsed_url.hostname,
491 'hostname': parsed_url.hostname,
485 }
492 }
486
493
487
494
488 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
495 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
489 qualified_home_url = request.route_url('home')
496 qualified_home_url = request.route_url('home')
490 parsed_url = urlobject.URLObject(qualified_home_url)
497 parsed_url = urlobject.URLObject(qualified_home_url)
491 decoded_path = safe_unicode(urllib.parse.unquote(parsed_url.path.rstrip('/')))
498 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
492
499
493 args = {
500 args = {
494 'scheme': parsed_url.scheme,
501 'scheme': parsed_url.scheme,
495 'user': '',
502 'user': '',
496 'sys_user': getpass.getuser(),
503 'sys_user': getpass.getuser(),
497 # path if we use proxy-prefix
504 # path if we use proxy-prefix
498 'netloc': parsed_url.netloc+decoded_path,
505 'netloc': parsed_url.netloc+decoded_path,
499 'hostname': parsed_url.hostname,
506 'hostname': parsed_url.hostname,
500 'prefix': decoded_path,
507 'prefix': decoded_path,
501 'repo': repo_name,
508 'repo': repo_name,
502 'repoid': str(repo_id),
509 'repoid': str(repo_id),
503 'repo_type': repo_type
510 'repo_type': repo_type
504 }
511 }
505 args.update(override)
512 args.update(override)
506 args['user'] = urllib.parse.quote(safe_str(args['user']))
513 args['user'] = urllib.parse.quote(safe_str(args['user']))
507
514
508 for k, v in args.items():
515 for k, v in list(args.items()):
509 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
516 tmpl_key = '{%s}' % k
517 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
510
518
511 # special case for SVN clone url
519 # special case for SVN clone url
512 if repo_type == 'svn':
520 if repo_type == 'svn':
513 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
521 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
514
522
515 # remove leading @ sign if it's present. Case of empty user
523 # remove leading @ sign if it's present. Case of empty user
516 url_obj = urlobject.URLObject(uri_tmpl)
524 url_obj = urlobject.URLObject(uri_tmpl)
517 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
525 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
518
526
519 return safe_unicode(url)
527 return safe_str(url)
520
528
521
529
522 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
530 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
523 maybe_unreachable=False, reference_obj=None):
531 maybe_unreachable=False, reference_obj=None):
524 """
532 """
525 Safe version of get_commit if this commit doesn't exists for a
533 Safe version of get_commit if this commit doesn't exists for a
526 repository it returns a Dummy one instead
534 repository it returns a Dummy one instead
527
535
528 :param repo: repository instance
536 :param repo: repository instance
529 :param commit_id: commit id as str
537 :param commit_id: commit id as str
530 :param commit_idx: numeric commit index
538 :param commit_idx: numeric commit index
531 :param pre_load: optional list of commit attributes to load
539 :param pre_load: optional list of commit attributes to load
532 :param maybe_unreachable: translate unreachable commits on git repos
540 :param maybe_unreachable: translate unreachable commits on git repos
533 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
541 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
534 """
542 """
535 # TODO(skreft): remove these circular imports
543 # TODO(skreft): remove these circular imports
536 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
544 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
537 from rhodecode.lib.vcs.exceptions import RepositoryError
545 from rhodecode.lib.vcs.exceptions import RepositoryError
538 if not isinstance(repo, BaseRepository):
546 if not isinstance(repo, BaseRepository):
539 raise Exception('You must pass an Repository '
547 raise Exception('You must pass an Repository '
540 'object as first argument got %s', type(repo))
548 'object as first argument got %s', type(repo))
541
549
542 try:
550 try:
543 commit = repo.get_commit(
551 commit = repo.get_commit(
544 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
552 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
545 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
553 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
546 except (RepositoryError, LookupError):
554 except (RepositoryError, LookupError):
547 commit = EmptyCommit()
555 commit = EmptyCommit()
548 return commit
556 return commit
549
557
550
558
551 def datetime_to_time(dt):
559 def datetime_to_time(dt):
552 if dt:
560 if dt:
553 return time.mktime(dt.timetuple())
561 return time.mktime(dt.timetuple())
554
562
555
563
556 def time_to_datetime(tm):
564 def time_to_datetime(tm):
557 if tm:
565 if tm:
558 if isinstance(tm, str):
566 if isinstance(tm, str):
559 try:
567 try:
560 tm = float(tm)
568 tm = float(tm)
561 except ValueError:
569 except ValueError:
562 return
570 return
563 return datetime.datetime.fromtimestamp(tm)
571 return datetime.datetime.fromtimestamp(tm)
564
572
565
573
566 def time_to_utcdatetime(tm):
574 def time_to_utcdatetime(tm):
567 if tm:
575 if tm:
568 if isinstance(tm, str):
576 if isinstance(tm, str):
569 try:
577 try:
570 tm = float(tm)
578 tm = float(tm)
571 except ValueError:
579 except ValueError:
572 return
580 return
573 return datetime.datetime.utcfromtimestamp(tm)
581 return datetime.datetime.utcfromtimestamp(tm)
574
582
575
583
576 MENTIONS_REGEX = re.compile(
584 MENTIONS_REGEX = re.compile(
577 # ^@ or @ without any special chars in front
585 # ^@ or @ without any special chars in front
578 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
586 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
579 # main body starts with letter, then can be . - _
587 # main body starts with letter, then can be . - _
580 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
588 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
581 re.VERBOSE | re.MULTILINE)
589 re.VERBOSE | re.MULTILINE)
582
590
583
591
584 def extract_mentioned_users(s):
592 def extract_mentioned_users(s):
585 """
593 """
586 Returns unique usernames from given string s that have @mention
594 Returns unique usernames from given string s that have @mention
587
595
588 :param s: string to get mentions
596 :param s: string to get mentions
589 """
597 """
590 usrs = set()
598 usrs = set()
591 for username in MENTIONS_REGEX.findall(s):
599 for username in MENTIONS_REGEX.findall(s):
592 usrs.add(username)
600 usrs.add(username)
593
601
594 return sorted(list(usrs), key=lambda k: k.lower())
602 return sorted(list(usrs), key=lambda k: k.lower())
595
603
596
604
597 class AttributeDictBase(dict):
598 def __getstate__(self):
599 odict = self.__dict__ # get attribute dictionary
600 return odict
601
602 def __setstate__(self, dict):
603 self.__dict__ = dict
604
605 __setattr__ = dict.__setitem__
606 __delattr__ = dict.__delitem__
607
608
609 class StrictAttributeDict(AttributeDictBase):
610 """
611 Strict Version of Attribute dict which raises an Attribute error when
612 requested attribute is not set
613 """
614 def __getattr__(self, attr):
615 try:
616 return self[attr]
617 except KeyError:
618 raise AttributeError('%s object has no attribute %s' % (
619 self.__class__, attr))
620
621
622 class AttributeDict(AttributeDictBase):
623 def __getattr__(self, attr):
624 return self.get(attr, None)
625
626
627 def fix_PATH(os_=None):
605 def fix_PATH(os_=None):
628 """
606 """
629 Get current active python path, and append it to PATH variable to fix
607 Get current active python path, and append it to PATH variable to fix
630 issues of subprocess calls and different python versions
608 issues of subprocess calls and different python versions
631 """
609 """
632 if os_ is None:
610 if os_ is None:
633 import os
611 import os
634 else:
612 else:
635 os = os_
613 os = os_
636
614
637 cur_path = os.path.split(sys.executable)[0]
615 cur_path = os.path.split(sys.executable)[0]
616 os_path = os.environ['PATH']
638 if not os.environ['PATH'].startswith(cur_path):
617 if not os.environ['PATH'].startswith(cur_path):
639 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
618 os.environ['PATH'] = f'{cur_path}:{os_path}'
640
619
641
620
642 def obfuscate_url_pw(engine):
621 def obfuscate_url_pw(engine):
643 _url = engine or ''
622 _url = engine or ''
644 try:
623 try:
645 _url = sqlalchemy.engine.url.make_url(engine)
624 _url = sqlalchemy.engine.url.make_url(engine)
646 if _url.password:
647 _url.password = 'XXXXX'
648 except Exception:
625 except Exception:
649 pass
626 pass
650 return str(_url)
627 return repr(_url)
651
628
652
629
653 def get_server_url(environ):
630 def get_server_url(environ):
654 req = webob.Request(environ)
631 req = webob.Request(environ)
655 return req.host_url + req.script_name
632 return req.host_url + req.script_name
656
633
657
634
658 def unique_id(hexlen=32):
635 def unique_id(hexlen=32):
659 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
636 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
660 return suuid(truncate_to=hexlen, alphabet=alphabet)
637 return suuid(truncate_to=hexlen, alphabet=alphabet)
661
638
662
639
663 def suuid(url=None, truncate_to=22, alphabet=None):
640 def suuid(url=None, truncate_to=22, alphabet=None):
664 """
641 """
665 Generate and return a short URL safe UUID.
642 Generate and return a short URL safe UUID.
666
643
667 If the url parameter is provided, set the namespace to the provided
644 If the url parameter is provided, set the namespace to the provided
668 URL and generate a UUID.
645 URL and generate a UUID.
669
646
670 :param url to get the uuid for
647 :param url to get the uuid for
671 :truncate_to: truncate the basic 22 UUID to shorter version
648 :truncate_to: truncate the basic 22 UUID to shorter version
672
649
673 The IDs won't be universally unique any longer, but the probability of
650 The IDs won't be universally unique any longer, but the probability of
674 a collision will still be very low.
651 a collision will still be very low.
675 """
652 """
676 # Define our alphabet.
653 # Define our alphabet.
677 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
654 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
678
655
679 # If no URL is given, generate a random UUID.
656 # If no URL is given, generate a random UUID.
680 if url is None:
657 if url is None:
681 unique_id = uuid.uuid4().int
658 unique_id = uuid.uuid4().int
682 else:
659 else:
683 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
660 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
684
661
685 alphabet_length = len(_ALPHABET)
662 alphabet_length = len(_ALPHABET)
686 output = []
663 output = []
687 while unique_id > 0:
664 while unique_id > 0:
688 digit = unique_id % alphabet_length
665 digit = unique_id % alphabet_length
689 output.append(_ALPHABET[digit])
666 output.append(_ALPHABET[digit])
690 unique_id = int(unique_id / alphabet_length)
667 unique_id = int(unique_id / alphabet_length)
691 return "".join(output)[:truncate_to]
668 return "".join(output)[:truncate_to]
692
669
693
670
694 def get_current_rhodecode_user(request=None):
671 def get_current_rhodecode_user(request=None):
695 """
672 """
696 Gets rhodecode user from request
673 Gets rhodecode user from request
697 """
674 """
675 import pyramid.threadlocal
698 pyramid_request = request or pyramid.threadlocal.get_current_request()
676 pyramid_request = request or pyramid.threadlocal.get_current_request()
699
677
700 # web case
678 # web case
701 if pyramid_request and hasattr(pyramid_request, 'user'):
679 if pyramid_request and hasattr(pyramid_request, 'user'):
702 return pyramid_request.user
680 return pyramid_request.user
703
681
704 # api case
682 # api case
705 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
683 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
706 return pyramid_request.rpc_user
684 return pyramid_request.rpc_user
707
685
708 return None
686 return None
709
687
710
688
711 def action_logger_generic(action, namespace=''):
689 def action_logger_generic(action, namespace=''):
712 """
690 """
713 A generic logger for actions useful to the system overview, tries to find
691 A generic logger for actions useful to the system overview, tries to find
714 an acting user for the context of the call otherwise reports unknown user
692 an acting user for the context of the call otherwise reports unknown user
715
693
716 :param action: logging message eg 'comment 5 deleted'
694 :param action: logging message eg 'comment 5 deleted'
717 :param type: string
695 :param type: string
718
696
719 :param namespace: namespace of the logging message eg. 'repo.comments'
697 :param namespace: namespace of the logging message eg. 'repo.comments'
720 :param type: string
698 :param type: string
721
699
722 """
700 """
723
701
724 logger_name = 'rhodecode.actions'
702 logger_name = 'rhodecode.actions'
725
703
726 if namespace:
704 if namespace:
727 logger_name += '.' + namespace
705 logger_name += '.' + namespace
728
706
729 log = logging.getLogger(logger_name)
707 log = logging.getLogger(logger_name)
730
708
731 # get a user if we can
709 # get a user if we can
732 user = get_current_rhodecode_user()
710 user = get_current_rhodecode_user()
733
711
734 logfunc = log.info
712 logfunc = log.info
735
713
736 if not user:
714 if not user:
737 user = '<unknown user>'
715 user = '<unknown user>'
738 logfunc = log.warning
716 logfunc = log.warning
739
717
740 logfunc('Logging action by {}: {}'.format(user, action))
718 logfunc('Logging action by {}: {}'.format(user, action))
741
719
742
720
743 def escape_split(text, sep=',', maxsplit=-1):
721 def escape_split(text, sep=',', maxsplit=-1):
744 r"""
722 r"""
745 Allows for escaping of the separator: e.g. arg='foo\, bar'
723 Allows for escaping of the separator: e.g. arg='foo\, bar'
746
724
747 It should be noted that the way bash et. al. do command line parsing, those
725 It should be noted that the way bash et. al. do command line parsing, those
748 single quotes are required.
726 single quotes are required.
749 """
727 """
750 escaped_sep = r'\%s' % sep
728 escaped_sep = r'\%s' % sep
751
729
752 if escaped_sep not in text:
730 if escaped_sep not in text:
753 return text.split(sep, maxsplit)
731 return text.split(sep, maxsplit)
754
732
755 before, _mid, after = text.partition(escaped_sep)
733 before, _mid, after = text.partition(escaped_sep)
756 startlist = before.split(sep, maxsplit) # a regular split is fine here
734 startlist = before.split(sep, maxsplit) # a regular split is fine here
757 unfinished = startlist[-1]
735 unfinished = startlist[-1]
758 startlist = startlist[:-1]
736 startlist = startlist[:-1]
759
737
760 # recurse because there may be more escaped separators
738 # recurse because there may be more escaped separators
761 endlist = escape_split(after, sep, maxsplit)
739 endlist = escape_split(after, sep, maxsplit)
762
740
763 # finish building the escaped value. we use endlist[0] becaue the first
741 # finish building the escaped value. we use endlist[0] becaue the first
764 # part of the string sent in recursion is the rest of the escaped value.
742 # part of the string sent in recursion is the rest of the escaped value.
765 unfinished += sep + endlist[0]
743 unfinished += sep + endlist[0]
766
744
767 return startlist + [unfinished] + endlist[1:] # put together all the parts
745 return startlist + [unfinished] + endlist[1:] # put together all the parts
768
746
769
747
770 class OptionalAttr(object):
748 class OptionalAttr(object):
771 """
749 """
772 Special Optional Option that defines other attribute. Example::
750 Special Optional Option that defines other attribute. Example::
773
751
774 def test(apiuser, userid=Optional(OAttr('apiuser')):
752 def test(apiuser, userid=Optional(OAttr('apiuser')):
775 user = Optional.extract(userid)
753 user = Optional.extract(userid)
776 # calls
754 # calls
777
755
778 """
756 """
779
757
780 def __init__(self, attr_name):
758 def __init__(self, attr_name):
781 self.attr_name = attr_name
759 self.attr_name = attr_name
782
760
783 def __repr__(self):
761 def __repr__(self):
784 return '<OptionalAttr:%s>' % self.attr_name
762 return '<OptionalAttr:%s>' % self.attr_name
785
763
786 def __call__(self):
764 def __call__(self):
787 return self
765 return self
788
766
789
767
790 # alias
768 # alias
791 OAttr = OptionalAttr
769 OAttr = OptionalAttr
792
770
793
771
794 class Optional(object):
772 class Optional(object):
795 """
773 """
796 Defines an optional parameter::
774 Defines an optional parameter::
797
775
798 param = param.getval() if isinstance(param, Optional) else param
776 param = param.getval() if isinstance(param, Optional) else param
799 param = param() if isinstance(param, Optional) else param
777 param = param() if isinstance(param, Optional) else param
800
778
801 is equivalent of::
779 is equivalent of::
802
780
803 param = Optional.extract(param)
781 param = Optional.extract(param)
804
782
805 """
783 """
806
784
807 def __init__(self, type_):
785 def __init__(self, type_):
808 self.type_ = type_
786 self.type_ = type_
809
787
810 def __repr__(self):
788 def __repr__(self):
811 return '<Optional:%s>' % self.type_.__repr__()
789 return '<Optional:%s>' % self.type_.__repr__()
812
790
813 def __call__(self):
791 def __call__(self):
814 return self.getval()
792 return self.getval()
815
793
816 def getval(self):
794 def getval(self):
817 """
795 """
818 returns value from this Optional instance
796 returns value from this Optional instance
819 """
797 """
820 if isinstance(self.type_, OAttr):
798 if isinstance(self.type_, OAttr):
821 # use params name
799 # use params name
822 return self.type_.attr_name
800 return self.type_.attr_name
823 return self.type_
801 return self.type_
824
802
825 @classmethod
803 @classmethod
826 def extract(cls, val):
804 def extract(cls, val):
827 """
805 """
828 Extracts value from Optional() instance
806 Extracts value from Optional() instance
829
807
830 :param val:
808 :param val:
831 :return: original value if it's not Optional instance else
809 :return: original value if it's not Optional instance else
832 value of instance
810 value of instance
833 """
811 """
834 if isinstance(val, cls):
812 if isinstance(val, cls):
835 return val.getval()
813 return val.getval()
836 return val
814 return val
837
815
838
816
839 def glob2re(pat):
817 def glob2re(pat):
840 """
818 import fnmatch
841 Translate a shell PATTERN to a regular expression.
819 return fnmatch.translate(pat)
842
843 There is no way to quote meta-characters.
844 """
845
846 i, n = 0, len(pat)
847 res = ''
848 while i < n:
849 c = pat[i]
850 i = i+1
851 if c == '*':
852 #res = res + '.*'
853 res = res + '[^/]*'
854 elif c == '?':
855 #res = res + '.'
856 res = res + '[^/]'
857 elif c == '[':
858 j = i
859 if j < n and pat[j] == '!':
860 j = j+1
861 if j < n and pat[j] == ']':
862 j = j+1
863 while j < n and pat[j] != ']':
864 j = j+1
865 if j >= n:
866 res = res + '\\['
867 else:
868 stuff = pat[i:j].replace('\\','\\\\')
869 i = j+1
870 if stuff[0] == '!':
871 stuff = '^' + stuff[1:]
872 elif stuff[0] == '^':
873 stuff = '\\' + stuff
874 res = '%s[%s]' % (res, stuff)
875 else:
876 res = res + re.escape(c)
877 return res + '\Z(?ms)'
878
820
879
821
880 def parse_byte_string(size_str):
822 def parse_byte_string(size_str):
881 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
823 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
882 if not match:
824 if not match:
883 raise ValueError('Given size:%s is invalid, please make sure '
825 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
884 'to use format of <num>(MB|KB)' % size_str)
826 f'to use format of <num>(MB|KB)')
885
827
886 _parts = match.groups()
828 _parts = match.groups()
887 num, type_ = _parts
829 num, type_ = _parts
888 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
830 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
889
831
890
832
891 class CachedProperty(object):
833 class CachedProperty(object):
892 """
834 """
893 Lazy Attributes. With option to invalidate the cache by running a method
835 Lazy Attributes. With option to invalidate the cache by running a method
894
836
895 >>> class Foo(object):
837 >>> class Foo(object):
896 ...
838 ...
897 ... @CachedProperty
839 ... @CachedProperty
898 ... def heavy_func(self):
840 ... def heavy_func(self):
899 ... return 'super-calculation'
841 ... return 'super-calculation'
900 ...
842 ...
901 ... foo = Foo()
843 ... foo = Foo()
902 ... foo.heavy_func() # first computation
844 ... foo.heavy_func() # first computation
903 ... foo.heavy_func() # fetch from cache
845 ... foo.heavy_func() # fetch from cache
904 ... foo._invalidate_prop_cache('heavy_func')
846 ... foo._invalidate_prop_cache('heavy_func')
905
847
906 # at this point calling foo.heavy_func() will be re-computed
848 # at this point calling foo.heavy_func() will be re-computed
907 """
849 """
908
850
909 def __init__(self, func, func_name=None):
851 def __init__(self, func, func_name=None):
910
852
911 if func_name is None:
853 if func_name is None:
912 func_name = func.__name__
854 func_name = func.__name__
913 self.data = (func, func_name)
855 self.data = (func, func_name)
914 update_wrapper(self, func)
856 functools.update_wrapper(self, func)
915
857
916 def __get__(self, inst, class_):
858 def __get__(self, inst, class_):
917 if inst is None:
859 if inst is None:
918 return self
860 return self
919
861
920 func, func_name = self.data
862 func, func_name = self.data
921 value = func(inst)
863 value = func(inst)
922 inst.__dict__[func_name] = value
864 inst.__dict__[func_name] = value
923 if '_invalidate_prop_cache' not in inst.__dict__:
865 if '_invalidate_prop_cache' not in inst.__dict__:
924 inst.__dict__['_invalidate_prop_cache'] = partial(
866 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
925 self._invalidate_prop_cache, inst)
867 self._invalidate_prop_cache, inst)
926 return value
868 return value
927
869
928 def _invalidate_prop_cache(self, inst, name):
870 def _invalidate_prop_cache(self, inst, name):
929 inst.__dict__.pop(name, None)
871 inst.__dict__.pop(name, None)
930
872
931
873
932 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
874 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
933 """
875 """
934 Retry decorator with exponential backoff.
876 Retry decorator with exponential backoff.
935
877
936 Parameters
878 Parameters
937 ----------
879 ----------
938 func : typing.Callable, optional
880 func : typing.Callable, optional
939 Callable on which the decorator is applied, by default None
881 Callable on which the decorator is applied, by default None
940 exception : Exception or tuple of Exceptions, optional
882 exception : Exception or tuple of Exceptions, optional
941 Exception(s) that invoke retry, by default Exception
883 Exception(s) that invoke retry, by default Exception
942 n_tries : int, optional
884 n_tries : int, optional
943 Number of tries before giving up, by default 5
885 Number of tries before giving up, by default 5
944 delay : int, optional
886 delay : int, optional
945 Initial delay between retries in seconds, by default 5
887 Initial delay between retries in seconds, by default 5
946 backoff : int, optional
888 backoff : int, optional
947 Backoff multiplier e.g. value of 2 will double the delay, by default 1
889 Backoff multiplier e.g. value of 2 will double the delay, by default 1
948 logger : bool, optional
890 logger : bool, optional
949 Option to log or print, by default False
891 Option to log or print, by default False
950
892
951 Returns
893 Returns
952 -------
894 -------
953 typing.Callable
895 typing.Callable
954 Decorated callable that calls itself when exception(s) occur.
896 Decorated callable that calls itself when exception(s) occur.
955
897
956 Examples
898 Examples
957 --------
899 --------
958 >>> import random
900 >>> import random
959 >>> @retry(exception=Exception, n_tries=3)
901 >>> @retry(exception=Exception, n_tries=3)
960 ... def test_random(text):
902 ... def test_random(text):
961 ... x = random.random()
903 ... x = random.random()
962 ... if x < 0.5:
904 ... if x < 0.5:
963 ... raise Exception("Fail")
905 ... raise Exception("Fail")
964 ... else:
906 ... else:
965 ... print("Success: ", text)
907 ... print("Success: ", text)
966 >>> test_random("It works!")
908 >>> test_random("It works!")
967 """
909 """
968
910
969 if func is None:
911 if func is None:
970 return partial(
912 return functools.partial(
971 retry,
913 retry,
972 exception=exception,
914 exception=exception,
973 n_tries=n_tries,
915 n_tries=n_tries,
974 delay=delay,
916 delay=delay,
975 backoff=backoff,
917 backoff=backoff,
976 logger=logger,
918 logger=logger,
977 )
919 )
978
920
979 @wraps(func)
921 @functools.wraps(func)
980 def wrapper(*args, **kwargs):
922 def wrapper(*args, **kwargs):
981 _n_tries, n_delay = n_tries, delay
923 _n_tries, n_delay = n_tries, delay
982 log = logging.getLogger('rhodecode.retry')
924 log = logging.getLogger('rhodecode.retry')
983
925
984 while _n_tries > 1:
926 while _n_tries > 1:
985 try:
927 try:
986 return func(*args, **kwargs)
928 return func(*args, **kwargs)
987 except exception as e:
929 except exception as e:
988 e_details = repr(e)
930 e_details = repr(e)
989 msg = "Exception on calling func {func}: {e}, " \
931 msg = "Exception on calling func {func}: {e}, " \
990 "Retrying in {n_delay} seconds..."\
932 "Retrying in {n_delay} seconds..."\
991 .format(func=func, e=e_details, n_delay=n_delay)
933 .format(func=func, e=e_details, n_delay=n_delay)
992 if logger:
934 if logger:
993 log.warning(msg)
935 log.warning(msg)
994 else:
936 else:
995 print(msg)
937 print(msg)
996 time.sleep(n_delay)
938 time.sleep(n_delay)
997 _n_tries -= 1
939 _n_tries -= 1
998 n_delay *= backoff
940 n_delay *= backoff
999
941
1000 return func(*args, **kwargs)
942 return func(*args, **kwargs)
1001
943
1002 return wrapper
944 return wrapper
1003
945
1004
946
1005 def user_agent_normalizer(user_agent_raw, safe=True):
947 def user_agent_normalizer(user_agent_raw, safe=True):
1006 log = logging.getLogger('rhodecode.user_agent_normalizer')
948 log = logging.getLogger('rhodecode.user_agent_normalizer')
1007 ua = (user_agent_raw or '').strip().lower()
949 ua = (user_agent_raw or '').strip().lower()
1008 ua = ua.replace('"', '')
950 ua = ua.replace('"', '')
1009
951
1010 try:
952 try:
1011 if 'mercurial/proto-1.0' in ua:
953 if 'mercurial/proto-1.0' in ua:
1012 ua = ua.replace('mercurial/proto-1.0', '')
954 ua = ua.replace('mercurial/proto-1.0', '')
1013 ua = ua.replace('(', '').replace(')', '').strip()
955 ua = ua.replace('(', '').replace(')', '').strip()
1014 ua = ua.replace('mercurial ', 'mercurial/')
956 ua = ua.replace('mercurial ', 'mercurial/')
1015 elif ua.startswith('git'):
957 elif ua.startswith('git'):
1016 parts = ua.split(' ')
958 parts = ua.split(' ')
1017 if parts:
959 if parts:
1018 ua = parts[0]
960 ua = parts[0]
1019 ua = re.sub('\.windows\.\d', '', ua).strip()
961 ua = re.sub(r'\.windows\.\d', '', ua).strip()
1020
962
1021 return ua
963 return ua
1022 except Exception:
964 except Exception:
1023 log.exception('Failed to parse scm user-agent')
965 log.exception('Failed to parse scm user-agent')
1024 if not safe:
966 if not safe:
1025 raise
967 raise
1026
968
1027 return ua
969 return ua
1028
970
1029
971
1030 def get_available_port(min_port=40000, max_port=55555, use_range=False):
972 def get_available_port(min_port=40000, max_port=55555, use_range=False):
1031 hostname = ''
973 hostname = ''
1032 for _ in range(min_port, max_port):
974 for _ in range(min_port, max_port):
1033 pick_port = 0
975 pick_port = 0
1034 if use_range:
976 if use_range:
1035 pick_port = random.randint(min_port, max_port)
977 pick_port = random.randint(min_port, max_port)
1036
978
1037 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
979 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
1038 try:
980 try:
1039 s.bind((hostname, pick_port))
981 s.bind((hostname, pick_port))
1040 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
982 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
1041 return s.getsockname()[1]
983 return s.getsockname()[1]
1042 except OSError:
984 except OSError:
1043 continue
985 continue
1044 except socket.error as e:
986 except socket.error as e:
1045 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
987 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
1046 continue
988 continue
1047 raise
989 raise
General Comments 0
You need to be logged in to leave comments. Login now