##// END OF EJS Templates
audit-logs: improve user-agent normalizer, and added tests
super-admin -
r4862:15c2a8b8 default
parent child Browse files
Show More
@@ -1,1165 +1,1173 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 from functools import update_wrapper, partial, wraps
38 from functools import update_wrapper, partial, wraps
39
39
40 import pygments.lexers
40 import pygments.lexers
41 import sqlalchemy
41 import sqlalchemy
42 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
43 import sqlalchemy.exc
43 import sqlalchemy.exc
44 import sqlalchemy.sql
44 import sqlalchemy.sql
45 import webob
45 import webob
46 import pyramid.threadlocal
46 import pyramid.threadlocal
47 from pyramid import compat
47 from pyramid import compat
48 from pyramid.settings import asbool
48 from pyramid.settings import asbool
49
49
50 import rhodecode
50 import rhodecode
51 from rhodecode.translation import _, _pluralize
51 from rhodecode.translation import _, _pluralize
52
52
53
53
54 def md5(s):
54 def md5(s):
55 return hashlib.md5(s).hexdigest()
55 return hashlib.md5(s).hexdigest()
56
56
57
57
58 def md5_safe(s):
58 def md5_safe(s):
59 return md5(safe_str(s))
59 return md5(safe_str(s))
60
60
61
61
62 def sha1(s):
62 def sha1(s):
63 return hashlib.sha1(s).hexdigest()
63 return hashlib.sha1(s).hexdigest()
64
64
65
65
66 def sha1_safe(s):
66 def sha1_safe(s):
67 return sha1(safe_str(s))
67 return sha1(safe_str(s))
68
68
69
69
70 def __get_lem(extra_mapping=None):
70 def __get_lem(extra_mapping=None):
71 """
71 """
72 Get language extension map based on what's inside pygments lexers
72 Get language extension map based on what's inside pygments lexers
73 """
73 """
74 d = collections.defaultdict(lambda: [])
74 d = collections.defaultdict(lambda: [])
75
75
76 def __clean(s):
76 def __clean(s):
77 s = s.lstrip('*')
77 s = s.lstrip('*')
78 s = s.lstrip('.')
78 s = s.lstrip('.')
79
79
80 if s.find('[') != -1:
80 if s.find('[') != -1:
81 exts = []
81 exts = []
82 start, stop = s.find('['), s.find(']')
82 start, stop = s.find('['), s.find(']')
83
83
84 for suffix in s[start + 1:stop]:
84 for suffix in s[start + 1:stop]:
85 exts.append(s[:s.find('[')] + suffix)
85 exts.append(s[:s.find('[')] + suffix)
86 return [e.lower() for e in exts]
86 return [e.lower() for e in exts]
87 else:
87 else:
88 return [s.lower()]
88 return [s.lower()]
89
89
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 m = map(__clean, t[-2])
91 m = map(__clean, t[-2])
92 if m:
92 if m:
93 m = reduce(lambda x, y: x + y, m)
93 m = reduce(lambda x, y: x + y, m)
94 for ext in m:
94 for ext in m:
95 desc = lx.replace('Lexer', '')
95 desc = lx.replace('Lexer', '')
96 d[ext].append(desc)
96 d[ext].append(desc)
97
97
98 data = dict(d)
98 data = dict(d)
99
99
100 extra_mapping = extra_mapping or {}
100 extra_mapping = extra_mapping or {}
101 if extra_mapping:
101 if extra_mapping:
102 for k, v in extra_mapping.items():
102 for k, v in extra_mapping.items():
103 if k not in data:
103 if k not in data:
104 # register new mapping2lexer
104 # register new mapping2lexer
105 data[k] = [v]
105 data[k] = [v]
106
106
107 return data
107 return data
108
108
109
109
110 def str2bool(_str):
110 def str2bool(_str):
111 """
111 """
112 returns True/False value from given string, it tries to translate the
112 returns True/False value from given string, it tries to translate the
113 string into boolean
113 string into boolean
114
114
115 :param _str: string value to translate into boolean
115 :param _str: string value to translate into boolean
116 :rtype: boolean
116 :rtype: boolean
117 :returns: boolean from given string
117 :returns: boolean from given string
118 """
118 """
119 if _str is None:
119 if _str is None:
120 return False
120 return False
121 if _str in (True, False):
121 if _str in (True, False):
122 return _str
122 return _str
123 _str = str(_str).strip().lower()
123 _str = str(_str).strip().lower()
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125
125
126
126
127 def aslist(obj, sep=None, strip=True):
127 def aslist(obj, sep=None, strip=True):
128 """
128 """
129 Returns given string separated by sep as list
129 Returns given string separated by sep as list
130
130
131 :param obj:
131 :param obj:
132 :param sep:
132 :param sep:
133 :param strip:
133 :param strip:
134 """
134 """
135 if isinstance(obj, (basestring,)):
135 if isinstance(obj, (basestring,)):
136 lst = obj.split(sep)
136 lst = obj.split(sep)
137 if strip:
137 if strip:
138 lst = [v.strip() for v in lst]
138 lst = [v.strip() for v in lst]
139 return lst
139 return lst
140 elif isinstance(obj, (list, tuple)):
140 elif isinstance(obj, (list, tuple)):
141 return obj
141 return obj
142 elif obj is None:
142 elif obj is None:
143 return []
143 return []
144 else:
144 else:
145 return [obj]
145 return [obj]
146
146
147
147
148 def convert_line_endings(line, mode):
148 def convert_line_endings(line, mode):
149 """
149 """
150 Converts a given line "line end" accordingly to given mode
150 Converts a given line "line end" accordingly to given mode
151
151
152 Available modes are::
152 Available modes are::
153 0 - Unix
153 0 - Unix
154 1 - Mac
154 1 - Mac
155 2 - DOS
155 2 - DOS
156
156
157 :param line: given line to convert
157 :param line: given line to convert
158 :param mode: mode to convert to
158 :param mode: mode to convert to
159 :rtype: str
159 :rtype: str
160 :return: converted line according to mode
160 :return: converted line according to mode
161 """
161 """
162 if mode == 0:
162 if mode == 0:
163 line = line.replace('\r\n', '\n')
163 line = line.replace('\r\n', '\n')
164 line = line.replace('\r', '\n')
164 line = line.replace('\r', '\n')
165 elif mode == 1:
165 elif mode == 1:
166 line = line.replace('\r\n', '\r')
166 line = line.replace('\r\n', '\r')
167 line = line.replace('\n', '\r')
167 line = line.replace('\n', '\r')
168 elif mode == 2:
168 elif mode == 2:
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 return line
170 return line
171
171
172
172
173 def detect_mode(line, default):
173 def detect_mode(line, default):
174 """
174 """
175 Detects line break for given line, if line break couldn't be found
175 Detects line break for given line, if line break couldn't be found
176 given default value is returned
176 given default value is returned
177
177
178 :param line: str line
178 :param line: str line
179 :param default: default
179 :param default: default
180 :rtype: int
180 :rtype: int
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 """
182 """
183 if line.endswith('\r\n'):
183 if line.endswith('\r\n'):
184 return 2
184 return 2
185 elif line.endswith('\n'):
185 elif line.endswith('\n'):
186 return 0
186 return 0
187 elif line.endswith('\r'):
187 elif line.endswith('\r'):
188 return 1
188 return 1
189 else:
189 else:
190 return default
190 return default
191
191
192
192
193 def safe_int(val, default=None):
193 def safe_int(val, default=None):
194 """
194 """
195 Returns int() of val if val is not convertable to int use default
195 Returns int() of val if val is not convertable to int use default
196 instead
196 instead
197
197
198 :param val:
198 :param val:
199 :param default:
199 :param default:
200 """
200 """
201
201
202 try:
202 try:
203 val = int(val)
203 val = int(val)
204 except (ValueError, TypeError):
204 except (ValueError, TypeError):
205 val = default
205 val = default
206
206
207 return val
207 return val
208
208
209
209
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 """
211 """
212 safe unicode function. Does few trick to turn str_ into unicode
212 safe unicode function. Does few trick to turn str_ into unicode
213
213
214 In case of UnicodeDecode error, we try to return it with encoding detected
214 In case of UnicodeDecode error, we try to return it with encoding detected
215 by chardet library if it fails fallback to unicode with errors replaced
215 by chardet library if it fails fallback to unicode with errors replaced
216
216
217 :param str_: string to decode
217 :param str_: string to decode
218 :rtype: unicode
218 :rtype: unicode
219 :returns: unicode object
219 :returns: unicode object
220 """
220 """
221 if isinstance(str_, unicode):
221 if isinstance(str_, unicode):
222 return str_
222 return str_
223
223
224 if not from_encoding:
224 if not from_encoding:
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 'utf8'), sep=',')
226 'utf8'), sep=',')
227 from_encoding = DEFAULT_ENCODINGS
227 from_encoding = DEFAULT_ENCODINGS
228
228
229 if not isinstance(from_encoding, (list, tuple)):
229 if not isinstance(from_encoding, (list, tuple)):
230 from_encoding = [from_encoding]
230 from_encoding = [from_encoding]
231
231
232 try:
232 try:
233 return unicode(str_)
233 return unicode(str_)
234 except UnicodeDecodeError:
234 except UnicodeDecodeError:
235 pass
235 pass
236
236
237 for enc in from_encoding:
237 for enc in from_encoding:
238 try:
238 try:
239 return unicode(str_, enc)
239 return unicode(str_, enc)
240 except UnicodeDecodeError:
240 except UnicodeDecodeError:
241 pass
241 pass
242
242
243 if use_chardet:
243 if use_chardet:
244 try:
244 try:
245 import chardet
245 import chardet
246 encoding = chardet.detect(str_)['encoding']
246 encoding = chardet.detect(str_)['encoding']
247 if encoding is None:
247 if encoding is None:
248 raise Exception()
248 raise Exception()
249 return str_.decode(encoding)
249 return str_.decode(encoding)
250 except (ImportError, UnicodeDecodeError, Exception):
250 except (ImportError, UnicodeDecodeError, Exception):
251 return unicode(str_, from_encoding[0], 'replace')
251 return unicode(str_, from_encoding[0], 'replace')
252 else:
252 else:
253 return unicode(str_, from_encoding[0], 'replace')
253 return unicode(str_, from_encoding[0], 'replace')
254
254
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 """
256 """
257 safe str function. Does few trick to turn unicode_ into string
257 safe str function. Does few trick to turn unicode_ into string
258
258
259 In case of UnicodeEncodeError, we try to return it with encoding detected
259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 by chardet library if it fails fallback to string with errors replaced
260 by chardet library if it fails fallback to string with errors replaced
261
261
262 :param unicode_: unicode to encode
262 :param unicode_: unicode to encode
263 :rtype: str
263 :rtype: str
264 :returns: str object
264 :returns: str object
265 """
265 """
266
266
267 # if it's not basestr cast to str
267 # if it's not basestr cast to str
268 if not isinstance(unicode_, compat.string_types):
268 if not isinstance(unicode_, compat.string_types):
269 return str(unicode_)
269 return str(unicode_)
270
270
271 if isinstance(unicode_, str):
271 if isinstance(unicode_, str):
272 return unicode_
272 return unicode_
273
273
274 if not to_encoding:
274 if not to_encoding:
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 'utf8'), sep=',')
276 'utf8'), sep=',')
277 to_encoding = DEFAULT_ENCODINGS
277 to_encoding = DEFAULT_ENCODINGS
278
278
279 if not isinstance(to_encoding, (list, tuple)):
279 if not isinstance(to_encoding, (list, tuple)):
280 to_encoding = [to_encoding]
280 to_encoding = [to_encoding]
281
281
282 for enc in to_encoding:
282 for enc in to_encoding:
283 try:
283 try:
284 return unicode_.encode(enc)
284 return unicode_.encode(enc)
285 except UnicodeEncodeError:
285 except UnicodeEncodeError:
286 pass
286 pass
287
287
288 if use_chardet:
288 if use_chardet:
289 try:
289 try:
290 import chardet
290 import chardet
291 encoding = chardet.detect(unicode_)['encoding']
291 encoding = chardet.detect(unicode_)['encoding']
292 if encoding is None:
292 if encoding is None:
293 raise UnicodeEncodeError()
293 raise UnicodeEncodeError()
294
294
295 return unicode_.encode(encoding)
295 return unicode_.encode(encoding)
296 except (ImportError, UnicodeEncodeError):
296 except (ImportError, UnicodeEncodeError):
297 return unicode_.encode(to_encoding[0], 'replace')
297 return unicode_.encode(to_encoding[0], 'replace')
298 else:
298 else:
299 return unicode_.encode(to_encoding[0], 'replace')
299 return unicode_.encode(to_encoding[0], 'replace')
300
300
301
301
302 def remove_suffix(s, suffix):
302 def remove_suffix(s, suffix):
303 if s.endswith(suffix):
303 if s.endswith(suffix):
304 s = s[:-1 * len(suffix)]
304 s = s[:-1 * len(suffix)]
305 return s
305 return s
306
306
307
307
308 def remove_prefix(s, prefix):
308 def remove_prefix(s, prefix):
309 if s.startswith(prefix):
309 if s.startswith(prefix):
310 s = s[len(prefix):]
310 s = s[len(prefix):]
311 return s
311 return s
312
312
313
313
314 def find_calling_context(ignore_modules=None):
314 def find_calling_context(ignore_modules=None):
315 """
315 """
316 Look through the calling stack and return the frame which called
316 Look through the calling stack and return the frame which called
317 this function and is part of core module ( ie. rhodecode.* )
317 this function and is part of core module ( ie. rhodecode.* )
318
318
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 """
320 """
321
321
322 ignore_modules = ignore_modules or []
322 ignore_modules = ignore_modules or []
323
323
324 f = sys._getframe(2)
324 f = sys._getframe(2)
325 while f.f_back is not None:
325 while f.f_back is not None:
326 name = f.f_globals.get('__name__')
326 name = f.f_globals.get('__name__')
327 if name and name.startswith(__name__.split('.')[0]):
327 if name and name.startswith(__name__.split('.')[0]):
328 if name not in ignore_modules:
328 if name not in ignore_modules:
329 return f
329 return f
330 f = f.f_back
330 f = f.f_back
331 return None
331 return None
332
332
333
333
334 def ping_connection(connection, branch):
334 def ping_connection(connection, branch):
335 if branch:
335 if branch:
336 # "branch" refers to a sub-connection of a connection,
336 # "branch" refers to a sub-connection of a connection,
337 # we don't want to bother pinging on these.
337 # we don't want to bother pinging on these.
338 return
338 return
339
339
340 # turn off "close with result". This flag is only used with
340 # turn off "close with result". This flag is only used with
341 # "connectionless" execution, otherwise will be False in any case
341 # "connectionless" execution, otherwise will be False in any case
342 save_should_close_with_result = connection.should_close_with_result
342 save_should_close_with_result = connection.should_close_with_result
343 connection.should_close_with_result = False
343 connection.should_close_with_result = False
344
344
345 try:
345 try:
346 # run a SELECT 1. use a core select() so that
346 # run a SELECT 1. use a core select() so that
347 # the SELECT of a scalar value without a table is
347 # the SELECT of a scalar value without a table is
348 # appropriately formatted for the backend
348 # appropriately formatted for the backend
349 connection.scalar(sqlalchemy.sql.select([1]))
349 connection.scalar(sqlalchemy.sql.select([1]))
350 except sqlalchemy.exc.DBAPIError as err:
350 except sqlalchemy.exc.DBAPIError as err:
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 # for the DBAPI's exception. It includes a .connection_invalidated
352 # for the DBAPI's exception. It includes a .connection_invalidated
353 # attribute which specifies if this connection is a "disconnect"
353 # attribute which specifies if this connection is a "disconnect"
354 # condition, which is based on inspection of the original exception
354 # condition, which is based on inspection of the original exception
355 # by the dialect in use.
355 # by the dialect in use.
356 if err.connection_invalidated:
356 if err.connection_invalidated:
357 # run the same SELECT again - the connection will re-validate
357 # run the same SELECT again - the connection will re-validate
358 # itself and establish a new connection. The disconnect detection
358 # itself and establish a new connection. The disconnect detection
359 # here also causes the whole connection pool to be invalidated
359 # here also causes the whole connection pool to be invalidated
360 # so that all stale connections are discarded.
360 # so that all stale connections are discarded.
361 connection.scalar(sqlalchemy.sql.select([1]))
361 connection.scalar(sqlalchemy.sql.select([1]))
362 else:
362 else:
363 raise
363 raise
364 finally:
364 finally:
365 # restore "close with result"
365 # restore "close with result"
366 connection.should_close_with_result = save_should_close_with_result
366 connection.should_close_with_result = save_should_close_with_result
367
367
368
368
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 """Custom engine_from_config functions."""
370 """Custom engine_from_config functions."""
371 log = logging.getLogger('sqlalchemy.engine')
371 log = logging.getLogger('sqlalchemy.engine')
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374
374
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376
376
377 def color_sql(sql):
377 def color_sql(sql):
378 color_seq = '\033[1;33m' # This is yellow: code 33
378 color_seq = '\033[1;33m' # This is yellow: code 33
379 normal = '\x1b[0m'
379 normal = '\x1b[0m'
380 return ''.join([color_seq, sql, normal])
380 return ''.join([color_seq, sql, normal])
381
381
382 if use_ping_connection:
382 if use_ping_connection:
383 log.debug('Adding ping_connection on the engine config.')
383 log.debug('Adding ping_connection on the engine config.')
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385
385
386 if debug:
386 if debug:
387 # attach events only for debug configuration
387 # attach events only for debug configuration
388 def before_cursor_execute(conn, cursor, statement,
388 def before_cursor_execute(conn, cursor, statement,
389 parameters, context, executemany):
389 parameters, context, executemany):
390 setattr(conn, 'query_start_time', time.time())
390 setattr(conn, 'query_start_time', time.time())
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 calling_context = find_calling_context(ignore_modules=[
392 calling_context = find_calling_context(ignore_modules=[
393 'rhodecode.lib.caching_query',
393 'rhodecode.lib.caching_query',
394 'rhodecode.model.settings',
394 'rhodecode.model.settings',
395 ])
395 ])
396 if calling_context:
396 if calling_context:
397 log.info(color_sql('call context %s:%s' % (
397 log.info(color_sql('call context %s:%s' % (
398 calling_context.f_code.co_filename,
398 calling_context.f_code.co_filename,
399 calling_context.f_lineno,
399 calling_context.f_lineno,
400 )))
400 )))
401
401
402 def after_cursor_execute(conn, cursor, statement,
402 def after_cursor_execute(conn, cursor, statement,
403 parameters, context, executemany):
403 parameters, context, executemany):
404 delattr(conn, 'query_start_time')
404 delattr(conn, 'query_start_time')
405
405
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408
408
409 return engine
409 return engine
410
410
411
411
412 def get_encryption_key(config):
412 def get_encryption_key(config):
413 secret = config.get('rhodecode.encrypted_values.secret')
413 secret = config.get('rhodecode.encrypted_values.secret')
414 default = config['beaker.session.secret']
414 default = config['beaker.session.secret']
415 return secret or default
415 return secret or default
416
416
417
417
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 short_format=False):
419 short_format=False):
420 """
420 """
421 Turns a datetime into an age string.
421 Turns a datetime into an age string.
422 If show_short_version is True, this generates a shorter string with
422 If show_short_version is True, this generates a shorter string with
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424
424
425 * IMPORTANT*
425 * IMPORTANT*
426 Code of this function is written in special way so it's easier to
426 Code of this function is written in special way so it's easier to
427 backport it to javascript. If you mean to update it, please also update
427 backport it to javascript. If you mean to update it, please also update
428 `jquery.timeago-extension.js` file
428 `jquery.timeago-extension.js` file
429
429
430 :param prevdate: datetime object
430 :param prevdate: datetime object
431 :param now: get current time, if not define we use
431 :param now: get current time, if not define we use
432 `datetime.datetime.now()`
432 `datetime.datetime.now()`
433 :param show_short_version: if it should approximate the date and
433 :param show_short_version: if it should approximate the date and
434 return a shorter string
434 return a shorter string
435 :param show_suffix:
435 :param show_suffix:
436 :param short_format: show short format, eg 2D instead of 2 days
436 :param short_format: show short format, eg 2D instead of 2 days
437 :rtype: unicode
437 :rtype: unicode
438 :returns: unicode words describing age
438 :returns: unicode words describing age
439 """
439 """
440
440
441 def _get_relative_delta(now, prevdate):
441 def _get_relative_delta(now, prevdate):
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 return {
443 return {
444 'year': base.years,
444 'year': base.years,
445 'month': base.months,
445 'month': base.months,
446 'day': base.days,
446 'day': base.days,
447 'hour': base.hours,
447 'hour': base.hours,
448 'minute': base.minutes,
448 'minute': base.minutes,
449 'second': base.seconds,
449 'second': base.seconds,
450 }
450 }
451
451
452 def _is_leap_year(year):
452 def _is_leap_year(year):
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454
454
455 def get_month(prevdate):
455 def get_month(prevdate):
456 return prevdate.month
456 return prevdate.month
457
457
458 def get_year(prevdate):
458 def get_year(prevdate):
459 return prevdate.year
459 return prevdate.year
460
460
461 now = now or datetime.datetime.now()
461 now = now or datetime.datetime.now()
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 deltas = {}
463 deltas = {}
464 future = False
464 future = False
465
465
466 if prevdate > now:
466 if prevdate > now:
467 now_old = now
467 now_old = now
468 now = prevdate
468 now = prevdate
469 prevdate = now_old
469 prevdate = now_old
470 future = True
470 future = True
471 if future:
471 if future:
472 prevdate = prevdate.replace(microsecond=0)
472 prevdate = prevdate.replace(microsecond=0)
473 # Get date parts deltas
473 # Get date parts deltas
474 for part in order:
474 for part in order:
475 rel_delta = _get_relative_delta(now, prevdate)
475 rel_delta = _get_relative_delta(now, prevdate)
476 deltas[part] = rel_delta[part]
476 deltas[part] = rel_delta[part]
477
477
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 # not 1 hour, -59 minutes and -59 seconds)
479 # not 1 hour, -59 minutes and -59 seconds)
480 offsets = [[5, 60], [4, 60], [3, 24]]
480 offsets = [[5, 60], [4, 60], [3, 24]]
481 for element in offsets: # seconds, minutes, hours
481 for element in offsets: # seconds, minutes, hours
482 num = element[0]
482 num = element[0]
483 length = element[1]
483 length = element[1]
484
484
485 part = order[num]
485 part = order[num]
486 carry_part = order[num - 1]
486 carry_part = order[num - 1]
487
487
488 if deltas[part] < 0:
488 if deltas[part] < 0:
489 deltas[part] += length
489 deltas[part] += length
490 deltas[carry_part] -= 1
490 deltas[carry_part] -= 1
491
491
492 # Same thing for days except that the increment depends on the (variable)
492 # Same thing for days except that the increment depends on the (variable)
493 # number of days in the month
493 # number of days in the month
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 if deltas['day'] < 0:
495 if deltas['day'] < 0:
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 deltas['day'] += 29
497 deltas['day'] += 29
498 else:
498 else:
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500
500
501 deltas['month'] -= 1
501 deltas['month'] -= 1
502
502
503 if deltas['month'] < 0:
503 if deltas['month'] < 0:
504 deltas['month'] += 12
504 deltas['month'] += 12
505 deltas['year'] -= 1
505 deltas['year'] -= 1
506
506
507 # Format the result
507 # Format the result
508 if short_format:
508 if short_format:
509 fmt_funcs = {
509 fmt_funcs = {
510 'year': lambda d: u'%dy' % d,
510 'year': lambda d: u'%dy' % d,
511 'month': lambda d: u'%dm' % d,
511 'month': lambda d: u'%dm' % d,
512 'day': lambda d: u'%dd' % d,
512 'day': lambda d: u'%dd' % d,
513 'hour': lambda d: u'%dh' % d,
513 'hour': lambda d: u'%dh' % d,
514 'minute': lambda d: u'%dmin' % d,
514 'minute': lambda d: u'%dmin' % d,
515 'second': lambda d: u'%dsec' % d,
515 'second': lambda d: u'%dsec' % d,
516 }
516 }
517 else:
517 else:
518 fmt_funcs = {
518 fmt_funcs = {
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 }
525 }
526
526
527 i = 0
527 i = 0
528 for part in order:
528 for part in order:
529 value = deltas[part]
529 value = deltas[part]
530 if value != 0:
530 if value != 0:
531
531
532 if i < 5:
532 if i < 5:
533 sub_part = order[i + 1]
533 sub_part = order[i + 1]
534 sub_value = deltas[sub_part]
534 sub_value = deltas[sub_part]
535 else:
535 else:
536 sub_value = 0
536 sub_value = 0
537
537
538 if sub_value == 0 or show_short_version:
538 if sub_value == 0 or show_short_version:
539 _val = fmt_funcs[part](value)
539 _val = fmt_funcs[part](value)
540 if future:
540 if future:
541 if show_suffix:
541 if show_suffix:
542 return _(u'in ${ago}', mapping={'ago': _val})
542 return _(u'in ${ago}', mapping={'ago': _val})
543 else:
543 else:
544 return _(_val)
544 return _(_val)
545
545
546 else:
546 else:
547 if show_suffix:
547 if show_suffix:
548 return _(u'${ago} ago', mapping={'ago': _val})
548 return _(u'${ago} ago', mapping={'ago': _val})
549 else:
549 else:
550 return _(_val)
550 return _(_val)
551
551
552 val = fmt_funcs[part](value)
552 val = fmt_funcs[part](value)
553 val_detail = fmt_funcs[sub_part](sub_value)
553 val_detail = fmt_funcs[sub_part](sub_value)
554 mapping = {'val': val, 'detail': val_detail}
554 mapping = {'val': val, 'detail': val_detail}
555
555
556 if short_format:
556 if short_format:
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 if show_suffix:
558 if show_suffix:
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 if future:
560 if future:
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 else:
562 else:
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 if show_suffix:
564 if show_suffix:
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 if future:
566 if future:
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568
568
569 return datetime_tmpl
569 return datetime_tmpl
570 i += 1
570 i += 1
571 return _(u'just now')
571 return _(u'just now')
572
572
573
573
574 def age_from_seconds(seconds):
574 def age_from_seconds(seconds):
575 seconds = safe_int(seconds) or 0
575 seconds = safe_int(seconds) or 0
576 prevdate = time_to_datetime(time.time() + seconds)
576 prevdate = time_to_datetime(time.time() + seconds)
577 return age(prevdate, show_suffix=False, show_short_version=True)
577 return age(prevdate, show_suffix=False, show_short_version=True)
578
578
579
579
580 def cleaned_uri(uri):
580 def cleaned_uri(uri):
581 """
581 """
582 Quotes '[' and ']' from uri if there is only one of them.
582 Quotes '[' and ']' from uri if there is only one of them.
583 according to RFC3986 we cannot use such chars in uri
583 according to RFC3986 we cannot use such chars in uri
584 :param uri:
584 :param uri:
585 :return: uri without this chars
585 :return: uri without this chars
586 """
586 """
587 return urllib.quote(uri, safe='@$:/')
587 return urllib.quote(uri, safe='@$:/')
588
588
589
589
590 def credentials_filter(uri):
590 def credentials_filter(uri):
591 """
591 """
592 Returns a url with removed credentials
592 Returns a url with removed credentials
593
593
594 :param uri:
594 :param uri:
595 """
595 """
596 import urlobject
596 import urlobject
597 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
597 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
598 return 'InvalidDecryptionKey'
598 return 'InvalidDecryptionKey'
599
599
600 url_obj = urlobject.URLObject(cleaned_uri(uri))
600 url_obj = urlobject.URLObject(cleaned_uri(uri))
601 url_obj = url_obj.without_password().without_username()
601 url_obj = url_obj.without_password().without_username()
602
602
603 return url_obj
603 return url_obj
604
604
605
605
606 def get_host_info(request):
606 def get_host_info(request):
607 """
607 """
608 Generate host info, to obtain full url e.g https://server.com
608 Generate host info, to obtain full url e.g https://server.com
609 use this
609 use this
610 `{scheme}://{netloc}`
610 `{scheme}://{netloc}`
611 """
611 """
612 if not request:
612 if not request:
613 return {}
613 return {}
614
614
615 qualified_home_url = request.route_url('home')
615 qualified_home_url = request.route_url('home')
616 parsed_url = urlobject.URLObject(qualified_home_url)
616 parsed_url = urlobject.URLObject(qualified_home_url)
617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
618
618
619 return {
619 return {
620 'scheme': parsed_url.scheme,
620 'scheme': parsed_url.scheme,
621 'netloc': parsed_url.netloc+decoded_path,
621 'netloc': parsed_url.netloc+decoded_path,
622 'hostname': parsed_url.hostname,
622 'hostname': parsed_url.hostname,
623 }
623 }
624
624
625
625
626 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
626 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
627 qualified_home_url = request.route_url('home')
627 qualified_home_url = request.route_url('home')
628 parsed_url = urlobject.URLObject(qualified_home_url)
628 parsed_url = urlobject.URLObject(qualified_home_url)
629 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
629 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
630
630
631 args = {
631 args = {
632 'scheme': parsed_url.scheme,
632 'scheme': parsed_url.scheme,
633 'user': '',
633 'user': '',
634 'sys_user': getpass.getuser(),
634 'sys_user': getpass.getuser(),
635 # path if we use proxy-prefix
635 # path if we use proxy-prefix
636 'netloc': parsed_url.netloc+decoded_path,
636 'netloc': parsed_url.netloc+decoded_path,
637 'hostname': parsed_url.hostname,
637 'hostname': parsed_url.hostname,
638 'prefix': decoded_path,
638 'prefix': decoded_path,
639 'repo': repo_name,
639 'repo': repo_name,
640 'repoid': str(repo_id),
640 'repoid': str(repo_id),
641 'repo_type': repo_type
641 'repo_type': repo_type
642 }
642 }
643 args.update(override)
643 args.update(override)
644 args['user'] = urllib.quote(safe_str(args['user']))
644 args['user'] = urllib.quote(safe_str(args['user']))
645
645
646 for k, v in args.items():
646 for k, v in args.items():
647 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
647 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
648
648
649 # special case for SVN clone url
649 # special case for SVN clone url
650 if repo_type == 'svn':
650 if repo_type == 'svn':
651 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
651 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
652
652
653 # remove leading @ sign if it's present. Case of empty user
653 # remove leading @ sign if it's present. Case of empty user
654 url_obj = urlobject.URLObject(uri_tmpl)
654 url_obj = urlobject.URLObject(uri_tmpl)
655 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
655 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
656
656
657 return safe_unicode(url)
657 return safe_unicode(url)
658
658
659
659
660 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
660 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
661 maybe_unreachable=False, reference_obj=None):
661 maybe_unreachable=False, reference_obj=None):
662 """
662 """
663 Safe version of get_commit if this commit doesn't exists for a
663 Safe version of get_commit if this commit doesn't exists for a
664 repository it returns a Dummy one instead
664 repository it returns a Dummy one instead
665
665
666 :param repo: repository instance
666 :param repo: repository instance
667 :param commit_id: commit id as str
667 :param commit_id: commit id as str
668 :param commit_idx: numeric commit index
668 :param commit_idx: numeric commit index
669 :param pre_load: optional list of commit attributes to load
669 :param pre_load: optional list of commit attributes to load
670 :param maybe_unreachable: translate unreachable commits on git repos
670 :param maybe_unreachable: translate unreachable commits on git repos
671 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
671 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
672 """
672 """
673 # TODO(skreft): remove these circular imports
673 # TODO(skreft): remove these circular imports
674 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
674 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
675 from rhodecode.lib.vcs.exceptions import RepositoryError
675 from rhodecode.lib.vcs.exceptions import RepositoryError
676 if not isinstance(repo, BaseRepository):
676 if not isinstance(repo, BaseRepository):
677 raise Exception('You must pass an Repository '
677 raise Exception('You must pass an Repository '
678 'object as first argument got %s', type(repo))
678 'object as first argument got %s', type(repo))
679
679
680 try:
680 try:
681 commit = repo.get_commit(
681 commit = repo.get_commit(
682 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
682 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
683 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
683 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
684 except (RepositoryError, LookupError):
684 except (RepositoryError, LookupError):
685 commit = EmptyCommit()
685 commit = EmptyCommit()
686 return commit
686 return commit
687
687
688
688
689 def datetime_to_time(dt):
689 def datetime_to_time(dt):
690 if dt:
690 if dt:
691 return time.mktime(dt.timetuple())
691 return time.mktime(dt.timetuple())
692
692
693
693
694 def time_to_datetime(tm):
694 def time_to_datetime(tm):
695 if tm:
695 if tm:
696 if isinstance(tm, compat.string_types):
696 if isinstance(tm, compat.string_types):
697 try:
697 try:
698 tm = float(tm)
698 tm = float(tm)
699 except ValueError:
699 except ValueError:
700 return
700 return
701 return datetime.datetime.fromtimestamp(tm)
701 return datetime.datetime.fromtimestamp(tm)
702
702
703
703
704 def time_to_utcdatetime(tm):
704 def time_to_utcdatetime(tm):
705 if tm:
705 if tm:
706 if isinstance(tm, compat.string_types):
706 if isinstance(tm, compat.string_types):
707 try:
707 try:
708 tm = float(tm)
708 tm = float(tm)
709 except ValueError:
709 except ValueError:
710 return
710 return
711 return datetime.datetime.utcfromtimestamp(tm)
711 return datetime.datetime.utcfromtimestamp(tm)
712
712
713
713
714 MENTIONS_REGEX = re.compile(
714 MENTIONS_REGEX = re.compile(
715 # ^@ or @ without any special chars in front
715 # ^@ or @ without any special chars in front
716 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
716 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
717 # main body starts with letter, then can be . - _
717 # main body starts with letter, then can be . - _
718 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
718 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
719 re.VERBOSE | re.MULTILINE)
719 re.VERBOSE | re.MULTILINE)
720
720
721
721
722 def extract_mentioned_users(s):
722 def extract_mentioned_users(s):
723 """
723 """
724 Returns unique usernames from given string s that have @mention
724 Returns unique usernames from given string s that have @mention
725
725
726 :param s: string to get mentions
726 :param s: string to get mentions
727 """
727 """
728 usrs = set()
728 usrs = set()
729 for username in MENTIONS_REGEX.findall(s):
729 for username in MENTIONS_REGEX.findall(s):
730 usrs.add(username)
730 usrs.add(username)
731
731
732 return sorted(list(usrs), key=lambda k: k.lower())
732 return sorted(list(usrs), key=lambda k: k.lower())
733
733
734
734
735 class AttributeDictBase(dict):
735 class AttributeDictBase(dict):
736 def __getstate__(self):
736 def __getstate__(self):
737 odict = self.__dict__ # get attribute dictionary
737 odict = self.__dict__ # get attribute dictionary
738 return odict
738 return odict
739
739
740 def __setstate__(self, dict):
740 def __setstate__(self, dict):
741 self.__dict__ = dict
741 self.__dict__ = dict
742
742
743 __setattr__ = dict.__setitem__
743 __setattr__ = dict.__setitem__
744 __delattr__ = dict.__delitem__
744 __delattr__ = dict.__delitem__
745
745
746
746
747 class StrictAttributeDict(AttributeDictBase):
747 class StrictAttributeDict(AttributeDictBase):
748 """
748 """
749 Strict Version of Attribute dict which raises an Attribute error when
749 Strict Version of Attribute dict which raises an Attribute error when
750 requested attribute is not set
750 requested attribute is not set
751 """
751 """
752 def __getattr__(self, attr):
752 def __getattr__(self, attr):
753 try:
753 try:
754 return self[attr]
754 return self[attr]
755 except KeyError:
755 except KeyError:
756 raise AttributeError('%s object has no attribute %s' % (
756 raise AttributeError('%s object has no attribute %s' % (
757 self.__class__, attr))
757 self.__class__, attr))
758
758
759
759
760 class AttributeDict(AttributeDictBase):
760 class AttributeDict(AttributeDictBase):
761 def __getattr__(self, attr):
761 def __getattr__(self, attr):
762 return self.get(attr, None)
762 return self.get(attr, None)
763
763
764
764
765
765
766 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
766 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
767 def __init__(self, default_factory=None, *args, **kwargs):
767 def __init__(self, default_factory=None, *args, **kwargs):
768 # in python3 you can omit the args to super
768 # in python3 you can omit the args to super
769 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
769 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
770 self.default_factory = default_factory
770 self.default_factory = default_factory
771
771
772
772
773 def fix_PATH(os_=None):
773 def fix_PATH(os_=None):
774 """
774 """
775 Get current active python path, and append it to PATH variable to fix
775 Get current active python path, and append it to PATH variable to fix
776 issues of subprocess calls and different python versions
776 issues of subprocess calls and different python versions
777 """
777 """
778 if os_ is None:
778 if os_ is None:
779 import os
779 import os
780 else:
780 else:
781 os = os_
781 os = os_
782
782
783 cur_path = os.path.split(sys.executable)[0]
783 cur_path = os.path.split(sys.executable)[0]
784 if not os.environ['PATH'].startswith(cur_path):
784 if not os.environ['PATH'].startswith(cur_path):
785 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
785 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
786
786
787
787
788 def obfuscate_url_pw(engine):
788 def obfuscate_url_pw(engine):
789 _url = engine or ''
789 _url = engine or ''
790 try:
790 try:
791 _url = sqlalchemy.engine.url.make_url(engine)
791 _url = sqlalchemy.engine.url.make_url(engine)
792 if _url.password:
792 if _url.password:
793 _url.password = 'XXXXX'
793 _url.password = 'XXXXX'
794 except Exception:
794 except Exception:
795 pass
795 pass
796 return unicode(_url)
796 return unicode(_url)
797
797
798
798
799 def get_server_url(environ):
799 def get_server_url(environ):
800 req = webob.Request(environ)
800 req = webob.Request(environ)
801 return req.host_url + req.script_name
801 return req.host_url + req.script_name
802
802
803
803
804 def unique_id(hexlen=32):
804 def unique_id(hexlen=32):
805 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
805 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
806 return suuid(truncate_to=hexlen, alphabet=alphabet)
806 return suuid(truncate_to=hexlen, alphabet=alphabet)
807
807
808
808
809 def suuid(url=None, truncate_to=22, alphabet=None):
809 def suuid(url=None, truncate_to=22, alphabet=None):
810 """
810 """
811 Generate and return a short URL safe UUID.
811 Generate and return a short URL safe UUID.
812
812
813 If the url parameter is provided, set the namespace to the provided
813 If the url parameter is provided, set the namespace to the provided
814 URL and generate a UUID.
814 URL and generate a UUID.
815
815
816 :param url to get the uuid for
816 :param url to get the uuid for
817 :truncate_to: truncate the basic 22 UUID to shorter version
817 :truncate_to: truncate the basic 22 UUID to shorter version
818
818
819 The IDs won't be universally unique any longer, but the probability of
819 The IDs won't be universally unique any longer, but the probability of
820 a collision will still be very low.
820 a collision will still be very low.
821 """
821 """
822 # Define our alphabet.
822 # Define our alphabet.
823 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
823 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
824
824
825 # If no URL is given, generate a random UUID.
825 # If no URL is given, generate a random UUID.
826 if url is None:
826 if url is None:
827 unique_id = uuid.uuid4().int
827 unique_id = uuid.uuid4().int
828 else:
828 else:
829 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
829 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
830
830
831 alphabet_length = len(_ALPHABET)
831 alphabet_length = len(_ALPHABET)
832 output = []
832 output = []
833 while unique_id > 0:
833 while unique_id > 0:
834 digit = unique_id % alphabet_length
834 digit = unique_id % alphabet_length
835 output.append(_ALPHABET[digit])
835 output.append(_ALPHABET[digit])
836 unique_id = int(unique_id / alphabet_length)
836 unique_id = int(unique_id / alphabet_length)
837 return "".join(output)[:truncate_to]
837 return "".join(output)[:truncate_to]
838
838
839
839
840 def get_current_rhodecode_user(request=None):
840 def get_current_rhodecode_user(request=None):
841 """
841 """
842 Gets rhodecode user from request
842 Gets rhodecode user from request
843 """
843 """
844 pyramid_request = request or pyramid.threadlocal.get_current_request()
844 pyramid_request = request or pyramid.threadlocal.get_current_request()
845
845
846 # web case
846 # web case
847 if pyramid_request and hasattr(pyramid_request, 'user'):
847 if pyramid_request and hasattr(pyramid_request, 'user'):
848 return pyramid_request.user
848 return pyramid_request.user
849
849
850 # api case
850 # api case
851 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
851 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
852 return pyramid_request.rpc_user
852 return pyramid_request.rpc_user
853
853
854 return None
854 return None
855
855
856
856
857 def action_logger_generic(action, namespace=''):
857 def action_logger_generic(action, namespace=''):
858 """
858 """
859 A generic logger for actions useful to the system overview, tries to find
859 A generic logger for actions useful to the system overview, tries to find
860 an acting user for the context of the call otherwise reports unknown user
860 an acting user for the context of the call otherwise reports unknown user
861
861
862 :param action: logging message eg 'comment 5 deleted'
862 :param action: logging message eg 'comment 5 deleted'
863 :param type: string
863 :param type: string
864
864
865 :param namespace: namespace of the logging message eg. 'repo.comments'
865 :param namespace: namespace of the logging message eg. 'repo.comments'
866 :param type: string
866 :param type: string
867
867
868 """
868 """
869
869
870 logger_name = 'rhodecode.actions'
870 logger_name = 'rhodecode.actions'
871
871
872 if namespace:
872 if namespace:
873 logger_name += '.' + namespace
873 logger_name += '.' + namespace
874
874
875 log = logging.getLogger(logger_name)
875 log = logging.getLogger(logger_name)
876
876
877 # get a user if we can
877 # get a user if we can
878 user = get_current_rhodecode_user()
878 user = get_current_rhodecode_user()
879
879
880 logfunc = log.info
880 logfunc = log.info
881
881
882 if not user:
882 if not user:
883 user = '<unknown user>'
883 user = '<unknown user>'
884 logfunc = log.warning
884 logfunc = log.warning
885
885
886 logfunc('Logging action by {}: {}'.format(user, action))
886 logfunc('Logging action by {}: {}'.format(user, action))
887
887
888
888
889 def escape_split(text, sep=',', maxsplit=-1):
889 def escape_split(text, sep=',', maxsplit=-1):
890 r"""
890 r"""
891 Allows for escaping of the separator: e.g. arg='foo\, bar'
891 Allows for escaping of the separator: e.g. arg='foo\, bar'
892
892
893 It should be noted that the way bash et. al. do command line parsing, those
893 It should be noted that the way bash et. al. do command line parsing, those
894 single quotes are required.
894 single quotes are required.
895 """
895 """
896 escaped_sep = r'\%s' % sep
896 escaped_sep = r'\%s' % sep
897
897
898 if escaped_sep not in text:
898 if escaped_sep not in text:
899 return text.split(sep, maxsplit)
899 return text.split(sep, maxsplit)
900
900
901 before, _mid, after = text.partition(escaped_sep)
901 before, _mid, after = text.partition(escaped_sep)
902 startlist = before.split(sep, maxsplit) # a regular split is fine here
902 startlist = before.split(sep, maxsplit) # a regular split is fine here
903 unfinished = startlist[-1]
903 unfinished = startlist[-1]
904 startlist = startlist[:-1]
904 startlist = startlist[:-1]
905
905
906 # recurse because there may be more escaped separators
906 # recurse because there may be more escaped separators
907 endlist = escape_split(after, sep, maxsplit)
907 endlist = escape_split(after, sep, maxsplit)
908
908
909 # finish building the escaped value. we use endlist[0] becaue the first
909 # finish building the escaped value. we use endlist[0] becaue the first
910 # part of the string sent in recursion is the rest of the escaped value.
910 # part of the string sent in recursion is the rest of the escaped value.
911 unfinished += sep + endlist[0]
911 unfinished += sep + endlist[0]
912
912
913 return startlist + [unfinished] + endlist[1:] # put together all the parts
913 return startlist + [unfinished] + endlist[1:] # put together all the parts
914
914
915
915
916 class OptionalAttr(object):
916 class OptionalAttr(object):
917 """
917 """
918 Special Optional Option that defines other attribute. Example::
918 Special Optional Option that defines other attribute. Example::
919
919
920 def test(apiuser, userid=Optional(OAttr('apiuser')):
920 def test(apiuser, userid=Optional(OAttr('apiuser')):
921 user = Optional.extract(userid)
921 user = Optional.extract(userid)
922 # calls
922 # calls
923
923
924 """
924 """
925
925
926 def __init__(self, attr_name):
926 def __init__(self, attr_name):
927 self.attr_name = attr_name
927 self.attr_name = attr_name
928
928
929 def __repr__(self):
929 def __repr__(self):
930 return '<OptionalAttr:%s>' % self.attr_name
930 return '<OptionalAttr:%s>' % self.attr_name
931
931
932 def __call__(self):
932 def __call__(self):
933 return self
933 return self
934
934
935
935
936 # alias
936 # alias
937 OAttr = OptionalAttr
937 OAttr = OptionalAttr
938
938
939
939
940 class Optional(object):
940 class Optional(object):
941 """
941 """
942 Defines an optional parameter::
942 Defines an optional parameter::
943
943
944 param = param.getval() if isinstance(param, Optional) else param
944 param = param.getval() if isinstance(param, Optional) else param
945 param = param() if isinstance(param, Optional) else param
945 param = param() if isinstance(param, Optional) else param
946
946
947 is equivalent of::
947 is equivalent of::
948
948
949 param = Optional.extract(param)
949 param = Optional.extract(param)
950
950
951 """
951 """
952
952
953 def __init__(self, type_):
953 def __init__(self, type_):
954 self.type_ = type_
954 self.type_ = type_
955
955
956 def __repr__(self):
956 def __repr__(self):
957 return '<Optional:%s>' % self.type_.__repr__()
957 return '<Optional:%s>' % self.type_.__repr__()
958
958
959 def __call__(self):
959 def __call__(self):
960 return self.getval()
960 return self.getval()
961
961
962 def getval(self):
962 def getval(self):
963 """
963 """
964 returns value from this Optional instance
964 returns value from this Optional instance
965 """
965 """
966 if isinstance(self.type_, OAttr):
966 if isinstance(self.type_, OAttr):
967 # use params name
967 # use params name
968 return self.type_.attr_name
968 return self.type_.attr_name
969 return self.type_
969 return self.type_
970
970
971 @classmethod
971 @classmethod
972 def extract(cls, val):
972 def extract(cls, val):
973 """
973 """
974 Extracts value from Optional() instance
974 Extracts value from Optional() instance
975
975
976 :param val:
976 :param val:
977 :return: original value if it's not Optional instance else
977 :return: original value if it's not Optional instance else
978 value of instance
978 value of instance
979 """
979 """
980 if isinstance(val, cls):
980 if isinstance(val, cls):
981 return val.getval()
981 return val.getval()
982 return val
982 return val
983
983
984
984
985 def glob2re(pat):
985 def glob2re(pat):
986 """
986 """
987 Translate a shell PATTERN to a regular expression.
987 Translate a shell PATTERN to a regular expression.
988
988
989 There is no way to quote meta-characters.
989 There is no way to quote meta-characters.
990 """
990 """
991
991
992 i, n = 0, len(pat)
992 i, n = 0, len(pat)
993 res = ''
993 res = ''
994 while i < n:
994 while i < n:
995 c = pat[i]
995 c = pat[i]
996 i = i+1
996 i = i+1
997 if c == '*':
997 if c == '*':
998 #res = res + '.*'
998 #res = res + '.*'
999 res = res + '[^/]*'
999 res = res + '[^/]*'
1000 elif c == '?':
1000 elif c == '?':
1001 #res = res + '.'
1001 #res = res + '.'
1002 res = res + '[^/]'
1002 res = res + '[^/]'
1003 elif c == '[':
1003 elif c == '[':
1004 j = i
1004 j = i
1005 if j < n and pat[j] == '!':
1005 if j < n and pat[j] == '!':
1006 j = j+1
1006 j = j+1
1007 if j < n and pat[j] == ']':
1007 if j < n and pat[j] == ']':
1008 j = j+1
1008 j = j+1
1009 while j < n and pat[j] != ']':
1009 while j < n and pat[j] != ']':
1010 j = j+1
1010 j = j+1
1011 if j >= n:
1011 if j >= n:
1012 res = res + '\\['
1012 res = res + '\\['
1013 else:
1013 else:
1014 stuff = pat[i:j].replace('\\','\\\\')
1014 stuff = pat[i:j].replace('\\','\\\\')
1015 i = j+1
1015 i = j+1
1016 if stuff[0] == '!':
1016 if stuff[0] == '!':
1017 stuff = '^' + stuff[1:]
1017 stuff = '^' + stuff[1:]
1018 elif stuff[0] == '^':
1018 elif stuff[0] == '^':
1019 stuff = '\\' + stuff
1019 stuff = '\\' + stuff
1020 res = '%s[%s]' % (res, stuff)
1020 res = '%s[%s]' % (res, stuff)
1021 else:
1021 else:
1022 res = res + re.escape(c)
1022 res = res + re.escape(c)
1023 return res + '\Z(?ms)'
1023 return res + '\Z(?ms)'
1024
1024
1025
1025
1026 def parse_byte_string(size_str):
1026 def parse_byte_string(size_str):
1027 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1027 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1028 if not match:
1028 if not match:
1029 raise ValueError('Given size:%s is invalid, please make sure '
1029 raise ValueError('Given size:%s is invalid, please make sure '
1030 'to use format of <num>(MB|KB)' % size_str)
1030 'to use format of <num>(MB|KB)' % size_str)
1031
1031
1032 _parts = match.groups()
1032 _parts = match.groups()
1033 num, type_ = _parts
1033 num, type_ = _parts
1034 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1034 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1035
1035
1036
1036
1037 class CachedProperty(object):
1037 class CachedProperty(object):
1038 """
1038 """
1039 Lazy Attributes. With option to invalidate the cache by running a method
1039 Lazy Attributes. With option to invalidate the cache by running a method
1040
1040
1041 >>> class Foo(object):
1041 >>> class Foo(object):
1042 ...
1042 ...
1043 ... @CachedProperty
1043 ... @CachedProperty
1044 ... def heavy_func(self):
1044 ... def heavy_func(self):
1045 ... return 'super-calculation'
1045 ... return 'super-calculation'
1046 ...
1046 ...
1047 ... foo = Foo()
1047 ... foo = Foo()
1048 ... foo.heavy_func() # first computation
1048 ... foo.heavy_func() # first computation
1049 ... foo.heavy_func() # fetch from cache
1049 ... foo.heavy_func() # fetch from cache
1050 ... foo._invalidate_prop_cache('heavy_func')
1050 ... foo._invalidate_prop_cache('heavy_func')
1051
1051
1052 # at this point calling foo.heavy_func() will be re-computed
1052 # at this point calling foo.heavy_func() will be re-computed
1053 """
1053 """
1054
1054
1055 def __init__(self, func, func_name=None):
1055 def __init__(self, func, func_name=None):
1056
1056
1057 if func_name is None:
1057 if func_name is None:
1058 func_name = func.__name__
1058 func_name = func.__name__
1059 self.data = (func, func_name)
1059 self.data = (func, func_name)
1060 update_wrapper(self, func)
1060 update_wrapper(self, func)
1061
1061
1062 def __get__(self, inst, class_):
1062 def __get__(self, inst, class_):
1063 if inst is None:
1063 if inst is None:
1064 return self
1064 return self
1065
1065
1066 func, func_name = self.data
1066 func, func_name = self.data
1067 value = func(inst)
1067 value = func(inst)
1068 inst.__dict__[func_name] = value
1068 inst.__dict__[func_name] = value
1069 if '_invalidate_prop_cache' not in inst.__dict__:
1069 if '_invalidate_prop_cache' not in inst.__dict__:
1070 inst.__dict__['_invalidate_prop_cache'] = partial(
1070 inst.__dict__['_invalidate_prop_cache'] = partial(
1071 self._invalidate_prop_cache, inst)
1071 self._invalidate_prop_cache, inst)
1072 return value
1072 return value
1073
1073
1074 def _invalidate_prop_cache(self, inst, name):
1074 def _invalidate_prop_cache(self, inst, name):
1075 inst.__dict__.pop(name, None)
1075 inst.__dict__.pop(name, None)
1076
1076
1077
1077
1078 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
1078 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
1079 """
1079 """
1080 Retry decorator with exponential backoff.
1080 Retry decorator with exponential backoff.
1081
1081
1082 Parameters
1082 Parameters
1083 ----------
1083 ----------
1084 func : typing.Callable, optional
1084 func : typing.Callable, optional
1085 Callable on which the decorator is applied, by default None
1085 Callable on which the decorator is applied, by default None
1086 exception : Exception or tuple of Exceptions, optional
1086 exception : Exception or tuple of Exceptions, optional
1087 Exception(s) that invoke retry, by default Exception
1087 Exception(s) that invoke retry, by default Exception
1088 n_tries : int, optional
1088 n_tries : int, optional
1089 Number of tries before giving up, by default 5
1089 Number of tries before giving up, by default 5
1090 delay : int, optional
1090 delay : int, optional
1091 Initial delay between retries in seconds, by default 5
1091 Initial delay between retries in seconds, by default 5
1092 backoff : int, optional
1092 backoff : int, optional
1093 Backoff multiplier e.g. value of 2 will double the delay, by default 1
1093 Backoff multiplier e.g. value of 2 will double the delay, by default 1
1094 logger : bool, optional
1094 logger : bool, optional
1095 Option to log or print, by default False
1095 Option to log or print, by default False
1096
1096
1097 Returns
1097 Returns
1098 -------
1098 -------
1099 typing.Callable
1099 typing.Callable
1100 Decorated callable that calls itself when exception(s) occur.
1100 Decorated callable that calls itself when exception(s) occur.
1101
1101
1102 Examples
1102 Examples
1103 --------
1103 --------
1104 >>> import random
1104 >>> import random
1105 >>> @retry(exception=Exception, n_tries=3)
1105 >>> @retry(exception=Exception, n_tries=3)
1106 ... def test_random(text):
1106 ... def test_random(text):
1107 ... x = random.random()
1107 ... x = random.random()
1108 ... if x < 0.5:
1108 ... if x < 0.5:
1109 ... raise Exception("Fail")
1109 ... raise Exception("Fail")
1110 ... else:
1110 ... else:
1111 ... print("Success: ", text)
1111 ... print("Success: ", text)
1112 >>> test_random("It works!")
1112 >>> test_random("It works!")
1113 """
1113 """
1114
1114
1115 if func is None:
1115 if func is None:
1116 return partial(
1116 return partial(
1117 retry,
1117 retry,
1118 exception=exception,
1118 exception=exception,
1119 n_tries=n_tries,
1119 n_tries=n_tries,
1120 delay=delay,
1120 delay=delay,
1121 backoff=backoff,
1121 backoff=backoff,
1122 logger=logger,
1122 logger=logger,
1123 )
1123 )
1124
1124
1125 @wraps(func)
1125 @wraps(func)
1126 def wrapper(*args, **kwargs):
1126 def wrapper(*args, **kwargs):
1127 _n_tries, n_delay = n_tries, delay
1127 _n_tries, n_delay = n_tries, delay
1128 log = logging.getLogger('rhodecode.retry')
1128 log = logging.getLogger('rhodecode.retry')
1129
1129
1130 while _n_tries > 1:
1130 while _n_tries > 1:
1131 try:
1131 try:
1132 return func(*args, **kwargs)
1132 return func(*args, **kwargs)
1133 except exception as e:
1133 except exception as e:
1134 e_details = repr(e)
1134 e_details = repr(e)
1135 msg = "Exception on calling func {func}: {e}, " \
1135 msg = "Exception on calling func {func}: {e}, " \
1136 "Retrying in {n_delay} seconds..."\
1136 "Retrying in {n_delay} seconds..."\
1137 .format(func=func, e=e_details, n_delay=n_delay)
1137 .format(func=func, e=e_details, n_delay=n_delay)
1138 if logger:
1138 if logger:
1139 log.warning(msg)
1139 log.warning(msg)
1140 else:
1140 else:
1141 print(msg)
1141 print(msg)
1142 time.sleep(n_delay)
1142 time.sleep(n_delay)
1143 _n_tries -= 1
1143 _n_tries -= 1
1144 n_delay *= backoff
1144 n_delay *= backoff
1145
1145
1146 return func(*args, **kwargs)
1146 return func(*args, **kwargs)
1147
1147
1148 return wrapper
1148 return wrapper
1149
1149
1150
1150
1151 def user_agent_normalizer(user_agent_raw):
1151 def user_agent_normalizer(user_agent_raw, safe=True):
1152 log = logging.getLogger('rhodecode.user_agent_normalizer')
1152 log = logging.getLogger('rhodecode.user_agent_normalizer')
1153 ua = (user_agent_raw or '').strip().lower()
1153 ua = (user_agent_raw or '').strip().lower()
1154 ua = ua.replace('"', '')
1154
1155
1155 try:
1156 try:
1156 if 'mercurial/proto-1.0' in ua:
1157 if 'mercurial/proto-1.0' in ua:
1157 ua = ua.replace('mercurial/proto-1.0', '')
1158 ua = ua.replace('mercurial/proto-1.0', '')
1158 ua = ua.replace('(', '').replace(')', '').strip()
1159 ua = ua.replace('(', '').replace(')', '').strip()
1159 ua = ua.replace('mercurial ', 'mercurial/')
1160 ua = ua.replace('mercurial ', 'mercurial/')
1160 elif ua.startswith('git'):
1161 elif ua.startswith('git'):
1161 pass
1162 parts = ua.split(' ')
1163 if parts:
1164 ua = parts[0]
1165 ua = re.sub('\.windows\.\d', '', ua).strip()
1166
1167 return ua
1162 except Exception:
1168 except Exception:
1163 log.exception('Failed to parse scm user-agent')
1169 log.exception('Failed to parse scm user-agent')
1170 if not safe:
1171 raise
1164
1172
1165 return ua
1173 return ua
@@ -1,449 +1,478 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import multiprocessing
22 import multiprocessing
23 import os
23 import os
24
24
25 import mock
25 import mock
26 import py
26 import py
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib import caching_query
29 from rhodecode.lib import caching_query
30 from rhodecode.lib import utils
30 from rhodecode.lib import utils
31 from rhodecode.lib.utils2 import md5
31 from rhodecode.lib.utils2 import md5
32 from rhodecode.model import settings
32 from rhodecode.model import settings
33 from rhodecode.model import db
33 from rhodecode.model import db
34 from rhodecode.model import meta
34 from rhodecode.model import meta
35 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixture import Fixture
39 from rhodecode.tests.fixture import Fixture
40
40
41
41
42 fixture = Fixture()
42 fixture = Fixture()
43
43
44
44
45 def extract_hooks(config):
45 def extract_hooks(config):
46 """Return a dictionary with the hook entries of the given config."""
46 """Return a dictionary with the hook entries of the given config."""
47 hooks = {}
47 hooks = {}
48 config_items = config.serialize()
48 config_items = config.serialize()
49 for section, name, value in config_items:
49 for section, name, value in config_items:
50 if section != 'hooks':
50 if section != 'hooks':
51 continue
51 continue
52 hooks[name] = value
52 hooks[name] = value
53
53
54 return hooks
54 return hooks
55
55
56
56
57 def disable_hooks(request, hooks):
57 def disable_hooks(request, hooks):
58 """Disables the given hooks from the UI settings."""
58 """Disables the given hooks from the UI settings."""
59 session = meta.Session()
59 session = meta.Session()
60
60
61 model = SettingsModel()
61 model = SettingsModel()
62 for hook_key in hooks:
62 for hook_key in hooks:
63 sett = model.get_ui_by_key(hook_key)
63 sett = model.get_ui_by_key(hook_key)
64 sett.ui_active = False
64 sett.ui_active = False
65 session.add(sett)
65 session.add(sett)
66
66
67 # Invalidate cache
67 # Invalidate cache
68 ui_settings = session.query(db.RhodeCodeUi).options(
68 ui_settings = session.query(db.RhodeCodeUi).options(
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 ui_settings.invalidate()
70 ui_settings.invalidate()
71
71
72 ui_settings = session.query(db.RhodeCodeUi).options(
72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
74 ui_settings.invalidate()
74 ui_settings.invalidate()
75
75
76 @request.addfinalizer
76 @request.addfinalizer
77 def rollback():
77 def rollback():
78 session.rollback()
78 session.rollback()
79
79
80
80
81 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
81 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
82 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
82 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
83 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
83 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
84 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
84 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
85 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
85 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
86 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
86 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
87 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
87 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
88
88
89 HG_HOOKS = frozenset(
89 HG_HOOKS = frozenset(
90 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
90 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
91 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
91 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
92
92
93
93
94 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
94 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
95 ([], HG_HOOKS),
95 ([], HG_HOOKS),
96 (HG_HOOKS, []),
96 (HG_HOOKS, []),
97
97
98 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
98 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
99
99
100 # When a pull/push hook is disabled, its pre-pull/push counterpart should
100 # When a pull/push hook is disabled, its pre-pull/push counterpart should
101 # be disabled too.
101 # be disabled too.
102 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
102 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
103 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
103 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
104 HOOK_PUSH_KEY]),
104 HOOK_PUSH_KEY]),
105 ])
105 ])
106 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
106 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
107 expected_hooks):
107 expected_hooks):
108 disable_hooks(request, disabled_hooks)
108 disable_hooks(request, disabled_hooks)
109
109
110 config = utils.make_db_config()
110 config = utils.make_db_config()
111 hooks = extract_hooks(config)
111 hooks = extract_hooks(config)
112
112
113 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
113 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
114
114
115
115
116 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
116 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
117 ([], ['pull', 'push']),
117 ([], ['pull', 'push']),
118 ([HOOK_PUSH], ['pull']),
118 ([HOOK_PUSH], ['pull']),
119 ([HOOK_PULL], ['push']),
119 ([HOOK_PULL], ['push']),
120 ([HOOK_PULL, HOOK_PUSH], []),
120 ([HOOK_PULL, HOOK_PUSH], []),
121 ])
121 ])
122 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
122 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
123 hook_keys = (HOOK_PUSH, HOOK_PULL)
123 hook_keys = (HOOK_PUSH, HOOK_PULL)
124 ui_settings = [
124 ui_settings = [
125 ('hooks', key, 'some value', key not in disabled_hooks)
125 ('hooks', key, 'some value', key not in disabled_hooks)
126 for key in hook_keys]
126 for key in hook_keys]
127
127
128 result = utils.get_enabled_hook_classes(ui_settings)
128 result = utils.get_enabled_hook_classes(ui_settings)
129 assert sorted(result) == expected_hooks
129 assert sorted(result) == expected_hooks
130
130
131
131
132 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
132 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
133 _stub_git_repo(tmpdir.ensure('repo', dir=True))
133 _stub_git_repo(tmpdir.ensure('repo', dir=True))
134 repos = list(utils.get_filesystem_repos(str(tmpdir)))
134 repos = list(utils.get_filesystem_repos(str(tmpdir)))
135 assert repos == [('repo', ('git', tmpdir.join('repo')))]
135 assert repos == [('repo', ('git', tmpdir.join('repo')))]
136
136
137
137
138 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
138 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
139 tmpdir.ensure('not-a-repo', dir=True)
139 tmpdir.ensure('not-a-repo', dir=True)
140 repos = list(utils.get_filesystem_repos(str(tmpdir)))
140 repos = list(utils.get_filesystem_repos(str(tmpdir)))
141 assert repos == []
141 assert repos == []
142
142
143
143
144 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
144 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
145 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
145 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
146 repos = list(utils.get_filesystem_repos(str(tmpdir)))
146 repos = list(utils.get_filesystem_repos(str(tmpdir)))
147 assert repos == []
147 assert repos == []
148
148
149
149
150 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
150 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
151 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
151 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
152 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
152 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
153 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
153 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
154
154
155
155
156 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
156 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
157 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
157 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
158 repos = list(utils.get_filesystem_repos(str(tmpdir)))
158 repos = list(utils.get_filesystem_repos(str(tmpdir)))
159 assert repos == []
159 assert repos == []
160
160
161
161
162 def test_get_filesystem_repos_skips_files(tmpdir):
162 def test_get_filesystem_repos_skips_files(tmpdir):
163 tmpdir.ensure('test-file')
163 tmpdir.ensure('test-file')
164 repos = list(utils.get_filesystem_repos(str(tmpdir)))
164 repos = list(utils.get_filesystem_repos(str(tmpdir)))
165 assert repos == []
165 assert repos == []
166
166
167
167
168 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
168 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
169 removed_repo_name = 'rm__00000000_000000_000000__.stub'
169 removed_repo_name = 'rm__00000000_000000_000000__.stub'
170 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
170 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
171 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
171 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
173 assert repos == []
173 assert repos == []
174
174
175
175
176 def _stub_git_repo(repo_path):
176 def _stub_git_repo(repo_path):
177 """
177 """
178 Make `repo_path` look like a Git repository.
178 Make `repo_path` look like a Git repository.
179 """
179 """
180 repo_path.ensure('.git', dir=True)
180 repo_path.ensure('.git', dir=True)
181
181
182
182
183 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
183 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
184 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
184 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
185 tmpdir.ensure('test-file')
185 tmpdir.ensure('test-file')
186 dirpaths = utils._get_dirpaths(str_class(tmpdir))
186 dirpaths = utils._get_dirpaths(str_class(tmpdir))
187 assert dirpaths == ['test-file']
187 assert dirpaths == ['test-file']
188
188
189
189
190 def test_get_dirpaths_returns_all_paths_bytes(
190 def test_get_dirpaths_returns_all_paths_bytes(
191 tmpdir, platform_encodes_filenames):
191 tmpdir, platform_encodes_filenames):
192 if platform_encodes_filenames:
192 if platform_encodes_filenames:
193 pytest.skip("This platform seems to encode filenames.")
193 pytest.skip("This platform seems to encode filenames.")
194 tmpdir.ensure('repo-a-umlaut-\xe4')
194 tmpdir.ensure('repo-a-umlaut-\xe4')
195 dirpaths = utils._get_dirpaths(str(tmpdir))
195 dirpaths = utils._get_dirpaths(str(tmpdir))
196 assert dirpaths == ['repo-a-umlaut-\xe4']
196 assert dirpaths == ['repo-a-umlaut-\xe4']
197
197
198
198
199 def test_get_dirpaths_skips_paths_it_cannot_decode(
199 def test_get_dirpaths_skips_paths_it_cannot_decode(
200 tmpdir, platform_encodes_filenames):
200 tmpdir, platform_encodes_filenames):
201 if platform_encodes_filenames:
201 if platform_encodes_filenames:
202 pytest.skip("This platform seems to encode filenames.")
202 pytest.skip("This platform seems to encode filenames.")
203 path_with_latin1 = 'repo-a-umlaut-\xe4'
203 path_with_latin1 = 'repo-a-umlaut-\xe4'
204 tmpdir.ensure(path_with_latin1)
204 tmpdir.ensure(path_with_latin1)
205 dirpaths = utils._get_dirpaths(unicode(tmpdir))
205 dirpaths = utils._get_dirpaths(unicode(tmpdir))
206 assert dirpaths == []
206 assert dirpaths == []
207
207
208
208
209 @pytest.fixture(scope='session')
209 @pytest.fixture(scope='session')
210 def platform_encodes_filenames():
210 def platform_encodes_filenames():
211 """
211 """
212 Boolean indicator if the current platform changes filename encodings.
212 Boolean indicator if the current platform changes filename encodings.
213 """
213 """
214 path_with_latin1 = 'repo-a-umlaut-\xe4'
214 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 tmpdir = py.path.local.mkdtemp()
215 tmpdir = py.path.local.mkdtemp()
216 tmpdir.ensure(path_with_latin1)
216 tmpdir.ensure(path_with_latin1)
217 read_path = tmpdir.listdir()[0].basename
217 read_path = tmpdir.listdir()[0].basename
218 tmpdir.remove()
218 tmpdir.remove()
219 return path_with_latin1 != read_path
219 return path_with_latin1 != read_path
220
220
221
221
222
222
223
223
224 def test_repo2db_mapper_groups(repo_groups):
224 def test_repo2db_mapper_groups(repo_groups):
225 session = meta.Session()
225 session = meta.Session()
226 zombie_group, parent_group, child_group = repo_groups
226 zombie_group, parent_group, child_group = repo_groups
227 zombie_path = os.path.join(
227 zombie_path = os.path.join(
228 RepoGroupModel().repos_path, zombie_group.full_path)
228 RepoGroupModel().repos_path, zombie_group.full_path)
229 os.rmdir(zombie_path)
229 os.rmdir(zombie_path)
230
230
231 # Avoid removing test repos when calling repo2db_mapper
231 # Avoid removing test repos when calling repo2db_mapper
232 repo_list = {
232 repo_list = {
233 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
233 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
234 }
234 }
235 utils.repo2db_mapper(repo_list, remove_obsolete=True)
235 utils.repo2db_mapper(repo_list, remove_obsolete=True)
236
236
237 groups_in_db = session.query(db.RepoGroup).all()
237 groups_in_db = session.query(db.RepoGroup).all()
238 assert child_group in groups_in_db
238 assert child_group in groups_in_db
239 assert parent_group in groups_in_db
239 assert parent_group in groups_in_db
240 assert zombie_path not in groups_in_db
240 assert zombie_path not in groups_in_db
241
241
242
242
243 def test_repo2db_mapper_enables_largefiles(backend):
243 def test_repo2db_mapper_enables_largefiles(backend):
244 repo = backend.create_repo()
244 repo = backend.create_repo()
245 repo_list = {repo.repo_name: 'test'}
245 repo_list = {repo.repo_name: 'test'}
246 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
246 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
247 utils.repo2db_mapper(repo_list, remove_obsolete=False)
247 utils.repo2db_mapper(repo_list, remove_obsolete=False)
248 _, kwargs = scm_mock.call_args
248 _, kwargs = scm_mock.call_args
249 assert kwargs['config'].get('extensions', 'largefiles') == ''
249 assert kwargs['config'].get('extensions', 'largefiles') == ''
250
250
251
251
252 @pytest.mark.backends("git", "svn")
252 @pytest.mark.backends("git", "svn")
253 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
253 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
254 repo = backend.create_repo()
254 repo = backend.create_repo()
255 repo_list = {repo.repo_name: 'test'}
255 repo_list = {repo.repo_name: 'test'}
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
257
257
258
258
259 @pytest.mark.backends("git", "svn")
259 @pytest.mark.backends("git", "svn")
260 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
260 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
261 repo = backend.create_repo()
261 repo = backend.create_repo()
262 RepoModel().delete(repo, fs_remove=False)
262 RepoModel().delete(repo, fs_remove=False)
263 meta.Session().commit()
263 meta.Session().commit()
264 repo_list = {repo.repo_name: repo.scm_instance()}
264 repo_list = {repo.repo_name: repo.scm_instance()}
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
266
266
267
267
268 class TestPasswordChanged(object):
268 class TestPasswordChanged(object):
269 def setup(self):
269 def setup(self):
270 self.session = {
270 self.session = {
271 'rhodecode_user': {
271 'rhodecode_user': {
272 'password': '0cc175b9c0f1b6a831c399e269772661'
272 'password': '0cc175b9c0f1b6a831c399e269772661'
273 }
273 }
274 }
274 }
275 self.auth_user = mock.Mock()
275 self.auth_user = mock.Mock()
276 self.auth_user.userame = 'test'
276 self.auth_user.userame = 'test'
277 self.auth_user.password = 'abc123'
277 self.auth_user.password = 'abc123'
278
278
279 def test_returns_false_for_default_user(self):
279 def test_returns_false_for_default_user(self):
280 self.auth_user.username = db.User.DEFAULT_USER
280 self.auth_user.username = db.User.DEFAULT_USER
281 result = utils.password_changed(self.auth_user, self.session)
281 result = utils.password_changed(self.auth_user, self.session)
282 assert result is False
282 assert result is False
283
283
284 def test_returns_false_if_password_was_not_changed(self):
284 def test_returns_false_if_password_was_not_changed(self):
285 self.session['rhodecode_user']['password'] = md5(
285 self.session['rhodecode_user']['password'] = md5(
286 self.auth_user.password)
286 self.auth_user.password)
287 result = utils.password_changed(self.auth_user, self.session)
287 result = utils.password_changed(self.auth_user, self.session)
288 assert result is False
288 assert result is False
289
289
290 def test_returns_true_if_password_was_changed(self):
290 def test_returns_true_if_password_was_changed(self):
291 result = utils.password_changed(self.auth_user, self.session)
291 result = utils.password_changed(self.auth_user, self.session)
292 assert result is True
292 assert result is True
293
293
294 def test_returns_true_if_auth_user_password_is_empty(self):
294 def test_returns_true_if_auth_user_password_is_empty(self):
295 self.auth_user.password = None
295 self.auth_user.password = None
296 result = utils.password_changed(self.auth_user, self.session)
296 result = utils.password_changed(self.auth_user, self.session)
297 assert result is True
297 assert result is True
298
298
299 def test_returns_true_if_session_password_is_empty(self):
299 def test_returns_true_if_session_password_is_empty(self):
300 self.session['rhodecode_user'].pop('password')
300 self.session['rhodecode_user'].pop('password')
301 result = utils.password_changed(self.auth_user, self.session)
301 result = utils.password_changed(self.auth_user, self.session)
302 assert result is True
302 assert result is True
303
303
304
304
305 class TestReadOpenSourceLicenses(object):
305 class TestReadOpenSourceLicenses(object):
306 def test_success(self):
306 def test_success(self):
307 utils._license_cache = None
307 utils._license_cache = None
308 json_data = '''
308 json_data = '''
309 {
309 {
310 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
310 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
311 "python2.7-Markdown-2.6.2": {
311 "python2.7-Markdown-2.6.2": {
312 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
312 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
313 }
313 }
314 }
314 }
315 '''
315 '''
316 resource_string_patch = mock.patch.object(
316 resource_string_patch = mock.patch.object(
317 utils.pkg_resources, 'resource_string', return_value=json_data)
317 utils.pkg_resources, 'resource_string', return_value=json_data)
318 with resource_string_patch:
318 with resource_string_patch:
319 result = utils.read_opensource_licenses()
319 result = utils.read_opensource_licenses()
320 assert result == json.loads(json_data)
320 assert result == json.loads(json_data)
321
321
322 def test_caching(self):
322 def test_caching(self):
323 utils._license_cache = {
323 utils._license_cache = {
324 "python2.7-pytest-2.7.1": {
324 "python2.7-pytest-2.7.1": {
325 "UNKNOWN": None
325 "UNKNOWN": None
326 },
326 },
327 "python2.7-Markdown-2.6.2": {
327 "python2.7-Markdown-2.6.2": {
328 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
328 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
329 }
329 }
330 }
330 }
331 resource_patch = mock.patch.object(
331 resource_patch = mock.patch.object(
332 utils.pkg_resources, 'resource_string', side_effect=Exception)
332 utils.pkg_resources, 'resource_string', side_effect=Exception)
333 json_patch = mock.patch.object(
333 json_patch = mock.patch.object(
334 utils.json, 'loads', side_effect=Exception)
334 utils.json, 'loads', side_effect=Exception)
335
335
336 with resource_patch as resource_mock, json_patch as json_mock:
336 with resource_patch as resource_mock, json_patch as json_mock:
337 result = utils.read_opensource_licenses()
337 result = utils.read_opensource_licenses()
338
338
339 assert resource_mock.call_count == 0
339 assert resource_mock.call_count == 0
340 assert json_mock.call_count == 0
340 assert json_mock.call_count == 0
341 assert result == utils._license_cache
341 assert result == utils._license_cache
342
342
343 def test_licenses_file_contains_no_unknown_licenses(self):
343 def test_licenses_file_contains_no_unknown_licenses(self):
344 utils._license_cache = None
344 utils._license_cache = None
345 result = utils.read_opensource_licenses()
345 result = utils.read_opensource_licenses()
346
346
347 for license_data in result:
347 for license_data in result:
348 if isinstance(license_data["license"], list):
348 if isinstance(license_data["license"], list):
349 for lic_data in license_data["license"]:
349 for lic_data in license_data["license"]:
350 assert 'UNKNOWN' not in lic_data["fullName"]
350 assert 'UNKNOWN' not in lic_data["fullName"]
351 else:
351 else:
352 full_name = license_data.get("fullName") or license_data
352 full_name = license_data.get("fullName") or license_data
353 assert 'UNKNOWN' not in full_name
353 assert 'UNKNOWN' not in full_name
354
354
355
355
356 class TestMakeDbConfig(object):
356 class TestMakeDbConfig(object):
357 def test_data_from_config_data_from_db_returned(self):
357 def test_data_from_config_data_from_db_returned(self):
358 test_data = [
358 test_data = [
359 ('section1', 'option1', 'value1'),
359 ('section1', 'option1', 'value1'),
360 ('section2', 'option2', 'value2'),
360 ('section2', 'option2', 'value2'),
361 ('section3', 'option3', 'value3'),
361 ('section3', 'option3', 'value3'),
362 ]
362 ]
363 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
363 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
364 config_mock.return_value = test_data
364 config_mock.return_value = test_data
365 kwargs = {'clear_session': False, 'repo': 'test_repo'}
365 kwargs = {'clear_session': False, 'repo': 'test_repo'}
366 result = utils.make_db_config(**kwargs)
366 result = utils.make_db_config(**kwargs)
367 config_mock.assert_called_once_with(**kwargs)
367 config_mock.assert_called_once_with(**kwargs)
368 for section, option, expected_value in test_data:
368 for section, option, expected_value in test_data:
369 value = result.get(section, option)
369 value = result.get(section, option)
370 assert value == expected_value
370 assert value == expected_value
371
371
372
372
373 class TestConfigDataFromDb(object):
373 class TestConfigDataFromDb(object):
374 def test_config_data_from_db_returns_active_settings(self):
374 def test_config_data_from_db_returns_active_settings(self):
375 test_data = [
375 test_data = [
376 UiSetting('section1', 'option1', 'value1', True),
376 UiSetting('section1', 'option1', 'value1', True),
377 UiSetting('section2', 'option2', 'value2', True),
377 UiSetting('section2', 'option2', 'value2', True),
378 UiSetting('section3', 'option3', 'value3', False),
378 UiSetting('section3', 'option3', 'value3', False),
379 ]
379 ]
380 repo_name = 'test_repo'
380 repo_name = 'test_repo'
381
381
382 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
382 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
383 hooks_patch = mock.patch.object(
383 hooks_patch = mock.patch.object(
384 utils, 'get_enabled_hook_classes',
384 utils, 'get_enabled_hook_classes',
385 return_value=['pull', 'push', 'repo_size'])
385 return_value=['pull', 'push', 'repo_size'])
386 with model_patch as model_mock, hooks_patch:
386 with model_patch as model_mock, hooks_patch:
387 instance_mock = mock.Mock()
387 instance_mock = mock.Mock()
388 model_mock.return_value = instance_mock
388 model_mock.return_value = instance_mock
389 instance_mock.get_ui_settings.return_value = test_data
389 instance_mock.get_ui_settings.return_value = test_data
390 result = utils.config_data_from_db(
390 result = utils.config_data_from_db(
391 clear_session=False, repo=repo_name)
391 clear_session=False, repo=repo_name)
392
392
393 self._assert_repo_name_passed(model_mock, repo_name)
393 self._assert_repo_name_passed(model_mock, repo_name)
394
394
395 expected_result = [
395 expected_result = [
396 ('section1', 'option1', 'value1'),
396 ('section1', 'option1', 'value1'),
397 ('section2', 'option2', 'value2'),
397 ('section2', 'option2', 'value2'),
398 ]
398 ]
399 assert result == expected_result
399 assert result == expected_result
400
400
401 def _assert_repo_name_passed(self, model_mock, repo_name):
401 def _assert_repo_name_passed(self, model_mock, repo_name):
402 assert model_mock.call_count == 1
402 assert model_mock.call_count == 1
403 call_args, call_kwargs = model_mock.call_args
403 call_args, call_kwargs = model_mock.call_args
404 assert call_kwargs['repo'] == repo_name
404 assert call_kwargs['repo'] == repo_name
405
405
406
406
407 class TestIsDirWritable(object):
407 class TestIsDirWritable(object):
408 def test_returns_false_when_not_writable(self):
408 def test_returns_false_when_not_writable(self):
409 with mock.patch('__builtin__.open', side_effect=OSError):
409 with mock.patch('__builtin__.open', side_effect=OSError):
410 assert not utils._is_dir_writable('/stub-path')
410 assert not utils._is_dir_writable('/stub-path')
411
411
412 def test_returns_true_when_writable(self, tmpdir):
412 def test_returns_true_when_writable(self, tmpdir):
413 assert utils._is_dir_writable(str(tmpdir))
413 assert utils._is_dir_writable(str(tmpdir))
414
414
415 def test_is_safe_against_race_conditions(self, tmpdir):
415 def test_is_safe_against_race_conditions(self, tmpdir):
416 workers = multiprocessing.Pool()
416 workers = multiprocessing.Pool()
417 directories = [str(tmpdir)] * 10
417 directories = [str(tmpdir)] * 10
418 workers.map(utils._is_dir_writable, directories)
418 workers.map(utils._is_dir_writable, directories)
419
419
420
420
421 class TestGetEnabledHooks(object):
421 class TestGetEnabledHooks(object):
422 def test_only_active_hooks_are_enabled(self):
422 def test_only_active_hooks_are_enabled(self):
423 ui_settings = [
423 ui_settings = [
424 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
424 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
425 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
425 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
426 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
426 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
427 ]
427 ]
428 result = utils.get_enabled_hook_classes(ui_settings)
428 result = utils.get_enabled_hook_classes(ui_settings)
429 assert result == ['push', 'repo_size']
429 assert result == ['push', 'repo_size']
430
430
431 def test_all_hooks_are_enabled(self):
431 def test_all_hooks_are_enabled(self):
432 ui_settings = [
432 ui_settings = [
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
434 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
434 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
435 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
435 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
436 ]
436 ]
437 result = utils.get_enabled_hook_classes(ui_settings)
437 result = utils.get_enabled_hook_classes(ui_settings)
438 assert result == ['push', 'repo_size', 'pull']
438 assert result == ['push', 'repo_size', 'pull']
439
439
440 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
440 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
441 ui_settings = []
441 ui_settings = []
442 result = utils.get_enabled_hook_classes(ui_settings)
442 result = utils.get_enabled_hook_classes(ui_settings)
443 assert result == []
443 assert result == []
444
444
445
445
446 def test_obfuscate_url_pw():
446 def test_obfuscate_url_pw():
447 from rhodecode.lib.utils2 import obfuscate_url_pw
447 from rhodecode.lib.utils2 import obfuscate_url_pw
448 engine = u'/home/repos/malmö'
448 engine = u'/home/repos/malmö'
449 assert obfuscate_url_pw(engine) No newline at end of file
449 assert obfuscate_url_pw(engine)
450
451
452 @pytest.mark.parametrize("test_ua, expected", [
453 ("", ""),
454 ('"quoted"', 'quoted'),
455 ('internal-merge', 'internal-merge'),
456 ('hg/internal-merge', 'hg/internal-merge'),
457 ('git/internal-merge', 'git/internal-merge'),
458
459 # git
460 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
461 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
462 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
463 ('ssh-user-agent', 'ssh-user-agent'),
464 ('git/ssh-user-agent', 'git/ssh-user-agent'),
465
466
467 # hg
468 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
469 ('mercurial/proto-1.0', ''),
470 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
471 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
472 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
473
474
475 ])
476 def test_user_agent_normalizer(test_ua, expected):
477 from rhodecode.lib.utils2 import user_agent_normalizer
478 assert user_agent_normalizer(test_ua, safe=False) == expected
General Comments 0
You need to be logged in to leave comments. Login now