##// END OF EJS Templates
python3: fix use of basetring
super-admin -
r4917:39e9fabf default
parent child Browse files
Show More
@@ -1,199 +1,199 b''
1 # Copyright (C) 2016-2020 RhodeCode GmbH
1 # Copyright (C) 2016-2020 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import os
20 import os
21 import string
21 import string
22 import functools
22 import functools
23 import collections
23 import collections
24 import urllib.request, urllib.parse, urllib.error
24 import urllib.request, urllib.parse, urllib.error
25
25
26 log = logging.getLogger('rhodecode.' + __name__)
26 log = logging.getLogger('rhodecode.' + __name__)
27
27
28
28
29 class HookResponse(object):
29 class HookResponse(object):
30 def __init__(self, status, output):
30 def __init__(self, status, output):
31 self.status = status
31 self.status = status
32 self.output = output
32 self.output = output
33
33
34 def __add__(self, other):
34 def __add__(self, other):
35 other_status = getattr(other, 'status', 0)
35 other_status = getattr(other, 'status', 0)
36 new_status = max(self.status, other_status)
36 new_status = max(self.status, other_status)
37 other_output = getattr(other, 'output', '')
37 other_output = getattr(other, 'output', '')
38 new_output = self.output + other_output
38 new_output = self.output + other_output
39
39
40 return HookResponse(new_status, new_output)
40 return HookResponse(new_status, new_output)
41
41
42 def __bool__(self):
42 def __bool__(self):
43 return self.status == 0
43 return self.status == 0
44
44
45
45
46 class DotDict(dict):
46 class DotDict(dict):
47
47
48 def __contains__(self, k):
48 def __contains__(self, k):
49 try:
49 try:
50 return dict.__contains__(self, k) or hasattr(self, k)
50 return dict.__contains__(self, k) or hasattr(self, k)
51 except:
51 except:
52 return False
52 return False
53
53
54 # only called if k not found in normal places
54 # only called if k not found in normal places
55 def __getattr__(self, k):
55 def __getattr__(self, k):
56 try:
56 try:
57 return object.__getattribute__(self, k)
57 return object.__getattribute__(self, k)
58 except AttributeError:
58 except AttributeError:
59 try:
59 try:
60 return self[k]
60 return self[k]
61 except KeyError:
61 except KeyError:
62 raise AttributeError(k)
62 raise AttributeError(k)
63
63
64 def __setattr__(self, k, v):
64 def __setattr__(self, k, v):
65 try:
65 try:
66 object.__getattribute__(self, k)
66 object.__getattribute__(self, k)
67 except AttributeError:
67 except AttributeError:
68 try:
68 try:
69 self[k] = v
69 self[k] = v
70 except:
70 except:
71 raise AttributeError(k)
71 raise AttributeError(k)
72 else:
72 else:
73 object.__setattr__(self, k, v)
73 object.__setattr__(self, k, v)
74
74
75 def __delattr__(self, k):
75 def __delattr__(self, k):
76 try:
76 try:
77 object.__getattribute__(self, k)
77 object.__getattribute__(self, k)
78 except AttributeError:
78 except AttributeError:
79 try:
79 try:
80 del self[k]
80 del self[k]
81 except KeyError:
81 except KeyError:
82 raise AttributeError(k)
82 raise AttributeError(k)
83 else:
83 else:
84 object.__delattr__(self, k)
84 object.__delattr__(self, k)
85
85
86 def toDict(self):
86 def toDict(self):
87 return unserialize(self)
87 return unserialize(self)
88
88
89 def __repr__(self):
89 def __repr__(self):
90 keys = list(self.keys())
90 keys = list(self.keys())
91 keys.sort()
91 keys.sort()
92 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
92 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
93 return '%s(%s)' % (self.__class__.__name__, args)
93 return '%s(%s)' % (self.__class__.__name__, args)
94
94
95 @staticmethod
95 @staticmethod
96 def fromDict(d):
96 def fromDict(d):
97 return serialize(d)
97 return serialize(d)
98
98
99
99
100 def serialize(x):
100 def serialize(x):
101 if isinstance(x, dict):
101 if isinstance(x, dict):
102 return DotDict((k, serialize(v)) for k, v in x.items())
102 return DotDict((k, serialize(v)) for k, v in x.items())
103 elif isinstance(x, (list, tuple)):
103 elif isinstance(x, (list, tuple)):
104 return type(x)(serialize(v) for v in x)
104 return type(x)(serialize(v) for v in x)
105 else:
105 else:
106 return x
106 return x
107
107
108
108
109 def unserialize(x):
109 def unserialize(x):
110 if isinstance(x, dict):
110 if isinstance(x, dict):
111 return dict((k, unserialize(v)) for k, v in x.items())
111 return dict((k, unserialize(v)) for k, v in x.items())
112 elif isinstance(x, (list, tuple)):
112 elif isinstance(x, (list, tuple)):
113 return type(x)(unserialize(v) for v in x)
113 return type(x)(unserialize(v) for v in x)
114 else:
114 else:
115 return x
115 return x
116
116
117
117
118 def _verify_kwargs(func_name, expected_parameters, kwargs):
118 def _verify_kwargs(func_name, expected_parameters, kwargs):
119 """
119 """
120 Verify that exactly `expected_parameters` are passed in as `kwargs`.
120 Verify that exactly `expected_parameters` are passed in as `kwargs`.
121 """
121 """
122 expected_parameters = set(expected_parameters)
122 expected_parameters = set(expected_parameters)
123 kwargs_keys = set(kwargs.keys())
123 kwargs_keys = set(kwargs.keys())
124 if kwargs_keys != expected_parameters:
124 if kwargs_keys != expected_parameters:
125 missing_kwargs = expected_parameters - kwargs_keys
125 missing_kwargs = expected_parameters - kwargs_keys
126 unexpected_kwargs = kwargs_keys - expected_parameters
126 unexpected_kwargs = kwargs_keys - expected_parameters
127 raise AssertionError(
127 raise AssertionError(
128 "func:%s: missing parameters: %r, unexpected parameters: %s" %
128 "func:%s: missing parameters: %r, unexpected parameters: %s" %
129 (func_name, missing_kwargs, unexpected_kwargs))
129 (func_name, missing_kwargs, unexpected_kwargs))
130
130
131
131
132 def has_kwargs(required_args):
132 def has_kwargs(required_args):
133 """
133 """
134 decorator to verify extension calls arguments.
134 decorator to verify extension calls arguments.
135
135
136 :param required_args:
136 :param required_args:
137 """
137 """
138 def wrap(func):
138 def wrap(func):
139 def wrapper(*args, **kwargs):
139 def wrapper(*args, **kwargs):
140 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
140 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
141 # in case there's `calls` defined on module we store the data
141 # in case there's `calls` defined on module we store the data
142 maybe_log_call(func.func_name, args, kwargs)
142 maybe_log_call(func.func_name, args, kwargs)
143 log.debug('Calling rcextensions function %s', func.func_name)
143 log.debug('Calling rcextensions function %s', func.func_name)
144 return func(*args, **kwargs)
144 return func(*args, **kwargs)
145 return wrapper
145 return wrapper
146 return wrap
146 return wrap
147
147
148
148
149 def maybe_log_call(name, args, kwargs):
149 def maybe_log_call(name, args, kwargs):
150 from rhodecode.config import rcextensions
150 from rhodecode.config import rcextensions
151 if hasattr(rcextensions, 'calls'):
151 if hasattr(rcextensions, 'calls'):
152 calls = rcextensions.calls
152 calls = rcextensions.calls
153 calls[name].append((args, kwargs))
153 calls[name].append((args, kwargs))
154
154
155
155
156 def str2bool(_str):
156 def str2bool(_str):
157 """
157 """
158 returns True/False value from given string, it tries to translate the
158 returns True/False value from given string, it tries to translate the
159 string into boolean
159 string into boolean
160
160
161 :param _str: string value to translate into boolean
161 :param _str: string value to translate into boolean
162 :rtype: boolean
162 :rtype: boolean
163 :returns: boolean from given string
163 :returns: boolean from given string
164 """
164 """
165 if _str is None:
165 if _str is None:
166 return False
166 return False
167 if _str in (True, False):
167 if _str in (True, False):
168 return _str
168 return _str
169 _str = str(_str).strip().lower()
169 _str = str(_str).strip().lower()
170 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
170 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
171
171
172
172
173 def aslist(obj, sep=None, strip=True):
173 def aslist(obj, sep=None, strip=True):
174 """
174 """
175 Returns given string separated by sep as list
175 Returns given string separated by sep as list
176
176
177 :param obj:
177 :param obj:
178 :param sep:
178 :param sep:
179 :param strip:
179 :param strip:
180 """
180 """
181 if isinstance(obj, (basestring,)):
181 if isinstance(obj, (str,)):
182 lst = obj.split(sep)
182 lst = obj.split(sep)
183 if strip:
183 if strip:
184 lst = [v.strip() for v in lst]
184 lst = [v.strip() for v in lst]
185 return lst
185 return lst
186 elif isinstance(obj, (list, tuple)):
186 elif isinstance(obj, (list, tuple)):
187 return obj
187 return obj
188 elif obj is None:
188 elif obj is None:
189 return []
189 return []
190 else:
190 else:
191 return [obj]
191 return [obj]
192
192
193
193
194 class UrlTemplate(string.Template):
194 class UrlTemplate(string.Template):
195
195
196 def safe_substitute(self, **kws):
196 def safe_substitute(self, **kws):
197 # url encode the kw for usage in url
197 # url encode the kw for usage in url
198 kws = {k: urllib.parse.quote(str(v)) for k, v in kws.items()}
198 kws = {k: urllib.parse.quote(str(v)) for k, v in kws.items()}
199 return super(UrlTemplate, self).safe_substitute(**kws)
199 return super(UrlTemplate, self).safe_substitute(**kws)
@@ -1,207 +1,207 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import textwrap
22 import textwrap
23 import string
23 import string
24 import functools
24 import functools
25 import logging
25 import logging
26 import tempfile
26 import tempfile
27 import logging.config
27 import logging.config
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30 # skip keys, that are set here, so we don't double process those
30 # skip keys, that are set here, so we don't double process those
31 set_keys = {
31 set_keys = {
32 '__file__': ''
32 '__file__': ''
33 }
33 }
34
34
35
35
36 def str2bool(_str):
36 def str2bool(_str):
37 """
37 """
38 returns True/False value from given string, it tries to translate the
38 returns True/False value from given string, it tries to translate the
39 string into boolean
39 string into boolean
40
40
41 :param _str: string value to translate into boolean
41 :param _str: string value to translate into boolean
42 :rtype: boolean
42 :rtype: boolean
43 :returns: boolean from given string
43 :returns: boolean from given string
44 """
44 """
45 if _str is None:
45 if _str is None:
46 return False
46 return False
47 if _str in (True, False):
47 if _str in (True, False):
48 return _str
48 return _str
49 _str = str(_str).strip().lower()
49 _str = str(_str).strip().lower()
50 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
50 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
51
51
52
52
53 def aslist(obj, sep=None, strip=True):
53 def aslist(obj, sep=None, strip=True):
54 """
54 """
55 Returns given string separated by sep as list
55 Returns given string separated by sep as list
56
56
57 :param obj:
57 :param obj:
58 :param sep:
58 :param sep:
59 :param strip:
59 :param strip:
60 """
60 """
61 if isinstance(obj, (basestring,)):
61 if isinstance(obj, (str,)):
62 if obj in ['', ""]:
62 if obj in ['', ""]:
63 return []
63 return []
64
64
65 lst = obj.split(sep)
65 lst = obj.split(sep)
66 if strip:
66 if strip:
67 lst = [v.strip() for v in lst]
67 lst = [v.strip() for v in lst]
68 return lst
68 return lst
69 elif isinstance(obj, (list, tuple)):
69 elif isinstance(obj, (list, tuple)):
70 return obj
70 return obj
71 elif obj is None:
71 elif obj is None:
72 return []
72 return []
73 else:
73 else:
74 return [obj]
74 return [obj]
75
75
76
76
77 class SettingsMaker(object):
77 class SettingsMaker(object):
78
78
79 def __init__(self, app_settings):
79 def __init__(self, app_settings):
80 self.settings = app_settings
80 self.settings = app_settings
81
81
82 @classmethod
82 @classmethod
83 def _bool_func(cls, input_val):
83 def _bool_func(cls, input_val):
84 if isinstance(input_val, unicode):
84 if isinstance(input_val, unicode):
85 input_val = input_val.encode('utf8')
85 input_val = input_val.encode('utf8')
86 return str2bool(input_val)
86 return str2bool(input_val)
87
87
88 @classmethod
88 @classmethod
89 def _int_func(cls, input_val):
89 def _int_func(cls, input_val):
90 return int(input_val)
90 return int(input_val)
91
91
92 @classmethod
92 @classmethod
93 def _list_func(cls, input_val, sep=','):
93 def _list_func(cls, input_val, sep=','):
94 return aslist(input_val, sep=sep)
94 return aslist(input_val, sep=sep)
95
95
96 @classmethod
96 @classmethod
97 def _string_func(cls, input_val, lower=True):
97 def _string_func(cls, input_val, lower=True):
98 if lower:
98 if lower:
99 input_val = input_val.lower()
99 input_val = input_val.lower()
100 return input_val
100 return input_val
101
101
102 @classmethod
102 @classmethod
103 def _float_func(cls, input_val):
103 def _float_func(cls, input_val):
104 return float(input_val)
104 return float(input_val)
105
105
106 @classmethod
106 @classmethod
107 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
107 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
108
108
109 # ensure we have our dir created
109 # ensure we have our dir created
110 if not os.path.isdir(input_val) and ensure_dir:
110 if not os.path.isdir(input_val) and ensure_dir:
111 os.makedirs(input_val, mode=mode)
111 os.makedirs(input_val, mode=mode)
112
112
113 if not os.path.isdir(input_val):
113 if not os.path.isdir(input_val):
114 raise Exception('Dir at {} does not exist'.format(input_val))
114 raise Exception('Dir at {} does not exist'.format(input_val))
115 return input_val
115 return input_val
116
116
117 @classmethod
117 @classmethod
118 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
118 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
119 dirname = os.path.dirname(input_val)
119 dirname = os.path.dirname(input_val)
120 cls._dir_func(dirname, ensure_dir=ensure_dir)
120 cls._dir_func(dirname, ensure_dir=ensure_dir)
121 return input_val
121 return input_val
122
122
123 @classmethod
123 @classmethod
124 def _key_transformator(cls, key):
124 def _key_transformator(cls, key):
125 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
125 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
126
126
127 def maybe_env_key(self, key):
127 def maybe_env_key(self, key):
128 # now maybe we have this KEY in env, search and use the value with higher priority.
128 # now maybe we have this KEY in env, search and use the value with higher priority.
129 transformed_key = self._key_transformator(key)
129 transformed_key = self._key_transformator(key)
130 envvar_value = os.environ.get(transformed_key)
130 envvar_value = os.environ.get(transformed_key)
131 if envvar_value:
131 if envvar_value:
132 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
132 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
133
133
134 return envvar_value
134 return envvar_value
135
135
136 def env_expand(self):
136 def env_expand(self):
137 replaced = {}
137 replaced = {}
138 for k, v in self.settings.items():
138 for k, v in self.settings.items():
139 if k not in set_keys:
139 if k not in set_keys:
140 envvar_value = self.maybe_env_key(k)
140 envvar_value = self.maybe_env_key(k)
141 if envvar_value:
141 if envvar_value:
142 replaced[k] = envvar_value
142 replaced[k] = envvar_value
143 set_keys[k] = envvar_value
143 set_keys[k] = envvar_value
144
144
145 # replace ALL keys updated
145 # replace ALL keys updated
146 self.settings.update(replaced)
146 self.settings.update(replaced)
147
147
148 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
148 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
149 """
149 """
150 Helper to enable debug on running instance
150 Helper to enable debug on running instance
151 :return:
151 :return:
152 """
152 """
153
153
154 if not str2bool(self.settings.get('logging.autoconfigure')):
154 if not str2bool(self.settings.get('logging.autoconfigure')):
155 log.info('logging configuration based on main .ini file')
155 log.info('logging configuration based on main .ini file')
156 return
156 return
157
157
158 if logging_conf is None:
158 if logging_conf is None:
159 logging_conf = self.settings.get('logging.logging_conf_file') or ''
159 logging_conf = self.settings.get('logging.logging_conf_file') or ''
160
160
161 if not os.path.isfile(logging_conf):
161 if not os.path.isfile(logging_conf):
162 log.error('Unable to setup logging based on %s, '
162 log.error('Unable to setup logging based on %s, '
163 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
163 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
164 return
164 return
165
165
166 with open(logging_conf, 'rb') as f:
166 with open(logging_conf, 'rb') as f:
167 ini_template = textwrap.dedent(f.read())
167 ini_template = textwrap.dedent(f.read())
168 ini_template = string.Template(ini_template).safe_substitute(
168 ini_template = string.Template(ini_template).safe_substitute(
169 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
169 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
170 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
170 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
171 )
171 )
172
172
173 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
173 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
174 log.info('Saved Temporary LOGGING config at %s', f.name)
174 log.info('Saved Temporary LOGGING config at %s', f.name)
175 f.write(ini_template)
175 f.write(ini_template)
176
176
177 logging.config.fileConfig(f.name)
177 logging.config.fileConfig(f.name)
178 os.remove(f.name)
178 os.remove(f.name)
179
179
180 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
180 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
181 input_val = self.settings.get(key, default)
181 input_val = self.settings.get(key, default)
182
182
183 if default_when_empty and not input_val:
183 if default_when_empty and not input_val:
184 # use default value when value is set in the config but it is empty
184 # use default value when value is set in the config but it is empty
185 input_val = default
185 input_val = default
186
186
187 parser_func = {
187 parser_func = {
188 'bool': self._bool_func,
188 'bool': self._bool_func,
189 'int': self._int_func,
189 'int': self._int_func,
190 'list': self._list_func,
190 'list': self._list_func,
191 'list:newline': functools.partial(self._list_func, sep='/n'),
191 'list:newline': functools.partial(self._list_func, sep='/n'),
192 'list:spacesep': functools.partial(self._list_func, sep=' '),
192 'list:spacesep': functools.partial(self._list_func, sep=' '),
193 'string': functools.partial(self._string_func, lower=lower),
193 'string': functools.partial(self._string_func, lower=lower),
194 'dir': self._dir_func,
194 'dir': self._dir_func,
195 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
195 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
196 'file': self._file_path_func,
196 'file': self._file_path_func,
197 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
197 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
198 None: lambda i: i
198 None: lambda i: i
199 }[parser]
199 }[parser]
200
200
201 envvar_value = self.maybe_env_key(key)
201 envvar_value = self.maybe_env_key(key)
202 if envvar_value:
202 if envvar_value:
203 input_val = envvar_value
203 input_val = envvar_value
204 set_keys[key] = input_val
204 set_keys[key] = input_val
205
205
206 self.settings[key] = parser_func(input_val)
206 self.settings[key] = parser_func(input_val)
207 return self.settings[key]
207 return self.settings[key]
@@ -1,839 +1,839 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Utilities for writing code that runs on Python 2 and 3"""
2 """Utilities for writing code that runs on Python 2 and 3"""
3
3
4 # Copyright (c) 2010-2015 Benjamin Peterson
4 # Copyright (c) 2010-2015 Benjamin Peterson
5 #
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
7 # of this software and associated documentation files (the "Software"), to deal
7 # of this software and associated documentation files (the "Software"), to deal
8 # in the Software without restriction, including without limitation the rights
8 # in the Software without restriction, including without limitation the rights
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 # copies of the Software, and to permit persons to whom the Software is
10 # copies of the Software, and to permit persons to whom the Software is
11 # furnished to do so, subject to the following conditions:
11 # furnished to do so, subject to the following conditions:
12 #
12 #
13 # The above copyright notice and this permission notice shall be included in all
13 # The above copyright notice and this permission notice shall be included in all
14 # copies or substantial portions of the Software.
14 # copies or substantial portions of the Software.
15 #
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 # SOFTWARE.
22 # SOFTWARE.
23
23
24
24
25
25
26 import functools
26 import functools
27 import itertools
27 import itertools
28 import operator
28 import operator
29 import sys
29 import sys
30 import types
30 import types
31
31
32 __author__ = "Benjamin Peterson <benjamin@python.org>"
32 __author__ = "Benjamin Peterson <benjamin@python.org>"
33 __version__ = "1.9.0"
33 __version__ = "1.9.0"
34
34
35
35
36 # Useful for very coarse version differentiation.
36 # Useful for very coarse version differentiation.
37 PY2 = sys.version_info[0] == 2
37 PY2 = sys.version_info[0] == 2
38 PY3 = sys.version_info[0] == 3
38 PY3 = sys.version_info[0] == 3
39
39
40 if PY3:
40 if PY3:
41 string_types = str,
41 string_types = str,
42 integer_types = int,
42 integer_types = int,
43 class_types = type,
43 class_types = type,
44 text_type = str
44 text_type = str
45 binary_type = bytes
45 binary_type = bytes
46
46
47 MAXSIZE = sys.maxsize
47 MAXSIZE = sys.maxsize
48 else:
48 else:
49 string_types = basestring,
49 string_types = str,
50 integer_types = (int, long)
50 integer_types = (int, long)
51 class_types = (type, types.ClassType)
51 class_types = (type, types.ClassType)
52 text_type = unicode
52 text_type = unicode
53 binary_type = str
53 binary_type = str
54
54
55 if sys.platform.startswith("java"):
55 if sys.platform.startswith("java"):
56 # Jython always uses 32 bits.
56 # Jython always uses 32 bits.
57 MAXSIZE = int((1 << 31) - 1)
57 MAXSIZE = int((1 << 31) - 1)
58 else:
58 else:
59 # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
59 # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
60 class X(object):
60 class X(object):
61 def __len__(self):
61 def __len__(self):
62 return 1 << 31
62 return 1 << 31
63 try:
63 try:
64 len(X())
64 len(X())
65 except OverflowError:
65 except OverflowError:
66 # 32-bit
66 # 32-bit
67 MAXSIZE = int((1 << 31) - 1)
67 MAXSIZE = int((1 << 31) - 1)
68 else:
68 else:
69 # 64-bit
69 # 64-bit
70 MAXSIZE = int((1 << 63) - 1)
70 MAXSIZE = int((1 << 63) - 1)
71 del X
71 del X
72
72
73
73
74 def _add_doc(func, doc):
74 def _add_doc(func, doc):
75 """Add documentation to a function."""
75 """Add documentation to a function."""
76 func.__doc__ = doc
76 func.__doc__ = doc
77
77
78
78
79 def _import_module(name):
79 def _import_module(name):
80 """Import module, returning the module after the last dot."""
80 """Import module, returning the module after the last dot."""
81 __import__(name)
81 __import__(name)
82 return sys.modules[name]
82 return sys.modules[name]
83
83
84
84
85 class _LazyDescr(object):
85 class _LazyDescr(object):
86
86
87 def __init__(self, name):
87 def __init__(self, name):
88 self.name = name
88 self.name = name
89
89
90 def __get__(self, obj, tp):
90 def __get__(self, obj, tp):
91 result = self._resolve()
91 result = self._resolve()
92 setattr(obj, self.name, result) # Invokes __set__.
92 setattr(obj, self.name, result) # Invokes __set__.
93 try:
93 try:
94 # This is a bit ugly, but it avoids running this again by
94 # This is a bit ugly, but it avoids running this again by
95 # removing this descriptor.
95 # removing this descriptor.
96 delattr(obj.__class__, self.name)
96 delattr(obj.__class__, self.name)
97 except AttributeError:
97 except AttributeError:
98 pass
98 pass
99 return result
99 return result
100
100
101
101
102 class MovedModule(_LazyDescr):
102 class MovedModule(_LazyDescr):
103
103
104 def __init__(self, name, old, new=None):
104 def __init__(self, name, old, new=None):
105 super(MovedModule, self).__init__(name)
105 super(MovedModule, self).__init__(name)
106 if PY3:
106 if PY3:
107 if new is None:
107 if new is None:
108 new = name
108 new = name
109 self.mod = new
109 self.mod = new
110 else:
110 else:
111 self.mod = old
111 self.mod = old
112
112
113 def _resolve(self):
113 def _resolve(self):
114 return _import_module(self.mod)
114 return _import_module(self.mod)
115
115
116 def __getattr__(self, attr):
116 def __getattr__(self, attr):
117 _module = self._resolve()
117 _module = self._resolve()
118 value = getattr(_module, attr)
118 value = getattr(_module, attr)
119 setattr(self, attr, value)
119 setattr(self, attr, value)
120 return value
120 return value
121
121
122
122
123 class _LazyModule(types.ModuleType):
123 class _LazyModule(types.ModuleType):
124
124
125 def __init__(self, name):
125 def __init__(self, name):
126 super(_LazyModule, self).__init__(name)
126 super(_LazyModule, self).__init__(name)
127 self.__doc__ = self.__class__.__doc__
127 self.__doc__ = self.__class__.__doc__
128
128
129 def __dir__(self):
129 def __dir__(self):
130 attrs = ["__doc__", "__name__"]
130 attrs = ["__doc__", "__name__"]
131 attrs += [attr.name for attr in self._moved_attributes]
131 attrs += [attr.name for attr in self._moved_attributes]
132 return attrs
132 return attrs
133
133
134 # Subclasses should override this
134 # Subclasses should override this
135 _moved_attributes = []
135 _moved_attributes = []
136
136
137
137
138 class MovedAttribute(_LazyDescr):
138 class MovedAttribute(_LazyDescr):
139
139
140 def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
140 def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
141 super(MovedAttribute, self).__init__(name)
141 super(MovedAttribute, self).__init__(name)
142 if PY3:
142 if PY3:
143 if new_mod is None:
143 if new_mod is None:
144 new_mod = name
144 new_mod = name
145 self.mod = new_mod
145 self.mod = new_mod
146 if new_attr is None:
146 if new_attr is None:
147 if old_attr is None:
147 if old_attr is None:
148 new_attr = name
148 new_attr = name
149 else:
149 else:
150 new_attr = old_attr
150 new_attr = old_attr
151 self.attr = new_attr
151 self.attr = new_attr
152 else:
152 else:
153 self.mod = old_mod
153 self.mod = old_mod
154 if old_attr is None:
154 if old_attr is None:
155 old_attr = name
155 old_attr = name
156 self.attr = old_attr
156 self.attr = old_attr
157
157
158 def _resolve(self):
158 def _resolve(self):
159 module = _import_module(self.mod)
159 module = _import_module(self.mod)
160 return getattr(module, self.attr)
160 return getattr(module, self.attr)
161
161
162
162
163 class _SixMetaPathImporter(object):
163 class _SixMetaPathImporter(object):
164 """
164 """
165 A meta path importer to import six.moves and its submodules.
165 A meta path importer to import six.moves and its submodules.
166
166
167 This class implements a PEP302 finder and loader. It should be compatible
167 This class implements a PEP302 finder and loader. It should be compatible
168 with Python 2.5 and all existing versions of Python3
168 with Python 2.5 and all existing versions of Python3
169 """
169 """
170 def __init__(self, six_module_name):
170 def __init__(self, six_module_name):
171 self.name = six_module_name
171 self.name = six_module_name
172 self.known_modules = {}
172 self.known_modules = {}
173
173
174 def _add_module(self, mod, *fullnames):
174 def _add_module(self, mod, *fullnames):
175 for fullname in fullnames:
175 for fullname in fullnames:
176 self.known_modules[self.name + "." + fullname] = mod
176 self.known_modules[self.name + "." + fullname] = mod
177
177
178 def _get_module(self, fullname):
178 def _get_module(self, fullname):
179 return self.known_modules[self.name + "." + fullname]
179 return self.known_modules[self.name + "." + fullname]
180
180
181 def find_module(self, fullname, path=None):
181 def find_module(self, fullname, path=None):
182 if fullname in self.known_modules:
182 if fullname in self.known_modules:
183 return self
183 return self
184 return None
184 return None
185
185
186 def __get_module(self, fullname):
186 def __get_module(self, fullname):
187 try:
187 try:
188 return self.known_modules[fullname]
188 return self.known_modules[fullname]
189 except KeyError:
189 except KeyError:
190 raise ImportError("This loader does not know module " + fullname)
190 raise ImportError("This loader does not know module " + fullname)
191
191
192 def load_module(self, fullname):
192 def load_module(self, fullname):
193 try:
193 try:
194 # in case of a reload
194 # in case of a reload
195 return sys.modules[fullname]
195 return sys.modules[fullname]
196 except KeyError:
196 except KeyError:
197 pass
197 pass
198 mod = self.__get_module(fullname)
198 mod = self.__get_module(fullname)
199 if isinstance(mod, MovedModule):
199 if isinstance(mod, MovedModule):
200 mod = mod._resolve()
200 mod = mod._resolve()
201 else:
201 else:
202 mod.__loader__ = self
202 mod.__loader__ = self
203 sys.modules[fullname] = mod
203 sys.modules[fullname] = mod
204 return mod
204 return mod
205
205
206 def is_package(self, fullname):
206 def is_package(self, fullname):
207 """
207 """
208 Return true, if the named module is a package.
208 Return true, if the named module is a package.
209
209
210 We need this method to get correct spec objects with
210 We need this method to get correct spec objects with
211 Python 3.4 (see PEP451)
211 Python 3.4 (see PEP451)
212 """
212 """
213 return hasattr(self.__get_module(fullname), "__path__")
213 return hasattr(self.__get_module(fullname), "__path__")
214
214
215 def get_code(self, fullname):
215 def get_code(self, fullname):
216 """Return None
216 """Return None
217
217
218 Required, if is_package is implemented"""
218 Required, if is_package is implemented"""
219 self.__get_module(fullname) # eventually raises ImportError
219 self.__get_module(fullname) # eventually raises ImportError
220 return None
220 return None
221 get_source = get_code # same as get_code
221 get_source = get_code # same as get_code
222
222
223 _importer = _SixMetaPathImporter(__name__)
223 _importer = _SixMetaPathImporter(__name__)
224
224
225
225
226 class _MovedItems(_LazyModule):
226 class _MovedItems(_LazyModule):
227 """Lazy loading of moved objects"""
227 """Lazy loading of moved objects"""
228 __path__ = [] # mark as package
228 __path__ = [] # mark as package
229
229
230
230
231 _moved_attributes = [
231 _moved_attributes = [
232 MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
232 MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
233 MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
233 MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
234 MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
234 MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
235 MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
235 MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
236 MovedAttribute("intern", "__builtin__", "sys"),
236 MovedAttribute("intern", "__builtin__", "sys"),
237 MovedAttribute("map", "itertools", "builtins", "imap", "map"),
237 MovedAttribute("map", "itertools", "builtins", "imap", "map"),
238 MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
238 MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
239 MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
239 MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
240 MovedAttribute("reduce", "__builtin__", "functools"),
240 MovedAttribute("reduce", "__builtin__", "functools"),
241 MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
241 MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
242 MovedAttribute("StringIO", "StringIO", "io"),
242 MovedAttribute("StringIO", "StringIO", "io"),
243 MovedAttribute("UserDict", "UserDict", "collections"),
243 MovedAttribute("UserDict", "UserDict", "collections"),
244 MovedAttribute("UserList", "UserList", "collections"),
244 MovedAttribute("UserList", "UserList", "collections"),
245 MovedAttribute("UserString", "UserString", "collections"),
245 MovedAttribute("UserString", "UserString", "collections"),
246 MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
246 MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
247 MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
247 MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
248 MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
248 MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
249
249
250 MovedModule("builtins", "__builtin__"),
250 MovedModule("builtins", "__builtin__"),
251 MovedModule("configparser", "ConfigParser"),
251 MovedModule("configparser", "ConfigParser"),
252 MovedModule("copyreg", "copy_reg"),
252 MovedModule("copyreg", "copy_reg"),
253 MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
253 MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
254 MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
254 MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
255 MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
255 MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
256 MovedModule("http_cookies", "Cookie", "http.cookies"),
256 MovedModule("http_cookies", "Cookie", "http.cookies"),
257 MovedModule("html_entities", "htmlentitydefs", "html.entities"),
257 MovedModule("html_entities", "htmlentitydefs", "html.entities"),
258 MovedModule("html_parser", "HTMLParser", "html.parser"),
258 MovedModule("html_parser", "HTMLParser", "html.parser"),
259 MovedModule("http_client", "httplib", "http.client"),
259 MovedModule("http_client", "httplib", "http.client"),
260 MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
260 MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
261 MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
261 MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
262 MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
262 MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
263 MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
263 MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
264 MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
264 MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
265 MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
265 MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
266 MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
266 MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
267 MovedModule("cPickle", "cPickle", "pickle"),
267 MovedModule("cPickle", "cPickle", "pickle"),
268 MovedModule("queue", "Queue"),
268 MovedModule("queue", "Queue"),
269 MovedModule("reprlib", "repr"),
269 MovedModule("reprlib", "repr"),
270 MovedModule("socketserver", "SocketServer"),
270 MovedModule("socketserver", "SocketServer"),
271 MovedModule("_thread", "thread", "_thread"),
271 MovedModule("_thread", "thread", "_thread"),
272 MovedModule("tkinter", "Tkinter"),
272 MovedModule("tkinter", "Tkinter"),
273 MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
273 MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
274 MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
274 MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
275 MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
275 MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
276 MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
276 MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
277 MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
277 MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
278 MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
278 MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
279 MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
279 MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
280 MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
280 MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
281 MovedModule("tkinter_colorchooser", "tkColorChooser",
281 MovedModule("tkinter_colorchooser", "tkColorChooser",
282 "tkinter.colorchooser"),
282 "tkinter.colorchooser"),
283 MovedModule("tkinter_commondialog", "tkCommonDialog",
283 MovedModule("tkinter_commondialog", "tkCommonDialog",
284 "tkinter.commondialog"),
284 "tkinter.commondialog"),
285 MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
285 MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
286 MovedModule("tkinter_font", "tkFont", "tkinter.font"),
286 MovedModule("tkinter_font", "tkFont", "tkinter.font"),
287 MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
287 MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
288 MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
288 MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
289 "tkinter.simpledialog"),
289 "tkinter.simpledialog"),
290 MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
290 MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
291 MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
291 MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
292 MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
292 MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
293 MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
293 MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
294 MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
294 MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
295 MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
295 MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
296 MovedModule("winreg", "_winreg"),
296 MovedModule("winreg", "_winreg"),
297 ]
297 ]
298 for attr in _moved_attributes:
298 for attr in _moved_attributes:
299 setattr(_MovedItems, attr.name, attr)
299 setattr(_MovedItems, attr.name, attr)
300 if isinstance(attr, MovedModule):
300 if isinstance(attr, MovedModule):
301 _importer._add_module(attr, "moves." + attr.name)
301 _importer._add_module(attr, "moves." + attr.name)
302 del attr
302 del attr
303
303
304 _MovedItems._moved_attributes = _moved_attributes
304 _MovedItems._moved_attributes = _moved_attributes
305
305
306 moves = _MovedItems(__name__ + ".moves")
306 moves = _MovedItems(__name__ + ".moves")
307 _importer._add_module(moves, "moves")
307 _importer._add_module(moves, "moves")
308
308
309
309
310 class Module_six_moves_urllib_parse(_LazyModule):
310 class Module_six_moves_urllib_parse(_LazyModule):
311 """Lazy loading of moved objects in six.moves.urllib_parse"""
311 """Lazy loading of moved objects in six.moves.urllib_parse"""
312
312
313
313
314 _urllib_parse_moved_attributes = [
314 _urllib_parse_moved_attributes = [
315 MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
315 MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
316 MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
316 MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
317 MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
317 MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
318 MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
318 MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
319 MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
319 MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
320 MovedAttribute("urljoin", "urlparse", "urllib.parse"),
320 MovedAttribute("urljoin", "urlparse", "urllib.parse"),
321 MovedAttribute("urlparse", "urlparse", "urllib.parse"),
321 MovedAttribute("urlparse", "urlparse", "urllib.parse"),
322 MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
322 MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
323 MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
323 MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
324 MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
324 MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
325 MovedAttribute("quote", "urllib", "urllib.parse"),
325 MovedAttribute("quote", "urllib", "urllib.parse"),
326 MovedAttribute("quote_plus", "urllib", "urllib.parse"),
326 MovedAttribute("quote_plus", "urllib", "urllib.parse"),
327 MovedAttribute("unquote", "urllib", "urllib.parse"),
327 MovedAttribute("unquote", "urllib", "urllib.parse"),
328 MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
328 MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
329 MovedAttribute("urlencode", "urllib", "urllib.parse"),
329 MovedAttribute("urlencode", "urllib", "urllib.parse"),
330 MovedAttribute("splitquery", "urllib", "urllib.parse"),
330 MovedAttribute("splitquery", "urllib", "urllib.parse"),
331 MovedAttribute("splittag", "urllib", "urllib.parse"),
331 MovedAttribute("splittag", "urllib", "urllib.parse"),
332 MovedAttribute("splituser", "urllib", "urllib.parse"),
332 MovedAttribute("splituser", "urllib", "urllib.parse"),
333 MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
333 MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
334 MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
334 MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
335 MovedAttribute("uses_params", "urlparse", "urllib.parse"),
335 MovedAttribute("uses_params", "urlparse", "urllib.parse"),
336 MovedAttribute("uses_query", "urlparse", "urllib.parse"),
336 MovedAttribute("uses_query", "urlparse", "urllib.parse"),
337 MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
337 MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
338 ]
338 ]
339 for attr in _urllib_parse_moved_attributes:
339 for attr in _urllib_parse_moved_attributes:
340 setattr(Module_six_moves_urllib_parse, attr.name, attr)
340 setattr(Module_six_moves_urllib_parse, attr.name, attr)
341 del attr
341 del attr
342
342
343 Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
343 Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
344
344
345 _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
345 _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
346 "moves.urllib_parse", "moves.urllib.parse")
346 "moves.urllib_parse", "moves.urllib.parse")
347
347
348
348
349 class Module_six_moves_urllib_error(_LazyModule):
349 class Module_six_moves_urllib_error(_LazyModule):
350 """Lazy loading of moved objects in six.moves.urllib_error"""
350 """Lazy loading of moved objects in six.moves.urllib_error"""
351
351
352
352
353 _urllib_error_moved_attributes = [
353 _urllib_error_moved_attributes = [
354 MovedAttribute("URLError", "urllib2", "urllib.error"),
354 MovedAttribute("URLError", "urllib2", "urllib.error"),
355 MovedAttribute("HTTPError", "urllib2", "urllib.error"),
355 MovedAttribute("HTTPError", "urllib2", "urllib.error"),
356 MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
356 MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
357 ]
357 ]
358 for attr in _urllib_error_moved_attributes:
358 for attr in _urllib_error_moved_attributes:
359 setattr(Module_six_moves_urllib_error, attr.name, attr)
359 setattr(Module_six_moves_urllib_error, attr.name, attr)
360 del attr
360 del attr
361
361
362 Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
362 Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
363
363
364 _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
364 _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
365 "moves.urllib_error", "moves.urllib.error")
365 "moves.urllib_error", "moves.urllib.error")
366
366
367
367
368 class Module_six_moves_urllib_request(_LazyModule):
368 class Module_six_moves_urllib_request(_LazyModule):
369 """Lazy loading of moved objects in six.moves.urllib_request"""
369 """Lazy loading of moved objects in six.moves.urllib_request"""
370
370
371
371
372 _urllib_request_moved_attributes = [
372 _urllib_request_moved_attributes = [
373 MovedAttribute("urlopen", "urllib2", "urllib.request"),
373 MovedAttribute("urlopen", "urllib2", "urllib.request"),
374 MovedAttribute("install_opener", "urllib2", "urllib.request"),
374 MovedAttribute("install_opener", "urllib2", "urllib.request"),
375 MovedAttribute("build_opener", "urllib2", "urllib.request"),
375 MovedAttribute("build_opener", "urllib2", "urllib.request"),
376 MovedAttribute("pathname2url", "urllib", "urllib.request"),
376 MovedAttribute("pathname2url", "urllib", "urllib.request"),
377 MovedAttribute("url2pathname", "urllib", "urllib.request"),
377 MovedAttribute("url2pathname", "urllib", "urllib.request"),
378 MovedAttribute("getproxies", "urllib", "urllib.request"),
378 MovedAttribute("getproxies", "urllib", "urllib.request"),
379 MovedAttribute("Request", "urllib2", "urllib.request"),
379 MovedAttribute("Request", "urllib2", "urllib.request"),
380 MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
380 MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
381 MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
381 MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
382 MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
382 MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
383 MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
383 MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
384 MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
384 MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
385 MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
385 MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
386 MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
386 MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
387 MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
387 MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
388 MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
388 MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
389 MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
389 MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
390 MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
390 MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
391 MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
391 MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
392 MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
392 MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
393 MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
393 MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
394 MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
394 MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
395 MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
395 MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
396 MovedAttribute("FileHandler", "urllib2", "urllib.request"),
396 MovedAttribute("FileHandler", "urllib2", "urllib.request"),
397 MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
397 MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
398 MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
398 MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
399 MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
399 MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
400 MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
400 MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
401 MovedAttribute("urlretrieve", "urllib", "urllib.request"),
401 MovedAttribute("urlretrieve", "urllib", "urllib.request"),
402 MovedAttribute("urlcleanup", "urllib", "urllib.request"),
402 MovedAttribute("urlcleanup", "urllib", "urllib.request"),
403 MovedAttribute("URLopener", "urllib", "urllib.request"),
403 MovedAttribute("URLopener", "urllib", "urllib.request"),
404 MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
404 MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
405 MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
405 MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
406 ]
406 ]
407 for attr in _urllib_request_moved_attributes:
407 for attr in _urllib_request_moved_attributes:
408 setattr(Module_six_moves_urllib_request, attr.name, attr)
408 setattr(Module_six_moves_urllib_request, attr.name, attr)
409 del attr
409 del attr
410
410
411 Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
411 Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
412
412
413 _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
413 _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
414 "moves.urllib_request", "moves.urllib.request")
414 "moves.urllib_request", "moves.urllib.request")
415
415
416
416
417 class Module_six_moves_urllib_response(_LazyModule):
417 class Module_six_moves_urllib_response(_LazyModule):
418 """Lazy loading of moved objects in six.moves.urllib_response"""
418 """Lazy loading of moved objects in six.moves.urllib_response"""
419
419
420
420
421 _urllib_response_moved_attributes = [
421 _urllib_response_moved_attributes = [
422 MovedAttribute("addbase", "urllib", "urllib.response"),
422 MovedAttribute("addbase", "urllib", "urllib.response"),
423 MovedAttribute("addclosehook", "urllib", "urllib.response"),
423 MovedAttribute("addclosehook", "urllib", "urllib.response"),
424 MovedAttribute("addinfo", "urllib", "urllib.response"),
424 MovedAttribute("addinfo", "urllib", "urllib.response"),
425 MovedAttribute("addinfourl", "urllib", "urllib.response"),
425 MovedAttribute("addinfourl", "urllib", "urllib.response"),
426 ]
426 ]
427 for attr in _urllib_response_moved_attributes:
427 for attr in _urllib_response_moved_attributes:
428 setattr(Module_six_moves_urllib_response, attr.name, attr)
428 setattr(Module_six_moves_urllib_response, attr.name, attr)
429 del attr
429 del attr
430
430
431 Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
431 Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
432
432
433 _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
433 _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
434 "moves.urllib_response", "moves.urllib.response")
434 "moves.urllib_response", "moves.urllib.response")
435
435
436
436
437 class Module_six_moves_urllib_robotparser(_LazyModule):
437 class Module_six_moves_urllib_robotparser(_LazyModule):
438 """Lazy loading of moved objects in six.moves.urllib_robotparser"""
438 """Lazy loading of moved objects in six.moves.urllib_robotparser"""
439
439
440
440
441 _urllib_robotparser_moved_attributes = [
441 _urllib_robotparser_moved_attributes = [
442 MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
442 MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
443 ]
443 ]
444 for attr in _urllib_robotparser_moved_attributes:
444 for attr in _urllib_robotparser_moved_attributes:
445 setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
445 setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
446 del attr
446 del attr
447
447
448 Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
448 Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
449
449
450 _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
450 _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
451 "moves.urllib_robotparser", "moves.urllib.robotparser")
451 "moves.urllib_robotparser", "moves.urllib.robotparser")
452
452
453
453
454 class Module_six_moves_urllib(types.ModuleType):
454 class Module_six_moves_urllib(types.ModuleType):
455 """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
455 """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
456 __path__ = [] # mark as package
456 __path__ = [] # mark as package
457 parse = _importer._get_module("moves.urllib_parse")
457 parse = _importer._get_module("moves.urllib_parse")
458 error = _importer._get_module("moves.urllib_error")
458 error = _importer._get_module("moves.urllib_error")
459 request = _importer._get_module("moves.urllib_request")
459 request = _importer._get_module("moves.urllib_request")
460 response = _importer._get_module("moves.urllib_response")
460 response = _importer._get_module("moves.urllib_response")
461 robotparser = _importer._get_module("moves.urllib_robotparser")
461 robotparser = _importer._get_module("moves.urllib_robotparser")
462
462
463 def __dir__(self):
463 def __dir__(self):
464 return ['parse', 'error', 'request', 'response', 'robotparser']
464 return ['parse', 'error', 'request', 'response', 'robotparser']
465
465
466 _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
466 _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
467 "moves.urllib")
467 "moves.urllib")
468
468
469
469
470 def add_move(move):
470 def add_move(move):
471 """Add an item to six.moves."""
471 """Add an item to six.moves."""
472 setattr(_MovedItems, move.name, move)
472 setattr(_MovedItems, move.name, move)
473
473
474
474
475 def remove_move(name):
475 def remove_move(name):
476 """Remove item from six.moves."""
476 """Remove item from six.moves."""
477 try:
477 try:
478 delattr(_MovedItems, name)
478 delattr(_MovedItems, name)
479 except AttributeError:
479 except AttributeError:
480 try:
480 try:
481 del moves.__dict__[name]
481 del moves.__dict__[name]
482 except KeyError:
482 except KeyError:
483 raise AttributeError("no such move, %r" % (name,))
483 raise AttributeError("no such move, %r" % (name,))
484
484
485
485
486 if PY3:
486 if PY3:
487 _meth_func = "__func__"
487 _meth_func = "__func__"
488 _meth_self = "__self__"
488 _meth_self = "__self__"
489
489
490 _func_closure = "__closure__"
490 _func_closure = "__closure__"
491 _func_code = "__code__"
491 _func_code = "__code__"
492 _func_defaults = "__defaults__"
492 _func_defaults = "__defaults__"
493 _func_globals = "__globals__"
493 _func_globals = "__globals__"
494 else:
494 else:
495 _meth_func = "im_func"
495 _meth_func = "im_func"
496 _meth_self = "im_self"
496 _meth_self = "im_self"
497
497
498 _func_closure = "func_closure"
498 _func_closure = "func_closure"
499 _func_code = "func_code"
499 _func_code = "func_code"
500 _func_defaults = "func_defaults"
500 _func_defaults = "func_defaults"
501 _func_globals = "func_globals"
501 _func_globals = "func_globals"
502
502
503
503
504 try:
504 try:
505 advance_iterator = next
505 advance_iterator = next
506 except NameError:
506 except NameError:
507 def advance_iterator(it):
507 def advance_iterator(it):
508 return it.next()
508 return it.next()
509 next = advance_iterator
509 next = advance_iterator
510
510
511
511
512 try:
512 try:
513 callable = callable
513 callable = callable
514 except NameError:
514 except NameError:
515 def callable(obj):
515 def callable(obj):
516 return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
516 return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
517
517
518
518
519 if PY3:
519 if PY3:
520 def get_unbound_function(unbound):
520 def get_unbound_function(unbound):
521 return unbound
521 return unbound
522
522
523 create_bound_method = types.MethodType
523 create_bound_method = types.MethodType
524
524
525 Iterator = object
525 Iterator = object
526 else:
526 else:
527 def get_unbound_function(unbound):
527 def get_unbound_function(unbound):
528 return unbound.im_func
528 return unbound.im_func
529
529
530 def create_bound_method(func, obj):
530 def create_bound_method(func, obj):
531 return types.MethodType(func, obj, obj.__class__)
531 return types.MethodType(func, obj, obj.__class__)
532
532
533 class Iterator(object):
533 class Iterator(object):
534
534
535 def next(self):
535 def next(self):
536 return type(self).__next__(self)
536 return type(self).__next__(self)
537
537
538 callable = callable
538 callable = callable
539 _add_doc(get_unbound_function,
539 _add_doc(get_unbound_function,
540 """Get the function out of a possibly unbound function""")
540 """Get the function out of a possibly unbound function""")
541
541
542
542
543 get_method_function = operator.attrgetter(_meth_func)
543 get_method_function = operator.attrgetter(_meth_func)
544 get_method_self = operator.attrgetter(_meth_self)
544 get_method_self = operator.attrgetter(_meth_self)
545 get_function_closure = operator.attrgetter(_func_closure)
545 get_function_closure = operator.attrgetter(_func_closure)
546 get_function_code = operator.attrgetter(_func_code)
546 get_function_code = operator.attrgetter(_func_code)
547 get_function_defaults = operator.attrgetter(_func_defaults)
547 get_function_defaults = operator.attrgetter(_func_defaults)
548 get_function_globals = operator.attrgetter(_func_globals)
548 get_function_globals = operator.attrgetter(_func_globals)
549
549
550
550
551 if PY3:
551 if PY3:
552 def iterkeys(d, **kw):
552 def iterkeys(d, **kw):
553 return iter(d.keys(**kw))
553 return iter(d.keys(**kw))
554
554
555 def itervalues(d, **kw):
555 def itervalues(d, **kw):
556 return iter(d.values(**kw))
556 return iter(d.values(**kw))
557
557
558 def iteritems(d, **kw):
558 def iteritems(d, **kw):
559 return iter(d.items(**kw))
559 return iter(d.items(**kw))
560
560
561 def iterlists(d, **kw):
561 def iterlists(d, **kw):
562 return iter(d.lists(**kw))
562 return iter(d.lists(**kw))
563
563
564 viewkeys = operator.methodcaller("keys")
564 viewkeys = operator.methodcaller("keys")
565
565
566 viewvalues = operator.methodcaller("values")
566 viewvalues = operator.methodcaller("values")
567
567
568 viewitems = operator.methodcaller("items")
568 viewitems = operator.methodcaller("items")
569 else:
569 else:
570 def iterkeys(d, **kw):
570 def iterkeys(d, **kw):
571 return iter(d.iterkeys(**kw))
571 return iter(d.iterkeys(**kw))
572
572
573 def itervalues(d, **kw):
573 def itervalues(d, **kw):
574 return iter(d.itervalues(**kw))
574 return iter(d.itervalues(**kw))
575
575
576 def iteritems(d, **kw):
576 def iteritems(d, **kw):
577 return iter(d.iteritems(**kw))
577 return iter(d.iteritems(**kw))
578
578
579 def iterlists(d, **kw):
579 def iterlists(d, **kw):
580 return iter(d.iterlists(**kw))
580 return iter(d.iterlists(**kw))
581
581
582 viewkeys = operator.methodcaller("viewkeys")
582 viewkeys = operator.methodcaller("viewkeys")
583
583
584 viewvalues = operator.methodcaller("viewvalues")
584 viewvalues = operator.methodcaller("viewvalues")
585
585
586 viewitems = operator.methodcaller("viewitems")
586 viewitems = operator.methodcaller("viewitems")
587
587
588 _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
588 _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
589 _add_doc(itervalues, "Return an iterator over the values of a dictionary.")
589 _add_doc(itervalues, "Return an iterator over the values of a dictionary.")
590 _add_doc(iteritems,
590 _add_doc(iteritems,
591 "Return an iterator over the (key, value) pairs of a dictionary.")
591 "Return an iterator over the (key, value) pairs of a dictionary.")
592 _add_doc(iterlists,
592 _add_doc(iterlists,
593 "Return an iterator over the (key, [values]) pairs of a dictionary.")
593 "Return an iterator over the (key, [values]) pairs of a dictionary.")
594
594
595
595
596 if PY3:
596 if PY3:
597 def b(s):
597 def b(s):
598 return s.encode("latin-1")
598 return s.encode("latin-1")
599 def u(s):
599 def u(s):
600 return s
600 return s
601 unichr = chr
601 unichr = chr
602 if sys.version_info[1] <= 1:
602 if sys.version_info[1] <= 1:
603 def int2byte(i):
603 def int2byte(i):
604 return bytes((i,))
604 return bytes((i,))
605 else:
605 else:
606 # This is about 2x faster than the implementation above on 3.2+
606 # This is about 2x faster than the implementation above on 3.2+
607 int2byte = operator.methodcaller("to_bytes", 1, "big")
607 int2byte = operator.methodcaller("to_bytes", 1, "big")
608 byte2int = operator.itemgetter(0)
608 byte2int = operator.itemgetter(0)
609 indexbytes = operator.getitem
609 indexbytes = operator.getitem
610 iterbytes = iter
610 iterbytes = iter
611 import io
611 import io
612 StringIO = io.StringIO
612 StringIO = io.StringIO
613 BytesIO = io.BytesIO
613 BytesIO = io.BytesIO
614 _assertCountEqual = "assertCountEqual"
614 _assertCountEqual = "assertCountEqual"
615 _assertRaisesRegex = "assertRaisesRegex"
615 _assertRaisesRegex = "assertRaisesRegex"
616 _assertRegex = "assertRegex"
616 _assertRegex = "assertRegex"
617 else:
617 else:
618 def b(s):
618 def b(s):
619 return s
619 return s
620 # Workaround for standalone backslash
620 # Workaround for standalone backslash
621 def u(s):
621 def u(s):
622 return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
622 return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
623 unichr = unichr
623 unichr = unichr
624 int2byte = chr
624 int2byte = chr
625 def byte2int(bs):
625 def byte2int(bs):
626 return ord(bs[0])
626 return ord(bs[0])
627 def indexbytes(buf, i):
627 def indexbytes(buf, i):
628 return ord(buf[i])
628 return ord(buf[i])
629 iterbytes = functools.partial(itertools.imap, ord)
629 iterbytes = functools.partial(itertools.imap, ord)
630 import StringIO
630 import StringIO
631 StringIO = BytesIO = StringIO.StringIO
631 StringIO = BytesIO = StringIO.StringIO
632 _assertCountEqual = "assertItemsEqual"
632 _assertCountEqual = "assertItemsEqual"
633 _assertRaisesRegex = "assertRaisesRegexp"
633 _assertRaisesRegex = "assertRaisesRegexp"
634 _assertRegex = "assertRegexpMatches"
634 _assertRegex = "assertRegexpMatches"
635 _add_doc(b, """Byte literal""")
635 _add_doc(b, """Byte literal""")
636 _add_doc(u, """Text literal""")
636 _add_doc(u, """Text literal""")
637
637
638
638
639 def assertCountEqual(self, *args, **kwargs):
639 def assertCountEqual(self, *args, **kwargs):
640 return getattr(self, _assertCountEqual)(*args, **kwargs)
640 return getattr(self, _assertCountEqual)(*args, **kwargs)
641
641
642
642
643 def assertRaisesRegex(self, *args, **kwargs):
643 def assertRaisesRegex(self, *args, **kwargs):
644 return getattr(self, _assertRaisesRegex)(*args, **kwargs)
644 return getattr(self, _assertRaisesRegex)(*args, **kwargs)
645
645
646
646
647 def assertRegex(self, *args, **kwargs):
647 def assertRegex(self, *args, **kwargs):
648 return getattr(self, _assertRegex)(*args, **kwargs)
648 return getattr(self, _assertRegex)(*args, **kwargs)
649
649
650
650
651 if PY3:
651 if PY3:
652 exec_ = getattr(moves.builtins, "exec")
652 exec_ = getattr(moves.builtins, "exec")
653
653
654
654
655 def reraise(tp, value, tb=None):
655 def reraise(tp, value, tb=None):
656 if value is None:
656 if value is None:
657 value = tp()
657 value = tp()
658 if value.__traceback__ is not tb:
658 if value.__traceback__ is not tb:
659 raise value.with_traceback(tb)
659 raise value.with_traceback(tb)
660 raise value
660 raise value
661
661
662 else:
662 else:
663 def exec_(_code_, _globs_=None, _locs_=None):
663 def exec_(_code_, _globs_=None, _locs_=None):
664 """Execute code in a namespace."""
664 """Execute code in a namespace."""
665 if _globs_ is None:
665 if _globs_ is None:
666 frame = sys._getframe(1)
666 frame = sys._getframe(1)
667 _globs_ = frame.f_globals
667 _globs_ = frame.f_globals
668 if _locs_ is None:
668 if _locs_ is None:
669 _locs_ = frame.f_locals
669 _locs_ = frame.f_locals
670 del frame
670 del frame
671 elif _locs_ is None:
671 elif _locs_ is None:
672 _locs_ = _globs_
672 _locs_ = _globs_
673 exec("""exec _code_ in _globs_, _locs_""")
673 exec("""exec _code_ in _globs_, _locs_""")
674
674
675
675
676 exec_("""def reraise(tp, value, tb=None):
676 exec_("""def reraise(tp, value, tb=None):
677 raise tp, value, tb
677 raise tp, value, tb
678 """)
678 """)
679
679
680
680
681 if sys.version_info[:2] == (3, 2):
681 if sys.version_info[:2] == (3, 2):
682 exec_("""def raise_from(value, from_value):
682 exec_("""def raise_from(value, from_value):
683 if from_value is None:
683 if from_value is None:
684 raise value
684 raise value
685 raise value from from_value
685 raise value from from_value
686 """)
686 """)
687 elif sys.version_info[:2] > (3, 2):
687 elif sys.version_info[:2] > (3, 2):
688 exec_("""def raise_from(value, from_value):
688 exec_("""def raise_from(value, from_value):
689 raise value from from_value
689 raise value from from_value
690 """)
690 """)
691 else:
691 else:
692 def raise_from(value, from_value):
692 def raise_from(value, from_value):
693 raise value
693 raise value
694
694
695
695
696 print_ = getattr(moves.builtins, "print", None)
696 print_ = getattr(moves.builtins, "print", None)
697 if print_ is None:
697 if print_ is None:
698 def print_(*args, **kwargs):
698 def print_(*args, **kwargs):
699 """The new-style print function for Python 2.4 and 2.5."""
699 """The new-style print function for Python 2.4 and 2.5."""
700 fp = kwargs.pop("file", sys.stdout)
700 fp = kwargs.pop("file", sys.stdout)
701 if fp is None:
701 if fp is None:
702 return
702 return
703 def write(data):
703 def write(data):
704 if not isinstance(data, basestring):
704 if not isinstance(data, str):
705 data = str(data)
705 data = str(data)
706 # If the file has an encoding, encode unicode with it.
706 # If the file has an encoding, encode unicode with it.
707 if (isinstance(fp, file) and
707 if (isinstance(fp, file) and
708 isinstance(data, unicode) and
708 isinstance(data, unicode) and
709 fp.encoding is not None):
709 fp.encoding is not None):
710 errors = getattr(fp, "errors", None)
710 errors = getattr(fp, "errors", None)
711 if errors is None:
711 if errors is None:
712 errors = "strict"
712 errors = "strict"
713 data = data.encode(fp.encoding, errors)
713 data = data.encode(fp.encoding, errors)
714 fp.write(data)
714 fp.write(data)
715 want_unicode = False
715 want_unicode = False
716 sep = kwargs.pop("sep", None)
716 sep = kwargs.pop("sep", None)
717 if sep is not None:
717 if sep is not None:
718 if isinstance(sep, unicode):
718 if isinstance(sep, unicode):
719 want_unicode = True
719 want_unicode = True
720 elif not isinstance(sep, str):
720 elif not isinstance(sep, str):
721 raise TypeError("sep must be None or a string")
721 raise TypeError("sep must be None or a string")
722 end = kwargs.pop("end", None)
722 end = kwargs.pop("end", None)
723 if end is not None:
723 if end is not None:
724 if isinstance(end, unicode):
724 if isinstance(end, unicode):
725 want_unicode = True
725 want_unicode = True
726 elif not isinstance(end, str):
726 elif not isinstance(end, str):
727 raise TypeError("end must be None or a string")
727 raise TypeError("end must be None or a string")
728 if kwargs:
728 if kwargs:
729 raise TypeError("invalid keyword arguments to print()")
729 raise TypeError("invalid keyword arguments to print()")
730 if not want_unicode:
730 if not want_unicode:
731 for arg in args:
731 for arg in args:
732 if isinstance(arg, unicode):
732 if isinstance(arg, unicode):
733 want_unicode = True
733 want_unicode = True
734 break
734 break
735 if want_unicode:
735 if want_unicode:
736 newline = unicode("\n")
736 newline = unicode("\n")
737 space = unicode(" ")
737 space = unicode(" ")
738 else:
738 else:
739 newline = "\n"
739 newline = "\n"
740 space = " "
740 space = " "
741 if sep is None:
741 if sep is None:
742 sep = space
742 sep = space
743 if end is None:
743 if end is None:
744 end = newline
744 end = newline
745 for i, arg in enumerate(args):
745 for i, arg in enumerate(args):
746 if i:
746 if i:
747 write(sep)
747 write(sep)
748 write(arg)
748 write(arg)
749 write(end)
749 write(end)
750 if sys.version_info[:2] < (3, 3):
750 if sys.version_info[:2] < (3, 3):
751 _print = print_
751 _print = print_
752 def print_(*args, **kwargs):
752 def print_(*args, **kwargs):
753 fp = kwargs.get("file", sys.stdout)
753 fp = kwargs.get("file", sys.stdout)
754 flush = kwargs.pop("flush", False)
754 flush = kwargs.pop("flush", False)
755 _print(*args, **kwargs)
755 _print(*args, **kwargs)
756 if flush and fp is not None:
756 if flush and fp is not None:
757 fp.flush()
757 fp.flush()
758
758
759 _add_doc(reraise, """Reraise an exception.""")
759 _add_doc(reraise, """Reraise an exception.""")
760
760
761 if sys.version_info[0:2] < (3, 4):
761 if sys.version_info[0:2] < (3, 4):
762 def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
762 def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
763 updated=functools.WRAPPER_UPDATES):
763 updated=functools.WRAPPER_UPDATES):
764 def wrapper(f):
764 def wrapper(f):
765 f = functools.wraps(wrapped, assigned, updated)(f)
765 f = functools.wraps(wrapped, assigned, updated)(f)
766 f.__wrapped__ = wrapped
766 f.__wrapped__ = wrapped
767 return f
767 return f
768 return wrapper
768 return wrapper
769 else:
769 else:
770 wraps = functools.wraps
770 wraps = functools.wraps
771
771
772 def with_metaclass(meta, *bases):
772 def with_metaclass(meta, *bases):
773 """Create a base class with a metaclass."""
773 """Create a base class with a metaclass."""
774 # This requires a bit of explanation: the basic idea is to make a dummy
774 # This requires a bit of explanation: the basic idea is to make a dummy
775 # metaclass for one level of class instantiation that replaces itself with
775 # metaclass for one level of class instantiation that replaces itself with
776 # the actual metaclass.
776 # the actual metaclass.
777 class metaclass(meta):
777 class metaclass(meta):
778 def __new__(cls, name, this_bases, d):
778 def __new__(cls, name, this_bases, d):
779 return meta(name, bases, d)
779 return meta(name, bases, d)
780 return type.__new__(metaclass, 'temporary_class', (), {})
780 return type.__new__(metaclass, 'temporary_class', (), {})
781
781
782
782
783 def add_metaclass(metaclass):
783 def add_metaclass(metaclass):
784 """Class decorator for creating a class with a metaclass."""
784 """Class decorator for creating a class with a metaclass."""
785 def wrapper(cls):
785 def wrapper(cls):
786 orig_vars = cls.__dict__.copy()
786 orig_vars = cls.__dict__.copy()
787 slots = orig_vars.get('__slots__')
787 slots = orig_vars.get('__slots__')
788 if slots is not None:
788 if slots is not None:
789 if isinstance(slots, str):
789 if isinstance(slots, str):
790 slots = [slots]
790 slots = [slots]
791 for slots_var in slots:
791 for slots_var in slots:
792 orig_vars.pop(slots_var)
792 orig_vars.pop(slots_var)
793 orig_vars.pop('__dict__', None)
793 orig_vars.pop('__dict__', None)
794 orig_vars.pop('__weakref__', None)
794 orig_vars.pop('__weakref__', None)
795 return metaclass(cls.__name__, cls.__bases__, orig_vars)
795 return metaclass(cls.__name__, cls.__bases__, orig_vars)
796 return wrapper
796 return wrapper
797
797
798
798
799 def python_2_unicode_compatible(klass):
799 def python_2_unicode_compatible(klass):
800 """
800 """
801 A decorator that defines __unicode__ and __str__ methods under Python 2.
801 A decorator that defines __unicode__ and __str__ methods under Python 2.
802 Under Python 3 it does nothing.
802 Under Python 3 it does nothing.
803
803
804 To support Python 2 and 3 with a single code base, define a __str__ method
804 To support Python 2 and 3 with a single code base, define a __str__ method
805 returning text and apply this decorator to the class.
805 returning text and apply this decorator to the class.
806 """
806 """
807 if PY2:
807 if PY2:
808 if '__str__' not in klass.__dict__:
808 if '__str__' not in klass.__dict__:
809 raise ValueError("@python_2_unicode_compatible cannot be applied "
809 raise ValueError("@python_2_unicode_compatible cannot be applied "
810 "to %s because it doesn't define __str__()." %
810 "to %s because it doesn't define __str__()." %
811 klass.__name__)
811 klass.__name__)
812 klass.__unicode__ = klass.__str__
812 klass.__unicode__ = klass.__str__
813 klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
813 klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
814 return klass
814 return klass
815
815
816
816
817 # Complete the moves implementation.
817 # Complete the moves implementation.
818 # This code is at the end of this module to speed up module loading.
818 # This code is at the end of this module to speed up module loading.
819 # Turn this module into a package.
819 # Turn this module into a package.
820 __path__ = [] # required for PEP 302 and PEP 451
820 __path__ = [] # required for PEP 302 and PEP 451
821 __package__ = __name__ # see PEP 366 @ReservedAssignment
821 __package__ = __name__ # see PEP 366 @ReservedAssignment
822 if globals().get("__spec__") is not None:
822 if globals().get("__spec__") is not None:
823 __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
823 __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
824 # Remove other six meta path importers, since they cause problems. This can
824 # Remove other six meta path importers, since they cause problems. This can
825 # happen if six is removed from sys.modules and then reloaded. (Setuptools does
825 # happen if six is removed from sys.modules and then reloaded. (Setuptools does
826 # this for some reason.)
826 # this for some reason.)
827 if sys.meta_path:
827 if sys.meta_path:
828 for i, importer in enumerate(sys.meta_path):
828 for i, importer in enumerate(sys.meta_path):
829 # Here's some real nastiness: Another "instance" of the six module might
829 # Here's some real nastiness: Another "instance" of the six module might
830 # be floating around. Therefore, we can't use isinstance() to check for
830 # be floating around. Therefore, we can't use isinstance() to check for
831 # the six meta path importer, since the other six instance will have
831 # the six meta path importer, since the other six instance will have
832 # inserted an importer with different class.
832 # inserted an importer with different class.
833 if (type(importer).__name__ == "_SixMetaPathImporter" and
833 if (type(importer).__name__ == "_SixMetaPathImporter" and
834 importer.name == __name__):
834 importer.name == __name__):
835 del sys.meta_path[i]
835 del sys.meta_path[i]
836 break
836 break
837 del i, importer
837 del i, importer
838 # Finally, add the importer to the meta path import hook.
838 # Finally, add the importer to the meta path import hook.
839 sys.meta_path.append(_importer)
839 sys.meta_path.append(_importer)
@@ -1,370 +1,370 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import hashlib
22 import hashlib
23 import itsdangerous
23 import itsdangerous
24 import logging
24 import logging
25 import requests
25 import requests
26 import datetime
26 import datetime
27
27
28 from dogpile.util.readwrite_lock import ReadWriteMutex
28 from dogpile.util.readwrite_lock import ReadWriteMutex
29 from pyramid.threadlocal import get_current_registry
29 from pyramid.threadlocal import get_current_registry
30
30
31 import rhodecode.lib.helpers as h
31 import rhodecode.lib.helpers as h
32 from rhodecode.lib.auth import HasRepoPermissionAny
32 from rhodecode.lib.auth import HasRepoPermissionAny
33 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.ext_json import json
34 from rhodecode.model.db import User
34 from rhodecode.model.db import User
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38 LOCK = ReadWriteMutex()
38 LOCK = ReadWriteMutex()
39
39
40 USER_STATE_PUBLIC_KEYS = [
40 USER_STATE_PUBLIC_KEYS = [
41 'id', 'username', 'first_name', 'last_name',
41 'id', 'username', 'first_name', 'last_name',
42 'icon_link', 'display_name', 'display_link']
42 'icon_link', 'display_name', 'display_link']
43
43
44
44
45 class ChannelstreamException(Exception):
45 class ChannelstreamException(Exception):
46 pass
46 pass
47
47
48
48
49 class ChannelstreamConnectionException(ChannelstreamException):
49 class ChannelstreamConnectionException(ChannelstreamException):
50 pass
50 pass
51
51
52
52
53 class ChannelstreamPermissionException(ChannelstreamException):
53 class ChannelstreamPermissionException(ChannelstreamException):
54 pass
54 pass
55
55
56
56
57 def get_channelstream_server_url(config, endpoint):
57 def get_channelstream_server_url(config, endpoint):
58 return 'http://{}{}'.format(config['server'], endpoint)
58 return 'http://{}{}'.format(config['server'], endpoint)
59
59
60
60
61 def channelstream_request(config, payload, endpoint, raise_exc=True):
61 def channelstream_request(config, payload, endpoint, raise_exc=True):
62 signer = itsdangerous.TimestampSigner(config['secret'])
62 signer = itsdangerous.TimestampSigner(config['secret'])
63 sig_for_server = signer.sign(endpoint)
63 sig_for_server = signer.sign(endpoint)
64 secret_headers = {'x-channelstream-secret': sig_for_server,
64 secret_headers = {'x-channelstream-secret': sig_for_server,
65 'x-channelstream-endpoint': endpoint,
65 'x-channelstream-endpoint': endpoint,
66 'Content-Type': 'application/json'}
66 'Content-Type': 'application/json'}
67 req_url = get_channelstream_server_url(config, endpoint)
67 req_url = get_channelstream_server_url(config, endpoint)
68
68
69 log.debug('Sending a channelstream request to endpoint: `%s`', req_url)
69 log.debug('Sending a channelstream request to endpoint: `%s`', req_url)
70 response = None
70 response = None
71 try:
71 try:
72 response = requests.post(req_url, data=json.dumps(payload),
72 response = requests.post(req_url, data=json.dumps(payload),
73 headers=secret_headers).json()
73 headers=secret_headers).json()
74 except requests.ConnectionError:
74 except requests.ConnectionError:
75 log.exception('ConnectionError occurred for endpoint %s', req_url)
75 log.exception('ConnectionError occurred for endpoint %s', req_url)
76 if raise_exc:
76 if raise_exc:
77 raise ChannelstreamConnectionException(req_url)
77 raise ChannelstreamConnectionException(req_url)
78 except Exception:
78 except Exception:
79 log.exception('Exception related to Channelstream happened')
79 log.exception('Exception related to Channelstream happened')
80 if raise_exc:
80 if raise_exc:
81 raise ChannelstreamConnectionException()
81 raise ChannelstreamConnectionException()
82 log.debug('Got channelstream response: %s', response)
82 log.debug('Got channelstream response: %s', response)
83 return response
83 return response
84
84
85
85
86 def get_user_data(user_id):
86 def get_user_data(user_id):
87 user = User.get(user_id)
87 user = User.get(user_id)
88 return {
88 return {
89 'id': user.user_id,
89 'id': user.user_id,
90 'username': user.username,
90 'username': user.username,
91 'first_name': user.first_name,
91 'first_name': user.first_name,
92 'last_name': user.last_name,
92 'last_name': user.last_name,
93 'icon_link': h.gravatar_url(user.email, 60),
93 'icon_link': h.gravatar_url(user.email, 60),
94 'display_name': h.person(user, 'username_or_name_or_email'),
94 'display_name': h.person(user, 'username_or_name_or_email'),
95 'display_link': h.link_to_user(user),
95 'display_link': h.link_to_user(user),
96 'notifications': user.user_data.get('notification_status', True)
96 'notifications': user.user_data.get('notification_status', True)
97 }
97 }
98
98
99
99
100 def broadcast_validator(channel_name):
100 def broadcast_validator(channel_name):
101 """ checks if user can access the broadcast channel """
101 """ checks if user can access the broadcast channel """
102 if channel_name == 'broadcast':
102 if channel_name == 'broadcast':
103 return True
103 return True
104
104
105
105
106 def repo_validator(channel_name):
106 def repo_validator(channel_name):
107 """ checks if user can access the broadcast channel """
107 """ checks if user can access the broadcast channel """
108 channel_prefix = '/repo$'
108 channel_prefix = '/repo$'
109 if channel_name.startswith(channel_prefix):
109 if channel_name.startswith(channel_prefix):
110 elements = channel_name[len(channel_prefix):].split('$')
110 elements = channel_name[len(channel_prefix):].split('$')
111 repo_name = elements[0]
111 repo_name = elements[0]
112 can_access = HasRepoPermissionAny(
112 can_access = HasRepoPermissionAny(
113 'repository.read',
113 'repository.read',
114 'repository.write',
114 'repository.write',
115 'repository.admin')(repo_name)
115 'repository.admin')(repo_name)
116 log.debug(
116 log.debug(
117 'permission check for %s channel resulted in %s',
117 'permission check for %s channel resulted in %s',
118 repo_name, can_access)
118 repo_name, can_access)
119 if can_access:
119 if can_access:
120 return True
120 return True
121 return False
121 return False
122
122
123
123
124 def check_channel_permissions(channels, plugin_validators, should_raise=True):
124 def check_channel_permissions(channels, plugin_validators, should_raise=True):
125 valid_channels = []
125 valid_channels = []
126
126
127 validators = [broadcast_validator, repo_validator]
127 validators = [broadcast_validator, repo_validator]
128 if plugin_validators:
128 if plugin_validators:
129 validators.extend(plugin_validators)
129 validators.extend(plugin_validators)
130 for channel_name in channels:
130 for channel_name in channels:
131 is_valid = False
131 is_valid = False
132 for validator in validators:
132 for validator in validators:
133 if validator(channel_name):
133 if validator(channel_name):
134 is_valid = True
134 is_valid = True
135 break
135 break
136 if is_valid:
136 if is_valid:
137 valid_channels.append(channel_name)
137 valid_channels.append(channel_name)
138 else:
138 else:
139 if should_raise:
139 if should_raise:
140 raise ChannelstreamPermissionException()
140 raise ChannelstreamPermissionException()
141 return valid_channels
141 return valid_channels
142
142
143
143
144 def get_channels_info(self, channels):
144 def get_channels_info(self, channels):
145 payload = {'channels': channels}
145 payload = {'channels': channels}
146 # gather persistence info
146 # gather persistence info
147 return channelstream_request(self._config(), payload, '/info')
147 return channelstream_request(self._config(), payload, '/info')
148
148
149
149
150 def parse_channels_info(info_result, include_channel_info=None):
150 def parse_channels_info(info_result, include_channel_info=None):
151 """
151 """
152 Returns data that contains only secure information that can be
152 Returns data that contains only secure information that can be
153 presented to clients
153 presented to clients
154 """
154 """
155 include_channel_info = include_channel_info or []
155 include_channel_info = include_channel_info or []
156
156
157 user_state_dict = {}
157 user_state_dict = {}
158 for userinfo in info_result['users']:
158 for userinfo in info_result['users']:
159 user_state_dict[userinfo['user']] = {
159 user_state_dict[userinfo['user']] = {
160 k: v for k, v in userinfo['state'].items()
160 k: v for k, v in userinfo['state'].items()
161 if k in USER_STATE_PUBLIC_KEYS
161 if k in USER_STATE_PUBLIC_KEYS
162 }
162 }
163
163
164 channels_info = {}
164 channels_info = {}
165
165
166 for c_name, c_info in info_result['channels'].items():
166 for c_name, c_info in info_result['channels'].items():
167 if c_name not in include_channel_info:
167 if c_name not in include_channel_info:
168 continue
168 continue
169 connected_list = []
169 connected_list = []
170 for username in c_info['users']:
170 for username in c_info['users']:
171 connected_list.append({
171 connected_list.append({
172 'user': username,
172 'user': username,
173 'state': user_state_dict[username]
173 'state': user_state_dict[username]
174 })
174 })
175 channels_info[c_name] = {'users': connected_list,
175 channels_info[c_name] = {'users': connected_list,
176 'history': c_info['history']}
176 'history': c_info['history']}
177
177
178 return channels_info
178 return channels_info
179
179
180
180
181 def log_filepath(history_location, channel_name):
181 def log_filepath(history_location, channel_name):
182 hasher = hashlib.sha256()
182 hasher = hashlib.sha256()
183 hasher.update(channel_name.encode('utf8'))
183 hasher.update(channel_name.encode('utf8'))
184 filename = '{}.log'.format(hasher.hexdigest())
184 filename = '{}.log'.format(hasher.hexdigest())
185 filepath = os.path.join(history_location, filename)
185 filepath = os.path.join(history_location, filename)
186 return filepath
186 return filepath
187
187
188
188
189 def read_history(history_location, channel_name):
189 def read_history(history_location, channel_name):
190 filepath = log_filepath(history_location, channel_name)
190 filepath = log_filepath(history_location, channel_name)
191 if not os.path.exists(filepath):
191 if not os.path.exists(filepath):
192 return []
192 return []
193 history_lines_limit = -100
193 history_lines_limit = -100
194 history = []
194 history = []
195 with open(filepath, 'rb') as f:
195 with open(filepath, 'rb') as f:
196 for line in f.readlines()[history_lines_limit:]:
196 for line in f.readlines()[history_lines_limit:]:
197 try:
197 try:
198 history.append(json.loads(line))
198 history.append(json.loads(line))
199 except Exception:
199 except Exception:
200 log.exception('Failed to load history')
200 log.exception('Failed to load history')
201 return history
201 return history
202
202
203
203
204 def update_history_from_logs(config, channels, payload):
204 def update_history_from_logs(config, channels, payload):
205 history_location = config.get('history.location')
205 history_location = config.get('history.location')
206 for channel in channels:
206 for channel in channels:
207 history = read_history(history_location, channel)
207 history = read_history(history_location, channel)
208 payload['channels_info'][channel]['history'] = history
208 payload['channels_info'][channel]['history'] = history
209
209
210
210
211 def write_history(config, message):
211 def write_history(config, message):
212 """ writes a messge to a base64encoded filename """
212 """ writes a messge to a base64encoded filename """
213 history_location = config.get('history.location')
213 history_location = config.get('history.location')
214 if not os.path.exists(history_location):
214 if not os.path.exists(history_location):
215 return
215 return
216 try:
216 try:
217 LOCK.acquire_write_lock()
217 LOCK.acquire_write_lock()
218 filepath = log_filepath(history_location, message['channel'])
218 filepath = log_filepath(history_location, message['channel'])
219 with open(filepath, 'ab') as f:
219 with open(filepath, 'ab') as f:
220 json.dump(message, f)
220 json.dump(message, f)
221 f.write('\n')
221 f.write('\n')
222 finally:
222 finally:
223 LOCK.release_write_lock()
223 LOCK.release_write_lock()
224
224
225
225
226 def get_connection_validators(registry):
226 def get_connection_validators(registry):
227 validators = []
227 validators = []
228 for k, config in registry.rhodecode_plugins.items():
228 for k, config in registry.rhodecode_plugins.items():
229 validator = config.get('channelstream', {}).get('connect_validator')
229 validator = config.get('channelstream', {}).get('connect_validator')
230 if validator:
230 if validator:
231 validators.append(validator)
231 validators.append(validator)
232 return validators
232 return validators
233
233
234
234
235 def get_channelstream_config(registry=None):
235 def get_channelstream_config(registry=None):
236 if not registry:
236 if not registry:
237 registry = get_current_registry()
237 registry = get_current_registry()
238
238
239 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
239 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
240 channelstream_config = rhodecode_plugins.get('channelstream', {})
240 channelstream_config = rhodecode_plugins.get('channelstream', {})
241 return channelstream_config
241 return channelstream_config
242
242
243
243
244 def post_message(channel, message, username, registry=None):
244 def post_message(channel, message, username, registry=None):
245 channelstream_config = get_channelstream_config(registry)
245 channelstream_config = get_channelstream_config(registry)
246 if not channelstream_config.get('enabled'):
246 if not channelstream_config.get('enabled'):
247 return
247 return
248
248
249 message_obj = message
249 message_obj = message
250 if isinstance(message, basestring):
250 if isinstance(message, str):
251 message_obj = {
251 message_obj = {
252 'message': message,
252 'message': message,
253 'level': 'success',
253 'level': 'success',
254 'topic': '/notifications'
254 'topic': '/notifications'
255 }
255 }
256
256
257 log.debug('Channelstream: sending notification to channel %s', channel)
257 log.debug('Channelstream: sending notification to channel %s', channel)
258 payload = {
258 payload = {
259 'type': 'message',
259 'type': 'message',
260 'timestamp': datetime.datetime.utcnow(),
260 'timestamp': datetime.datetime.utcnow(),
261 'user': 'system',
261 'user': 'system',
262 'exclude_users': [username],
262 'exclude_users': [username],
263 'channel': channel,
263 'channel': channel,
264 'message': message_obj
264 'message': message_obj
265 }
265 }
266
266
267 try:
267 try:
268 return channelstream_request(
268 return channelstream_request(
269 channelstream_config, [payload], '/message',
269 channelstream_config, [payload], '/message',
270 raise_exc=False)
270 raise_exc=False)
271 except ChannelstreamException:
271 except ChannelstreamException:
272 log.exception('Failed to send channelstream data')
272 log.exception('Failed to send channelstream data')
273 raise
273 raise
274
274
275
275
276 def _reload_link(label):
276 def _reload_link(label):
277 return (
277 return (
278 '<a onclick="window.location.reload()">'
278 '<a onclick="window.location.reload()">'
279 '<strong>{}</strong>'
279 '<strong>{}</strong>'
280 '</a>'.format(label)
280 '</a>'.format(label)
281 )
281 )
282
282
283
283
284 def pr_channel(pull_request):
284 def pr_channel(pull_request):
285 repo_name = pull_request.target_repo.repo_name
285 repo_name = pull_request.target_repo.repo_name
286 pull_request_id = pull_request.pull_request_id
286 pull_request_id = pull_request.pull_request_id
287 channel = '/repo${}$/pr/{}'.format(repo_name, pull_request_id)
287 channel = '/repo${}$/pr/{}'.format(repo_name, pull_request_id)
288 log.debug('Getting pull-request channelstream broadcast channel: %s', channel)
288 log.debug('Getting pull-request channelstream broadcast channel: %s', channel)
289 return channel
289 return channel
290
290
291
291
292 def comment_channel(repo_name, commit_obj=None, pull_request_obj=None):
292 def comment_channel(repo_name, commit_obj=None, pull_request_obj=None):
293 channel = None
293 channel = None
294 if commit_obj:
294 if commit_obj:
295 channel = u'/repo${}$/commit/{}'.format(
295 channel = u'/repo${}$/commit/{}'.format(
296 repo_name, commit_obj.raw_id
296 repo_name, commit_obj.raw_id
297 )
297 )
298 elif pull_request_obj:
298 elif pull_request_obj:
299 channel = u'/repo${}$/pr/{}'.format(
299 channel = u'/repo${}$/pr/{}'.format(
300 repo_name, pull_request_obj.pull_request_id
300 repo_name, pull_request_obj.pull_request_id
301 )
301 )
302 log.debug('Getting comment channelstream broadcast channel: %s', channel)
302 log.debug('Getting comment channelstream broadcast channel: %s', channel)
303
303
304 return channel
304 return channel
305
305
306
306
307 def pr_update_channelstream_push(request, pr_broadcast_channel, user, msg, **kwargs):
307 def pr_update_channelstream_push(request, pr_broadcast_channel, user, msg, **kwargs):
308 """
308 """
309 Channel push on pull request update
309 Channel push on pull request update
310 """
310 """
311 if not pr_broadcast_channel:
311 if not pr_broadcast_channel:
312 return
312 return
313
313
314 _ = request.translate
314 _ = request.translate
315
315
316 message = '{} {}'.format(
316 message = '{} {}'.format(
317 msg,
317 msg,
318 _reload_link(_(' Reload page to load changes')))
318 _reload_link(_(' Reload page to load changes')))
319
319
320 message_obj = {
320 message_obj = {
321 'message': message,
321 'message': message,
322 'level': 'success',
322 'level': 'success',
323 'topic': '/notifications'
323 'topic': '/notifications'
324 }
324 }
325
325
326 post_message(
326 post_message(
327 pr_broadcast_channel, message_obj, user.username,
327 pr_broadcast_channel, message_obj, user.username,
328 registry=request.registry)
328 registry=request.registry)
329
329
330
330
331 def comment_channelstream_push(request, comment_broadcast_channel, user, msg, **kwargs):
331 def comment_channelstream_push(request, comment_broadcast_channel, user, msg, **kwargs):
332 """
332 """
333 Channelstream push on comment action, on commit, or pull-request
333 Channelstream push on comment action, on commit, or pull-request
334 """
334 """
335 if not comment_broadcast_channel:
335 if not comment_broadcast_channel:
336 return
336 return
337
337
338 _ = request.translate
338 _ = request.translate
339
339
340 comment_data = kwargs.pop('comment_data', {})
340 comment_data = kwargs.pop('comment_data', {})
341 user_data = kwargs.pop('user_data', {})
341 user_data = kwargs.pop('user_data', {})
342 comment_id = comment_data.keys()[0] if comment_data else ''
342 comment_id = comment_data.keys()[0] if comment_data else ''
343
343
344 message = '<strong>{}</strong> {} #{}'.format(
344 message = '<strong>{}</strong> {} #{}'.format(
345 user.username,
345 user.username,
346 msg,
346 msg,
347 comment_id,
347 comment_id,
348 )
348 )
349
349
350 message_obj = {
350 message_obj = {
351 'message': message,
351 'message': message,
352 'level': 'success',
352 'level': 'success',
353 'topic': '/notifications'
353 'topic': '/notifications'
354 }
354 }
355
355
356 post_message(
356 post_message(
357 comment_broadcast_channel, message_obj, user.username,
357 comment_broadcast_channel, message_obj, user.username,
358 registry=request.registry)
358 registry=request.registry)
359
359
360 message_obj = {
360 message_obj = {
361 'message': None,
361 'message': None,
362 'user': user.username,
362 'user': user.username,
363 'comment_id': comment_id,
363 'comment_id': comment_id,
364 'comment_data': comment_data,
364 'comment_data': comment_data,
365 'user_data': user_data,
365 'user_data': user_data,
366 'topic': '/comment'
366 'topic': '/comment'
367 }
367 }
368 post_message(
368 post_message(
369 comment_broadcast_channel, message_obj, user.username,
369 comment_broadcast_channel, message_obj, user.username,
370 registry=request.registry)
370 registry=request.registry)
@@ -1,2155 +1,2155 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27 import base64
27 import base64
28 import collections
28 import collections
29
29
30 import os
30 import os
31 import random
31 import random
32 import hashlib
32 import hashlib
33 import StringIO
33 import StringIO
34 import textwrap
34 import textwrap
35 import urllib.request, urllib.parse, urllib.error
35 import urllib.request, urllib.parse, urllib.error
36 import math
36 import math
37 import logging
37 import logging
38 import re
38 import re
39 import time
39 import time
40 import string
40 import string
41 import hashlib
41 import hashlib
42 import regex
42 import regex
43 from collections import OrderedDict
43 from collections import OrderedDict
44
44
45 import pygments
45 import pygments
46 import itertools
46 import itertools
47 import fnmatch
47 import fnmatch
48 import bleach
48 import bleach
49
49
50 from datetime import datetime
50 from datetime import datetime
51 from functools import partial
51 from functools import partial
52 from pygments.formatters.html import HtmlFormatter
52 from pygments.formatters.html import HtmlFormatter
53 from pygments.lexers import (
53 from pygments.lexers import (
54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
55
55
56 from pyramid.threadlocal import get_current_request
56 from pyramid.threadlocal import get_current_request
57 from tempita import looper
57 from tempita import looper
58 from webhelpers2.html import literal, HTML, escape
58 from webhelpers2.html import literal, HTML, escape
59 from webhelpers2.html._autolink import _auto_link_urls
59 from webhelpers2.html._autolink import _auto_link_urls
60 from webhelpers2.html.tools import (
60 from webhelpers2.html.tools import (
61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
62
62
63 from webhelpers2.text import (
63 from webhelpers2.text import (
64 chop_at, collapse, convert_accented_entities,
64 chop_at, collapse, convert_accented_entities,
65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
66 replace_whitespace, urlify, truncate, wrap_paragraphs)
66 replace_whitespace, urlify, truncate, wrap_paragraphs)
67 from webhelpers2.date import time_ago_in_words
67 from webhelpers2.date import time_ago_in_words
68
68
69 from webhelpers2.html.tags import (
69 from webhelpers2.html.tags import (
70 _input, NotGiven, _make_safe_id_component as safeid,
70 _input, NotGiven, _make_safe_id_component as safeid,
71 form as insecure_form,
71 form as insecure_form,
72 auto_discovery_link, checkbox, end_form, file,
72 auto_discovery_link, checkbox, end_form, file,
73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
74 select as raw_select, stylesheet_link, submit, text, password, textarea,
74 select as raw_select, stylesheet_link, submit, text, password, textarea,
75 ul, radio, Options)
75 ul, radio, Options)
76
76
77 from webhelpers2.number import format_byte_size
77 from webhelpers2.number import format_byte_size
78
78
79 from rhodecode.lib.action_parser import action_parser
79 from rhodecode.lib.action_parser import action_parser
80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
81 from rhodecode.lib.ext_json import json
81 from rhodecode.lib.ext_json import json
82 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
82 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
83 from rhodecode.lib.utils2 import (
83 from rhodecode.lib.utils2 import (
84 str2bool, safe_unicode, safe_str,
84 str2bool, safe_unicode, safe_str,
85 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
85 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
86 AttributeDict, safe_int, md5, md5_safe, get_host_info)
86 AttributeDict, safe_int, md5, md5_safe, get_host_info)
87 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
87 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
88 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
88 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
89 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
89 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
90 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
90 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
91 from rhodecode.lib.index.search_utils import get_matching_line_offsets
91 from rhodecode.lib.index.search_utils import get_matching_line_offsets
92 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
92 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
93 from rhodecode.model.changeset_status import ChangesetStatusModel
93 from rhodecode.model.changeset_status import ChangesetStatusModel
94 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
94 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
95 from rhodecode.model.repo_group import RepoGroupModel
95 from rhodecode.model.repo_group import RepoGroupModel
96 from rhodecode.model.settings import IssueTrackerSettingsModel
96 from rhodecode.model.settings import IssueTrackerSettingsModel
97
97
98
98
99 log = logging.getLogger(__name__)
99 log = logging.getLogger(__name__)
100
100
101
101
102 DEFAULT_USER = User.DEFAULT_USER
102 DEFAULT_USER = User.DEFAULT_USER
103 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
103 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
104
104
105
105
106 def asset(path, ver=None, **kwargs):
106 def asset(path, ver=None, **kwargs):
107 """
107 """
108 Helper to generate a static asset file path for rhodecode assets
108 Helper to generate a static asset file path for rhodecode assets
109
109
110 eg. h.asset('images/image.png', ver='3923')
110 eg. h.asset('images/image.png', ver='3923')
111
111
112 :param path: path of asset
112 :param path: path of asset
113 :param ver: optional version query param to append as ?ver=
113 :param ver: optional version query param to append as ?ver=
114 """
114 """
115 request = get_current_request()
115 request = get_current_request()
116 query = {}
116 query = {}
117 query.update(kwargs)
117 query.update(kwargs)
118 if ver:
118 if ver:
119 query = {'ver': ver}
119 query = {'ver': ver}
120 return request.static_path(
120 return request.static_path(
121 'rhodecode:public/{}'.format(path), _query=query)
121 'rhodecode:public/{}'.format(path), _query=query)
122
122
123
123
124 default_html_escape_table = {
124 default_html_escape_table = {
125 ord('&'): u'&amp;',
125 ord('&'): u'&amp;',
126 ord('<'): u'&lt;',
126 ord('<'): u'&lt;',
127 ord('>'): u'&gt;',
127 ord('>'): u'&gt;',
128 ord('"'): u'&quot;',
128 ord('"'): u'&quot;',
129 ord("'"): u'&#39;',
129 ord("'"): u'&#39;',
130 }
130 }
131
131
132
132
133 def html_escape(text, html_escape_table=default_html_escape_table):
133 def html_escape(text, html_escape_table=default_html_escape_table):
134 """Produce entities within text."""
134 """Produce entities within text."""
135 return text.translate(html_escape_table)
135 return text.translate(html_escape_table)
136
136
137
137
138 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
138 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
139 """
139 """
140 Truncate string ``s`` at the first occurrence of ``sub``.
140 Truncate string ``s`` at the first occurrence of ``sub``.
141
141
142 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
142 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
143 """
143 """
144 suffix_if_chopped = suffix_if_chopped or ''
144 suffix_if_chopped = suffix_if_chopped or ''
145 pos = s.find(sub)
145 pos = s.find(sub)
146 if pos == -1:
146 if pos == -1:
147 return s
147 return s
148
148
149 if inclusive:
149 if inclusive:
150 pos += len(sub)
150 pos += len(sub)
151
151
152 chopped = s[:pos]
152 chopped = s[:pos]
153 left = s[pos:].strip()
153 left = s[pos:].strip()
154
154
155 if left and suffix_if_chopped:
155 if left and suffix_if_chopped:
156 chopped += suffix_if_chopped
156 chopped += suffix_if_chopped
157
157
158 return chopped
158 return chopped
159
159
160
160
161 def shorter(text, size=20, prefix=False):
161 def shorter(text, size=20, prefix=False):
162 postfix = '...'
162 postfix = '...'
163 if len(text) > size:
163 if len(text) > size:
164 if prefix:
164 if prefix:
165 # shorten in front
165 # shorten in front
166 return postfix + text[-(size - len(postfix)):]
166 return postfix + text[-(size - len(postfix)):]
167 else:
167 else:
168 return text[:size - len(postfix)] + postfix
168 return text[:size - len(postfix)] + postfix
169 return text
169 return text
170
170
171
171
172 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
172 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
173 """
173 """
174 Reset button
174 Reset button
175 """
175 """
176 return _input(type, name, value, id, attrs)
176 return _input(type, name, value, id, attrs)
177
177
178
178
179 def select(name, selected_values, options, id=NotGiven, **attrs):
179 def select(name, selected_values, options, id=NotGiven, **attrs):
180
180
181 if isinstance(options, (list, tuple)):
181 if isinstance(options, (list, tuple)):
182 options_iter = options
182 options_iter = options
183 # Handle old value,label lists ... where value also can be value,label lists
183 # Handle old value,label lists ... where value also can be value,label lists
184 options = Options()
184 options = Options()
185 for opt in options_iter:
185 for opt in options_iter:
186 if isinstance(opt, tuple) and len(opt) == 2:
186 if isinstance(opt, tuple) and len(opt) == 2:
187 value, label = opt
187 value, label = opt
188 elif isinstance(opt, basestring):
188 elif isinstance(opt, str):
189 value = label = opt
189 value = label = opt
190 else:
190 else:
191 raise ValueError('invalid select option type %r' % type(opt))
191 raise ValueError('invalid select option type %r' % type(opt))
192
192
193 if isinstance(value, (list, tuple)):
193 if isinstance(value, (list, tuple)):
194 option_group = options.add_optgroup(label)
194 option_group = options.add_optgroup(label)
195 for opt2 in value:
195 for opt2 in value:
196 if isinstance(opt2, tuple) and len(opt2) == 2:
196 if isinstance(opt2, tuple) and len(opt2) == 2:
197 group_value, group_label = opt2
197 group_value, group_label = opt2
198 elif isinstance(opt2, basestring):
198 elif isinstance(opt2, str):
199 group_value = group_label = opt2
199 group_value = group_label = opt2
200 else:
200 else:
201 raise ValueError('invalid select option type %r' % type(opt2))
201 raise ValueError('invalid select option type %r' % type(opt2))
202
202
203 option_group.add_option(group_label, group_value)
203 option_group.add_option(group_label, group_value)
204 else:
204 else:
205 options.add_option(label, value)
205 options.add_option(label, value)
206
206
207 return raw_select(name, selected_values, options, id=id, **attrs)
207 return raw_select(name, selected_values, options, id=id, **attrs)
208
208
209
209
210 def branding(name, length=40):
210 def branding(name, length=40):
211 return truncate(name, length, indicator="")
211 return truncate(name, length, indicator="")
212
212
213
213
214 def FID(raw_id, path):
214 def FID(raw_id, path):
215 """
215 """
216 Creates a unique ID for filenode based on it's hash of path and commit
216 Creates a unique ID for filenode based on it's hash of path and commit
217 it's safe to use in urls
217 it's safe to use in urls
218
218
219 :param raw_id:
219 :param raw_id:
220 :param path:
220 :param path:
221 """
221 """
222
222
223 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
223 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
224
224
225
225
226 class _GetError(object):
226 class _GetError(object):
227 """Get error from form_errors, and represent it as span wrapped error
227 """Get error from form_errors, and represent it as span wrapped error
228 message
228 message
229
229
230 :param field_name: field to fetch errors for
230 :param field_name: field to fetch errors for
231 :param form_errors: form errors dict
231 :param form_errors: form errors dict
232 """
232 """
233
233
234 def __call__(self, field_name, form_errors):
234 def __call__(self, field_name, form_errors):
235 tmpl = """<span class="error_msg">%s</span>"""
235 tmpl = """<span class="error_msg">%s</span>"""
236 if form_errors and field_name in form_errors:
236 if form_errors and field_name in form_errors:
237 return literal(tmpl % form_errors.get(field_name))
237 return literal(tmpl % form_errors.get(field_name))
238
238
239
239
240 get_error = _GetError()
240 get_error = _GetError()
241
241
242
242
243 class _ToolTip(object):
243 class _ToolTip(object):
244
244
245 def __call__(self, tooltip_title, trim_at=50):
245 def __call__(self, tooltip_title, trim_at=50):
246 """
246 """
247 Special function just to wrap our text into nice formatted
247 Special function just to wrap our text into nice formatted
248 autowrapped text
248 autowrapped text
249
249
250 :param tooltip_title:
250 :param tooltip_title:
251 """
251 """
252 tooltip_title = escape(tooltip_title)
252 tooltip_title = escape(tooltip_title)
253 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
253 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
254 return tooltip_title
254 return tooltip_title
255
255
256
256
257 tooltip = _ToolTip()
257 tooltip = _ToolTip()
258
258
259 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
259 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
260
260
261
261
262 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
262 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
263 limit_items=False, linkify_last_item=False, hide_last_item=False,
263 limit_items=False, linkify_last_item=False, hide_last_item=False,
264 copy_path_icon=True):
264 copy_path_icon=True):
265 if isinstance(file_path, str):
265 if isinstance(file_path, str):
266 file_path = safe_unicode(file_path)
266 file_path = safe_unicode(file_path)
267
267
268 if at_ref:
268 if at_ref:
269 route_qry = {'at': at_ref}
269 route_qry = {'at': at_ref}
270 default_landing_ref = at_ref or landing_ref_name or commit_id
270 default_landing_ref = at_ref or landing_ref_name or commit_id
271 else:
271 else:
272 route_qry = None
272 route_qry = None
273 default_landing_ref = commit_id
273 default_landing_ref = commit_id
274
274
275 # first segment is a `HOME` link to repo files root location
275 # first segment is a `HOME` link to repo files root location
276 root_name = literal(u'<i class="icon-home"></i>')
276 root_name = literal(u'<i class="icon-home"></i>')
277
277
278 url_segments = [
278 url_segments = [
279 link_to(
279 link_to(
280 root_name,
280 root_name,
281 repo_files_by_ref_url(
281 repo_files_by_ref_url(
282 repo_name,
282 repo_name,
283 repo_type,
283 repo_type,
284 f_path=None, # None here is a special case for SVN repos,
284 f_path=None, # None here is a special case for SVN repos,
285 # that won't prefix with a ref
285 # that won't prefix with a ref
286 ref_name=default_landing_ref,
286 ref_name=default_landing_ref,
287 commit_id=commit_id,
287 commit_id=commit_id,
288 query=route_qry
288 query=route_qry
289 )
289 )
290 )]
290 )]
291
291
292 path_segments = file_path.split('/')
292 path_segments = file_path.split('/')
293 last_cnt = len(path_segments) - 1
293 last_cnt = len(path_segments) - 1
294 for cnt, segment in enumerate(path_segments):
294 for cnt, segment in enumerate(path_segments):
295 if not segment:
295 if not segment:
296 continue
296 continue
297 segment_html = escape(segment)
297 segment_html = escape(segment)
298
298
299 last_item = cnt == last_cnt
299 last_item = cnt == last_cnt
300
300
301 if last_item and hide_last_item:
301 if last_item and hide_last_item:
302 # iterate over and hide last element
302 # iterate over and hide last element
303 continue
303 continue
304
304
305 if last_item and linkify_last_item is False:
305 if last_item and linkify_last_item is False:
306 # plain version
306 # plain version
307 url_segments.append(segment_html)
307 url_segments.append(segment_html)
308 else:
308 else:
309 url_segments.append(
309 url_segments.append(
310 link_to(
310 link_to(
311 segment_html,
311 segment_html,
312 repo_files_by_ref_url(
312 repo_files_by_ref_url(
313 repo_name,
313 repo_name,
314 repo_type,
314 repo_type,
315 f_path='/'.join(path_segments[:cnt + 1]),
315 f_path='/'.join(path_segments[:cnt + 1]),
316 ref_name=default_landing_ref,
316 ref_name=default_landing_ref,
317 commit_id=commit_id,
317 commit_id=commit_id,
318 query=route_qry
318 query=route_qry
319 ),
319 ),
320 ))
320 ))
321
321
322 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
322 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
323 if limit_items and len(limited_url_segments) < len(url_segments):
323 if limit_items and len(limited_url_segments) < len(url_segments):
324 url_segments = limited_url_segments
324 url_segments = limited_url_segments
325
325
326 full_path = file_path
326 full_path = file_path
327 if copy_path_icon:
327 if copy_path_icon:
328 icon = files_icon.format(escape(full_path))
328 icon = files_icon.format(escape(full_path))
329 else:
329 else:
330 icon = ''
330 icon = ''
331
331
332 if file_path == '':
332 if file_path == '':
333 return root_name
333 return root_name
334 else:
334 else:
335 return literal(' / '.join(url_segments) + icon)
335 return literal(' / '.join(url_segments) + icon)
336
336
337
337
338 def files_url_data(request):
338 def files_url_data(request):
339 import urllib.request, urllib.parse, urllib.error
339 import urllib.request, urllib.parse, urllib.error
340 matchdict = request.matchdict
340 matchdict = request.matchdict
341
341
342 if 'f_path' not in matchdict:
342 if 'f_path' not in matchdict:
343 matchdict['f_path'] = ''
343 matchdict['f_path'] = ''
344 else:
344 else:
345 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
345 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
346 if 'commit_id' not in matchdict:
346 if 'commit_id' not in matchdict:
347 matchdict['commit_id'] = 'tip'
347 matchdict['commit_id'] = 'tip'
348
348
349 return json.dumps(matchdict)
349 return json.dumps(matchdict)
350
350
351
351
352 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
352 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
353 _is_svn = is_svn(db_repo_type)
353 _is_svn = is_svn(db_repo_type)
354 final_f_path = f_path
354 final_f_path = f_path
355
355
356 if _is_svn:
356 if _is_svn:
357 """
357 """
358 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
358 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
359 actually commit_id followed by the ref_name. This should be done only in case
359 actually commit_id followed by the ref_name. This should be done only in case
360 This is a initial landing url, without additional paths.
360 This is a initial landing url, without additional paths.
361
361
362 like: /1000/tags/1.0.0/?at=tags/1.0.0
362 like: /1000/tags/1.0.0/?at=tags/1.0.0
363 """
363 """
364
364
365 if ref_name and ref_name != 'tip':
365 if ref_name and ref_name != 'tip':
366 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
366 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
367 # for SVN we only do this magic prefix if it's root, .eg landing revision
367 # for SVN we only do this magic prefix if it's root, .eg landing revision
368 # of files link. If we are in the tree we don't need this since we traverse the url
368 # of files link. If we are in the tree we don't need this since we traverse the url
369 # that has everything stored
369 # that has everything stored
370 if f_path in ['', '/']:
370 if f_path in ['', '/']:
371 final_f_path = '/'.join([ref_name, f_path])
371 final_f_path = '/'.join([ref_name, f_path])
372
372
373 # SVN always needs a commit_id explicitly, without a named REF
373 # SVN always needs a commit_id explicitly, without a named REF
374 default_commit_id = commit_id
374 default_commit_id = commit_id
375 else:
375 else:
376 """
376 """
377 For git and mercurial we construct a new URL using the names instead of commit_id
377 For git and mercurial we construct a new URL using the names instead of commit_id
378 like: /master/some_path?at=master
378 like: /master/some_path?at=master
379 """
379 """
380 # We currently do not support branches with slashes
380 # We currently do not support branches with slashes
381 if '/' in ref_name:
381 if '/' in ref_name:
382 default_commit_id = commit_id
382 default_commit_id = commit_id
383 else:
383 else:
384 default_commit_id = ref_name
384 default_commit_id = ref_name
385
385
386 # sometimes we pass f_path as None, to indicate explicit no prefix,
386 # sometimes we pass f_path as None, to indicate explicit no prefix,
387 # we translate it to string to not have None
387 # we translate it to string to not have None
388 final_f_path = final_f_path or ''
388 final_f_path = final_f_path or ''
389
389
390 files_url = route_path(
390 files_url = route_path(
391 'repo_files',
391 'repo_files',
392 repo_name=db_repo_name,
392 repo_name=db_repo_name,
393 commit_id=default_commit_id,
393 commit_id=default_commit_id,
394 f_path=final_f_path,
394 f_path=final_f_path,
395 _query=query
395 _query=query
396 )
396 )
397 return files_url
397 return files_url
398
398
399
399
400 def code_highlight(code, lexer, formatter, use_hl_filter=False):
400 def code_highlight(code, lexer, formatter, use_hl_filter=False):
401 """
401 """
402 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
402 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
403
403
404 If ``outfile`` is given and a valid file object (an object
404 If ``outfile`` is given and a valid file object (an object
405 with a ``write`` method), the result will be written to it, otherwise
405 with a ``write`` method), the result will be written to it, otherwise
406 it is returned as a string.
406 it is returned as a string.
407 """
407 """
408 if use_hl_filter:
408 if use_hl_filter:
409 # add HL filter
409 # add HL filter
410 from rhodecode.lib.index import search_utils
410 from rhodecode.lib.index import search_utils
411 lexer.add_filter(search_utils.ElasticSearchHLFilter())
411 lexer.add_filter(search_utils.ElasticSearchHLFilter())
412 return pygments.format(pygments.lex(code, lexer), formatter)
412 return pygments.format(pygments.lex(code, lexer), formatter)
413
413
414
414
415 class CodeHtmlFormatter(HtmlFormatter):
415 class CodeHtmlFormatter(HtmlFormatter):
416 """
416 """
417 My code Html Formatter for source codes
417 My code Html Formatter for source codes
418 """
418 """
419
419
420 def wrap(self, source, outfile):
420 def wrap(self, source, outfile):
421 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
421 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
422
422
423 def _wrap_code(self, source):
423 def _wrap_code(self, source):
424 for cnt, it in enumerate(source):
424 for cnt, it in enumerate(source):
425 i, t = it
425 i, t = it
426 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
426 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
427 yield i, t
427 yield i, t
428
428
429 def _wrap_tablelinenos(self, inner):
429 def _wrap_tablelinenos(self, inner):
430 dummyoutfile = StringIO.StringIO()
430 dummyoutfile = StringIO.StringIO()
431 lncount = 0
431 lncount = 0
432 for t, line in inner:
432 for t, line in inner:
433 if t:
433 if t:
434 lncount += 1
434 lncount += 1
435 dummyoutfile.write(line)
435 dummyoutfile.write(line)
436
436
437 fl = self.linenostart
437 fl = self.linenostart
438 mw = len(str(lncount + fl - 1))
438 mw = len(str(lncount + fl - 1))
439 sp = self.linenospecial
439 sp = self.linenospecial
440 st = self.linenostep
440 st = self.linenostep
441 la = self.lineanchors
441 la = self.lineanchors
442 aln = self.anchorlinenos
442 aln = self.anchorlinenos
443 nocls = self.noclasses
443 nocls = self.noclasses
444 if sp:
444 if sp:
445 lines = []
445 lines = []
446
446
447 for i in range(fl, fl + lncount):
447 for i in range(fl, fl + lncount):
448 if i % st == 0:
448 if i % st == 0:
449 if i % sp == 0:
449 if i % sp == 0:
450 if aln:
450 if aln:
451 lines.append('<a href="#%s%d" class="special">%*d</a>' %
451 lines.append('<a href="#%s%d" class="special">%*d</a>' %
452 (la, i, mw, i))
452 (la, i, mw, i))
453 else:
453 else:
454 lines.append('<span class="special">%*d</span>' % (mw, i))
454 lines.append('<span class="special">%*d</span>' % (mw, i))
455 else:
455 else:
456 if aln:
456 if aln:
457 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
457 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
458 else:
458 else:
459 lines.append('%*d' % (mw, i))
459 lines.append('%*d' % (mw, i))
460 else:
460 else:
461 lines.append('')
461 lines.append('')
462 ls = '\n'.join(lines)
462 ls = '\n'.join(lines)
463 else:
463 else:
464 lines = []
464 lines = []
465 for i in range(fl, fl + lncount):
465 for i in range(fl, fl + lncount):
466 if i % st == 0:
466 if i % st == 0:
467 if aln:
467 if aln:
468 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
468 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
469 else:
469 else:
470 lines.append('%*d' % (mw, i))
470 lines.append('%*d' % (mw, i))
471 else:
471 else:
472 lines.append('')
472 lines.append('')
473 ls = '\n'.join(lines)
473 ls = '\n'.join(lines)
474
474
475 # in case you wonder about the seemingly redundant <div> here: since the
475 # in case you wonder about the seemingly redundant <div> here: since the
476 # content in the other cell also is wrapped in a div, some browsers in
476 # content in the other cell also is wrapped in a div, some browsers in
477 # some configurations seem to mess up the formatting...
477 # some configurations seem to mess up the formatting...
478 if nocls:
478 if nocls:
479 yield 0, ('<table class="%stable">' % self.cssclass +
479 yield 0, ('<table class="%stable">' % self.cssclass +
480 '<tr><td><div class="linenodiv" '
480 '<tr><td><div class="linenodiv" '
481 'style="background-color: #f0f0f0; padding-right: 10px">'
481 'style="background-color: #f0f0f0; padding-right: 10px">'
482 '<pre style="line-height: 125%">' +
482 '<pre style="line-height: 125%">' +
483 ls + '</pre></div></td><td id="hlcode" class="code">')
483 ls + '</pre></div></td><td id="hlcode" class="code">')
484 else:
484 else:
485 yield 0, ('<table class="%stable">' % self.cssclass +
485 yield 0, ('<table class="%stable">' % self.cssclass +
486 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
486 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
487 ls + '</pre></div></td><td id="hlcode" class="code">')
487 ls + '</pre></div></td><td id="hlcode" class="code">')
488 yield 0, dummyoutfile.getvalue()
488 yield 0, dummyoutfile.getvalue()
489 yield 0, '</td></tr></table>'
489 yield 0, '</td></tr></table>'
490
490
491
491
492 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
492 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
493 def __init__(self, **kw):
493 def __init__(self, **kw):
494 # only show these line numbers if set
494 # only show these line numbers if set
495 self.only_lines = kw.pop('only_line_numbers', [])
495 self.only_lines = kw.pop('only_line_numbers', [])
496 self.query_terms = kw.pop('query_terms', [])
496 self.query_terms = kw.pop('query_terms', [])
497 self.max_lines = kw.pop('max_lines', 5)
497 self.max_lines = kw.pop('max_lines', 5)
498 self.line_context = kw.pop('line_context', 3)
498 self.line_context = kw.pop('line_context', 3)
499 self.url = kw.pop('url', None)
499 self.url = kw.pop('url', None)
500
500
501 super(CodeHtmlFormatter, self).__init__(**kw)
501 super(CodeHtmlFormatter, self).__init__(**kw)
502
502
503 def _wrap_code(self, source):
503 def _wrap_code(self, source):
504 for cnt, it in enumerate(source):
504 for cnt, it in enumerate(source):
505 i, t = it
505 i, t = it
506 t = '<pre>%s</pre>' % t
506 t = '<pre>%s</pre>' % t
507 yield i, t
507 yield i, t
508
508
509 def _wrap_tablelinenos(self, inner):
509 def _wrap_tablelinenos(self, inner):
510 yield 0, '<table class="code-highlight %stable">' % self.cssclass
510 yield 0, '<table class="code-highlight %stable">' % self.cssclass
511
511
512 last_shown_line_number = 0
512 last_shown_line_number = 0
513 current_line_number = 1
513 current_line_number = 1
514
514
515 for t, line in inner:
515 for t, line in inner:
516 if not t:
516 if not t:
517 yield t, line
517 yield t, line
518 continue
518 continue
519
519
520 if current_line_number in self.only_lines:
520 if current_line_number in self.only_lines:
521 if last_shown_line_number + 1 != current_line_number:
521 if last_shown_line_number + 1 != current_line_number:
522 yield 0, '<tr>'
522 yield 0, '<tr>'
523 yield 0, '<td class="line">...</td>'
523 yield 0, '<td class="line">...</td>'
524 yield 0, '<td id="hlcode" class="code"></td>'
524 yield 0, '<td id="hlcode" class="code"></td>'
525 yield 0, '</tr>'
525 yield 0, '</tr>'
526
526
527 yield 0, '<tr>'
527 yield 0, '<tr>'
528 if self.url:
528 if self.url:
529 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
529 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
530 self.url, current_line_number, current_line_number)
530 self.url, current_line_number, current_line_number)
531 else:
531 else:
532 yield 0, '<td class="line"><a href="">%i</a></td>' % (
532 yield 0, '<td class="line"><a href="">%i</a></td>' % (
533 current_line_number)
533 current_line_number)
534 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
534 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
535 yield 0, '</tr>'
535 yield 0, '</tr>'
536
536
537 last_shown_line_number = current_line_number
537 last_shown_line_number = current_line_number
538
538
539 current_line_number += 1
539 current_line_number += 1
540
540
541 yield 0, '</table>'
541 yield 0, '</table>'
542
542
543
543
544 def hsv_to_rgb(h, s, v):
544 def hsv_to_rgb(h, s, v):
545 """ Convert hsv color values to rgb """
545 """ Convert hsv color values to rgb """
546
546
547 if s == 0.0:
547 if s == 0.0:
548 return v, v, v
548 return v, v, v
549 i = int(h * 6.0) # XXX assume int() truncates!
549 i = int(h * 6.0) # XXX assume int() truncates!
550 f = (h * 6.0) - i
550 f = (h * 6.0) - i
551 p = v * (1.0 - s)
551 p = v * (1.0 - s)
552 q = v * (1.0 - s * f)
552 q = v * (1.0 - s * f)
553 t = v * (1.0 - s * (1.0 - f))
553 t = v * (1.0 - s * (1.0 - f))
554 i = i % 6
554 i = i % 6
555 if i == 0:
555 if i == 0:
556 return v, t, p
556 return v, t, p
557 if i == 1:
557 if i == 1:
558 return q, v, p
558 return q, v, p
559 if i == 2:
559 if i == 2:
560 return p, v, t
560 return p, v, t
561 if i == 3:
561 if i == 3:
562 return p, q, v
562 return p, q, v
563 if i == 4:
563 if i == 4:
564 return t, p, v
564 return t, p, v
565 if i == 5:
565 if i == 5:
566 return v, p, q
566 return v, p, q
567
567
568
568
569 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
569 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
570 """
570 """
571 Generator for getting n of evenly distributed colors using
571 Generator for getting n of evenly distributed colors using
572 hsv color and golden ratio. It always return same order of colors
572 hsv color and golden ratio. It always return same order of colors
573
573
574 :param n: number of colors to generate
574 :param n: number of colors to generate
575 :param saturation: saturation of returned colors
575 :param saturation: saturation of returned colors
576 :param lightness: lightness of returned colors
576 :param lightness: lightness of returned colors
577 :returns: RGB tuple
577 :returns: RGB tuple
578 """
578 """
579
579
580 golden_ratio = 0.618033988749895
580 golden_ratio = 0.618033988749895
581 h = 0.22717784590367374
581 h = 0.22717784590367374
582
582
583 for _ in range(n):
583 for _ in range(n):
584 h += golden_ratio
584 h += golden_ratio
585 h %= 1
585 h %= 1
586 HSV_tuple = [h, saturation, lightness]
586 HSV_tuple = [h, saturation, lightness]
587 RGB_tuple = hsv_to_rgb(*HSV_tuple)
587 RGB_tuple = hsv_to_rgb(*HSV_tuple)
588 yield map(lambda x: str(int(x * 256)), RGB_tuple)
588 yield map(lambda x: str(int(x * 256)), RGB_tuple)
589
589
590
590
591 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
591 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
592 """
592 """
593 Returns a function which when called with an argument returns a unique
593 Returns a function which when called with an argument returns a unique
594 color for that argument, eg.
594 color for that argument, eg.
595
595
596 :param n: number of colors to generate
596 :param n: number of colors to generate
597 :param saturation: saturation of returned colors
597 :param saturation: saturation of returned colors
598 :param lightness: lightness of returned colors
598 :param lightness: lightness of returned colors
599 :returns: css RGB string
599 :returns: css RGB string
600
600
601 >>> color_hash = color_hasher()
601 >>> color_hash = color_hasher()
602 >>> color_hash('hello')
602 >>> color_hash('hello')
603 'rgb(34, 12, 59)'
603 'rgb(34, 12, 59)'
604 >>> color_hash('hello')
604 >>> color_hash('hello')
605 'rgb(34, 12, 59)'
605 'rgb(34, 12, 59)'
606 >>> color_hash('other')
606 >>> color_hash('other')
607 'rgb(90, 224, 159)'
607 'rgb(90, 224, 159)'
608 """
608 """
609
609
610 color_dict = {}
610 color_dict = {}
611 cgenerator = unique_color_generator(
611 cgenerator = unique_color_generator(
612 saturation=saturation, lightness=lightness)
612 saturation=saturation, lightness=lightness)
613
613
614 def get_color_string(thing):
614 def get_color_string(thing):
615 if thing in color_dict:
615 if thing in color_dict:
616 col = color_dict[thing]
616 col = color_dict[thing]
617 else:
617 else:
618 col = color_dict[thing] = cgenerator.next()
618 col = color_dict[thing] = cgenerator.next()
619 return "rgb(%s)" % (', '.join(col))
619 return "rgb(%s)" % (', '.join(col))
620
620
621 return get_color_string
621 return get_color_string
622
622
623
623
624 def get_lexer_safe(mimetype=None, filepath=None):
624 def get_lexer_safe(mimetype=None, filepath=None):
625 """
625 """
626 Tries to return a relevant pygments lexer using mimetype/filepath name,
626 Tries to return a relevant pygments lexer using mimetype/filepath name,
627 defaulting to plain text if none could be found
627 defaulting to plain text if none could be found
628 """
628 """
629 lexer = None
629 lexer = None
630 try:
630 try:
631 if mimetype:
631 if mimetype:
632 lexer = get_lexer_for_mimetype(mimetype)
632 lexer = get_lexer_for_mimetype(mimetype)
633 if not lexer:
633 if not lexer:
634 lexer = get_lexer_for_filename(filepath)
634 lexer = get_lexer_for_filename(filepath)
635 except pygments.util.ClassNotFound:
635 except pygments.util.ClassNotFound:
636 pass
636 pass
637
637
638 if not lexer:
638 if not lexer:
639 lexer = get_lexer_by_name('text')
639 lexer = get_lexer_by_name('text')
640
640
641 return lexer
641 return lexer
642
642
643
643
644 def get_lexer_for_filenode(filenode):
644 def get_lexer_for_filenode(filenode):
645 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
645 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
646 return lexer
646 return lexer
647
647
648
648
649 def pygmentize(filenode, **kwargs):
649 def pygmentize(filenode, **kwargs):
650 """
650 """
651 pygmentize function using pygments
651 pygmentize function using pygments
652
652
653 :param filenode:
653 :param filenode:
654 """
654 """
655 lexer = get_lexer_for_filenode(filenode)
655 lexer = get_lexer_for_filenode(filenode)
656 return literal(code_highlight(filenode.content, lexer,
656 return literal(code_highlight(filenode.content, lexer,
657 CodeHtmlFormatter(**kwargs)))
657 CodeHtmlFormatter(**kwargs)))
658
658
659
659
660 def is_following_repo(repo_name, user_id):
660 def is_following_repo(repo_name, user_id):
661 from rhodecode.model.scm import ScmModel
661 from rhodecode.model.scm import ScmModel
662 return ScmModel().is_following_repo(repo_name, user_id)
662 return ScmModel().is_following_repo(repo_name, user_id)
663
663
664
664
665 class _Message(object):
665 class _Message(object):
666 """A message returned by ``Flash.pop_messages()``.
666 """A message returned by ``Flash.pop_messages()``.
667
667
668 Converting the message to a string returns the message text. Instances
668 Converting the message to a string returns the message text. Instances
669 also have the following attributes:
669 also have the following attributes:
670
670
671 * ``message``: the message text.
671 * ``message``: the message text.
672 * ``category``: the category specified when the message was created.
672 * ``category``: the category specified when the message was created.
673 """
673 """
674
674
675 def __init__(self, category, message, sub_data=None):
675 def __init__(self, category, message, sub_data=None):
676 self.category = category
676 self.category = category
677 self.message = message
677 self.message = message
678 self.sub_data = sub_data or {}
678 self.sub_data = sub_data or {}
679
679
680 def __str__(self):
680 def __str__(self):
681 return self.message
681 return self.message
682
682
683 __unicode__ = __str__
683 __unicode__ = __str__
684
684
685 def __html__(self):
685 def __html__(self):
686 return escape(safe_unicode(self.message))
686 return escape(safe_unicode(self.message))
687
687
688
688
689 class Flash(object):
689 class Flash(object):
690 # List of allowed categories. If None, allow any category.
690 # List of allowed categories. If None, allow any category.
691 categories = ["warning", "notice", "error", "success"]
691 categories = ["warning", "notice", "error", "success"]
692
692
693 # Default category if none is specified.
693 # Default category if none is specified.
694 default_category = "notice"
694 default_category = "notice"
695
695
696 def __init__(self, session_key="flash", categories=None,
696 def __init__(self, session_key="flash", categories=None,
697 default_category=None):
697 default_category=None):
698 """
698 """
699 Instantiate a ``Flash`` object.
699 Instantiate a ``Flash`` object.
700
700
701 ``session_key`` is the key to save the messages under in the user's
701 ``session_key`` is the key to save the messages under in the user's
702 session.
702 session.
703
703
704 ``categories`` is an optional list which overrides the default list
704 ``categories`` is an optional list which overrides the default list
705 of categories.
705 of categories.
706
706
707 ``default_category`` overrides the default category used for messages
707 ``default_category`` overrides the default category used for messages
708 when none is specified.
708 when none is specified.
709 """
709 """
710 self.session_key = session_key
710 self.session_key = session_key
711 if categories is not None:
711 if categories is not None:
712 self.categories = categories
712 self.categories = categories
713 if default_category is not None:
713 if default_category is not None:
714 self.default_category = default_category
714 self.default_category = default_category
715 if self.categories and self.default_category not in self.categories:
715 if self.categories and self.default_category not in self.categories:
716 raise ValueError(
716 raise ValueError(
717 "unrecognized default category %r" % (self.default_category,))
717 "unrecognized default category %r" % (self.default_category,))
718
718
719 def pop_messages(self, session=None, request=None):
719 def pop_messages(self, session=None, request=None):
720 """
720 """
721 Return all accumulated messages and delete them from the session.
721 Return all accumulated messages and delete them from the session.
722
722
723 The return value is a list of ``Message`` objects.
723 The return value is a list of ``Message`` objects.
724 """
724 """
725 messages = []
725 messages = []
726
726
727 if not session:
727 if not session:
728 if not request:
728 if not request:
729 request = get_current_request()
729 request = get_current_request()
730 session = request.session
730 session = request.session
731
731
732 # Pop the 'old' pylons flash messages. They are tuples of the form
732 # Pop the 'old' pylons flash messages. They are tuples of the form
733 # (category, message)
733 # (category, message)
734 for cat, msg in session.pop(self.session_key, []):
734 for cat, msg in session.pop(self.session_key, []):
735 messages.append(_Message(cat, msg))
735 messages.append(_Message(cat, msg))
736
736
737 # Pop the 'new' pyramid flash messages for each category as list
737 # Pop the 'new' pyramid flash messages for each category as list
738 # of strings.
738 # of strings.
739 for cat in self.categories:
739 for cat in self.categories:
740 for msg in session.pop_flash(queue=cat):
740 for msg in session.pop_flash(queue=cat):
741 sub_data = {}
741 sub_data = {}
742 if hasattr(msg, 'rsplit'):
742 if hasattr(msg, 'rsplit'):
743 flash_data = msg.rsplit('|DELIM|', 1)
743 flash_data = msg.rsplit('|DELIM|', 1)
744 org_message = flash_data[0]
744 org_message = flash_data[0]
745 if len(flash_data) > 1:
745 if len(flash_data) > 1:
746 sub_data = json.loads(flash_data[1])
746 sub_data = json.loads(flash_data[1])
747 else:
747 else:
748 org_message = msg
748 org_message = msg
749
749
750 messages.append(_Message(cat, org_message, sub_data=sub_data))
750 messages.append(_Message(cat, org_message, sub_data=sub_data))
751
751
752 # Map messages from the default queue to the 'notice' category.
752 # Map messages from the default queue to the 'notice' category.
753 for msg in session.pop_flash():
753 for msg in session.pop_flash():
754 messages.append(_Message('notice', msg))
754 messages.append(_Message('notice', msg))
755
755
756 session.save()
756 session.save()
757 return messages
757 return messages
758
758
759 def json_alerts(self, session=None, request=None):
759 def json_alerts(self, session=None, request=None):
760 payloads = []
760 payloads = []
761 messages = flash.pop_messages(session=session, request=request) or []
761 messages = flash.pop_messages(session=session, request=request) or []
762 for message in messages:
762 for message in messages:
763 payloads.append({
763 payloads.append({
764 'message': {
764 'message': {
765 'message': u'{}'.format(message.message),
765 'message': u'{}'.format(message.message),
766 'level': message.category,
766 'level': message.category,
767 'force': True,
767 'force': True,
768 'subdata': message.sub_data
768 'subdata': message.sub_data
769 }
769 }
770 })
770 })
771 return json.dumps(payloads)
771 return json.dumps(payloads)
772
772
773 def __call__(self, message, category=None, ignore_duplicate=True,
773 def __call__(self, message, category=None, ignore_duplicate=True,
774 session=None, request=None):
774 session=None, request=None):
775
775
776 if not session:
776 if not session:
777 if not request:
777 if not request:
778 request = get_current_request()
778 request = get_current_request()
779 session = request.session
779 session = request.session
780
780
781 session.flash(
781 session.flash(
782 message, queue=category, allow_duplicate=not ignore_duplicate)
782 message, queue=category, allow_duplicate=not ignore_duplicate)
783
783
784
784
785 flash = Flash()
785 flash = Flash()
786
786
787 #==============================================================================
787 #==============================================================================
788 # SCM FILTERS available via h.
788 # SCM FILTERS available via h.
789 #==============================================================================
789 #==============================================================================
790 from rhodecode.lib.vcs.utils import author_name, author_email
790 from rhodecode.lib.vcs.utils import author_name, author_email
791 from rhodecode.lib.utils2 import age, age_from_seconds
791 from rhodecode.lib.utils2 import age, age_from_seconds
792 from rhodecode.model.db import User, ChangesetStatus
792 from rhodecode.model.db import User, ChangesetStatus
793
793
794
794
795 email = author_email
795 email = author_email
796
796
797
797
798 def capitalize(raw_text):
798 def capitalize(raw_text):
799 return raw_text.capitalize()
799 return raw_text.capitalize()
800
800
801
801
802 def short_id(long_id):
802 def short_id(long_id):
803 return long_id[:12]
803 return long_id[:12]
804
804
805
805
806 def hide_credentials(url):
806 def hide_credentials(url):
807 from rhodecode.lib.utils2 import credentials_filter
807 from rhodecode.lib.utils2 import credentials_filter
808 return credentials_filter(url)
808 return credentials_filter(url)
809
809
810
810
811 import pytz
811 import pytz
812 import tzlocal
812 import tzlocal
813 local_timezone = tzlocal.get_localzone()
813 local_timezone = tzlocal.get_localzone()
814
814
815
815
816 def get_timezone(datetime_iso, time_is_local=False):
816 def get_timezone(datetime_iso, time_is_local=False):
817 tzinfo = '+00:00'
817 tzinfo = '+00:00'
818
818
819 # detect if we have a timezone info, otherwise, add it
819 # detect if we have a timezone info, otherwise, add it
820 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
820 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
821 force_timezone = os.environ.get('RC_TIMEZONE', '')
821 force_timezone = os.environ.get('RC_TIMEZONE', '')
822 if force_timezone:
822 if force_timezone:
823 force_timezone = pytz.timezone(force_timezone)
823 force_timezone = pytz.timezone(force_timezone)
824 timezone = force_timezone or local_timezone
824 timezone = force_timezone or local_timezone
825 offset = timezone.localize(datetime_iso).strftime('%z')
825 offset = timezone.localize(datetime_iso).strftime('%z')
826 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
826 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
827 return tzinfo
827 return tzinfo
828
828
829
829
830 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
830 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
831 title = value or format_date(datetime_iso)
831 title = value or format_date(datetime_iso)
832 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
832 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
833
833
834 return literal(
834 return literal(
835 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
835 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
836 cls='tooltip' if tooltip else '',
836 cls='tooltip' if tooltip else '',
837 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
837 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
838 title=title, dt=datetime_iso, tzinfo=tzinfo
838 title=title, dt=datetime_iso, tzinfo=tzinfo
839 ))
839 ))
840
840
841
841
842 def _shorten_commit_id(commit_id, commit_len=None):
842 def _shorten_commit_id(commit_id, commit_len=None):
843 if commit_len is None:
843 if commit_len is None:
844 request = get_current_request()
844 request = get_current_request()
845 commit_len = request.call_context.visual.show_sha_length
845 commit_len = request.call_context.visual.show_sha_length
846 return commit_id[:commit_len]
846 return commit_id[:commit_len]
847
847
848
848
849 def show_id(commit, show_idx=None, commit_len=None):
849 def show_id(commit, show_idx=None, commit_len=None):
850 """
850 """
851 Configurable function that shows ID
851 Configurable function that shows ID
852 by default it's r123:fffeeefffeee
852 by default it's r123:fffeeefffeee
853
853
854 :param commit: commit instance
854 :param commit: commit instance
855 """
855 """
856 if show_idx is None:
856 if show_idx is None:
857 request = get_current_request()
857 request = get_current_request()
858 show_idx = request.call_context.visual.show_revision_number
858 show_idx = request.call_context.visual.show_revision_number
859
859
860 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
860 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
861 if show_idx:
861 if show_idx:
862 return 'r%s:%s' % (commit.idx, raw_id)
862 return 'r%s:%s' % (commit.idx, raw_id)
863 else:
863 else:
864 return '%s' % (raw_id, )
864 return '%s' % (raw_id, )
865
865
866
866
867 def format_date(date):
867 def format_date(date):
868 """
868 """
869 use a standardized formatting for dates used in RhodeCode
869 use a standardized formatting for dates used in RhodeCode
870
870
871 :param date: date/datetime object
871 :param date: date/datetime object
872 :return: formatted date
872 :return: formatted date
873 """
873 """
874
874
875 if date:
875 if date:
876 _fmt = "%a, %d %b %Y %H:%M:%S"
876 _fmt = "%a, %d %b %Y %H:%M:%S"
877 return safe_unicode(date.strftime(_fmt))
877 return safe_unicode(date.strftime(_fmt))
878
878
879 return u""
879 return u""
880
880
881
881
882 class _RepoChecker(object):
882 class _RepoChecker(object):
883
883
884 def __init__(self, backend_alias):
884 def __init__(self, backend_alias):
885 self._backend_alias = backend_alias
885 self._backend_alias = backend_alias
886
886
887 def __call__(self, repository):
887 def __call__(self, repository):
888 if hasattr(repository, 'alias'):
888 if hasattr(repository, 'alias'):
889 _type = repository.alias
889 _type = repository.alias
890 elif hasattr(repository, 'repo_type'):
890 elif hasattr(repository, 'repo_type'):
891 _type = repository.repo_type
891 _type = repository.repo_type
892 else:
892 else:
893 _type = repository
893 _type = repository
894 return _type == self._backend_alias
894 return _type == self._backend_alias
895
895
896
896
897 is_git = _RepoChecker('git')
897 is_git = _RepoChecker('git')
898 is_hg = _RepoChecker('hg')
898 is_hg = _RepoChecker('hg')
899 is_svn = _RepoChecker('svn')
899 is_svn = _RepoChecker('svn')
900
900
901
901
902 def get_repo_type_by_name(repo_name):
902 def get_repo_type_by_name(repo_name):
903 repo = Repository.get_by_repo_name(repo_name)
903 repo = Repository.get_by_repo_name(repo_name)
904 if repo:
904 if repo:
905 return repo.repo_type
905 return repo.repo_type
906
906
907
907
908 def is_svn_without_proxy(repository):
908 def is_svn_without_proxy(repository):
909 if is_svn(repository):
909 if is_svn(repository):
910 from rhodecode.model.settings import VcsSettingsModel
910 from rhodecode.model.settings import VcsSettingsModel
911 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
911 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
912 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
912 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
913 return False
913 return False
914
914
915
915
916 def discover_user(author):
916 def discover_user(author):
917 """
917 """
918 Tries to discover RhodeCode User based on the author string. Author string
918 Tries to discover RhodeCode User based on the author string. Author string
919 is typically `FirstName LastName <email@address.com>`
919 is typically `FirstName LastName <email@address.com>`
920 """
920 """
921
921
922 # if author is already an instance use it for extraction
922 # if author is already an instance use it for extraction
923 if isinstance(author, User):
923 if isinstance(author, User):
924 return author
924 return author
925
925
926 # Valid email in the attribute passed, see if they're in the system
926 # Valid email in the attribute passed, see if they're in the system
927 _email = author_email(author)
927 _email = author_email(author)
928 if _email != '':
928 if _email != '':
929 user = User.get_by_email(_email, case_insensitive=True, cache=True)
929 user = User.get_by_email(_email, case_insensitive=True, cache=True)
930 if user is not None:
930 if user is not None:
931 return user
931 return user
932
932
933 # Maybe it's a username, we try to extract it and fetch by username ?
933 # Maybe it's a username, we try to extract it and fetch by username ?
934 _author = author_name(author)
934 _author = author_name(author)
935 user = User.get_by_username(_author, case_insensitive=True, cache=True)
935 user = User.get_by_username(_author, case_insensitive=True, cache=True)
936 if user is not None:
936 if user is not None:
937 return user
937 return user
938
938
939 return None
939 return None
940
940
941
941
942 def email_or_none(author):
942 def email_or_none(author):
943 # extract email from the commit string
943 # extract email from the commit string
944 _email = author_email(author)
944 _email = author_email(author)
945
945
946 # If we have an email, use it, otherwise
946 # If we have an email, use it, otherwise
947 # see if it contains a username we can get an email from
947 # see if it contains a username we can get an email from
948 if _email != '':
948 if _email != '':
949 return _email
949 return _email
950 else:
950 else:
951 user = User.get_by_username(
951 user = User.get_by_username(
952 author_name(author), case_insensitive=True, cache=True)
952 author_name(author), case_insensitive=True, cache=True)
953
953
954 if user is not None:
954 if user is not None:
955 return user.email
955 return user.email
956
956
957 # No valid email, not a valid user in the system, none!
957 # No valid email, not a valid user in the system, none!
958 return None
958 return None
959
959
960
960
961 def link_to_user(author, length=0, **kwargs):
961 def link_to_user(author, length=0, **kwargs):
962 user = discover_user(author)
962 user = discover_user(author)
963 # user can be None, but if we have it already it means we can re-use it
963 # user can be None, but if we have it already it means we can re-use it
964 # in the person() function, so we save 1 intensive-query
964 # in the person() function, so we save 1 intensive-query
965 if user:
965 if user:
966 author = user
966 author = user
967
967
968 display_person = person(author, 'username_or_name_or_email')
968 display_person = person(author, 'username_or_name_or_email')
969 if length:
969 if length:
970 display_person = shorter(display_person, length)
970 display_person = shorter(display_person, length)
971
971
972 if user and user.username != user.DEFAULT_USER:
972 if user and user.username != user.DEFAULT_USER:
973 return link_to(
973 return link_to(
974 escape(display_person),
974 escape(display_person),
975 route_path('user_profile', username=user.username),
975 route_path('user_profile', username=user.username),
976 **kwargs)
976 **kwargs)
977 else:
977 else:
978 return escape(display_person)
978 return escape(display_person)
979
979
980
980
981 def link_to_group(users_group_name, **kwargs):
981 def link_to_group(users_group_name, **kwargs):
982 return link_to(
982 return link_to(
983 escape(users_group_name),
983 escape(users_group_name),
984 route_path('user_group_profile', user_group_name=users_group_name),
984 route_path('user_group_profile', user_group_name=users_group_name),
985 **kwargs)
985 **kwargs)
986
986
987
987
988 def person(author, show_attr="username_and_name"):
988 def person(author, show_attr="username_and_name"):
989 user = discover_user(author)
989 user = discover_user(author)
990 if user:
990 if user:
991 return getattr(user, show_attr)
991 return getattr(user, show_attr)
992 else:
992 else:
993 _author = author_name(author)
993 _author = author_name(author)
994 _email = email(author)
994 _email = email(author)
995 return _author or _email
995 return _author or _email
996
996
997
997
998 def author_string(email):
998 def author_string(email):
999 if email:
999 if email:
1000 user = User.get_by_email(email, case_insensitive=True, cache=True)
1000 user = User.get_by_email(email, case_insensitive=True, cache=True)
1001 if user:
1001 if user:
1002 if user.first_name or user.last_name:
1002 if user.first_name or user.last_name:
1003 return '%s %s &lt;%s&gt;' % (
1003 return '%s %s &lt;%s&gt;' % (
1004 user.first_name, user.last_name, email)
1004 user.first_name, user.last_name, email)
1005 else:
1005 else:
1006 return email
1006 return email
1007 else:
1007 else:
1008 return email
1008 return email
1009 else:
1009 else:
1010 return None
1010 return None
1011
1011
1012
1012
1013 def person_by_id(id_, show_attr="username_and_name"):
1013 def person_by_id(id_, show_attr="username_and_name"):
1014 # attr to return from fetched user
1014 # attr to return from fetched user
1015 person_getter = lambda usr: getattr(usr, show_attr)
1015 person_getter = lambda usr: getattr(usr, show_attr)
1016
1016
1017 #maybe it's an ID ?
1017 #maybe it's an ID ?
1018 if str(id_).isdigit() or isinstance(id_, int):
1018 if str(id_).isdigit() or isinstance(id_, int):
1019 id_ = int(id_)
1019 id_ = int(id_)
1020 user = User.get(id_)
1020 user = User.get(id_)
1021 if user is not None:
1021 if user is not None:
1022 return person_getter(user)
1022 return person_getter(user)
1023 return id_
1023 return id_
1024
1024
1025
1025
1026 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1026 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1027 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1027 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1028 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1028 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1029
1029
1030
1030
1031 tags_paterns = OrderedDict((
1031 tags_paterns = OrderedDict((
1032 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1032 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1033 '<div class="metatag" tag="lang">\\2</div>')),
1033 '<div class="metatag" tag="lang">\\2</div>')),
1034
1034
1035 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1035 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1036 '<div class="metatag" tag="see">see: \\1 </div>')),
1036 '<div class="metatag" tag="see">see: \\1 </div>')),
1037
1037
1038 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1038 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1039 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1039 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1040
1040
1041 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1041 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1042 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1042 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1043
1043
1044 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1044 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1045 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1045 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1046
1046
1047 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1047 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1048 '<div class="metatag" tag="state \\1">\\1</div>')),
1048 '<div class="metatag" tag="state \\1">\\1</div>')),
1049
1049
1050 # label in grey
1050 # label in grey
1051 ('label', (re.compile(r'\[([a-z]+)\]'),
1051 ('label', (re.compile(r'\[([a-z]+)\]'),
1052 '<div class="metatag" tag="label">\\1</div>')),
1052 '<div class="metatag" tag="label">\\1</div>')),
1053
1053
1054 # generic catch all in grey
1054 # generic catch all in grey
1055 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1055 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1056 '<div class="metatag" tag="generic">\\1</div>')),
1056 '<div class="metatag" tag="generic">\\1</div>')),
1057 ))
1057 ))
1058
1058
1059
1059
1060 def extract_metatags(value):
1060 def extract_metatags(value):
1061 """
1061 """
1062 Extract supported meta-tags from given text value
1062 Extract supported meta-tags from given text value
1063 """
1063 """
1064 tags = []
1064 tags = []
1065 if not value:
1065 if not value:
1066 return tags, ''
1066 return tags, ''
1067
1067
1068 for key, val in tags_paterns.items():
1068 for key, val in tags_paterns.items():
1069 pat, replace_html = val
1069 pat, replace_html = val
1070 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1070 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1071 value = pat.sub('', value)
1071 value = pat.sub('', value)
1072
1072
1073 return tags, value
1073 return tags, value
1074
1074
1075
1075
1076 def style_metatag(tag_type, value):
1076 def style_metatag(tag_type, value):
1077 """
1077 """
1078 converts tags from value into html equivalent
1078 converts tags from value into html equivalent
1079 """
1079 """
1080 if not value:
1080 if not value:
1081 return ''
1081 return ''
1082
1082
1083 html_value = value
1083 html_value = value
1084 tag_data = tags_paterns.get(tag_type)
1084 tag_data = tags_paterns.get(tag_type)
1085 if tag_data:
1085 if tag_data:
1086 pat, replace_html = tag_data
1086 pat, replace_html = tag_data
1087 # convert to plain `unicode` instead of a markup tag to be used in
1087 # convert to plain `unicode` instead of a markup tag to be used in
1088 # regex expressions. safe_unicode doesn't work here
1088 # regex expressions. safe_unicode doesn't work here
1089 html_value = pat.sub(replace_html, unicode(value))
1089 html_value = pat.sub(replace_html, unicode(value))
1090
1090
1091 return html_value
1091 return html_value
1092
1092
1093
1093
1094 def bool2icon(value, show_at_false=True):
1094 def bool2icon(value, show_at_false=True):
1095 """
1095 """
1096 Returns boolean value of a given value, represented as html element with
1096 Returns boolean value of a given value, represented as html element with
1097 classes that will represent icons
1097 classes that will represent icons
1098
1098
1099 :param value: given value to convert to html node
1099 :param value: given value to convert to html node
1100 """
1100 """
1101
1101
1102 if value: # does bool conversion
1102 if value: # does bool conversion
1103 return HTML.tag('i', class_="icon-true", title='True')
1103 return HTML.tag('i', class_="icon-true", title='True')
1104 else: # not true as bool
1104 else: # not true as bool
1105 if show_at_false:
1105 if show_at_false:
1106 return HTML.tag('i', class_="icon-false", title='False')
1106 return HTML.tag('i', class_="icon-false", title='False')
1107 return HTML.tag('i')
1107 return HTML.tag('i')
1108
1108
1109
1109
1110 def b64(inp):
1110 def b64(inp):
1111 return base64.b64encode(inp)
1111 return base64.b64encode(inp)
1112
1112
1113 #==============================================================================
1113 #==============================================================================
1114 # PERMS
1114 # PERMS
1115 #==============================================================================
1115 #==============================================================================
1116 from rhodecode.lib.auth import (
1116 from rhodecode.lib.auth import (
1117 HasPermissionAny, HasPermissionAll,
1117 HasPermissionAny, HasPermissionAll,
1118 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1118 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1119 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1119 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1120 csrf_token_key, AuthUser)
1120 csrf_token_key, AuthUser)
1121
1121
1122
1122
1123 #==============================================================================
1123 #==============================================================================
1124 # GRAVATAR URL
1124 # GRAVATAR URL
1125 #==============================================================================
1125 #==============================================================================
1126 class InitialsGravatar(object):
1126 class InitialsGravatar(object):
1127 def __init__(self, email_address, first_name, last_name, size=30,
1127 def __init__(self, email_address, first_name, last_name, size=30,
1128 background=None, text_color='#fff'):
1128 background=None, text_color='#fff'):
1129 self.size = size
1129 self.size = size
1130 self.first_name = first_name
1130 self.first_name = first_name
1131 self.last_name = last_name
1131 self.last_name = last_name
1132 self.email_address = email_address
1132 self.email_address = email_address
1133 self.background = background or self.str2color(email_address)
1133 self.background = background or self.str2color(email_address)
1134 self.text_color = text_color
1134 self.text_color = text_color
1135
1135
1136 def get_color_bank(self):
1136 def get_color_bank(self):
1137 """
1137 """
1138 returns a predefined list of colors that gravatars can use.
1138 returns a predefined list of colors that gravatars can use.
1139 Those are randomized distinct colors that guarantee readability and
1139 Those are randomized distinct colors that guarantee readability and
1140 uniqueness.
1140 uniqueness.
1141
1141
1142 generated with: http://phrogz.net/css/distinct-colors.html
1142 generated with: http://phrogz.net/css/distinct-colors.html
1143 """
1143 """
1144 return [
1144 return [
1145 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1145 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1146 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1146 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1147 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1147 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1148 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1148 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1149 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1149 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1150 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1150 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1151 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1151 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1152 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1152 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1153 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1153 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1154 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1154 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1155 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1155 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1156 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1156 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1157 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1157 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1158 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1158 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1159 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1159 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1160 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1160 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1161 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1161 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1162 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1162 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1163 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1163 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1164 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1164 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1165 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1165 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1166 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1166 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1167 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1167 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1168 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1168 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1169 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1169 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1170 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1170 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1171 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1171 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1172 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1172 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1173 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1173 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1174 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1174 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1175 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1175 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1176 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1176 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1177 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1177 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1178 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1178 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1179 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1179 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1180 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1180 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1181 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1181 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1182 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1182 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1183 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1183 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1184 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1184 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1185 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1185 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1186 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1186 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1187 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1187 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1188 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1188 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1189 '#4f8c46', '#368dd9', '#5c0073'
1189 '#4f8c46', '#368dd9', '#5c0073'
1190 ]
1190 ]
1191
1191
1192 def rgb_to_hex_color(self, rgb_tuple):
1192 def rgb_to_hex_color(self, rgb_tuple):
1193 """
1193 """
1194 Converts an rgb_tuple passed to an hex color.
1194 Converts an rgb_tuple passed to an hex color.
1195
1195
1196 :param rgb_tuple: tuple with 3 ints represents rgb color space
1196 :param rgb_tuple: tuple with 3 ints represents rgb color space
1197 """
1197 """
1198 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1198 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1199
1199
1200 def email_to_int_list(self, email_str):
1200 def email_to_int_list(self, email_str):
1201 """
1201 """
1202 Get every byte of the hex digest value of email and turn it to integer.
1202 Get every byte of the hex digest value of email and turn it to integer.
1203 It's going to be always between 0-255
1203 It's going to be always between 0-255
1204 """
1204 """
1205 digest = md5_safe(email_str.lower())
1205 digest = md5_safe(email_str.lower())
1206 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1206 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1207
1207
1208 def pick_color_bank_index(self, email_str, color_bank):
1208 def pick_color_bank_index(self, email_str, color_bank):
1209 return self.email_to_int_list(email_str)[0] % len(color_bank)
1209 return self.email_to_int_list(email_str)[0] % len(color_bank)
1210
1210
1211 def str2color(self, email_str):
1211 def str2color(self, email_str):
1212 """
1212 """
1213 Tries to map in a stable algorithm an email to color
1213 Tries to map in a stable algorithm an email to color
1214
1214
1215 :param email_str:
1215 :param email_str:
1216 """
1216 """
1217 color_bank = self.get_color_bank()
1217 color_bank = self.get_color_bank()
1218 # pick position (module it's length so we always find it in the
1218 # pick position (module it's length so we always find it in the
1219 # bank even if it's smaller than 256 values
1219 # bank even if it's smaller than 256 values
1220 pos = self.pick_color_bank_index(email_str, color_bank)
1220 pos = self.pick_color_bank_index(email_str, color_bank)
1221 return color_bank[pos]
1221 return color_bank[pos]
1222
1222
1223 def normalize_email(self, email_address):
1223 def normalize_email(self, email_address):
1224 import unicodedata
1224 import unicodedata
1225 # default host used to fill in the fake/missing email
1225 # default host used to fill in the fake/missing email
1226 default_host = u'localhost'
1226 default_host = u'localhost'
1227
1227
1228 if not email_address:
1228 if not email_address:
1229 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1229 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1230
1230
1231 email_address = safe_unicode(email_address)
1231 email_address = safe_unicode(email_address)
1232
1232
1233 if u'@' not in email_address:
1233 if u'@' not in email_address:
1234 email_address = u'%s@%s' % (email_address, default_host)
1234 email_address = u'%s@%s' % (email_address, default_host)
1235
1235
1236 if email_address.endswith(u'@'):
1236 if email_address.endswith(u'@'):
1237 email_address = u'%s%s' % (email_address, default_host)
1237 email_address = u'%s%s' % (email_address, default_host)
1238
1238
1239 email_address = unicodedata.normalize('NFKD', email_address)\
1239 email_address = unicodedata.normalize('NFKD', email_address)\
1240 .encode('ascii', 'ignore')
1240 .encode('ascii', 'ignore')
1241 return email_address
1241 return email_address
1242
1242
1243 def get_initials(self):
1243 def get_initials(self):
1244 """
1244 """
1245 Returns 2 letter initials calculated based on the input.
1245 Returns 2 letter initials calculated based on the input.
1246 The algorithm picks first given email address, and takes first letter
1246 The algorithm picks first given email address, and takes first letter
1247 of part before @, and then the first letter of server name. In case
1247 of part before @, and then the first letter of server name. In case
1248 the part before @ is in a format of `somestring.somestring2` it replaces
1248 the part before @ is in a format of `somestring.somestring2` it replaces
1249 the server letter with first letter of somestring2
1249 the server letter with first letter of somestring2
1250
1250
1251 In case function was initialized with both first and lastname, this
1251 In case function was initialized with both first and lastname, this
1252 overrides the extraction from email by first letter of the first and
1252 overrides the extraction from email by first letter of the first and
1253 last name. We add special logic to that functionality, In case Full name
1253 last name. We add special logic to that functionality, In case Full name
1254 is compound, like Guido Von Rossum, we use last part of the last name
1254 is compound, like Guido Von Rossum, we use last part of the last name
1255 (Von Rossum) picking `R`.
1255 (Von Rossum) picking `R`.
1256
1256
1257 Function also normalizes the non-ascii characters to they ascii
1257 Function also normalizes the non-ascii characters to they ascii
1258 representation, eg Δ„ => A
1258 representation, eg Δ„ => A
1259 """
1259 """
1260 import unicodedata
1260 import unicodedata
1261 # replace non-ascii to ascii
1261 # replace non-ascii to ascii
1262 first_name = unicodedata.normalize(
1262 first_name = unicodedata.normalize(
1263 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1263 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1264 last_name = unicodedata.normalize(
1264 last_name = unicodedata.normalize(
1265 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1265 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1266
1266
1267 # do NFKD encoding, and also make sure email has proper format
1267 # do NFKD encoding, and also make sure email has proper format
1268 email_address = self.normalize_email(self.email_address)
1268 email_address = self.normalize_email(self.email_address)
1269
1269
1270 # first push the email initials
1270 # first push the email initials
1271 prefix, server = email_address.split('@', 1)
1271 prefix, server = email_address.split('@', 1)
1272
1272
1273 # check if prefix is maybe a 'first_name.last_name' syntax
1273 # check if prefix is maybe a 'first_name.last_name' syntax
1274 _dot_split = prefix.rsplit('.', 1)
1274 _dot_split = prefix.rsplit('.', 1)
1275 if len(_dot_split) == 2 and _dot_split[1]:
1275 if len(_dot_split) == 2 and _dot_split[1]:
1276 initials = [_dot_split[0][0], _dot_split[1][0]]
1276 initials = [_dot_split[0][0], _dot_split[1][0]]
1277 else:
1277 else:
1278 initials = [prefix[0], server[0]]
1278 initials = [prefix[0], server[0]]
1279
1279
1280 # then try to replace either first_name or last_name
1280 # then try to replace either first_name or last_name
1281 fn_letter = (first_name or " ")[0].strip()
1281 fn_letter = (first_name or " ")[0].strip()
1282 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1282 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1283
1283
1284 if fn_letter:
1284 if fn_letter:
1285 initials[0] = fn_letter
1285 initials[0] = fn_letter
1286
1286
1287 if ln_letter:
1287 if ln_letter:
1288 initials[1] = ln_letter
1288 initials[1] = ln_letter
1289
1289
1290 return ''.join(initials).upper()
1290 return ''.join(initials).upper()
1291
1291
1292 def get_img_data_by_type(self, font_family, img_type):
1292 def get_img_data_by_type(self, font_family, img_type):
1293 default_user = """
1293 default_user = """
1294 <svg xmlns="http://www.w3.org/2000/svg"
1294 <svg xmlns="http://www.w3.org/2000/svg"
1295 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1295 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1296 viewBox="-15 -10 439.165 429.164"
1296 viewBox="-15 -10 439.165 429.164"
1297
1297
1298 xml:space="preserve"
1298 xml:space="preserve"
1299 style="background:{background};" >
1299 style="background:{background};" >
1300
1300
1301 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1301 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1302 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1302 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1303 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1303 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1304 168.596,153.916,216.671,
1304 168.596,153.916,216.671,
1305 204.583,216.671z" fill="{text_color}"/>
1305 204.583,216.671z" fill="{text_color}"/>
1306 <path d="M407.164,374.717L360.88,
1306 <path d="M407.164,374.717L360.88,
1307 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1307 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1308 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1308 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1309 15.366-44.203,23.488-69.076,23.488c-24.877,
1309 15.366-44.203,23.488-69.076,23.488c-24.877,
1310 0-48.762-8.122-69.078-23.488
1310 0-48.762-8.122-69.078-23.488
1311 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1311 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1312 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1312 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1313 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1313 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1314 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1314 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1315 19.402-10.527 C409.699,390.129,
1315 19.402-10.527 C409.699,390.129,
1316 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1316 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1317 </svg>""".format(
1317 </svg>""".format(
1318 size=self.size,
1318 size=self.size,
1319 background='#979797', # @grey4
1319 background='#979797', # @grey4
1320 text_color=self.text_color,
1320 text_color=self.text_color,
1321 font_family=font_family)
1321 font_family=font_family)
1322
1322
1323 return {
1323 return {
1324 "default_user": default_user
1324 "default_user": default_user
1325 }[img_type]
1325 }[img_type]
1326
1326
1327 def get_img_data(self, svg_type=None):
1327 def get_img_data(self, svg_type=None):
1328 """
1328 """
1329 generates the svg metadata for image
1329 generates the svg metadata for image
1330 """
1330 """
1331 fonts = [
1331 fonts = [
1332 '-apple-system',
1332 '-apple-system',
1333 'BlinkMacSystemFont',
1333 'BlinkMacSystemFont',
1334 'Segoe UI',
1334 'Segoe UI',
1335 'Roboto',
1335 'Roboto',
1336 'Oxygen-Sans',
1336 'Oxygen-Sans',
1337 'Ubuntu',
1337 'Ubuntu',
1338 'Cantarell',
1338 'Cantarell',
1339 'Helvetica Neue',
1339 'Helvetica Neue',
1340 'sans-serif'
1340 'sans-serif'
1341 ]
1341 ]
1342 font_family = ','.join(fonts)
1342 font_family = ','.join(fonts)
1343 if svg_type:
1343 if svg_type:
1344 return self.get_img_data_by_type(font_family, svg_type)
1344 return self.get_img_data_by_type(font_family, svg_type)
1345
1345
1346 initials = self.get_initials()
1346 initials = self.get_initials()
1347 img_data = """
1347 img_data = """
1348 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1348 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1349 width="{size}" height="{size}"
1349 width="{size}" height="{size}"
1350 style="width: 100%; height: 100%; background-color: {background}"
1350 style="width: 100%; height: 100%; background-color: {background}"
1351 viewBox="0 0 {size} {size}">
1351 viewBox="0 0 {size} {size}">
1352 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1352 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1353 pointer-events="auto" fill="{text_color}"
1353 pointer-events="auto" fill="{text_color}"
1354 font-family="{font_family}"
1354 font-family="{font_family}"
1355 style="font-weight: 400; font-size: {f_size}px;">{text}
1355 style="font-weight: 400; font-size: {f_size}px;">{text}
1356 </text>
1356 </text>
1357 </svg>""".format(
1357 </svg>""".format(
1358 size=self.size,
1358 size=self.size,
1359 f_size=self.size/2.05, # scale the text inside the box nicely
1359 f_size=self.size/2.05, # scale the text inside the box nicely
1360 background=self.background,
1360 background=self.background,
1361 text_color=self.text_color,
1361 text_color=self.text_color,
1362 text=initials.upper(),
1362 text=initials.upper(),
1363 font_family=font_family)
1363 font_family=font_family)
1364
1364
1365 return img_data
1365 return img_data
1366
1366
1367 def generate_svg(self, svg_type=None):
1367 def generate_svg(self, svg_type=None):
1368 img_data = self.get_img_data(svg_type)
1368 img_data = self.get_img_data(svg_type)
1369 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1369 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1370
1370
1371
1371
1372 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1372 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1373
1373
1374 svg_type = None
1374 svg_type = None
1375 if email_address == User.DEFAULT_USER_EMAIL:
1375 if email_address == User.DEFAULT_USER_EMAIL:
1376 svg_type = 'default_user'
1376 svg_type = 'default_user'
1377
1377
1378 klass = InitialsGravatar(email_address, first_name, last_name, size)
1378 klass = InitialsGravatar(email_address, first_name, last_name, size)
1379
1379
1380 if store_on_disk:
1380 if store_on_disk:
1381 from rhodecode.apps.file_store import utils as store_utils
1381 from rhodecode.apps.file_store import utils as store_utils
1382 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1382 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1383 FileOverSizeException
1383 FileOverSizeException
1384 from rhodecode.model.db import Session
1384 from rhodecode.model.db import Session
1385
1385
1386 image_key = md5_safe(email_address.lower()
1386 image_key = md5_safe(email_address.lower()
1387 + first_name.lower() + last_name.lower())
1387 + first_name.lower() + last_name.lower())
1388
1388
1389 storage = store_utils.get_file_storage(request.registry.settings)
1389 storage = store_utils.get_file_storage(request.registry.settings)
1390 filename = '{}.svg'.format(image_key)
1390 filename = '{}.svg'.format(image_key)
1391 subdir = 'gravatars'
1391 subdir = 'gravatars'
1392 # since final name has a counter, we apply the 0
1392 # since final name has a counter, we apply the 0
1393 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1393 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1394 store_uid = os.path.join(subdir, uid)
1394 store_uid = os.path.join(subdir, uid)
1395
1395
1396 db_entry = FileStore.get_by_store_uid(store_uid)
1396 db_entry = FileStore.get_by_store_uid(store_uid)
1397 if db_entry:
1397 if db_entry:
1398 return request.route_path('download_file', fid=store_uid)
1398 return request.route_path('download_file', fid=store_uid)
1399
1399
1400 img_data = klass.get_img_data(svg_type=svg_type)
1400 img_data = klass.get_img_data(svg_type=svg_type)
1401 img_file = store_utils.bytes_to_file_obj(img_data)
1401 img_file = store_utils.bytes_to_file_obj(img_data)
1402
1402
1403 try:
1403 try:
1404 store_uid, metadata = storage.save_file(
1404 store_uid, metadata = storage.save_file(
1405 img_file, filename, directory=subdir,
1405 img_file, filename, directory=subdir,
1406 extensions=['.svg'], randomized_name=False)
1406 extensions=['.svg'], randomized_name=False)
1407 except (FileNotAllowedException, FileOverSizeException):
1407 except (FileNotAllowedException, FileOverSizeException):
1408 raise
1408 raise
1409
1409
1410 try:
1410 try:
1411 entry = FileStore.create(
1411 entry = FileStore.create(
1412 file_uid=store_uid, filename=metadata["filename"],
1412 file_uid=store_uid, filename=metadata["filename"],
1413 file_hash=metadata["sha256"], file_size=metadata["size"],
1413 file_hash=metadata["sha256"], file_size=metadata["size"],
1414 file_display_name=filename,
1414 file_display_name=filename,
1415 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1415 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1416 hidden=True, check_acl=False, user_id=1
1416 hidden=True, check_acl=False, user_id=1
1417 )
1417 )
1418 Session().add(entry)
1418 Session().add(entry)
1419 Session().commit()
1419 Session().commit()
1420 log.debug('Stored upload in DB as %s', entry)
1420 log.debug('Stored upload in DB as %s', entry)
1421 except Exception:
1421 except Exception:
1422 raise
1422 raise
1423
1423
1424 return request.route_path('download_file', fid=store_uid)
1424 return request.route_path('download_file', fid=store_uid)
1425
1425
1426 else:
1426 else:
1427 return klass.generate_svg(svg_type=svg_type)
1427 return klass.generate_svg(svg_type=svg_type)
1428
1428
1429
1429
1430 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1430 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1431 return safe_str(gravatar_url_tmpl)\
1431 return safe_str(gravatar_url_tmpl)\
1432 .replace('{email}', email_address) \
1432 .replace('{email}', email_address) \
1433 .replace('{md5email}', md5_safe(email_address.lower())) \
1433 .replace('{md5email}', md5_safe(email_address.lower())) \
1434 .replace('{netloc}', request.host) \
1434 .replace('{netloc}', request.host) \
1435 .replace('{scheme}', request.scheme) \
1435 .replace('{scheme}', request.scheme) \
1436 .replace('{size}', safe_str(size))
1436 .replace('{size}', safe_str(size))
1437
1437
1438
1438
1439 def gravatar_url(email_address, size=30, request=None):
1439 def gravatar_url(email_address, size=30, request=None):
1440 request = request or get_current_request()
1440 request = request or get_current_request()
1441 _use_gravatar = request.call_context.visual.use_gravatar
1441 _use_gravatar = request.call_context.visual.use_gravatar
1442
1442
1443 email_address = email_address or User.DEFAULT_USER_EMAIL
1443 email_address = email_address or User.DEFAULT_USER_EMAIL
1444 if isinstance(email_address, unicode):
1444 if isinstance(email_address, unicode):
1445 # hashlib crashes on unicode items
1445 # hashlib crashes on unicode items
1446 email_address = safe_str(email_address)
1446 email_address = safe_str(email_address)
1447
1447
1448 # empty email or default user
1448 # empty email or default user
1449 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1449 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1450 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1450 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1451
1451
1452 if _use_gravatar:
1452 if _use_gravatar:
1453 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1453 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1454 or User.DEFAULT_GRAVATAR_URL
1454 or User.DEFAULT_GRAVATAR_URL
1455 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1455 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1456
1456
1457 else:
1457 else:
1458 return initials_gravatar(request, email_address, '', '', size=size)
1458 return initials_gravatar(request, email_address, '', '', size=size)
1459
1459
1460
1460
1461 def breadcrumb_repo_link(repo):
1461 def breadcrumb_repo_link(repo):
1462 """
1462 """
1463 Makes a breadcrumbs path link to repo
1463 Makes a breadcrumbs path link to repo
1464
1464
1465 ex::
1465 ex::
1466 group >> subgroup >> repo
1466 group >> subgroup >> repo
1467
1467
1468 :param repo: a Repository instance
1468 :param repo: a Repository instance
1469 """
1469 """
1470
1470
1471 path = [
1471 path = [
1472 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1472 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1473 title='last change:{}'.format(format_date(group.last_commit_change)))
1473 title='last change:{}'.format(format_date(group.last_commit_change)))
1474 for group in repo.groups_with_parents
1474 for group in repo.groups_with_parents
1475 ] + [
1475 ] + [
1476 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1476 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1477 title='last change:{}'.format(format_date(repo.last_commit_change)))
1477 title='last change:{}'.format(format_date(repo.last_commit_change)))
1478 ]
1478 ]
1479
1479
1480 return literal(' &raquo; '.join(path))
1480 return literal(' &raquo; '.join(path))
1481
1481
1482
1482
1483 def breadcrumb_repo_group_link(repo_group):
1483 def breadcrumb_repo_group_link(repo_group):
1484 """
1484 """
1485 Makes a breadcrumbs path link to repo
1485 Makes a breadcrumbs path link to repo
1486
1486
1487 ex::
1487 ex::
1488 group >> subgroup
1488 group >> subgroup
1489
1489
1490 :param repo_group: a Repository Group instance
1490 :param repo_group: a Repository Group instance
1491 """
1491 """
1492
1492
1493 path = [
1493 path = [
1494 link_to(group.name,
1494 link_to(group.name,
1495 route_path('repo_group_home', repo_group_name=group.group_name),
1495 route_path('repo_group_home', repo_group_name=group.group_name),
1496 title='last change:{}'.format(format_date(group.last_commit_change)))
1496 title='last change:{}'.format(format_date(group.last_commit_change)))
1497 for group in repo_group.parents
1497 for group in repo_group.parents
1498 ] + [
1498 ] + [
1499 link_to(repo_group.name,
1499 link_to(repo_group.name,
1500 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1500 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1501 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1501 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1502 ]
1502 ]
1503
1503
1504 return literal(' &raquo; '.join(path))
1504 return literal(' &raquo; '.join(path))
1505
1505
1506
1506
1507 def format_byte_size_binary(file_size):
1507 def format_byte_size_binary(file_size):
1508 """
1508 """
1509 Formats file/folder sizes to standard.
1509 Formats file/folder sizes to standard.
1510 """
1510 """
1511 if file_size is None:
1511 if file_size is None:
1512 file_size = 0
1512 file_size = 0
1513
1513
1514 formatted_size = format_byte_size(file_size, binary=True)
1514 formatted_size = format_byte_size(file_size, binary=True)
1515 return formatted_size
1515 return formatted_size
1516
1516
1517
1517
1518 def urlify_text(text_, safe=True, **href_attrs):
1518 def urlify_text(text_, safe=True, **href_attrs):
1519 """
1519 """
1520 Extract urls from text and make html links out of them
1520 Extract urls from text and make html links out of them
1521 """
1521 """
1522
1522
1523 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1523 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1524 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1524 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1525
1525
1526 def url_func(match_obj):
1526 def url_func(match_obj):
1527 url_full = match_obj.groups()[0]
1527 url_full = match_obj.groups()[0]
1528 a_options = dict(href_attrs)
1528 a_options = dict(href_attrs)
1529 a_options['href'] = url_full
1529 a_options['href'] = url_full
1530 a_text = url_full
1530 a_text = url_full
1531 return HTML.tag("a", a_text, **a_options)
1531 return HTML.tag("a", a_text, **a_options)
1532
1532
1533 _new_text = url_pat.sub(url_func, text_)
1533 _new_text = url_pat.sub(url_func, text_)
1534
1534
1535 if safe:
1535 if safe:
1536 return literal(_new_text)
1536 return literal(_new_text)
1537 return _new_text
1537 return _new_text
1538
1538
1539
1539
1540 def urlify_commits(text_, repo_name):
1540 def urlify_commits(text_, repo_name):
1541 """
1541 """
1542 Extract commit ids from text and make link from them
1542 Extract commit ids from text and make link from them
1543
1543
1544 :param text_:
1544 :param text_:
1545 :param repo_name: repo name to build the URL with
1545 :param repo_name: repo name to build the URL with
1546 """
1546 """
1547
1547
1548 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1548 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1549
1549
1550 def url_func(match_obj):
1550 def url_func(match_obj):
1551 commit_id = match_obj.groups()[1]
1551 commit_id = match_obj.groups()[1]
1552 pref = match_obj.groups()[0]
1552 pref = match_obj.groups()[0]
1553 suf = match_obj.groups()[2]
1553 suf = match_obj.groups()[2]
1554
1554
1555 tmpl = (
1555 tmpl = (
1556 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1556 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1557 '%(commit_id)s</a>%(suf)s'
1557 '%(commit_id)s</a>%(suf)s'
1558 )
1558 )
1559 return tmpl % {
1559 return tmpl % {
1560 'pref': pref,
1560 'pref': pref,
1561 'cls': 'revision-link',
1561 'cls': 'revision-link',
1562 'url': route_url(
1562 'url': route_url(
1563 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1563 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1564 'commit_id': commit_id,
1564 'commit_id': commit_id,
1565 'suf': suf,
1565 'suf': suf,
1566 'hovercard_alt': 'Commit: {}'.format(commit_id),
1566 'hovercard_alt': 'Commit: {}'.format(commit_id),
1567 'hovercard_url': route_url(
1567 'hovercard_url': route_url(
1568 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1568 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1569 }
1569 }
1570
1570
1571 new_text = url_pat.sub(url_func, text_)
1571 new_text = url_pat.sub(url_func, text_)
1572
1572
1573 return new_text
1573 return new_text
1574
1574
1575
1575
1576 def _process_url_func(match_obj, repo_name, uid, entry,
1576 def _process_url_func(match_obj, repo_name, uid, entry,
1577 return_raw_data=False, link_format='html'):
1577 return_raw_data=False, link_format='html'):
1578 pref = ''
1578 pref = ''
1579 if match_obj.group().startswith(' '):
1579 if match_obj.group().startswith(' '):
1580 pref = ' '
1580 pref = ' '
1581
1581
1582 issue_id = ''.join(match_obj.groups())
1582 issue_id = ''.join(match_obj.groups())
1583
1583
1584 if link_format == 'html':
1584 if link_format == 'html':
1585 tmpl = (
1585 tmpl = (
1586 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1586 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1587 '%(issue-prefix)s%(id-repr)s'
1587 '%(issue-prefix)s%(id-repr)s'
1588 '</a>')
1588 '</a>')
1589 elif link_format == 'html+hovercard':
1589 elif link_format == 'html+hovercard':
1590 tmpl = (
1590 tmpl = (
1591 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1591 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1592 '%(issue-prefix)s%(id-repr)s'
1592 '%(issue-prefix)s%(id-repr)s'
1593 '</a>')
1593 '</a>')
1594 elif link_format in ['rst', 'rst+hovercard']:
1594 elif link_format in ['rst', 'rst+hovercard']:
1595 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1595 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1596 elif link_format in ['markdown', 'markdown+hovercard']:
1596 elif link_format in ['markdown', 'markdown+hovercard']:
1597 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1597 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1598 else:
1598 else:
1599 raise ValueError('Bad link_format:{}'.format(link_format))
1599 raise ValueError('Bad link_format:{}'.format(link_format))
1600
1600
1601 (repo_name_cleaned,
1601 (repo_name_cleaned,
1602 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1602 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1603
1603
1604 # variables replacement
1604 # variables replacement
1605 named_vars = {
1605 named_vars = {
1606 'id': issue_id,
1606 'id': issue_id,
1607 'repo': repo_name,
1607 'repo': repo_name,
1608 'repo_name': repo_name_cleaned,
1608 'repo_name': repo_name_cleaned,
1609 'group_name': parent_group_name,
1609 'group_name': parent_group_name,
1610 # set dummy keys so we always have them
1610 # set dummy keys so we always have them
1611 'hostname': '',
1611 'hostname': '',
1612 'netloc': '',
1612 'netloc': '',
1613 'scheme': ''
1613 'scheme': ''
1614 }
1614 }
1615
1615
1616 request = get_current_request()
1616 request = get_current_request()
1617 if request:
1617 if request:
1618 # exposes, hostname, netloc, scheme
1618 # exposes, hostname, netloc, scheme
1619 host_data = get_host_info(request)
1619 host_data = get_host_info(request)
1620 named_vars.update(host_data)
1620 named_vars.update(host_data)
1621
1621
1622 # named regex variables
1622 # named regex variables
1623 named_vars.update(match_obj.groupdict())
1623 named_vars.update(match_obj.groupdict())
1624 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1624 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1625 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1625 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1626 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1626 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1627
1627
1628 def quote_cleaner(input_str):
1628 def quote_cleaner(input_str):
1629 """Remove quotes as it's HTML"""
1629 """Remove quotes as it's HTML"""
1630 return input_str.replace('"', '')
1630 return input_str.replace('"', '')
1631
1631
1632 data = {
1632 data = {
1633 'pref': pref,
1633 'pref': pref,
1634 'cls': quote_cleaner('issue-tracker-link'),
1634 'cls': quote_cleaner('issue-tracker-link'),
1635 'url': quote_cleaner(_url),
1635 'url': quote_cleaner(_url),
1636 'id-repr': issue_id,
1636 'id-repr': issue_id,
1637 'issue-prefix': entry['pref'],
1637 'issue-prefix': entry['pref'],
1638 'serv': entry['url'],
1638 'serv': entry['url'],
1639 'title': bleach.clean(desc, strip=True),
1639 'title': bleach.clean(desc, strip=True),
1640 'hovercard_url': hovercard_url
1640 'hovercard_url': hovercard_url
1641 }
1641 }
1642
1642
1643 if return_raw_data:
1643 if return_raw_data:
1644 return {
1644 return {
1645 'id': issue_id,
1645 'id': issue_id,
1646 'url': _url
1646 'url': _url
1647 }
1647 }
1648 return tmpl % data
1648 return tmpl % data
1649
1649
1650
1650
1651 def get_active_pattern_entries(repo_name):
1651 def get_active_pattern_entries(repo_name):
1652 repo = None
1652 repo = None
1653 if repo_name:
1653 if repo_name:
1654 # Retrieving repo_name to avoid invalid repo_name to explode on
1654 # Retrieving repo_name to avoid invalid repo_name to explode on
1655 # IssueTrackerSettingsModel but still passing invalid name further down
1655 # IssueTrackerSettingsModel but still passing invalid name further down
1656 repo = Repository.get_by_repo_name(repo_name, cache=True)
1656 repo = Repository.get_by_repo_name(repo_name, cache=True)
1657
1657
1658 settings_model = IssueTrackerSettingsModel(repo=repo)
1658 settings_model = IssueTrackerSettingsModel(repo=repo)
1659 active_entries = settings_model.get_settings(cache=True)
1659 active_entries = settings_model.get_settings(cache=True)
1660 return active_entries
1660 return active_entries
1661
1661
1662
1662
1663 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1663 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1664
1664
1665 allowed_link_formats = [
1665 allowed_link_formats = [
1666 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1666 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1667
1667
1668 compile_cache = {
1668 compile_cache = {
1669
1669
1670 }
1670 }
1671
1671
1672
1672
1673 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1673 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1674
1674
1675 if link_format not in allowed_link_formats:
1675 if link_format not in allowed_link_formats:
1676 raise ValueError('Link format can be only one of:{} got {}'.format(
1676 raise ValueError('Link format can be only one of:{} got {}'.format(
1677 allowed_link_formats, link_format))
1677 allowed_link_formats, link_format))
1678 issues_data = []
1678 issues_data = []
1679 errors = []
1679 errors = []
1680 new_text = text_string
1680 new_text = text_string
1681
1681
1682 if active_entries is None:
1682 if active_entries is None:
1683 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1683 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1684 active_entries = get_active_pattern_entries(repo_name)
1684 active_entries = get_active_pattern_entries(repo_name)
1685
1685
1686 log.debug('Got %s pattern entries to process', len(active_entries))
1686 log.debug('Got %s pattern entries to process', len(active_entries))
1687
1687
1688 for uid, entry in active_entries.items():
1688 for uid, entry in active_entries.items():
1689
1689
1690 if not (entry['pat'] and entry['url']):
1690 if not (entry['pat'] and entry['url']):
1691 log.debug('skipping due to missing data')
1691 log.debug('skipping due to missing data')
1692 continue
1692 continue
1693
1693
1694 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1694 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1695 uid, entry['pat'], entry['url'], entry['pref'])
1695 uid, entry['pat'], entry['url'], entry['pref'])
1696
1696
1697 if entry.get('pat_compiled'):
1697 if entry.get('pat_compiled'):
1698 pattern = entry['pat_compiled']
1698 pattern = entry['pat_compiled']
1699 elif entry['pat'] in compile_cache:
1699 elif entry['pat'] in compile_cache:
1700 pattern = compile_cache[entry['pat']]
1700 pattern = compile_cache[entry['pat']]
1701 else:
1701 else:
1702 try:
1702 try:
1703 pattern = regex.compile(r'%s' % entry['pat'])
1703 pattern = regex.compile(r'%s' % entry['pat'])
1704 except regex.error as e:
1704 except regex.error as e:
1705 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1705 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1706 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1706 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1707 errors.append(regex_err)
1707 errors.append(regex_err)
1708 continue
1708 continue
1709 compile_cache[entry['pat']] = pattern
1709 compile_cache[entry['pat']] = pattern
1710
1710
1711 data_func = partial(
1711 data_func = partial(
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1713 return_raw_data=True)
1713 return_raw_data=True)
1714
1714
1715 for match_obj in pattern.finditer(text_string):
1715 for match_obj in pattern.finditer(text_string):
1716 issues_data.append(data_func(match_obj))
1716 issues_data.append(data_func(match_obj))
1717
1717
1718 url_func = partial(
1718 url_func = partial(
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1720 link_format=link_format)
1720 link_format=link_format)
1721
1721
1722 new_text = pattern.sub(url_func, new_text)
1722 new_text = pattern.sub(url_func, new_text)
1723 log.debug('processed prefix:uid `%s`', uid)
1723 log.debug('processed prefix:uid `%s`', uid)
1724
1724
1725 # finally use global replace, eg !123 -> pr-link, those will not catch
1725 # finally use global replace, eg !123 -> pr-link, those will not catch
1726 # if already similar pattern exists
1726 # if already similar pattern exists
1727 server_url = '${scheme}://${netloc}'
1727 server_url = '${scheme}://${netloc}'
1728 pr_entry = {
1728 pr_entry = {
1729 'pref': '!',
1729 'pref': '!',
1730 'url': server_url + '/_admin/pull-requests/${id}',
1730 'url': server_url + '/_admin/pull-requests/${id}',
1731 'desc': 'Pull Request !${id}',
1731 'desc': 'Pull Request !${id}',
1732 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1732 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1733 }
1733 }
1734 pr_url_func = partial(
1734 pr_url_func = partial(
1735 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1735 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1736 link_format=link_format+'+hovercard')
1736 link_format=link_format+'+hovercard')
1737 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1737 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1738 log.debug('processed !pr pattern')
1738 log.debug('processed !pr pattern')
1739
1739
1740 return new_text, issues_data, errors
1740 return new_text, issues_data, errors
1741
1741
1742
1742
1743 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1743 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1744 issues_container_callback=None, error_container=None):
1744 issues_container_callback=None, error_container=None):
1745 """
1745 """
1746 Parses given text message and makes proper links.
1746 Parses given text message and makes proper links.
1747 issues are linked to given issue-server, and rest is a commit link
1747 issues are linked to given issue-server, and rest is a commit link
1748 """
1748 """
1749
1749
1750 def escaper(_text):
1750 def escaper(_text):
1751 return _text.replace('<', '&lt;').replace('>', '&gt;')
1751 return _text.replace('<', '&lt;').replace('>', '&gt;')
1752
1752
1753 new_text = escaper(commit_text)
1753 new_text = escaper(commit_text)
1754
1754
1755 # extract http/https links and make them real urls
1755 # extract http/https links and make them real urls
1756 new_text = urlify_text(new_text, safe=False)
1756 new_text = urlify_text(new_text, safe=False)
1757
1757
1758 # urlify commits - extract commit ids and make link out of them, if we have
1758 # urlify commits - extract commit ids and make link out of them, if we have
1759 # the scope of repository present.
1759 # the scope of repository present.
1760 if repository:
1760 if repository:
1761 new_text = urlify_commits(new_text, repository)
1761 new_text = urlify_commits(new_text, repository)
1762
1762
1763 # process issue tracker patterns
1763 # process issue tracker patterns
1764 new_text, issues, errors = process_patterns(
1764 new_text, issues, errors = process_patterns(
1765 new_text, repository or '', active_entries=active_pattern_entries)
1765 new_text, repository or '', active_entries=active_pattern_entries)
1766
1766
1767 if issues_container_callback is not None:
1767 if issues_container_callback is not None:
1768 for issue in issues:
1768 for issue in issues:
1769 issues_container_callback(issue)
1769 issues_container_callback(issue)
1770
1770
1771 if error_container is not None:
1771 if error_container is not None:
1772 error_container.extend(errors)
1772 error_container.extend(errors)
1773
1773
1774 return literal(new_text)
1774 return literal(new_text)
1775
1775
1776
1776
1777 def render_binary(repo_name, file_obj):
1777 def render_binary(repo_name, file_obj):
1778 """
1778 """
1779 Choose how to render a binary file
1779 Choose how to render a binary file
1780 """
1780 """
1781
1781
1782 # unicode
1782 # unicode
1783 filename = file_obj.name
1783 filename = file_obj.name
1784
1784
1785 # images
1785 # images
1786 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1786 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1787 if fnmatch.fnmatch(filename, pat=ext):
1787 if fnmatch.fnmatch(filename, pat=ext):
1788 src = route_path(
1788 src = route_path(
1789 'repo_file_raw', repo_name=repo_name,
1789 'repo_file_raw', repo_name=repo_name,
1790 commit_id=file_obj.commit.raw_id,
1790 commit_id=file_obj.commit.raw_id,
1791 f_path=file_obj.path)
1791 f_path=file_obj.path)
1792
1792
1793 return literal(
1793 return literal(
1794 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1794 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1795
1795
1796
1796
1797 def renderer_from_filename(filename, exclude=None):
1797 def renderer_from_filename(filename, exclude=None):
1798 """
1798 """
1799 choose a renderer based on filename, this works only for text based files
1799 choose a renderer based on filename, this works only for text based files
1800 """
1800 """
1801
1801
1802 # ipython
1802 # ipython
1803 for ext in ['*.ipynb']:
1803 for ext in ['*.ipynb']:
1804 if fnmatch.fnmatch(filename, pat=ext):
1804 if fnmatch.fnmatch(filename, pat=ext):
1805 return 'jupyter'
1805 return 'jupyter'
1806
1806
1807 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1807 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1808 if is_markup:
1808 if is_markup:
1809 return is_markup
1809 return is_markup
1810 return None
1810 return None
1811
1811
1812
1812
1813 def render(source, renderer='rst', mentions=False, relative_urls=None,
1813 def render(source, renderer='rst', mentions=False, relative_urls=None,
1814 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1814 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1815
1815
1816 def maybe_convert_relative_links(html_source):
1816 def maybe_convert_relative_links(html_source):
1817 if relative_urls:
1817 if relative_urls:
1818 return relative_links(html_source, relative_urls)
1818 return relative_links(html_source, relative_urls)
1819 return html_source
1819 return html_source
1820
1820
1821 if renderer == 'plain':
1821 if renderer == 'plain':
1822 return literal(
1822 return literal(
1823 MarkupRenderer.plain(source, leading_newline=False))
1823 MarkupRenderer.plain(source, leading_newline=False))
1824
1824
1825 elif renderer == 'rst':
1825 elif renderer == 'rst':
1826 if repo_name:
1826 if repo_name:
1827 # process patterns on comments if we pass in repo name
1827 # process patterns on comments if we pass in repo name
1828 source, issues, errors = process_patterns(
1828 source, issues, errors = process_patterns(
1829 source, repo_name, link_format='rst',
1829 source, repo_name, link_format='rst',
1830 active_entries=active_pattern_entries)
1830 active_entries=active_pattern_entries)
1831 if issues_container_callback is not None:
1831 if issues_container_callback is not None:
1832 for issue in issues:
1832 for issue in issues:
1833 issues_container_callback(issue)
1833 issues_container_callback(issue)
1834
1834
1835 return literal(
1835 return literal(
1836 '<div class="rst-block">%s</div>' %
1836 '<div class="rst-block">%s</div>' %
1837 maybe_convert_relative_links(
1837 maybe_convert_relative_links(
1838 MarkupRenderer.rst(source, mentions=mentions)))
1838 MarkupRenderer.rst(source, mentions=mentions)))
1839
1839
1840 elif renderer == 'markdown':
1840 elif renderer == 'markdown':
1841 if repo_name:
1841 if repo_name:
1842 # process patterns on comments if we pass in repo name
1842 # process patterns on comments if we pass in repo name
1843 source, issues, errors = process_patterns(
1843 source, issues, errors = process_patterns(
1844 source, repo_name, link_format='markdown',
1844 source, repo_name, link_format='markdown',
1845 active_entries=active_pattern_entries)
1845 active_entries=active_pattern_entries)
1846 if issues_container_callback is not None:
1846 if issues_container_callback is not None:
1847 for issue in issues:
1847 for issue in issues:
1848 issues_container_callback(issue)
1848 issues_container_callback(issue)
1849
1849
1850
1850
1851 return literal(
1851 return literal(
1852 '<div class="markdown-block">%s</div>' %
1852 '<div class="markdown-block">%s</div>' %
1853 maybe_convert_relative_links(
1853 maybe_convert_relative_links(
1854 MarkupRenderer.markdown(source, flavored=True,
1854 MarkupRenderer.markdown(source, flavored=True,
1855 mentions=mentions)))
1855 mentions=mentions)))
1856
1856
1857 elif renderer == 'jupyter':
1857 elif renderer == 'jupyter':
1858 return literal(
1858 return literal(
1859 '<div class="ipynb">%s</div>' %
1859 '<div class="ipynb">%s</div>' %
1860 maybe_convert_relative_links(
1860 maybe_convert_relative_links(
1861 MarkupRenderer.jupyter(source)))
1861 MarkupRenderer.jupyter(source)))
1862
1862
1863 # None means just show the file-source
1863 # None means just show the file-source
1864 return None
1864 return None
1865
1865
1866
1866
1867 def commit_status(repo, commit_id):
1867 def commit_status(repo, commit_id):
1868 return ChangesetStatusModel().get_status(repo, commit_id)
1868 return ChangesetStatusModel().get_status(repo, commit_id)
1869
1869
1870
1870
1871 def commit_status_lbl(commit_status):
1871 def commit_status_lbl(commit_status):
1872 return dict(ChangesetStatus.STATUSES).get(commit_status)
1872 return dict(ChangesetStatus.STATUSES).get(commit_status)
1873
1873
1874
1874
1875 def commit_time(repo_name, commit_id):
1875 def commit_time(repo_name, commit_id):
1876 repo = Repository.get_by_repo_name(repo_name)
1876 repo = Repository.get_by_repo_name(repo_name)
1877 commit = repo.get_commit(commit_id=commit_id)
1877 commit = repo.get_commit(commit_id=commit_id)
1878 return commit.date
1878 return commit.date
1879
1879
1880
1880
1881 def get_permission_name(key):
1881 def get_permission_name(key):
1882 return dict(Permission.PERMS).get(key)
1882 return dict(Permission.PERMS).get(key)
1883
1883
1884
1884
1885 def journal_filter_help(request):
1885 def journal_filter_help(request):
1886 _ = request.translate
1886 _ = request.translate
1887 from rhodecode.lib.audit_logger import ACTIONS
1887 from rhodecode.lib.audit_logger import ACTIONS
1888 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1888 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1889
1889
1890 return _(
1890 return _(
1891 'Example filter terms:\n' +
1891 'Example filter terms:\n' +
1892 ' repository:vcs\n' +
1892 ' repository:vcs\n' +
1893 ' username:marcin\n' +
1893 ' username:marcin\n' +
1894 ' username:(NOT marcin)\n' +
1894 ' username:(NOT marcin)\n' +
1895 ' action:*push*\n' +
1895 ' action:*push*\n' +
1896 ' ip:127.0.0.1\n' +
1896 ' ip:127.0.0.1\n' +
1897 ' date:20120101\n' +
1897 ' date:20120101\n' +
1898 ' date:[20120101100000 TO 20120102]\n' +
1898 ' date:[20120101100000 TO 20120102]\n' +
1899 '\n' +
1899 '\n' +
1900 'Actions: {actions}\n' +
1900 'Actions: {actions}\n' +
1901 '\n' +
1901 '\n' +
1902 'Generate wildcards using \'*\' character:\n' +
1902 'Generate wildcards using \'*\' character:\n' +
1903 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1903 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1904 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1904 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1905 '\n' +
1905 '\n' +
1906 'Optional AND / OR operators in queries\n' +
1906 'Optional AND / OR operators in queries\n' +
1907 ' "repository:vcs OR repository:test"\n' +
1907 ' "repository:vcs OR repository:test"\n' +
1908 ' "username:test AND repository:test*"\n'
1908 ' "username:test AND repository:test*"\n'
1909 ).format(actions=actions)
1909 ).format(actions=actions)
1910
1910
1911
1911
1912 def not_mapped_error(repo_name):
1912 def not_mapped_error(repo_name):
1913 from rhodecode.translation import _
1913 from rhodecode.translation import _
1914 flash(_('%s repository is not mapped to db perhaps'
1914 flash(_('%s repository is not mapped to db perhaps'
1915 ' it was created or renamed from the filesystem'
1915 ' it was created or renamed from the filesystem'
1916 ' please run the application again'
1916 ' please run the application again'
1917 ' in order to rescan repositories') % repo_name, category='error')
1917 ' in order to rescan repositories') % repo_name, category='error')
1918
1918
1919
1919
1920 def ip_range(ip_addr):
1920 def ip_range(ip_addr):
1921 from rhodecode.model.db import UserIpMap
1921 from rhodecode.model.db import UserIpMap
1922 s, e = UserIpMap._get_ip_range(ip_addr)
1922 s, e = UserIpMap._get_ip_range(ip_addr)
1923 return '%s - %s' % (s, e)
1923 return '%s - %s' % (s, e)
1924
1924
1925
1925
1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1928 if method.lower() != 'get' and needs_csrf_token:
1928 if method.lower() != 'get' and needs_csrf_token:
1929 raise Exception(
1929 raise Exception(
1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1931 'CSRF token. If the endpoint does not require such token you can ' +
1931 'CSRF token. If the endpoint does not require such token you can ' +
1932 'explicitly set the parameter needs_csrf_token to false.')
1932 'explicitly set the parameter needs_csrf_token to false.')
1933
1933
1934 return insecure_form(url, method=method, **attrs)
1934 return insecure_form(url, method=method, **attrs)
1935
1935
1936
1936
1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1938 """Start a form tag that points the action to an url. This
1938 """Start a form tag that points the action to an url. This
1939 form tag will also include the hidden field containing
1939 form tag will also include the hidden field containing
1940 the auth token.
1940 the auth token.
1941
1941
1942 The url options should be given either as a string, or as a
1942 The url options should be given either as a string, or as a
1943 ``url()`` function. The method for the form defaults to POST.
1943 ``url()`` function. The method for the form defaults to POST.
1944
1944
1945 Options:
1945 Options:
1946
1946
1947 ``multipart``
1947 ``multipart``
1948 If set to True, the enctype is set to "multipart/form-data".
1948 If set to True, the enctype is set to "multipart/form-data".
1949 ``method``
1949 ``method``
1950 The method to use when submitting the form, usually either
1950 The method to use when submitting the form, usually either
1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1952 hidden input with name _method is added to simulate the verb
1952 hidden input with name _method is added to simulate the verb
1953 over POST.
1953 over POST.
1954
1954
1955 """
1955 """
1956
1956
1957 if 'request' in attrs:
1957 if 'request' in attrs:
1958 session = attrs['request'].session
1958 session = attrs['request'].session
1959 del attrs['request']
1959 del attrs['request']
1960 else:
1960 else:
1961 raise ValueError(
1961 raise ValueError(
1962 'Calling this form requires request= to be passed as argument')
1962 'Calling this form requires request= to be passed as argument')
1963
1963
1964 _form = insecure_form(form_url, method, multipart, **attrs)
1964 _form = insecure_form(form_url, method, multipart, **attrs)
1965 token = literal(
1965 token = literal(
1966 '<input type="hidden" name="{}" value="{}">'.format(
1966 '<input type="hidden" name="{}" value="{}">'.format(
1967 csrf_token_key, get_csrf_token(session)))
1967 csrf_token_key, get_csrf_token(session)))
1968
1968
1969 return literal("%s\n%s" % (_form, token))
1969 return literal("%s\n%s" % (_form, token))
1970
1970
1971
1971
1972 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1972 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1973 select_html = select(name, selected, options, **attrs)
1973 select_html = select(name, selected, options, **attrs)
1974
1974
1975 select2 = """
1975 select2 = """
1976 <script>
1976 <script>
1977 $(document).ready(function() {
1977 $(document).ready(function() {
1978 $('#%s').select2({
1978 $('#%s').select2({
1979 containerCssClass: 'drop-menu %s',
1979 containerCssClass: 'drop-menu %s',
1980 dropdownCssClass: 'drop-menu-dropdown',
1980 dropdownCssClass: 'drop-menu-dropdown',
1981 dropdownAutoWidth: true%s
1981 dropdownAutoWidth: true%s
1982 });
1982 });
1983 });
1983 });
1984 </script>
1984 </script>
1985 """
1985 """
1986
1986
1987 filter_option = """,
1987 filter_option = """,
1988 minimumResultsForSearch: -1
1988 minimumResultsForSearch: -1
1989 """
1989 """
1990 input_id = attrs.get('id') or name
1990 input_id = attrs.get('id') or name
1991 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1991 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1992 filter_enabled = "" if enable_filter else filter_option
1992 filter_enabled = "" if enable_filter else filter_option
1993 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1993 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1994
1994
1995 return literal(select_html+select_script)
1995 return literal(select_html+select_script)
1996
1996
1997
1997
1998 def get_visual_attr(tmpl_context_var, attr_name):
1998 def get_visual_attr(tmpl_context_var, attr_name):
1999 """
1999 """
2000 A safe way to get a variable from visual variable of template context
2000 A safe way to get a variable from visual variable of template context
2001
2001
2002 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2002 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2003 :param attr_name: name of the attribute we fetch from the c.visual
2003 :param attr_name: name of the attribute we fetch from the c.visual
2004 """
2004 """
2005 visual = getattr(tmpl_context_var, 'visual', None)
2005 visual = getattr(tmpl_context_var, 'visual', None)
2006 if not visual:
2006 if not visual:
2007 return
2007 return
2008 else:
2008 else:
2009 return getattr(visual, attr_name, None)
2009 return getattr(visual, attr_name, None)
2010
2010
2011
2011
2012 def get_last_path_part(file_node):
2012 def get_last_path_part(file_node):
2013 if not file_node.path:
2013 if not file_node.path:
2014 return u'/'
2014 return u'/'
2015
2015
2016 path = safe_unicode(file_node.path.split('/')[-1])
2016 path = safe_unicode(file_node.path.split('/')[-1])
2017 return u'../' + path
2017 return u'../' + path
2018
2018
2019
2019
2020 def route_url(*args, **kwargs):
2020 def route_url(*args, **kwargs):
2021 """
2021 """
2022 Wrapper around pyramids `route_url` (fully qualified url) function.
2022 Wrapper around pyramids `route_url` (fully qualified url) function.
2023 """
2023 """
2024 req = get_current_request()
2024 req = get_current_request()
2025 return req.route_url(*args, **kwargs)
2025 return req.route_url(*args, **kwargs)
2026
2026
2027
2027
2028 def route_path(*args, **kwargs):
2028 def route_path(*args, **kwargs):
2029 """
2029 """
2030 Wrapper around pyramids `route_path` function.
2030 Wrapper around pyramids `route_path` function.
2031 """
2031 """
2032 req = get_current_request()
2032 req = get_current_request()
2033 return req.route_path(*args, **kwargs)
2033 return req.route_path(*args, **kwargs)
2034
2034
2035
2035
2036 def route_path_or_none(*args, **kwargs):
2036 def route_path_or_none(*args, **kwargs):
2037 try:
2037 try:
2038 return route_path(*args, **kwargs)
2038 return route_path(*args, **kwargs)
2039 except KeyError:
2039 except KeyError:
2040 return None
2040 return None
2041
2041
2042
2042
2043 def current_route_path(request, **kw):
2043 def current_route_path(request, **kw):
2044 new_args = request.GET.mixed()
2044 new_args = request.GET.mixed()
2045 new_args.update(kw)
2045 new_args.update(kw)
2046 return request.current_route_path(_query=new_args)
2046 return request.current_route_path(_query=new_args)
2047
2047
2048
2048
2049 def curl_api_example(method, args):
2049 def curl_api_example(method, args):
2050 args_json = json.dumps(OrderedDict([
2050 args_json = json.dumps(OrderedDict([
2051 ('id', 1),
2051 ('id', 1),
2052 ('auth_token', 'SECRET'),
2052 ('auth_token', 'SECRET'),
2053 ('method', method),
2053 ('method', method),
2054 ('args', args)
2054 ('args', args)
2055 ]))
2055 ]))
2056
2056
2057 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2057 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2058 api_url=route_url('apiv2'),
2058 api_url=route_url('apiv2'),
2059 args_json=args_json
2059 args_json=args_json
2060 )
2060 )
2061
2061
2062
2062
2063 def api_call_example(method, args):
2063 def api_call_example(method, args):
2064 """
2064 """
2065 Generates an API call example via CURL
2065 Generates an API call example via CURL
2066 """
2066 """
2067 curl_call = curl_api_example(method, args)
2067 curl_call = curl_api_example(method, args)
2068
2068
2069 return literal(
2069 return literal(
2070 curl_call +
2070 curl_call +
2071 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2071 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2072 "and needs to be of `api calls` role."
2072 "and needs to be of `api calls` role."
2073 .format(token_url=route_url('my_account_auth_tokens')))
2073 .format(token_url=route_url('my_account_auth_tokens')))
2074
2074
2075
2075
2076 def notification_description(notification, request):
2076 def notification_description(notification, request):
2077 """
2077 """
2078 Generate notification human readable description based on notification type
2078 Generate notification human readable description based on notification type
2079 """
2079 """
2080 from rhodecode.model.notification import NotificationModel
2080 from rhodecode.model.notification import NotificationModel
2081 return NotificationModel().make_description(
2081 return NotificationModel().make_description(
2082 notification, translate=request.translate)
2082 notification, translate=request.translate)
2083
2083
2084
2084
2085 def go_import_header(request, db_repo=None):
2085 def go_import_header(request, db_repo=None):
2086 """
2086 """
2087 Creates a header for go-import functionality in Go Lang
2087 Creates a header for go-import functionality in Go Lang
2088 """
2088 """
2089
2089
2090 if not db_repo:
2090 if not db_repo:
2091 return
2091 return
2092 if 'go-get' not in request.GET:
2092 if 'go-get' not in request.GET:
2093 return
2093 return
2094
2094
2095 clone_url = db_repo.clone_url()
2095 clone_url = db_repo.clone_url()
2096 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2096 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2097 # we have a repo and go-get flag,
2097 # we have a repo and go-get flag,
2098 return literal('<meta name="go-import" content="{} {} {}">'.format(
2098 return literal('<meta name="go-import" content="{} {} {}">'.format(
2099 prefix, db_repo.repo_type, clone_url))
2099 prefix, db_repo.repo_type, clone_url))
2100
2100
2101
2101
2102 def reviewer_as_json(*args, **kwargs):
2102 def reviewer_as_json(*args, **kwargs):
2103 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2103 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2104 return _reviewer_as_json(*args, **kwargs)
2104 return _reviewer_as_json(*args, **kwargs)
2105
2105
2106
2106
2107 def get_repo_view_type(request):
2107 def get_repo_view_type(request):
2108 route_name = request.matched_route.name
2108 route_name = request.matched_route.name
2109 route_to_view_type = {
2109 route_to_view_type = {
2110 'repo_changelog': 'commits',
2110 'repo_changelog': 'commits',
2111 'repo_commits': 'commits',
2111 'repo_commits': 'commits',
2112 'repo_files': 'files',
2112 'repo_files': 'files',
2113 'repo_summary': 'summary',
2113 'repo_summary': 'summary',
2114 'repo_commit': 'commit'
2114 'repo_commit': 'commit'
2115 }
2115 }
2116
2116
2117 return route_to_view_type.get(route_name)
2117 return route_to_view_type.get(route_name)
2118
2118
2119
2119
2120 def is_active(menu_entry, selected):
2120 def is_active(menu_entry, selected):
2121 """
2121 """
2122 Returns active class for selecting menus in templates
2122 Returns active class for selecting menus in templates
2123 <li class=${h.is_active('settings', current_active)}></li>
2123 <li class=${h.is_active('settings', current_active)}></li>
2124 """
2124 """
2125 if not isinstance(menu_entry, list):
2125 if not isinstance(menu_entry, list):
2126 menu_entry = [menu_entry]
2126 menu_entry = [menu_entry]
2127
2127
2128 if selected in menu_entry:
2128 if selected in menu_entry:
2129 return "active"
2129 return "active"
2130
2130
2131
2131
2132 class IssuesRegistry(object):
2132 class IssuesRegistry(object):
2133 """
2133 """
2134 issue_registry = IssuesRegistry()
2134 issue_registry = IssuesRegistry()
2135 some_func(issues_callback=issues_registry(...))
2135 some_func(issues_callback=issues_registry(...))
2136 """
2136 """
2137
2137
2138 def __init__(self):
2138 def __init__(self):
2139 self.issues = []
2139 self.issues = []
2140 self.unique_issues = collections.defaultdict(lambda: [])
2140 self.unique_issues = collections.defaultdict(lambda: [])
2141
2141
2142 def __call__(self, commit_dict=None):
2142 def __call__(self, commit_dict=None):
2143 def callback(issue):
2143 def callback(issue):
2144 if commit_dict and issue:
2144 if commit_dict and issue:
2145 issue['commit'] = commit_dict
2145 issue['commit'] = commit_dict
2146 self.issues.append(issue)
2146 self.issues.append(issue)
2147 self.unique_issues[issue['id']].append(issue)
2147 self.unique_issues[issue['id']].append(issue)
2148 return callback
2148 return callback
2149
2149
2150 def get_issues(self):
2150 def get_issues(self):
2151 return self.issues
2151 return self.issues
2152
2152
2153 @property
2153 @property
2154 def issues_unique_count(self):
2154 def issues_unique_count(self):
2155 return len(set(i['id'] for i in self.issues))
2155 return len(set(i['id'] for i in self.issues))
@@ -1,1028 +1,1028 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
144 continue
145
145
146 yield db_obj
146 yield db_obj
147
147
148
148
149 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
150
150
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
152 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 with_wire={"cache": False})
232 except OSError:
232 except OSError:
233 continue
233 continue
234 except RepositoryError:
234 except RepositoryError:
235 log.exception('Failed to create a repo')
235 log.exception('Failed to create a repo')
236 continue
236 continue
237
237
238 log.debug('found %s paths with repositories', len(repos))
238 log.debug('found %s paths with repositories', len(repos))
239 return repos
239 return repos
240
240
241 def get_repos(self, all_repos=None, sort_key=None):
241 def get_repos(self, all_repos=None, sort_key=None):
242 """
242 """
243 Get all repositories from db and for each repo create it's
243 Get all repositories from db and for each repo create it's
244 backend instance and fill that backed with information from database
244 backend instance and fill that backed with information from database
245
245
246 :param all_repos: list of repository names as strings
246 :param all_repos: list of repository names as strings
247 give specific repositories list, good for filtering
247 give specific repositories list, good for filtering
248
248
249 :param sort_key: initial sorting of repositories
249 :param sort_key: initial sorting of repositories
250 """
250 """
251 if all_repos is None:
251 if all_repos is None:
252 all_repos = self.sa.query(Repository)\
252 all_repos = self.sa.query(Repository)\
253 .filter(Repository.group_id == None)\
253 .filter(Repository.group_id == None)\
254 .order_by(func.lower(Repository.repo_name)).all()
254 .order_by(func.lower(Repository.repo_name)).all()
255 repo_iter = SimpleCachedRepoList(
255 repo_iter = SimpleCachedRepoList(
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 return repo_iter
257 return repo_iter
258
258
259 def get_repo_groups(self, all_groups=None):
259 def get_repo_groups(self, all_groups=None):
260 if all_groups is None:
260 if all_groups is None:
261 all_groups = RepoGroup.query()\
261 all_groups = RepoGroup.query()\
262 .filter(RepoGroup.group_parent_id == None).all()
262 .filter(RepoGroup.group_parent_id == None).all()
263 return [x for x in RepoGroupList(all_groups)]
263 return [x for x in RepoGroupList(all_groups)]
264
264
265 def mark_for_invalidation(self, repo_name, delete=False):
265 def mark_for_invalidation(self, repo_name, delete=False):
266 """
266 """
267 Mark caches of this repo invalid in the database. `delete` flag
267 Mark caches of this repo invalid in the database. `delete` flag
268 removes the cache entries
268 removes the cache entries
269
269
270 :param repo_name: the repo_name for which caches should be marked
270 :param repo_name: the repo_name for which caches should be marked
271 invalid, or deleted
271 invalid, or deleted
272 :param delete: delete the entry keys instead of setting bool
272 :param delete: delete the entry keys instead of setting bool
273 flag on them, and also purge caches used by the dogpile
273 flag on them, and also purge caches used by the dogpile
274 """
274 """
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 repo_id=repo.repo_id)
279 repo_id=repo.repo_id)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281
281
282 repo_id = repo.repo_id
282 repo_id = repo.repo_id
283 config = repo._config
283 config = repo._config
284 config.set('extensions', 'largefiles', '')
284 config.set('extensions', 'largefiles', '')
285 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
286 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 rc_cache.clear_cache_namespace(
288 rc_cache.clear_cache_namespace(
289 'cache_repo', cache_namespace_uid, invalidate=True)
289 'cache_repo', cache_namespace_uid, invalidate=True)
290
290
291 def toggle_following_repo(self, follow_repo_id, user_id):
291 def toggle_following_repo(self, follow_repo_id, user_id):
292
292
293 f = self.sa.query(UserFollowing)\
293 f = self.sa.query(UserFollowing)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
295 .filter(UserFollowing.user_id == user_id).scalar()
295 .filter(UserFollowing.user_id == user_id).scalar()
296
296
297 if f is not None:
297 if f is not None:
298 try:
298 try:
299 self.sa.delete(f)
299 self.sa.delete(f)
300 return
300 return
301 except Exception:
301 except Exception:
302 log.error(traceback.format_exc())
302 log.error(traceback.format_exc())
303 raise
303 raise
304
304
305 try:
305 try:
306 f = UserFollowing()
306 f = UserFollowing()
307 f.user_id = user_id
307 f.user_id = user_id
308 f.follows_repo_id = follow_repo_id
308 f.follows_repo_id = follow_repo_id
309 self.sa.add(f)
309 self.sa.add(f)
310 except Exception:
310 except Exception:
311 log.error(traceback.format_exc())
311 log.error(traceback.format_exc())
312 raise
312 raise
313
313
314 def toggle_following_user(self, follow_user_id, user_id):
314 def toggle_following_user(self, follow_user_id, user_id):
315 f = self.sa.query(UserFollowing)\
315 f = self.sa.query(UserFollowing)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.user_id == user_id).scalar()
317 .filter(UserFollowing.user_id == user_id).scalar()
318
318
319 if f is not None:
319 if f is not None:
320 try:
320 try:
321 self.sa.delete(f)
321 self.sa.delete(f)
322 return
322 return
323 except Exception:
323 except Exception:
324 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
325 raise
325 raise
326
326
327 try:
327 try:
328 f = UserFollowing()
328 f = UserFollowing()
329 f.user_id = user_id
329 f.user_id = user_id
330 f.follows_user_id = follow_user_id
330 f.follows_user_id = follow_user_id
331 self.sa.add(f)
331 self.sa.add(f)
332 except Exception:
332 except Exception:
333 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
334 raise
334 raise
335
335
336 def is_following_repo(self, repo_name, user_id, cache=False):
336 def is_following_repo(self, repo_name, user_id, cache=False):
337 r = self.sa.query(Repository)\
337 r = self.sa.query(Repository)\
338 .filter(Repository.repo_name == repo_name).scalar()
338 .filter(Repository.repo_name == repo_name).scalar()
339
339
340 f = self.sa.query(UserFollowing)\
340 f = self.sa.query(UserFollowing)\
341 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.user_id == user_id).scalar()
342 .filter(UserFollowing.user_id == user_id).scalar()
343
343
344 return f is not None
344 return f is not None
345
345
346 def is_following_user(self, username, user_id, cache=False):
346 def is_following_user(self, username, user_id, cache=False):
347 u = User.get_by_username(username)
347 u = User.get_by_username(username)
348
348
349 f = self.sa.query(UserFollowing)\
349 f = self.sa.query(UserFollowing)\
350 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.user_id == user_id).scalar()
351 .filter(UserFollowing.user_id == user_id).scalar()
352
352
353 return f is not None
353 return f is not None
354
354
355 def get_followers(self, repo):
355 def get_followers(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357
357
358 return self.sa.query(UserFollowing)\
358 return self.sa.query(UserFollowing)\
359 .filter(UserFollowing.follows_repository == repo).count()
359 .filter(UserFollowing.follows_repository == repo).count()
360
360
361 def get_forks(self, repo):
361 def get_forks(self, repo):
362 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
363 return self.sa.query(Repository)\
363 return self.sa.query(Repository)\
364 .filter(Repository.fork == repo).count()
364 .filter(Repository.fork == repo).count()
365
365
366 def get_pull_requests(self, repo):
366 def get_pull_requests(self, repo):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 return self.sa.query(PullRequest)\
368 return self.sa.query(PullRequest)\
369 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371
371
372 def get_artifacts(self, repo):
372 def get_artifacts(self, repo):
373 repo = self._get_repo(repo)
373 repo = self._get_repo(repo)
374 return self.sa.query(FileStore)\
374 return self.sa.query(FileStore)\
375 .filter(FileStore.repo == repo)\
375 .filter(FileStore.repo == repo)\
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
377
377
378 def mark_as_fork(self, repo, fork, user):
378 def mark_as_fork(self, repo, fork, user):
379 repo = self._get_repo(repo)
379 repo = self._get_repo(repo)
380 fork = self._get_repo(fork)
380 fork = self._get_repo(fork)
381 if fork and repo.repo_id == fork.repo_id:
381 if fork and repo.repo_id == fork.repo_id:
382 raise Exception("Cannot set repository as fork of itself")
382 raise Exception("Cannot set repository as fork of itself")
383
383
384 if fork and repo.repo_type != fork.repo_type:
384 if fork and repo.repo_type != fork.repo_type:
385 raise RepositoryError(
385 raise RepositoryError(
386 "Cannot set repository as fork of repository with other type")
386 "Cannot set repository as fork of repository with other type")
387
387
388 repo.fork = fork
388 repo.fork = fork
389 self.sa.add(repo)
389 self.sa.add(repo)
390 return repo
390 return repo
391
391
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
393 dbrepo = self._get_repo(repo)
393 dbrepo = self._get_repo(repo)
394 remote_uri = remote_uri or dbrepo.clone_uri
394 remote_uri = remote_uri or dbrepo.clone_uri
395 if not remote_uri:
395 if not remote_uri:
396 raise Exception("This repository doesn't have a clone uri")
396 raise Exception("This repository doesn't have a clone uri")
397
397
398 repo = dbrepo.scm_instance(cache=False)
398 repo = dbrepo.scm_instance(cache=False)
399 repo.config.clear_section('hooks')
399 repo.config.clear_section('hooks')
400
400
401 try:
401 try:
402 # NOTE(marcink): add extra validation so we skip invalid urls
402 # NOTE(marcink): add extra validation so we skip invalid urls
403 # this is due this tasks can be executed via scheduler without
403 # this is due this tasks can be executed via scheduler without
404 # proper validation of remote_uri
404 # proper validation of remote_uri
405 if validate_uri:
405 if validate_uri:
406 config = make_db_config(clear_session=False)
406 config = make_db_config(clear_session=False)
407 url_validator(remote_uri, dbrepo.repo_type, config)
407 url_validator(remote_uri, dbrepo.repo_type, config)
408 except InvalidCloneUrl:
408 except InvalidCloneUrl:
409 raise
409 raise
410
410
411 repo_name = dbrepo.repo_name
411 repo_name = dbrepo.repo_name
412 try:
412 try:
413 # TODO: we need to make sure those operations call proper hooks !
413 # TODO: we need to make sure those operations call proper hooks !
414 repo.fetch(remote_uri)
414 repo.fetch(remote_uri)
415
415
416 self.mark_for_invalidation(repo_name)
416 self.mark_for_invalidation(repo_name)
417 except Exception:
417 except Exception:
418 log.error(traceback.format_exc())
418 log.error(traceback.format_exc())
419 raise
419 raise
420
420
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
422 dbrepo = self._get_repo(repo)
422 dbrepo = self._get_repo(repo)
423 remote_uri = remote_uri or dbrepo.push_uri
423 remote_uri = remote_uri or dbrepo.push_uri
424 if not remote_uri:
424 if not remote_uri:
425 raise Exception("This repository doesn't have a clone uri")
425 raise Exception("This repository doesn't have a clone uri")
426
426
427 repo = dbrepo.scm_instance(cache=False)
427 repo = dbrepo.scm_instance(cache=False)
428 repo.config.clear_section('hooks')
428 repo.config.clear_section('hooks')
429
429
430 try:
430 try:
431 # NOTE(marcink): add extra validation so we skip invalid urls
431 # NOTE(marcink): add extra validation so we skip invalid urls
432 # this is due this tasks can be executed via scheduler without
432 # this is due this tasks can be executed via scheduler without
433 # proper validation of remote_uri
433 # proper validation of remote_uri
434 if validate_uri:
434 if validate_uri:
435 config = make_db_config(clear_session=False)
435 config = make_db_config(clear_session=False)
436 url_validator(remote_uri, dbrepo.repo_type, config)
436 url_validator(remote_uri, dbrepo.repo_type, config)
437 except InvalidCloneUrl:
437 except InvalidCloneUrl:
438 raise
438 raise
439
439
440 try:
440 try:
441 repo.push(remote_uri)
441 repo.push(remote_uri)
442 except Exception:
442 except Exception:
443 log.error(traceback.format_exc())
443 log.error(traceback.format_exc())
444 raise
444 raise
445
445
446 def commit_change(self, repo, repo_name, commit, user, author, message,
446 def commit_change(self, repo, repo_name, commit, user, author, message,
447 content, f_path):
447 content, f_path):
448 """
448 """
449 Commits changes
449 Commits changes
450
450
451 :param repo: SCM instance
451 :param repo: SCM instance
452
452
453 """
453 """
454 user = self._get_user(user)
454 user = self._get_user(user)
455
455
456 # decoding here will force that we have proper encoded values
456 # decoding here will force that we have proper encoded values
457 # in any other case this will throw exceptions and deny commit
457 # in any other case this will throw exceptions and deny commit
458 content = safe_str(content)
458 content = safe_str(content)
459 path = safe_str(f_path)
459 path = safe_str(f_path)
460 # message and author needs to be unicode
460 # message and author needs to be unicode
461 # proper backend should then translate that into required type
461 # proper backend should then translate that into required type
462 message = safe_unicode(message)
462 message = safe_unicode(message)
463 author = safe_unicode(author)
463 author = safe_unicode(author)
464 imc = repo.in_memory_commit
464 imc = repo.in_memory_commit
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
466 try:
466 try:
467 # TODO: handle pre-push action !
467 # TODO: handle pre-push action !
468 tip = imc.commit(
468 tip = imc.commit(
469 message=message, author=author, parents=[commit],
469 message=message, author=author, parents=[commit],
470 branch=commit.branch)
470 branch=commit.branch)
471 except Exception as e:
471 except Exception as e:
472 log.error(traceback.format_exc())
472 log.error(traceback.format_exc())
473 raise IMCCommitError(str(e))
473 raise IMCCommitError(str(e))
474 finally:
474 finally:
475 # always clear caches, if commit fails we want fresh object also
475 # always clear caches, if commit fails we want fresh object also
476 self.mark_for_invalidation(repo_name)
476 self.mark_for_invalidation(repo_name)
477
477
478 # We trigger the post-push action
478 # We trigger the post-push action
479 hooks_utils.trigger_post_push_hook(
479 hooks_utils.trigger_post_push_hook(
480 username=user.username, action='push_local', hook_type='post_push',
480 username=user.username, action='push_local', hook_type='post_push',
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
482 return tip
482 return tip
483
483
484 def _sanitize_path(self, f_path):
484 def _sanitize_path(self, f_path):
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
486 raise NonRelativePathError('%s is not an relative path' % f_path)
486 raise NonRelativePathError('%s is not an relative path' % f_path)
487 if f_path:
487 if f_path:
488 f_path = os.path.normpath(f_path)
488 f_path = os.path.normpath(f_path)
489 return f_path
489 return f_path
490
490
491 def get_dirnode_metadata(self, request, commit, dir_node):
491 def get_dirnode_metadata(self, request, commit, dir_node):
492 if not dir_node.is_dir():
492 if not dir_node.is_dir():
493 return []
493 return []
494
494
495 data = []
495 data = []
496 for node in dir_node:
496 for node in dir_node:
497 if not node.is_file():
497 if not node.is_file():
498 # we skip file-nodes
498 # we skip file-nodes
499 continue
499 continue
500
500
501 last_commit = node.last_commit
501 last_commit = node.last_commit
502 last_commit_date = last_commit.date
502 last_commit_date = last_commit.date
503 data.append({
503 data.append({
504 'name': node.name,
504 'name': node.name,
505 'size': h.format_byte_size_binary(node.size),
505 'size': h.format_byte_size_binary(node.size),
506 'modified_at': h.format_date(last_commit_date),
506 'modified_at': h.format_date(last_commit_date),
507 'modified_ts': last_commit_date.isoformat(),
507 'modified_ts': last_commit_date.isoformat(),
508 'revision': last_commit.revision,
508 'revision': last_commit.revision,
509 'short_id': last_commit.short_id,
509 'short_id': last_commit.short_id,
510 'message': h.escape(last_commit.message),
510 'message': h.escape(last_commit.message),
511 'author': h.escape(last_commit.author),
511 'author': h.escape(last_commit.author),
512 'user_profile': h.gravatar_with_user(
512 'user_profile': h.gravatar_with_user(
513 request, last_commit.author),
513 request, last_commit.author),
514 })
514 })
515
515
516 return data
516 return data
517
517
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
519 extended_info=False, content=False, max_file_bytes=None):
519 extended_info=False, content=False, max_file_bytes=None):
520 """
520 """
521 recursive walk in root dir and return a set of all path in that dir
521 recursive walk in root dir and return a set of all path in that dir
522 based on repository walk function
522 based on repository walk function
523
523
524 :param repo_name: name of repository
524 :param repo_name: name of repository
525 :param commit_id: commit id for which to list nodes
525 :param commit_id: commit id for which to list nodes
526 :param root_path: root path to list
526 :param root_path: root path to list
527 :param flat: return as a list, if False returns a dict with description
527 :param flat: return as a list, if False returns a dict with description
528 :param extended_info: show additional info such as md5, binary, size etc
528 :param extended_info: show additional info such as md5, binary, size etc
529 :param content: add nodes content to the return data
529 :param content: add nodes content to the return data
530 :param max_file_bytes: will not return file contents over this limit
530 :param max_file_bytes: will not return file contents over this limit
531
531
532 """
532 """
533 _files = list()
533 _files = list()
534 _dirs = list()
534 _dirs = list()
535 try:
535 try:
536 _repo = self._get_repo(repo_name)
536 _repo = self._get_repo(repo_name)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
538 root_path = root_path.lstrip('/')
538 root_path = root_path.lstrip('/')
539 for __, dirs, files in commit.walk(root_path):
539 for __, dirs, files in commit.walk(root_path):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.unicode_path
543 _data = f_name = f.unicode_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = safe_str(f.content)
565 full_content = safe_str(f.content)
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.unicode_path
573 _data = d_name = d.unicode_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": None,
581 "md5": None,
582 "binary": None,
582 "binary": None,
583 "size": None,
583 "size": None,
584 "extension": None,
584 "extension": None,
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608 for __, dirs, files in commit.walk(root_path):
608 for __, dirs, files in commit.walk(root_path):
609
609
610 for f in files:
610 for f in files:
611
611
612 _data = {
612 _data = {
613 "name": h.escape(f.unicode_path),
613 "name": h.escape(f.unicode_path),
614 "type": "file",
614 "type": "file",
615 }
615 }
616
616
617 _files.append(_data)
617 _files.append(_data)
618
618
619 for d in dirs:
619 for d in dirs:
620
620
621 _data = {
621 _data = {
622 "name": h.escape(d.unicode_path),
622 "name": h.escape(d.unicode_path),
623 "type": "dir",
623 "type": "dir",
624 }
624 }
625
625
626 _dirs.append(_data)
626 _dirs.append(_data)
627 except RepositoryError:
627 except RepositoryError:
628 log.exception("Exception in get_quick_filter_nodes")
628 log.exception("Exception in get_quick_filter_nodes")
629 raise
629 raise
630
630
631 return _dirs, _files
631 return _dirs, _files
632
632
633 def get_node(self, repo_name, commit_id, file_path,
633 def get_node(self, repo_name, commit_id, file_path,
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 """
635 """
636 retrieve single node from commit
636 retrieve single node from commit
637 """
637 """
638 try:
638 try:
639
639
640 _repo = self._get_repo(repo_name)
640 _repo = self._get_repo(repo_name)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642
642
643 file_node = commit.get_node(file_path)
643 file_node = commit.get_node(file_path)
644 if file_node.is_dir():
644 if file_node.is_dir():
645 raise RepositoryError('The given path is a directory')
645 raise RepositoryError('The given path is a directory')
646
646
647 _content = None
647 _content = None
648 f_name = file_node.unicode_path
648 f_name = file_node.unicode_path
649
649
650 file_data = {
650 file_data = {
651 "name": h.escape(f_name),
651 "name": h.escape(f_name),
652 "type": "file",
652 "type": "file",
653 }
653 }
654
654
655 if extended_info:
655 if extended_info:
656 file_data.update({
656 file_data.update({
657 "extension": file_node.extension,
657 "extension": file_node.extension,
658 "mimetype": file_node.mimetype,
658 "mimetype": file_node.mimetype,
659 })
659 })
660
660
661 if cache:
661 if cache:
662 md5 = file_node.md5
662 md5 = file_node.md5
663 is_binary = file_node.is_binary
663 is_binary = file_node.is_binary
664 size = file_node.size
664 size = file_node.size
665 else:
665 else:
666 is_binary, md5, size, _content = file_node.metadata_uncached()
666 is_binary, md5, size, _content = file_node.metadata_uncached()
667
667
668 file_data.update({
668 file_data.update({
669 "md5": md5,
669 "md5": md5,
670 "binary": is_binary,
670 "binary": is_binary,
671 "size": size,
671 "size": size,
672 })
672 })
673
673
674 if content and cache:
674 if content and cache:
675 # get content + cache
675 # get content + cache
676 size = file_node.size
676 size = file_node.size
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 full_content = None
678 full_content = None
679 all_lines = 0
679 all_lines = 0
680 if not file_node.is_binary and not over_size_limit:
680 if not file_node.is_binary and not over_size_limit:
681 full_content = safe_unicode(file_node.content)
681 full_content = safe_unicode(file_node.content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
683
683
684 file_data.update({
684 file_data.update({
685 "content": full_content,
685 "content": full_content,
686 "lines": all_lines
686 "lines": all_lines
687 })
687 })
688 elif content:
688 elif content:
689 # get content *without* cache
689 # get content *without* cache
690 if _content is None:
690 if _content is None:
691 is_binary, md5, size, _content = file_node.metadata_uncached()
691 is_binary, md5, size, _content = file_node.metadata_uncached()
692
692
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 full_content = None
694 full_content = None
695 all_lines = 0
695 all_lines = 0
696 if not is_binary and not over_size_limit:
696 if not is_binary and not over_size_limit:
697 full_content = safe_unicode(_content)
697 full_content = safe_unicode(_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
699
699
700 file_data.update({
700 file_data.update({
701 "content": full_content,
701 "content": full_content,
702 "lines": all_lines
702 "lines": all_lines
703 })
703 })
704
704
705 except RepositoryError:
705 except RepositoryError:
706 log.exception("Exception in get_node")
706 log.exception("Exception in get_node")
707 raise
707 raise
708
708
709 return file_data
709 return file_data
710
710
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 """
712 """
713 Fetch node tree for usage in full text search
713 Fetch node tree for usage in full text search
714 """
714 """
715
715
716 tree_info = list()
716 tree_info = list()
717
717
718 try:
718 try:
719 _repo = self._get_repo(repo_name)
719 _repo = self._get_repo(repo_name)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 root_path = root_path.lstrip('/')
721 root_path = root_path.lstrip('/')
722 for __, dirs, files in commit.walk(root_path):
722 for __, dirs, files in commit.walk(root_path):
723
723
724 for f in files:
724 for f in files:
725 is_binary, md5, size, _content = f.metadata_uncached()
725 is_binary, md5, size, _content = f.metadata_uncached()
726 _data = {
726 _data = {
727 "name": f.unicode_path,
727 "name": f.unicode_path,
728 "md5": md5,
728 "md5": md5,
729 "extension": f.extension,
729 "extension": f.extension,
730 "binary": is_binary,
730 "binary": is_binary,
731 "size": size
731 "size": size
732 }
732 }
733
733
734 tree_info.append(_data)
734 tree_info.append(_data)
735
735
736 except RepositoryError:
736 except RepositoryError:
737 log.exception("Exception in get_nodes")
737 log.exception("Exception in get_nodes")
738 raise
738 raise
739
739
740 return tree_info
740 return tree_info
741
741
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
743 author=None, trigger_push_hook=True):
743 author=None, trigger_push_hook=True):
744 """
744 """
745 Commits given multiple nodes into repo
745 Commits given multiple nodes into repo
746
746
747 :param user: RhodeCode User object or user_id, the commiter
747 :param user: RhodeCode User object or user_id, the commiter
748 :param repo: RhodeCode Repository object
748 :param repo: RhodeCode Repository object
749 :param message: commit message
749 :param message: commit message
750 :param nodes: mapping {filename:{'content':content},...}
750 :param nodes: mapping {filename:{'content':content},...}
751 :param parent_commit: parent commit, can be empty than it's
751 :param parent_commit: parent commit, can be empty than it's
752 initial commit
752 initial commit
753 :param author: author of commit, cna be different that commiter
753 :param author: author of commit, cna be different that commiter
754 only for git
754 only for git
755 :param trigger_push_hook: trigger push hooks
755 :param trigger_push_hook: trigger push hooks
756
756
757 :returns: new committed commit
757 :returns: new committed commit
758 """
758 """
759
759
760 user = self._get_user(user)
760 user = self._get_user(user)
761 scm_instance = repo.scm_instance(cache=False)
761 scm_instance = repo.scm_instance(cache=False)
762
762
763 processed_nodes = []
763 processed_nodes = []
764 for f_path in nodes:
764 for f_path in nodes:
765 f_path = self._sanitize_path(f_path)
765 f_path = self._sanitize_path(f_path)
766 content = nodes[f_path]['content']
766 content = nodes[f_path]['content']
767 f_path = safe_str(f_path)
767 f_path = safe_str(f_path)
768 # decoding here will force that we have proper encoded values
768 # decoding here will force that we have proper encoded values
769 # in any other case this will throw exceptions and deny commit
769 # in any other case this will throw exceptions and deny commit
770 if isinstance(content, (basestring,)):
770 if isinstance(content, (str,)):
771 content = safe_str(content)
771 content = safe_str(content)
772 elif isinstance(content, (file, cStringIO.OutputType,)):
772 elif isinstance(content, (file, cStringIO.OutputType,)):
773 content = content.read()
773 content = content.read()
774 else:
774 else:
775 raise Exception('Content is of unrecognized type %s' % (
775 raise Exception('Content is of unrecognized type %s' % (
776 type(content)
776 type(content)
777 ))
777 ))
778 processed_nodes.append((f_path, content))
778 processed_nodes.append((f_path, content))
779
779
780 message = safe_unicode(message)
780 message = safe_unicode(message)
781 commiter = user.full_contact
781 commiter = user.full_contact
782 author = safe_unicode(author) if author else commiter
782 author = safe_unicode(author) if author else commiter
783
783
784 imc = scm_instance.in_memory_commit
784 imc = scm_instance.in_memory_commit
785
785
786 if not parent_commit:
786 if not parent_commit:
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
788
788
789 if isinstance(parent_commit, EmptyCommit):
789 if isinstance(parent_commit, EmptyCommit):
790 # EmptyCommit means we we're editing empty repository
790 # EmptyCommit means we we're editing empty repository
791 parents = None
791 parents = None
792 else:
792 else:
793 parents = [parent_commit]
793 parents = [parent_commit]
794 # add multiple nodes
794 # add multiple nodes
795 for path, content in processed_nodes:
795 for path, content in processed_nodes:
796 imc.add(FileNode(path, content=content))
796 imc.add(FileNode(path, content=content))
797 # TODO: handle pre push scenario
797 # TODO: handle pre push scenario
798 tip = imc.commit(message=message,
798 tip = imc.commit(message=message,
799 author=author,
799 author=author,
800 parents=parents,
800 parents=parents,
801 branch=parent_commit.branch)
801 branch=parent_commit.branch)
802
802
803 self.mark_for_invalidation(repo.repo_name)
803 self.mark_for_invalidation(repo.repo_name)
804 if trigger_push_hook:
804 if trigger_push_hook:
805 hooks_utils.trigger_post_push_hook(
805 hooks_utils.trigger_post_push_hook(
806 username=user.username, action='push_local',
806 username=user.username, action='push_local',
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
808 hook_type='post_push',
808 hook_type='post_push',
809 commit_ids=[tip.raw_id])
809 commit_ids=[tip.raw_id])
810 return tip
810 return tip
811
811
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
813 author=None, trigger_push_hook=True):
813 author=None, trigger_push_hook=True):
814 user = self._get_user(user)
814 user = self._get_user(user)
815 scm_instance = repo.scm_instance(cache=False)
815 scm_instance = repo.scm_instance(cache=False)
816
816
817 message = safe_unicode(message)
817 message = safe_unicode(message)
818 commiter = user.full_contact
818 commiter = user.full_contact
819 author = safe_unicode(author) if author else commiter
819 author = safe_unicode(author) if author else commiter
820
820
821 imc = scm_instance.in_memory_commit
821 imc = scm_instance.in_memory_commit
822
822
823 if not parent_commit:
823 if not parent_commit:
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
825
825
826 if isinstance(parent_commit, EmptyCommit):
826 if isinstance(parent_commit, EmptyCommit):
827 # EmptyCommit means we we're editing empty repository
827 # EmptyCommit means we we're editing empty repository
828 parents = None
828 parents = None
829 else:
829 else:
830 parents = [parent_commit]
830 parents = [parent_commit]
831
831
832 # add multiple nodes
832 # add multiple nodes
833 for _filename, data in nodes.items():
833 for _filename, data in nodes.items():
834 # new filename, can be renamed from the old one, also sanitaze
834 # new filename, can be renamed from the old one, also sanitaze
835 # the path for any hack around relative paths like ../../ etc.
835 # the path for any hack around relative paths like ../../ etc.
836 filename = self._sanitize_path(data['filename'])
836 filename = self._sanitize_path(data['filename'])
837 old_filename = self._sanitize_path(_filename)
837 old_filename = self._sanitize_path(_filename)
838 content = data['content']
838 content = data['content']
839 file_mode = data.get('mode')
839 file_mode = data.get('mode')
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
841 op = data['op']
841 op = data['op']
842 if op == 'add':
842 if op == 'add':
843 imc.add(filenode)
843 imc.add(filenode)
844 elif op == 'del':
844 elif op == 'del':
845 imc.remove(filenode)
845 imc.remove(filenode)
846 elif op == 'mod':
846 elif op == 'mod':
847 if filename != old_filename:
847 if filename != old_filename:
848 # TODO: handle renames more efficient, needs vcs lib changes
848 # TODO: handle renames more efficient, needs vcs lib changes
849 imc.remove(filenode)
849 imc.remove(filenode)
850 imc.add(FileNode(filename, content=content, mode=file_mode))
850 imc.add(FileNode(filename, content=content, mode=file_mode))
851 else:
851 else:
852 imc.change(filenode)
852 imc.change(filenode)
853
853
854 try:
854 try:
855 # TODO: handle pre push scenario commit changes
855 # TODO: handle pre push scenario commit changes
856 tip = imc.commit(message=message,
856 tip = imc.commit(message=message,
857 author=author,
857 author=author,
858 parents=parents,
858 parents=parents,
859 branch=parent_commit.branch)
859 branch=parent_commit.branch)
860 except NodeNotChangedError:
860 except NodeNotChangedError:
861 raise
861 raise
862 except Exception as e:
862 except Exception as e:
863 log.exception("Unexpected exception during call to imc.commit")
863 log.exception("Unexpected exception during call to imc.commit")
864 raise IMCCommitError(str(e))
864 raise IMCCommitError(str(e))
865 finally:
865 finally:
866 # always clear caches, if commit fails we want fresh object also
866 # always clear caches, if commit fails we want fresh object also
867 self.mark_for_invalidation(repo.repo_name)
867 self.mark_for_invalidation(repo.repo_name)
868
868
869 if trigger_push_hook:
869 if trigger_push_hook:
870 hooks_utils.trigger_post_push_hook(
870 hooks_utils.trigger_post_push_hook(
871 username=user.username, action='push_local', hook_type='post_push',
871 username=user.username, action='push_local', hook_type='post_push',
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
873 commit_ids=[tip.raw_id])
873 commit_ids=[tip.raw_id])
874
874
875 return tip
875 return tip
876
876
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
878 author=None, trigger_push_hook=True):
878 author=None, trigger_push_hook=True):
879 """
879 """
880 Deletes given multiple nodes into `repo`
880 Deletes given multiple nodes into `repo`
881
881
882 :param user: RhodeCode User object or user_id, the committer
882 :param user: RhodeCode User object or user_id, the committer
883 :param repo: RhodeCode Repository object
883 :param repo: RhodeCode Repository object
884 :param message: commit message
884 :param message: commit message
885 :param nodes: mapping {filename:{'content':content},...}
885 :param nodes: mapping {filename:{'content':content},...}
886 :param parent_commit: parent commit, can be empty than it's initial
886 :param parent_commit: parent commit, can be empty than it's initial
887 commit
887 commit
888 :param author: author of commit, cna be different that commiter only
888 :param author: author of commit, cna be different that commiter only
889 for git
889 for git
890 :param trigger_push_hook: trigger push hooks
890 :param trigger_push_hook: trigger push hooks
891
891
892 :returns: new commit after deletion
892 :returns: new commit after deletion
893 """
893 """
894
894
895 user = self._get_user(user)
895 user = self._get_user(user)
896 scm_instance = repo.scm_instance(cache=False)
896 scm_instance = repo.scm_instance(cache=False)
897
897
898 processed_nodes = []
898 processed_nodes = []
899 for f_path in nodes:
899 for f_path in nodes:
900 f_path = self._sanitize_path(f_path)
900 f_path = self._sanitize_path(f_path)
901 # content can be empty but for compatabilty it allows same dicts
901 # content can be empty but for compatabilty it allows same dicts
902 # structure as add_nodes
902 # structure as add_nodes
903 content = nodes[f_path].get('content')
903 content = nodes[f_path].get('content')
904 processed_nodes.append((f_path, content))
904 processed_nodes.append((f_path, content))
905
905
906 message = safe_unicode(message)
906 message = safe_unicode(message)
907 commiter = user.full_contact
907 commiter = user.full_contact
908 author = safe_unicode(author) if author else commiter
908 author = safe_unicode(author) if author else commiter
909
909
910 imc = scm_instance.in_memory_commit
910 imc = scm_instance.in_memory_commit
911
911
912 if not parent_commit:
912 if not parent_commit:
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
914
914
915 if isinstance(parent_commit, EmptyCommit):
915 if isinstance(parent_commit, EmptyCommit):
916 # EmptyCommit means we we're editing empty repository
916 # EmptyCommit means we we're editing empty repository
917 parents = None
917 parents = None
918 else:
918 else:
919 parents = [parent_commit]
919 parents = [parent_commit]
920 # add multiple nodes
920 # add multiple nodes
921 for path, content in processed_nodes:
921 for path, content in processed_nodes:
922 imc.remove(FileNode(path, content=content))
922 imc.remove(FileNode(path, content=content))
923
923
924 # TODO: handle pre push scenario
924 # TODO: handle pre push scenario
925 tip = imc.commit(message=message,
925 tip = imc.commit(message=message,
926 author=author,
926 author=author,
927 parents=parents,
927 parents=parents,
928 branch=parent_commit.branch)
928 branch=parent_commit.branch)
929
929
930 self.mark_for_invalidation(repo.repo_name)
930 self.mark_for_invalidation(repo.repo_name)
931 if trigger_push_hook:
931 if trigger_push_hook:
932 hooks_utils.trigger_post_push_hook(
932 hooks_utils.trigger_post_push_hook(
933 username=user.username, action='push_local', hook_type='post_push',
933 username=user.username, action='push_local', hook_type='post_push',
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
935 commit_ids=[tip.raw_id])
935 commit_ids=[tip.raw_id])
936 return tip
936 return tip
937
937
938 def strip(self, repo, commit_id, branch):
938 def strip(self, repo, commit_id, branch):
939 scm_instance = repo.scm_instance(cache=False)
939 scm_instance = repo.scm_instance(cache=False)
940 scm_instance.config.clear_section('hooks')
940 scm_instance.config.clear_section('hooks')
941 scm_instance.strip(commit_id, branch)
941 scm_instance.strip(commit_id, branch)
942 self.mark_for_invalidation(repo.repo_name)
942 self.mark_for_invalidation(repo.repo_name)
943
943
944 def get_unread_journal(self):
944 def get_unread_journal(self):
945 return self.sa.query(UserLog).count()
945 return self.sa.query(UserLog).count()
946
946
947 @classmethod
947 @classmethod
948 def backend_landing_ref(cls, repo_type):
948 def backend_landing_ref(cls, repo_type):
949 """
949 """
950 Return a default landing ref based on a repository type.
950 Return a default landing ref based on a repository type.
951 """
951 """
952
952
953 landing_ref = {
953 landing_ref = {
954 'hg': ('branch:default', 'default'),
954 'hg': ('branch:default', 'default'),
955 'git': ('branch:master', 'master'),
955 'git': ('branch:master', 'master'),
956 'svn': ('rev:tip', 'latest tip'),
956 'svn': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
958 }
958 }
959
959
960 return landing_ref.get(repo_type) or landing_ref['default']
960 return landing_ref.get(repo_type) or landing_ref['default']
961
961
962 def get_repo_landing_revs(self, translator, repo=None):
962 def get_repo_landing_revs(self, translator, repo=None):
963 """
963 """
964 Generates select option with tags branches and bookmarks (for hg only)
964 Generates select option with tags branches and bookmarks (for hg only)
965 grouped by type
965 grouped by type
966
966
967 :param repo:
967 :param repo:
968 """
968 """
969 from rhodecode.lib.vcs.backends.git import GitRepository
969 from rhodecode.lib.vcs.backends.git import GitRepository
970
970
971 _ = translator
971 _ = translator
972 repo = self._get_repo(repo)
972 repo = self._get_repo(repo)
973
973
974 if repo:
974 if repo:
975 repo_type = repo.repo_type
975 repo_type = repo.repo_type
976 else:
976 else:
977 repo_type = 'default'
977 repo_type = 'default'
978
978
979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
980
980
981 default_ref_options = [
981 default_ref_options = [
982 [default_landing_ref, landing_ref_lbl]
982 [default_landing_ref, landing_ref_lbl]
983 ]
983 ]
984 default_choices = [
984 default_choices = [
985 default_landing_ref
985 default_landing_ref
986 ]
986 ]
987
987
988 if not repo:
988 if not repo:
989 # presented at NEW repo creation
989 # presented at NEW repo creation
990 return default_choices, default_ref_options
990 return default_choices, default_ref_options
991
991
992 repo = repo.scm_instance()
992 repo = repo.scm_instance()
993
993
994 ref_options = [(default_landing_ref, landing_ref_lbl)]
994 ref_options = [(default_landing_ref, landing_ref_lbl)]
995 choices = [default_landing_ref]
995 choices = [default_landing_ref]
996
996
997 # branches
997 # branches
998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
999 if not branch_group:
999 if not branch_group:
1000 # new repo, or without maybe a branch?
1000 # new repo, or without maybe a branch?
1001 branch_group = default_ref_options
1001 branch_group = default_ref_options
1002
1002
1003 branches_group = (branch_group, _("Branches"))
1003 branches_group = (branch_group, _("Branches"))
1004 ref_options.append(branches_group)
1004 ref_options.append(branches_group)
1005 choices.extend([x[0] for x in branches_group[0]])
1005 choices.extend([x[0] for x in branches_group[0]])
1006
1006
1007 # bookmarks for HG
1007 # bookmarks for HG
1008 if repo.alias == 'hg':
1008 if repo.alias == 'hg':
1009 bookmarks_group = (
1009 bookmarks_group = (
1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1011 for b in repo.bookmarks],
1011 for b in repo.bookmarks],
1012 _("Bookmarks"))
1012 _("Bookmarks"))
1013 ref_options.append(bookmarks_group)
1013 ref_options.append(bookmarks_group)
1014 choices.extend([x[0] for x in bookmarks_group[0]])
1014 choices.extend([x[0] for x in bookmarks_group[0]])
1015
1015
1016 # tags
1016 # tags
1017 tags_group = (
1017 tags_group = (
1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1019 for t in repo.tags],
1019 for t in repo.tags],
1020 _("Tags"))
1020 _("Tags"))
1021 ref_options.append(tags_group)
1021 ref_options.append(tags_group)
1022 choices.extend([x[0] for x in tags_group[0]])
1022 choices.extend([x[0] for x in tags_group[0]])
1023
1023
1024 return choices, ref_options
1024 return choices, ref_options
1025
1025
1026 def get_server_info(self, environ=None):
1026 def get_server_info(self, environ=None):
1027 server_info = get_system_info(environ)
1027 server_info = get_system_info(environ)
1028 return server_info
1028 return server_info
General Comments 0
You need to be logged in to leave comments. Login now