Show More
@@ -1,199 +1,199 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | import os |
|
21 | 21 | import string |
|
22 | 22 | import functools |
|
23 | 23 | import collections |
|
24 | 24 | import urllib.request, urllib.parse, urllib.error |
|
25 | 25 | |
|
26 | 26 | log = logging.getLogger('rhodecode.' + __name__) |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class HookResponse(object): |
|
30 | 30 | def __init__(self, status, output): |
|
31 | 31 | self.status = status |
|
32 | 32 | self.output = output |
|
33 | 33 | |
|
34 | 34 | def __add__(self, other): |
|
35 | 35 | other_status = getattr(other, 'status', 0) |
|
36 | 36 | new_status = max(self.status, other_status) |
|
37 | 37 | other_output = getattr(other, 'output', '') |
|
38 | 38 | new_output = self.output + other_output |
|
39 | 39 | |
|
40 | 40 | return HookResponse(new_status, new_output) |
|
41 | 41 | |
|
42 | 42 | def __bool__(self): |
|
43 | 43 | return self.status == 0 |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class DotDict(dict): |
|
47 | 47 | |
|
48 | 48 | def __contains__(self, k): |
|
49 | 49 | try: |
|
50 | 50 | return dict.__contains__(self, k) or hasattr(self, k) |
|
51 | 51 | except: |
|
52 | 52 | return False |
|
53 | 53 | |
|
54 | 54 | # only called if k not found in normal places |
|
55 | 55 | def __getattr__(self, k): |
|
56 | 56 | try: |
|
57 | 57 | return object.__getattribute__(self, k) |
|
58 | 58 | except AttributeError: |
|
59 | 59 | try: |
|
60 | 60 | return self[k] |
|
61 | 61 | except KeyError: |
|
62 | 62 | raise AttributeError(k) |
|
63 | 63 | |
|
64 | 64 | def __setattr__(self, k, v): |
|
65 | 65 | try: |
|
66 | 66 | object.__getattribute__(self, k) |
|
67 | 67 | except AttributeError: |
|
68 | 68 | try: |
|
69 | 69 | self[k] = v |
|
70 | 70 | except: |
|
71 | 71 | raise AttributeError(k) |
|
72 | 72 | else: |
|
73 | 73 | object.__setattr__(self, k, v) |
|
74 | 74 | |
|
75 | 75 | def __delattr__(self, k): |
|
76 | 76 | try: |
|
77 | 77 | object.__getattribute__(self, k) |
|
78 | 78 | except AttributeError: |
|
79 | 79 | try: |
|
80 | 80 | del self[k] |
|
81 | 81 | except KeyError: |
|
82 | 82 | raise AttributeError(k) |
|
83 | 83 | else: |
|
84 | 84 | object.__delattr__(self, k) |
|
85 | 85 | |
|
86 | 86 | def toDict(self): |
|
87 | 87 | return unserialize(self) |
|
88 | 88 | |
|
89 | 89 | def __repr__(self): |
|
90 | 90 | keys = list(self.keys()) |
|
91 | 91 | keys.sort() |
|
92 | 92 | args = ', '.join(['%s=%r' % (key, self[key]) for key in keys]) |
|
93 | 93 | return '%s(%s)' % (self.__class__.__name__, args) |
|
94 | 94 | |
|
95 | 95 | @staticmethod |
|
96 | 96 | def fromDict(d): |
|
97 | 97 | return serialize(d) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def serialize(x): |
|
101 | 101 | if isinstance(x, dict): |
|
102 | 102 | return DotDict((k, serialize(v)) for k, v in x.items()) |
|
103 | 103 | elif isinstance(x, (list, tuple)): |
|
104 | 104 | return type(x)(serialize(v) for v in x) |
|
105 | 105 | else: |
|
106 | 106 | return x |
|
107 | 107 | |
|
108 | 108 | |
|
109 | 109 | def unserialize(x): |
|
110 | 110 | if isinstance(x, dict): |
|
111 | 111 | return dict((k, unserialize(v)) for k, v in x.items()) |
|
112 | 112 | elif isinstance(x, (list, tuple)): |
|
113 | 113 | return type(x)(unserialize(v) for v in x) |
|
114 | 114 | else: |
|
115 | 115 | return x |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def _verify_kwargs(func_name, expected_parameters, kwargs): |
|
119 | 119 | """ |
|
120 | 120 | Verify that exactly `expected_parameters` are passed in as `kwargs`. |
|
121 | 121 | """ |
|
122 | 122 | expected_parameters = set(expected_parameters) |
|
123 | 123 | kwargs_keys = set(kwargs.keys()) |
|
124 | 124 | if kwargs_keys != expected_parameters: |
|
125 | 125 | missing_kwargs = expected_parameters - kwargs_keys |
|
126 | 126 | unexpected_kwargs = kwargs_keys - expected_parameters |
|
127 | 127 | raise AssertionError( |
|
128 | 128 | "func:%s: missing parameters: %r, unexpected parameters: %s" % |
|
129 | 129 | (func_name, missing_kwargs, unexpected_kwargs)) |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | def has_kwargs(required_args): |
|
133 | 133 | """ |
|
134 | 134 | decorator to verify extension calls arguments. |
|
135 | 135 | |
|
136 | 136 | :param required_args: |
|
137 | 137 | """ |
|
138 | 138 | def wrap(func): |
|
139 | 139 | def wrapper(*args, **kwargs): |
|
140 | 140 | _verify_kwargs(func.func_name, required_args.keys(), kwargs) |
|
141 | 141 | # in case there's `calls` defined on module we store the data |
|
142 | 142 | maybe_log_call(func.func_name, args, kwargs) |
|
143 | 143 | log.debug('Calling rcextensions function %s', func.func_name) |
|
144 | 144 | return func(*args, **kwargs) |
|
145 | 145 | return wrapper |
|
146 | 146 | return wrap |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | def maybe_log_call(name, args, kwargs): |
|
150 | 150 | from rhodecode.config import rcextensions |
|
151 | 151 | if hasattr(rcextensions, 'calls'): |
|
152 | 152 | calls = rcextensions.calls |
|
153 | 153 | calls[name].append((args, kwargs)) |
|
154 | 154 | |
|
155 | 155 | |
|
156 | 156 | def str2bool(_str): |
|
157 | 157 | """ |
|
158 | 158 | returns True/False value from given string, it tries to translate the |
|
159 | 159 | string into boolean |
|
160 | 160 | |
|
161 | 161 | :param _str: string value to translate into boolean |
|
162 | 162 | :rtype: boolean |
|
163 | 163 | :returns: boolean from given string |
|
164 | 164 | """ |
|
165 | 165 | if _str is None: |
|
166 | 166 | return False |
|
167 | 167 | if _str in (True, False): |
|
168 | 168 | return _str |
|
169 | 169 | _str = str(_str).strip().lower() |
|
170 | 170 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
171 | 171 | |
|
172 | 172 | |
|
173 | 173 | def aslist(obj, sep=None, strip=True): |
|
174 | 174 | """ |
|
175 | 175 | Returns given string separated by sep as list |
|
176 | 176 | |
|
177 | 177 | :param obj: |
|
178 | 178 | :param sep: |
|
179 | 179 | :param strip: |
|
180 | 180 | """ |
|
181 |
if isinstance(obj, ( |
|
|
181 | if isinstance(obj, (str,)): | |
|
182 | 182 | lst = obj.split(sep) |
|
183 | 183 | if strip: |
|
184 | 184 | lst = [v.strip() for v in lst] |
|
185 | 185 | return lst |
|
186 | 186 | elif isinstance(obj, (list, tuple)): |
|
187 | 187 | return obj |
|
188 | 188 | elif obj is None: |
|
189 | 189 | return [] |
|
190 | 190 | else: |
|
191 | 191 | return [obj] |
|
192 | 192 | |
|
193 | 193 | |
|
194 | 194 | class UrlTemplate(string.Template): |
|
195 | 195 | |
|
196 | 196 | def safe_substitute(self, **kws): |
|
197 | 197 | # url encode the kw for usage in url |
|
198 | 198 | kws = {k: urllib.parse.quote(str(v)) for k, v in kws.items()} |
|
199 | 199 | return super(UrlTemplate, self).safe_substitute(**kws) |
@@ -1,207 +1,207 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import textwrap |
|
23 | 23 | import string |
|
24 | 24 | import functools |
|
25 | 25 | import logging |
|
26 | 26 | import tempfile |
|
27 | 27 | import logging.config |
|
28 | 28 | log = logging.getLogger(__name__) |
|
29 | 29 | |
|
30 | 30 | # skip keys, that are set here, so we don't double process those |
|
31 | 31 | set_keys = { |
|
32 | 32 | '__file__': '' |
|
33 | 33 | } |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def str2bool(_str): |
|
37 | 37 | """ |
|
38 | 38 | returns True/False value from given string, it tries to translate the |
|
39 | 39 | string into boolean |
|
40 | 40 | |
|
41 | 41 | :param _str: string value to translate into boolean |
|
42 | 42 | :rtype: boolean |
|
43 | 43 | :returns: boolean from given string |
|
44 | 44 | """ |
|
45 | 45 | if _str is None: |
|
46 | 46 | return False |
|
47 | 47 | if _str in (True, False): |
|
48 | 48 | return _str |
|
49 | 49 | _str = str(_str).strip().lower() |
|
50 | 50 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def aslist(obj, sep=None, strip=True): |
|
54 | 54 | """ |
|
55 | 55 | Returns given string separated by sep as list |
|
56 | 56 | |
|
57 | 57 | :param obj: |
|
58 | 58 | :param sep: |
|
59 | 59 | :param strip: |
|
60 | 60 | """ |
|
61 |
if isinstance(obj, ( |
|
|
61 | if isinstance(obj, (str,)): | |
|
62 | 62 | if obj in ['', ""]: |
|
63 | 63 | return [] |
|
64 | 64 | |
|
65 | 65 | lst = obj.split(sep) |
|
66 | 66 | if strip: |
|
67 | 67 | lst = [v.strip() for v in lst] |
|
68 | 68 | return lst |
|
69 | 69 | elif isinstance(obj, (list, tuple)): |
|
70 | 70 | return obj |
|
71 | 71 | elif obj is None: |
|
72 | 72 | return [] |
|
73 | 73 | else: |
|
74 | 74 | return [obj] |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | class SettingsMaker(object): |
|
78 | 78 | |
|
79 | 79 | def __init__(self, app_settings): |
|
80 | 80 | self.settings = app_settings |
|
81 | 81 | |
|
82 | 82 | @classmethod |
|
83 | 83 | def _bool_func(cls, input_val): |
|
84 | 84 | if isinstance(input_val, unicode): |
|
85 | 85 | input_val = input_val.encode('utf8') |
|
86 | 86 | return str2bool(input_val) |
|
87 | 87 | |
|
88 | 88 | @classmethod |
|
89 | 89 | def _int_func(cls, input_val): |
|
90 | 90 | return int(input_val) |
|
91 | 91 | |
|
92 | 92 | @classmethod |
|
93 | 93 | def _list_func(cls, input_val, sep=','): |
|
94 | 94 | return aslist(input_val, sep=sep) |
|
95 | 95 | |
|
96 | 96 | @classmethod |
|
97 | 97 | def _string_func(cls, input_val, lower=True): |
|
98 | 98 | if lower: |
|
99 | 99 | input_val = input_val.lower() |
|
100 | 100 | return input_val |
|
101 | 101 | |
|
102 | 102 | @classmethod |
|
103 | 103 | def _float_func(cls, input_val): |
|
104 | 104 | return float(input_val) |
|
105 | 105 | |
|
106 | 106 | @classmethod |
|
107 | 107 | def _dir_func(cls, input_val, ensure_dir=False, mode=0o755): |
|
108 | 108 | |
|
109 | 109 | # ensure we have our dir created |
|
110 | 110 | if not os.path.isdir(input_val) and ensure_dir: |
|
111 | 111 | os.makedirs(input_val, mode=mode) |
|
112 | 112 | |
|
113 | 113 | if not os.path.isdir(input_val): |
|
114 | 114 | raise Exception('Dir at {} does not exist'.format(input_val)) |
|
115 | 115 | return input_val |
|
116 | 116 | |
|
117 | 117 | @classmethod |
|
118 | 118 | def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755): |
|
119 | 119 | dirname = os.path.dirname(input_val) |
|
120 | 120 | cls._dir_func(dirname, ensure_dir=ensure_dir) |
|
121 | 121 | return input_val |
|
122 | 122 | |
|
123 | 123 | @classmethod |
|
124 | 124 | def _key_transformator(cls, key): |
|
125 | 125 | return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_')) |
|
126 | 126 | |
|
127 | 127 | def maybe_env_key(self, key): |
|
128 | 128 | # now maybe we have this KEY in env, search and use the value with higher priority. |
|
129 | 129 | transformed_key = self._key_transformator(key) |
|
130 | 130 | envvar_value = os.environ.get(transformed_key) |
|
131 | 131 | if envvar_value: |
|
132 | 132 | log.debug('using `%s` key instead of `%s` key for config', transformed_key, key) |
|
133 | 133 | |
|
134 | 134 | return envvar_value |
|
135 | 135 | |
|
136 | 136 | def env_expand(self): |
|
137 | 137 | replaced = {} |
|
138 | 138 | for k, v in self.settings.items(): |
|
139 | 139 | if k not in set_keys: |
|
140 | 140 | envvar_value = self.maybe_env_key(k) |
|
141 | 141 | if envvar_value: |
|
142 | 142 | replaced[k] = envvar_value |
|
143 | 143 | set_keys[k] = envvar_value |
|
144 | 144 | |
|
145 | 145 | # replace ALL keys updated |
|
146 | 146 | self.settings.update(replaced) |
|
147 | 147 | |
|
148 | 148 | def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'): |
|
149 | 149 | """ |
|
150 | 150 | Helper to enable debug on running instance |
|
151 | 151 | :return: |
|
152 | 152 | """ |
|
153 | 153 | |
|
154 | 154 | if not str2bool(self.settings.get('logging.autoconfigure')): |
|
155 | 155 | log.info('logging configuration based on main .ini file') |
|
156 | 156 | return |
|
157 | 157 | |
|
158 | 158 | if logging_conf is None: |
|
159 | 159 | logging_conf = self.settings.get('logging.logging_conf_file') or '' |
|
160 | 160 | |
|
161 | 161 | if not os.path.isfile(logging_conf): |
|
162 | 162 | log.error('Unable to setup logging based on %s, ' |
|
163 | 163 | 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf) |
|
164 | 164 | return |
|
165 | 165 | |
|
166 | 166 | with open(logging_conf, 'rb') as f: |
|
167 | 167 | ini_template = textwrap.dedent(f.read()) |
|
168 | 168 | ini_template = string.Template(ini_template).safe_substitute( |
|
169 | 169 | RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level, |
|
170 | 170 | RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter |
|
171 | 171 | ) |
|
172 | 172 | |
|
173 | 173 | with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f: |
|
174 | 174 | log.info('Saved Temporary LOGGING config at %s', f.name) |
|
175 | 175 | f.write(ini_template) |
|
176 | 176 | |
|
177 | 177 | logging.config.fileConfig(f.name) |
|
178 | 178 | os.remove(f.name) |
|
179 | 179 | |
|
180 | 180 | def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None): |
|
181 | 181 | input_val = self.settings.get(key, default) |
|
182 | 182 | |
|
183 | 183 | if default_when_empty and not input_val: |
|
184 | 184 | # use default value when value is set in the config but it is empty |
|
185 | 185 | input_val = default |
|
186 | 186 | |
|
187 | 187 | parser_func = { |
|
188 | 188 | 'bool': self._bool_func, |
|
189 | 189 | 'int': self._int_func, |
|
190 | 190 | 'list': self._list_func, |
|
191 | 191 | 'list:newline': functools.partial(self._list_func, sep='/n'), |
|
192 | 192 | 'list:spacesep': functools.partial(self._list_func, sep=' '), |
|
193 | 193 | 'string': functools.partial(self._string_func, lower=lower), |
|
194 | 194 | 'dir': self._dir_func, |
|
195 | 195 | 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True), |
|
196 | 196 | 'file': self._file_path_func, |
|
197 | 197 | 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True), |
|
198 | 198 | None: lambda i: i |
|
199 | 199 | }[parser] |
|
200 | 200 | |
|
201 | 201 | envvar_value = self.maybe_env_key(key) |
|
202 | 202 | if envvar_value: |
|
203 | 203 | input_val = envvar_value |
|
204 | 204 | set_keys[key] = input_val |
|
205 | 205 | |
|
206 | 206 | self.settings[key] = parser_func(input_val) |
|
207 | 207 | return self.settings[key] |
@@ -1,839 +1,839 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Utilities for writing code that runs on Python 2 and 3""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) 2010-2015 Benjamin Peterson |
|
5 | 5 | # |
|
6 | 6 | # Permission is hereby granted, free of charge, to any person obtaining a copy |
|
7 | 7 | # of this software and associated documentation files (the "Software"), to deal |
|
8 | 8 | # in the Software without restriction, including without limitation the rights |
|
9 | 9 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|
10 | 10 | # copies of the Software, and to permit persons to whom the Software is |
|
11 | 11 | # furnished to do so, subject to the following conditions: |
|
12 | 12 | # |
|
13 | 13 | # The above copyright notice and this permission notice shall be included in all |
|
14 | 14 | # copies or substantial portions of the Software. |
|
15 | 15 | # |
|
16 | 16 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|
17 | 17 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|
18 | 18 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|
19 | 19 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|
20 | 20 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|
21 | 21 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|
22 | 22 | # SOFTWARE. |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | import functools |
|
27 | 27 | import itertools |
|
28 | 28 | import operator |
|
29 | 29 | import sys |
|
30 | 30 | import types |
|
31 | 31 | |
|
32 | 32 | __author__ = "Benjamin Peterson <benjamin@python.org>" |
|
33 | 33 | __version__ = "1.9.0" |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | # Useful for very coarse version differentiation. |
|
37 | 37 | PY2 = sys.version_info[0] == 2 |
|
38 | 38 | PY3 = sys.version_info[0] == 3 |
|
39 | 39 | |
|
40 | 40 | if PY3: |
|
41 | 41 | string_types = str, |
|
42 | 42 | integer_types = int, |
|
43 | 43 | class_types = type, |
|
44 | 44 | text_type = str |
|
45 | 45 | binary_type = bytes |
|
46 | 46 | |
|
47 | 47 | MAXSIZE = sys.maxsize |
|
48 | 48 | else: |
|
49 |
string_types = |
|
|
49 | string_types = str, | |
|
50 | 50 | integer_types = (int, long) |
|
51 | 51 | class_types = (type, types.ClassType) |
|
52 | 52 | text_type = unicode |
|
53 | 53 | binary_type = str |
|
54 | 54 | |
|
55 | 55 | if sys.platform.startswith("java"): |
|
56 | 56 | # Jython always uses 32 bits. |
|
57 | 57 | MAXSIZE = int((1 << 31) - 1) |
|
58 | 58 | else: |
|
59 | 59 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t). |
|
60 | 60 | class X(object): |
|
61 | 61 | def __len__(self): |
|
62 | 62 | return 1 << 31 |
|
63 | 63 | try: |
|
64 | 64 | len(X()) |
|
65 | 65 | except OverflowError: |
|
66 | 66 | # 32-bit |
|
67 | 67 | MAXSIZE = int((1 << 31) - 1) |
|
68 | 68 | else: |
|
69 | 69 | # 64-bit |
|
70 | 70 | MAXSIZE = int((1 << 63) - 1) |
|
71 | 71 | del X |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | def _add_doc(func, doc): |
|
75 | 75 | """Add documentation to a function.""" |
|
76 | 76 | func.__doc__ = doc |
|
77 | 77 | |
|
78 | 78 | |
|
79 | 79 | def _import_module(name): |
|
80 | 80 | """Import module, returning the module after the last dot.""" |
|
81 | 81 | __import__(name) |
|
82 | 82 | return sys.modules[name] |
|
83 | 83 | |
|
84 | 84 | |
|
85 | 85 | class _LazyDescr(object): |
|
86 | 86 | |
|
87 | 87 | def __init__(self, name): |
|
88 | 88 | self.name = name |
|
89 | 89 | |
|
90 | 90 | def __get__(self, obj, tp): |
|
91 | 91 | result = self._resolve() |
|
92 | 92 | setattr(obj, self.name, result) # Invokes __set__. |
|
93 | 93 | try: |
|
94 | 94 | # This is a bit ugly, but it avoids running this again by |
|
95 | 95 | # removing this descriptor. |
|
96 | 96 | delattr(obj.__class__, self.name) |
|
97 | 97 | except AttributeError: |
|
98 | 98 | pass |
|
99 | 99 | return result |
|
100 | 100 | |
|
101 | 101 | |
|
102 | 102 | class MovedModule(_LazyDescr): |
|
103 | 103 | |
|
104 | 104 | def __init__(self, name, old, new=None): |
|
105 | 105 | super(MovedModule, self).__init__(name) |
|
106 | 106 | if PY3: |
|
107 | 107 | if new is None: |
|
108 | 108 | new = name |
|
109 | 109 | self.mod = new |
|
110 | 110 | else: |
|
111 | 111 | self.mod = old |
|
112 | 112 | |
|
113 | 113 | def _resolve(self): |
|
114 | 114 | return _import_module(self.mod) |
|
115 | 115 | |
|
116 | 116 | def __getattr__(self, attr): |
|
117 | 117 | _module = self._resolve() |
|
118 | 118 | value = getattr(_module, attr) |
|
119 | 119 | setattr(self, attr, value) |
|
120 | 120 | return value |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | class _LazyModule(types.ModuleType): |
|
124 | 124 | |
|
125 | 125 | def __init__(self, name): |
|
126 | 126 | super(_LazyModule, self).__init__(name) |
|
127 | 127 | self.__doc__ = self.__class__.__doc__ |
|
128 | 128 | |
|
129 | 129 | def __dir__(self): |
|
130 | 130 | attrs = ["__doc__", "__name__"] |
|
131 | 131 | attrs += [attr.name for attr in self._moved_attributes] |
|
132 | 132 | return attrs |
|
133 | 133 | |
|
134 | 134 | # Subclasses should override this |
|
135 | 135 | _moved_attributes = [] |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | class MovedAttribute(_LazyDescr): |
|
139 | 139 | |
|
140 | 140 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): |
|
141 | 141 | super(MovedAttribute, self).__init__(name) |
|
142 | 142 | if PY3: |
|
143 | 143 | if new_mod is None: |
|
144 | 144 | new_mod = name |
|
145 | 145 | self.mod = new_mod |
|
146 | 146 | if new_attr is None: |
|
147 | 147 | if old_attr is None: |
|
148 | 148 | new_attr = name |
|
149 | 149 | else: |
|
150 | 150 | new_attr = old_attr |
|
151 | 151 | self.attr = new_attr |
|
152 | 152 | else: |
|
153 | 153 | self.mod = old_mod |
|
154 | 154 | if old_attr is None: |
|
155 | 155 | old_attr = name |
|
156 | 156 | self.attr = old_attr |
|
157 | 157 | |
|
158 | 158 | def _resolve(self): |
|
159 | 159 | module = _import_module(self.mod) |
|
160 | 160 | return getattr(module, self.attr) |
|
161 | 161 | |
|
162 | 162 | |
|
163 | 163 | class _SixMetaPathImporter(object): |
|
164 | 164 | """ |
|
165 | 165 | A meta path importer to import six.moves and its submodules. |
|
166 | 166 | |
|
167 | 167 | This class implements a PEP302 finder and loader. It should be compatible |
|
168 | 168 | with Python 2.5 and all existing versions of Python3 |
|
169 | 169 | """ |
|
170 | 170 | def __init__(self, six_module_name): |
|
171 | 171 | self.name = six_module_name |
|
172 | 172 | self.known_modules = {} |
|
173 | 173 | |
|
174 | 174 | def _add_module(self, mod, *fullnames): |
|
175 | 175 | for fullname in fullnames: |
|
176 | 176 | self.known_modules[self.name + "." + fullname] = mod |
|
177 | 177 | |
|
178 | 178 | def _get_module(self, fullname): |
|
179 | 179 | return self.known_modules[self.name + "." + fullname] |
|
180 | 180 | |
|
181 | 181 | def find_module(self, fullname, path=None): |
|
182 | 182 | if fullname in self.known_modules: |
|
183 | 183 | return self |
|
184 | 184 | return None |
|
185 | 185 | |
|
186 | 186 | def __get_module(self, fullname): |
|
187 | 187 | try: |
|
188 | 188 | return self.known_modules[fullname] |
|
189 | 189 | except KeyError: |
|
190 | 190 | raise ImportError("This loader does not know module " + fullname) |
|
191 | 191 | |
|
192 | 192 | def load_module(self, fullname): |
|
193 | 193 | try: |
|
194 | 194 | # in case of a reload |
|
195 | 195 | return sys.modules[fullname] |
|
196 | 196 | except KeyError: |
|
197 | 197 | pass |
|
198 | 198 | mod = self.__get_module(fullname) |
|
199 | 199 | if isinstance(mod, MovedModule): |
|
200 | 200 | mod = mod._resolve() |
|
201 | 201 | else: |
|
202 | 202 | mod.__loader__ = self |
|
203 | 203 | sys.modules[fullname] = mod |
|
204 | 204 | return mod |
|
205 | 205 | |
|
206 | 206 | def is_package(self, fullname): |
|
207 | 207 | """ |
|
208 | 208 | Return true, if the named module is a package. |
|
209 | 209 | |
|
210 | 210 | We need this method to get correct spec objects with |
|
211 | 211 | Python 3.4 (see PEP451) |
|
212 | 212 | """ |
|
213 | 213 | return hasattr(self.__get_module(fullname), "__path__") |
|
214 | 214 | |
|
215 | 215 | def get_code(self, fullname): |
|
216 | 216 | """Return None |
|
217 | 217 | |
|
218 | 218 | Required, if is_package is implemented""" |
|
219 | 219 | self.__get_module(fullname) # eventually raises ImportError |
|
220 | 220 | return None |
|
221 | 221 | get_source = get_code # same as get_code |
|
222 | 222 | |
|
223 | 223 | _importer = _SixMetaPathImporter(__name__) |
|
224 | 224 | |
|
225 | 225 | |
|
226 | 226 | class _MovedItems(_LazyModule): |
|
227 | 227 | """Lazy loading of moved objects""" |
|
228 | 228 | __path__ = [] # mark as package |
|
229 | 229 | |
|
230 | 230 | |
|
231 | 231 | _moved_attributes = [ |
|
232 | 232 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), |
|
233 | 233 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), |
|
234 | 234 | MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), |
|
235 | 235 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), |
|
236 | 236 | MovedAttribute("intern", "__builtin__", "sys"), |
|
237 | 237 | MovedAttribute("map", "itertools", "builtins", "imap", "map"), |
|
238 | 238 | MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), |
|
239 | 239 | MovedAttribute("reload_module", "__builtin__", "imp", "reload"), |
|
240 | 240 | MovedAttribute("reduce", "__builtin__", "functools"), |
|
241 | 241 | MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), |
|
242 | 242 | MovedAttribute("StringIO", "StringIO", "io"), |
|
243 | 243 | MovedAttribute("UserDict", "UserDict", "collections"), |
|
244 | 244 | MovedAttribute("UserList", "UserList", "collections"), |
|
245 | 245 | MovedAttribute("UserString", "UserString", "collections"), |
|
246 | 246 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), |
|
247 | 247 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), |
|
248 | 248 | MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), |
|
249 | 249 | |
|
250 | 250 | MovedModule("builtins", "__builtin__"), |
|
251 | 251 | MovedModule("configparser", "ConfigParser"), |
|
252 | 252 | MovedModule("copyreg", "copy_reg"), |
|
253 | 253 | MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), |
|
254 | 254 | MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), |
|
255 | 255 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), |
|
256 | 256 | MovedModule("http_cookies", "Cookie", "http.cookies"), |
|
257 | 257 | MovedModule("html_entities", "htmlentitydefs", "html.entities"), |
|
258 | 258 | MovedModule("html_parser", "HTMLParser", "html.parser"), |
|
259 | 259 | MovedModule("http_client", "httplib", "http.client"), |
|
260 | 260 | MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), |
|
261 | 261 | MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), |
|
262 | 262 | MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), |
|
263 | 263 | MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), |
|
264 | 264 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), |
|
265 | 265 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), |
|
266 | 266 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), |
|
267 | 267 | MovedModule("cPickle", "cPickle", "pickle"), |
|
268 | 268 | MovedModule("queue", "Queue"), |
|
269 | 269 | MovedModule("reprlib", "repr"), |
|
270 | 270 | MovedModule("socketserver", "SocketServer"), |
|
271 | 271 | MovedModule("_thread", "thread", "_thread"), |
|
272 | 272 | MovedModule("tkinter", "Tkinter"), |
|
273 | 273 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), |
|
274 | 274 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), |
|
275 | 275 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), |
|
276 | 276 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), |
|
277 | 277 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"), |
|
278 | 278 | MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), |
|
279 | 279 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), |
|
280 | 280 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), |
|
281 | 281 | MovedModule("tkinter_colorchooser", "tkColorChooser", |
|
282 | 282 | "tkinter.colorchooser"), |
|
283 | 283 | MovedModule("tkinter_commondialog", "tkCommonDialog", |
|
284 | 284 | "tkinter.commondialog"), |
|
285 | 285 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), |
|
286 | 286 | MovedModule("tkinter_font", "tkFont", "tkinter.font"), |
|
287 | 287 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), |
|
288 | 288 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", |
|
289 | 289 | "tkinter.simpledialog"), |
|
290 | 290 | MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), |
|
291 | 291 | MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), |
|
292 | 292 | MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), |
|
293 | 293 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), |
|
294 | 294 | MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), |
|
295 | 295 | MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), |
|
296 | 296 | MovedModule("winreg", "_winreg"), |
|
297 | 297 | ] |
|
298 | 298 | for attr in _moved_attributes: |
|
299 | 299 | setattr(_MovedItems, attr.name, attr) |
|
300 | 300 | if isinstance(attr, MovedModule): |
|
301 | 301 | _importer._add_module(attr, "moves." + attr.name) |
|
302 | 302 | del attr |
|
303 | 303 | |
|
304 | 304 | _MovedItems._moved_attributes = _moved_attributes |
|
305 | 305 | |
|
306 | 306 | moves = _MovedItems(__name__ + ".moves") |
|
307 | 307 | _importer._add_module(moves, "moves") |
|
308 | 308 | |
|
309 | 309 | |
|
310 | 310 | class Module_six_moves_urllib_parse(_LazyModule): |
|
311 | 311 | """Lazy loading of moved objects in six.moves.urllib_parse""" |
|
312 | 312 | |
|
313 | 313 | |
|
314 | 314 | _urllib_parse_moved_attributes = [ |
|
315 | 315 | MovedAttribute("ParseResult", "urlparse", "urllib.parse"), |
|
316 | 316 | MovedAttribute("SplitResult", "urlparse", "urllib.parse"), |
|
317 | 317 | MovedAttribute("parse_qs", "urlparse", "urllib.parse"), |
|
318 | 318 | MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), |
|
319 | 319 | MovedAttribute("urldefrag", "urlparse", "urllib.parse"), |
|
320 | 320 | MovedAttribute("urljoin", "urlparse", "urllib.parse"), |
|
321 | 321 | MovedAttribute("urlparse", "urlparse", "urllib.parse"), |
|
322 | 322 | MovedAttribute("urlsplit", "urlparse", "urllib.parse"), |
|
323 | 323 | MovedAttribute("urlunparse", "urlparse", "urllib.parse"), |
|
324 | 324 | MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), |
|
325 | 325 | MovedAttribute("quote", "urllib", "urllib.parse"), |
|
326 | 326 | MovedAttribute("quote_plus", "urllib", "urllib.parse"), |
|
327 | 327 | MovedAttribute("unquote", "urllib", "urllib.parse"), |
|
328 | 328 | MovedAttribute("unquote_plus", "urllib", "urllib.parse"), |
|
329 | 329 | MovedAttribute("urlencode", "urllib", "urllib.parse"), |
|
330 | 330 | MovedAttribute("splitquery", "urllib", "urllib.parse"), |
|
331 | 331 | MovedAttribute("splittag", "urllib", "urllib.parse"), |
|
332 | 332 | MovedAttribute("splituser", "urllib", "urllib.parse"), |
|
333 | 333 | MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), |
|
334 | 334 | MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), |
|
335 | 335 | MovedAttribute("uses_params", "urlparse", "urllib.parse"), |
|
336 | 336 | MovedAttribute("uses_query", "urlparse", "urllib.parse"), |
|
337 | 337 | MovedAttribute("uses_relative", "urlparse", "urllib.parse"), |
|
338 | 338 | ] |
|
339 | 339 | for attr in _urllib_parse_moved_attributes: |
|
340 | 340 | setattr(Module_six_moves_urllib_parse, attr.name, attr) |
|
341 | 341 | del attr |
|
342 | 342 | |
|
343 | 343 | Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes |
|
344 | 344 | |
|
345 | 345 | _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), |
|
346 | 346 | "moves.urllib_parse", "moves.urllib.parse") |
|
347 | 347 | |
|
348 | 348 | |
|
349 | 349 | class Module_six_moves_urllib_error(_LazyModule): |
|
350 | 350 | """Lazy loading of moved objects in six.moves.urllib_error""" |
|
351 | 351 | |
|
352 | 352 | |
|
353 | 353 | _urllib_error_moved_attributes = [ |
|
354 | 354 | MovedAttribute("URLError", "urllib2", "urllib.error"), |
|
355 | 355 | MovedAttribute("HTTPError", "urllib2", "urllib.error"), |
|
356 | 356 | MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), |
|
357 | 357 | ] |
|
358 | 358 | for attr in _urllib_error_moved_attributes: |
|
359 | 359 | setattr(Module_six_moves_urllib_error, attr.name, attr) |
|
360 | 360 | del attr |
|
361 | 361 | |
|
362 | 362 | Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes |
|
363 | 363 | |
|
364 | 364 | _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), |
|
365 | 365 | "moves.urllib_error", "moves.urllib.error") |
|
366 | 366 | |
|
367 | 367 | |
|
368 | 368 | class Module_six_moves_urllib_request(_LazyModule): |
|
369 | 369 | """Lazy loading of moved objects in six.moves.urllib_request""" |
|
370 | 370 | |
|
371 | 371 | |
|
372 | 372 | _urllib_request_moved_attributes = [ |
|
373 | 373 | MovedAttribute("urlopen", "urllib2", "urllib.request"), |
|
374 | 374 | MovedAttribute("install_opener", "urllib2", "urllib.request"), |
|
375 | 375 | MovedAttribute("build_opener", "urllib2", "urllib.request"), |
|
376 | 376 | MovedAttribute("pathname2url", "urllib", "urllib.request"), |
|
377 | 377 | MovedAttribute("url2pathname", "urllib", "urllib.request"), |
|
378 | 378 | MovedAttribute("getproxies", "urllib", "urllib.request"), |
|
379 | 379 | MovedAttribute("Request", "urllib2", "urllib.request"), |
|
380 | 380 | MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), |
|
381 | 381 | MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), |
|
382 | 382 | MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), |
|
383 | 383 | MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), |
|
384 | 384 | MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), |
|
385 | 385 | MovedAttribute("BaseHandler", "urllib2", "urllib.request"), |
|
386 | 386 | MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), |
|
387 | 387 | MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), |
|
388 | 388 | MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), |
|
389 | 389 | MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), |
|
390 | 390 | MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), |
|
391 | 391 | MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), |
|
392 | 392 | MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), |
|
393 | 393 | MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), |
|
394 | 394 | MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), |
|
395 | 395 | MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), |
|
396 | 396 | MovedAttribute("FileHandler", "urllib2", "urllib.request"), |
|
397 | 397 | MovedAttribute("FTPHandler", "urllib2", "urllib.request"), |
|
398 | 398 | MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), |
|
399 | 399 | MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), |
|
400 | 400 | MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), |
|
401 | 401 | MovedAttribute("urlretrieve", "urllib", "urllib.request"), |
|
402 | 402 | MovedAttribute("urlcleanup", "urllib", "urllib.request"), |
|
403 | 403 | MovedAttribute("URLopener", "urllib", "urllib.request"), |
|
404 | 404 | MovedAttribute("FancyURLopener", "urllib", "urllib.request"), |
|
405 | 405 | MovedAttribute("proxy_bypass", "urllib", "urllib.request"), |
|
406 | 406 | ] |
|
407 | 407 | for attr in _urllib_request_moved_attributes: |
|
408 | 408 | setattr(Module_six_moves_urllib_request, attr.name, attr) |
|
409 | 409 | del attr |
|
410 | 410 | |
|
411 | 411 | Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes |
|
412 | 412 | |
|
413 | 413 | _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), |
|
414 | 414 | "moves.urllib_request", "moves.urllib.request") |
|
415 | 415 | |
|
416 | 416 | |
|
417 | 417 | class Module_six_moves_urllib_response(_LazyModule): |
|
418 | 418 | """Lazy loading of moved objects in six.moves.urllib_response""" |
|
419 | 419 | |
|
420 | 420 | |
|
421 | 421 | _urllib_response_moved_attributes = [ |
|
422 | 422 | MovedAttribute("addbase", "urllib", "urllib.response"), |
|
423 | 423 | MovedAttribute("addclosehook", "urllib", "urllib.response"), |
|
424 | 424 | MovedAttribute("addinfo", "urllib", "urllib.response"), |
|
425 | 425 | MovedAttribute("addinfourl", "urllib", "urllib.response"), |
|
426 | 426 | ] |
|
427 | 427 | for attr in _urllib_response_moved_attributes: |
|
428 | 428 | setattr(Module_six_moves_urllib_response, attr.name, attr) |
|
429 | 429 | del attr |
|
430 | 430 | |
|
431 | 431 | Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes |
|
432 | 432 | |
|
433 | 433 | _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), |
|
434 | 434 | "moves.urllib_response", "moves.urllib.response") |
|
435 | 435 | |
|
436 | 436 | |
|
437 | 437 | class Module_six_moves_urllib_robotparser(_LazyModule): |
|
438 | 438 | """Lazy loading of moved objects in six.moves.urllib_robotparser""" |
|
439 | 439 | |
|
440 | 440 | |
|
441 | 441 | _urllib_robotparser_moved_attributes = [ |
|
442 | 442 | MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), |
|
443 | 443 | ] |
|
444 | 444 | for attr in _urllib_robotparser_moved_attributes: |
|
445 | 445 | setattr(Module_six_moves_urllib_robotparser, attr.name, attr) |
|
446 | 446 | del attr |
|
447 | 447 | |
|
448 | 448 | Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes |
|
449 | 449 | |
|
450 | 450 | _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), |
|
451 | 451 | "moves.urllib_robotparser", "moves.urllib.robotparser") |
|
452 | 452 | |
|
453 | 453 | |
|
454 | 454 | class Module_six_moves_urllib(types.ModuleType): |
|
455 | 455 | """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" |
|
456 | 456 | __path__ = [] # mark as package |
|
457 | 457 | parse = _importer._get_module("moves.urllib_parse") |
|
458 | 458 | error = _importer._get_module("moves.urllib_error") |
|
459 | 459 | request = _importer._get_module("moves.urllib_request") |
|
460 | 460 | response = _importer._get_module("moves.urllib_response") |
|
461 | 461 | robotparser = _importer._get_module("moves.urllib_robotparser") |
|
462 | 462 | |
|
463 | 463 | def __dir__(self): |
|
464 | 464 | return ['parse', 'error', 'request', 'response', 'robotparser'] |
|
465 | 465 | |
|
466 | 466 | _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), |
|
467 | 467 | "moves.urllib") |
|
468 | 468 | |
|
469 | 469 | |
|
470 | 470 | def add_move(move): |
|
471 | 471 | """Add an item to six.moves.""" |
|
472 | 472 | setattr(_MovedItems, move.name, move) |
|
473 | 473 | |
|
474 | 474 | |
|
475 | 475 | def remove_move(name): |
|
476 | 476 | """Remove item from six.moves.""" |
|
477 | 477 | try: |
|
478 | 478 | delattr(_MovedItems, name) |
|
479 | 479 | except AttributeError: |
|
480 | 480 | try: |
|
481 | 481 | del moves.__dict__[name] |
|
482 | 482 | except KeyError: |
|
483 | 483 | raise AttributeError("no such move, %r" % (name,)) |
|
484 | 484 | |
|
485 | 485 | |
|
486 | 486 | if PY3: |
|
487 | 487 | _meth_func = "__func__" |
|
488 | 488 | _meth_self = "__self__" |
|
489 | 489 | |
|
490 | 490 | _func_closure = "__closure__" |
|
491 | 491 | _func_code = "__code__" |
|
492 | 492 | _func_defaults = "__defaults__" |
|
493 | 493 | _func_globals = "__globals__" |
|
494 | 494 | else: |
|
495 | 495 | _meth_func = "im_func" |
|
496 | 496 | _meth_self = "im_self" |
|
497 | 497 | |
|
498 | 498 | _func_closure = "func_closure" |
|
499 | 499 | _func_code = "func_code" |
|
500 | 500 | _func_defaults = "func_defaults" |
|
501 | 501 | _func_globals = "func_globals" |
|
502 | 502 | |
|
503 | 503 | |
|
504 | 504 | try: |
|
505 | 505 | advance_iterator = next |
|
506 | 506 | except NameError: |
|
507 | 507 | def advance_iterator(it): |
|
508 | 508 | return it.next() |
|
509 | 509 | next = advance_iterator |
|
510 | 510 | |
|
511 | 511 | |
|
512 | 512 | try: |
|
513 | 513 | callable = callable |
|
514 | 514 | except NameError: |
|
515 | 515 | def callable(obj): |
|
516 | 516 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) |
|
517 | 517 | |
|
518 | 518 | |
|
519 | 519 | if PY3: |
|
520 | 520 | def get_unbound_function(unbound): |
|
521 | 521 | return unbound |
|
522 | 522 | |
|
523 | 523 | create_bound_method = types.MethodType |
|
524 | 524 | |
|
525 | 525 | Iterator = object |
|
526 | 526 | else: |
|
527 | 527 | def get_unbound_function(unbound): |
|
528 | 528 | return unbound.im_func |
|
529 | 529 | |
|
530 | 530 | def create_bound_method(func, obj): |
|
531 | 531 | return types.MethodType(func, obj, obj.__class__) |
|
532 | 532 | |
|
533 | 533 | class Iterator(object): |
|
534 | 534 | |
|
535 | 535 | def next(self): |
|
536 | 536 | return type(self).__next__(self) |
|
537 | 537 | |
|
538 | 538 | callable = callable |
|
539 | 539 | _add_doc(get_unbound_function, |
|
540 | 540 | """Get the function out of a possibly unbound function""") |
|
541 | 541 | |
|
542 | 542 | |
|
543 | 543 | get_method_function = operator.attrgetter(_meth_func) |
|
544 | 544 | get_method_self = operator.attrgetter(_meth_self) |
|
545 | 545 | get_function_closure = operator.attrgetter(_func_closure) |
|
546 | 546 | get_function_code = operator.attrgetter(_func_code) |
|
547 | 547 | get_function_defaults = operator.attrgetter(_func_defaults) |
|
548 | 548 | get_function_globals = operator.attrgetter(_func_globals) |
|
549 | 549 | |
|
550 | 550 | |
|
551 | 551 | if PY3: |
|
552 | 552 | def iterkeys(d, **kw): |
|
553 | 553 | return iter(d.keys(**kw)) |
|
554 | 554 | |
|
555 | 555 | def itervalues(d, **kw): |
|
556 | 556 | return iter(d.values(**kw)) |
|
557 | 557 | |
|
558 | 558 | def iteritems(d, **kw): |
|
559 | 559 | return iter(d.items(**kw)) |
|
560 | 560 | |
|
561 | 561 | def iterlists(d, **kw): |
|
562 | 562 | return iter(d.lists(**kw)) |
|
563 | 563 | |
|
564 | 564 | viewkeys = operator.methodcaller("keys") |
|
565 | 565 | |
|
566 | 566 | viewvalues = operator.methodcaller("values") |
|
567 | 567 | |
|
568 | 568 | viewitems = operator.methodcaller("items") |
|
569 | 569 | else: |
|
570 | 570 | def iterkeys(d, **kw): |
|
571 | 571 | return iter(d.iterkeys(**kw)) |
|
572 | 572 | |
|
573 | 573 | def itervalues(d, **kw): |
|
574 | 574 | return iter(d.itervalues(**kw)) |
|
575 | 575 | |
|
576 | 576 | def iteritems(d, **kw): |
|
577 | 577 | return iter(d.iteritems(**kw)) |
|
578 | 578 | |
|
579 | 579 | def iterlists(d, **kw): |
|
580 | 580 | return iter(d.iterlists(**kw)) |
|
581 | 581 | |
|
582 | 582 | viewkeys = operator.methodcaller("viewkeys") |
|
583 | 583 | |
|
584 | 584 | viewvalues = operator.methodcaller("viewvalues") |
|
585 | 585 | |
|
586 | 586 | viewitems = operator.methodcaller("viewitems") |
|
587 | 587 | |
|
588 | 588 | _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") |
|
589 | 589 | _add_doc(itervalues, "Return an iterator over the values of a dictionary.") |
|
590 | 590 | _add_doc(iteritems, |
|
591 | 591 | "Return an iterator over the (key, value) pairs of a dictionary.") |
|
592 | 592 | _add_doc(iterlists, |
|
593 | 593 | "Return an iterator over the (key, [values]) pairs of a dictionary.") |
|
594 | 594 | |
|
595 | 595 | |
|
596 | 596 | if PY3: |
|
597 | 597 | def b(s): |
|
598 | 598 | return s.encode("latin-1") |
|
599 | 599 | def u(s): |
|
600 | 600 | return s |
|
601 | 601 | unichr = chr |
|
602 | 602 | if sys.version_info[1] <= 1: |
|
603 | 603 | def int2byte(i): |
|
604 | 604 | return bytes((i,)) |
|
605 | 605 | else: |
|
606 | 606 | # This is about 2x faster than the implementation above on 3.2+ |
|
607 | 607 | int2byte = operator.methodcaller("to_bytes", 1, "big") |
|
608 | 608 | byte2int = operator.itemgetter(0) |
|
609 | 609 | indexbytes = operator.getitem |
|
610 | 610 | iterbytes = iter |
|
611 | 611 | import io |
|
612 | 612 | StringIO = io.StringIO |
|
613 | 613 | BytesIO = io.BytesIO |
|
614 | 614 | _assertCountEqual = "assertCountEqual" |
|
615 | 615 | _assertRaisesRegex = "assertRaisesRegex" |
|
616 | 616 | _assertRegex = "assertRegex" |
|
617 | 617 | else: |
|
618 | 618 | def b(s): |
|
619 | 619 | return s |
|
620 | 620 | # Workaround for standalone backslash |
|
621 | 621 | def u(s): |
|
622 | 622 | return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") |
|
623 | 623 | unichr = unichr |
|
624 | 624 | int2byte = chr |
|
625 | 625 | def byte2int(bs): |
|
626 | 626 | return ord(bs[0]) |
|
627 | 627 | def indexbytes(buf, i): |
|
628 | 628 | return ord(buf[i]) |
|
629 | 629 | iterbytes = functools.partial(itertools.imap, ord) |
|
630 | 630 | import StringIO |
|
631 | 631 | StringIO = BytesIO = StringIO.StringIO |
|
632 | 632 | _assertCountEqual = "assertItemsEqual" |
|
633 | 633 | _assertRaisesRegex = "assertRaisesRegexp" |
|
634 | 634 | _assertRegex = "assertRegexpMatches" |
|
635 | 635 | _add_doc(b, """Byte literal""") |
|
636 | 636 | _add_doc(u, """Text literal""") |
|
637 | 637 | |
|
638 | 638 | |
|
639 | 639 | def assertCountEqual(self, *args, **kwargs): |
|
640 | 640 | return getattr(self, _assertCountEqual)(*args, **kwargs) |
|
641 | 641 | |
|
642 | 642 | |
|
643 | 643 | def assertRaisesRegex(self, *args, **kwargs): |
|
644 | 644 | return getattr(self, _assertRaisesRegex)(*args, **kwargs) |
|
645 | 645 | |
|
646 | 646 | |
|
647 | 647 | def assertRegex(self, *args, **kwargs): |
|
648 | 648 | return getattr(self, _assertRegex)(*args, **kwargs) |
|
649 | 649 | |
|
650 | 650 | |
|
651 | 651 | if PY3: |
|
652 | 652 | exec_ = getattr(moves.builtins, "exec") |
|
653 | 653 | |
|
654 | 654 | |
|
655 | 655 | def reraise(tp, value, tb=None): |
|
656 | 656 | if value is None: |
|
657 | 657 | value = tp() |
|
658 | 658 | if value.__traceback__ is not tb: |
|
659 | 659 | raise value.with_traceback(tb) |
|
660 | 660 | raise value |
|
661 | 661 | |
|
662 | 662 | else: |
|
663 | 663 | def exec_(_code_, _globs_=None, _locs_=None): |
|
664 | 664 | """Execute code in a namespace.""" |
|
665 | 665 | if _globs_ is None: |
|
666 | 666 | frame = sys._getframe(1) |
|
667 | 667 | _globs_ = frame.f_globals |
|
668 | 668 | if _locs_ is None: |
|
669 | 669 | _locs_ = frame.f_locals |
|
670 | 670 | del frame |
|
671 | 671 | elif _locs_ is None: |
|
672 | 672 | _locs_ = _globs_ |
|
673 | 673 | exec("""exec _code_ in _globs_, _locs_""") |
|
674 | 674 | |
|
675 | 675 | |
|
676 | 676 | exec_("""def reraise(tp, value, tb=None): |
|
677 | 677 | raise tp, value, tb |
|
678 | 678 | """) |
|
679 | 679 | |
|
680 | 680 | |
|
681 | 681 | if sys.version_info[:2] == (3, 2): |
|
682 | 682 | exec_("""def raise_from(value, from_value): |
|
683 | 683 | if from_value is None: |
|
684 | 684 | raise value |
|
685 | 685 | raise value from from_value |
|
686 | 686 | """) |
|
687 | 687 | elif sys.version_info[:2] > (3, 2): |
|
688 | 688 | exec_("""def raise_from(value, from_value): |
|
689 | 689 | raise value from from_value |
|
690 | 690 | """) |
|
691 | 691 | else: |
|
692 | 692 | def raise_from(value, from_value): |
|
693 | 693 | raise value |
|
694 | 694 | |
|
695 | 695 | |
|
696 | 696 | print_ = getattr(moves.builtins, "print", None) |
|
697 | 697 | if print_ is None: |
|
698 | 698 | def print_(*args, **kwargs): |
|
699 | 699 | """The new-style print function for Python 2.4 and 2.5.""" |
|
700 | 700 | fp = kwargs.pop("file", sys.stdout) |
|
701 | 701 | if fp is None: |
|
702 | 702 | return |
|
703 | 703 | def write(data): |
|
704 |
if not isinstance(data, |
|
|
704 | if not isinstance(data, str): | |
|
705 | 705 | data = str(data) |
|
706 | 706 | # If the file has an encoding, encode unicode with it. |
|
707 | 707 | if (isinstance(fp, file) and |
|
708 | 708 | isinstance(data, unicode) and |
|
709 | 709 | fp.encoding is not None): |
|
710 | 710 | errors = getattr(fp, "errors", None) |
|
711 | 711 | if errors is None: |
|
712 | 712 | errors = "strict" |
|
713 | 713 | data = data.encode(fp.encoding, errors) |
|
714 | 714 | fp.write(data) |
|
715 | 715 | want_unicode = False |
|
716 | 716 | sep = kwargs.pop("sep", None) |
|
717 | 717 | if sep is not None: |
|
718 | 718 | if isinstance(sep, unicode): |
|
719 | 719 | want_unicode = True |
|
720 | 720 | elif not isinstance(sep, str): |
|
721 | 721 | raise TypeError("sep must be None or a string") |
|
722 | 722 | end = kwargs.pop("end", None) |
|
723 | 723 | if end is not None: |
|
724 | 724 | if isinstance(end, unicode): |
|
725 | 725 | want_unicode = True |
|
726 | 726 | elif not isinstance(end, str): |
|
727 | 727 | raise TypeError("end must be None or a string") |
|
728 | 728 | if kwargs: |
|
729 | 729 | raise TypeError("invalid keyword arguments to print()") |
|
730 | 730 | if not want_unicode: |
|
731 | 731 | for arg in args: |
|
732 | 732 | if isinstance(arg, unicode): |
|
733 | 733 | want_unicode = True |
|
734 | 734 | break |
|
735 | 735 | if want_unicode: |
|
736 | 736 | newline = unicode("\n") |
|
737 | 737 | space = unicode(" ") |
|
738 | 738 | else: |
|
739 | 739 | newline = "\n" |
|
740 | 740 | space = " " |
|
741 | 741 | if sep is None: |
|
742 | 742 | sep = space |
|
743 | 743 | if end is None: |
|
744 | 744 | end = newline |
|
745 | 745 | for i, arg in enumerate(args): |
|
746 | 746 | if i: |
|
747 | 747 | write(sep) |
|
748 | 748 | write(arg) |
|
749 | 749 | write(end) |
|
750 | 750 | if sys.version_info[:2] < (3, 3): |
|
751 | 751 | _print = print_ |
|
752 | 752 | def print_(*args, **kwargs): |
|
753 | 753 | fp = kwargs.get("file", sys.stdout) |
|
754 | 754 | flush = kwargs.pop("flush", False) |
|
755 | 755 | _print(*args, **kwargs) |
|
756 | 756 | if flush and fp is not None: |
|
757 | 757 | fp.flush() |
|
758 | 758 | |
|
759 | 759 | _add_doc(reraise, """Reraise an exception.""") |
|
760 | 760 | |
|
761 | 761 | if sys.version_info[0:2] < (3, 4): |
|
762 | 762 | def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, |
|
763 | 763 | updated=functools.WRAPPER_UPDATES): |
|
764 | 764 | def wrapper(f): |
|
765 | 765 | f = functools.wraps(wrapped, assigned, updated)(f) |
|
766 | 766 | f.__wrapped__ = wrapped |
|
767 | 767 | return f |
|
768 | 768 | return wrapper |
|
769 | 769 | else: |
|
770 | 770 | wraps = functools.wraps |
|
771 | 771 | |
|
772 | 772 | def with_metaclass(meta, *bases): |
|
773 | 773 | """Create a base class with a metaclass.""" |
|
774 | 774 | # This requires a bit of explanation: the basic idea is to make a dummy |
|
775 | 775 | # metaclass for one level of class instantiation that replaces itself with |
|
776 | 776 | # the actual metaclass. |
|
777 | 777 | class metaclass(meta): |
|
778 | 778 | def __new__(cls, name, this_bases, d): |
|
779 | 779 | return meta(name, bases, d) |
|
780 | 780 | return type.__new__(metaclass, 'temporary_class', (), {}) |
|
781 | 781 | |
|
782 | 782 | |
|
783 | 783 | def add_metaclass(metaclass): |
|
784 | 784 | """Class decorator for creating a class with a metaclass.""" |
|
785 | 785 | def wrapper(cls): |
|
786 | 786 | orig_vars = cls.__dict__.copy() |
|
787 | 787 | slots = orig_vars.get('__slots__') |
|
788 | 788 | if slots is not None: |
|
789 | 789 | if isinstance(slots, str): |
|
790 | 790 | slots = [slots] |
|
791 | 791 | for slots_var in slots: |
|
792 | 792 | orig_vars.pop(slots_var) |
|
793 | 793 | orig_vars.pop('__dict__', None) |
|
794 | 794 | orig_vars.pop('__weakref__', None) |
|
795 | 795 | return metaclass(cls.__name__, cls.__bases__, orig_vars) |
|
796 | 796 | return wrapper |
|
797 | 797 | |
|
798 | 798 | |
|
799 | 799 | def python_2_unicode_compatible(klass): |
|
800 | 800 | """ |
|
801 | 801 | A decorator that defines __unicode__ and __str__ methods under Python 2. |
|
802 | 802 | Under Python 3 it does nothing. |
|
803 | 803 | |
|
804 | 804 | To support Python 2 and 3 with a single code base, define a __str__ method |
|
805 | 805 | returning text and apply this decorator to the class. |
|
806 | 806 | """ |
|
807 | 807 | if PY2: |
|
808 | 808 | if '__str__' not in klass.__dict__: |
|
809 | 809 | raise ValueError("@python_2_unicode_compatible cannot be applied " |
|
810 | 810 | "to %s because it doesn't define __str__()." % |
|
811 | 811 | klass.__name__) |
|
812 | 812 | klass.__unicode__ = klass.__str__ |
|
813 | 813 | klass.__str__ = lambda self: self.__unicode__().encode('utf-8') |
|
814 | 814 | return klass |
|
815 | 815 | |
|
816 | 816 | |
|
817 | 817 | # Complete the moves implementation. |
|
818 | 818 | # This code is at the end of this module to speed up module loading. |
|
819 | 819 | # Turn this module into a package. |
|
820 | 820 | __path__ = [] # required for PEP 302 and PEP 451 |
|
821 | 821 | __package__ = __name__ # see PEP 366 @ReservedAssignment |
|
822 | 822 | if globals().get("__spec__") is not None: |
|
823 | 823 | __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable |
|
824 | 824 | # Remove other six meta path importers, since they cause problems. This can |
|
825 | 825 | # happen if six is removed from sys.modules and then reloaded. (Setuptools does |
|
826 | 826 | # this for some reason.) |
|
827 | 827 | if sys.meta_path: |
|
828 | 828 | for i, importer in enumerate(sys.meta_path): |
|
829 | 829 | # Here's some real nastiness: Another "instance" of the six module might |
|
830 | 830 | # be floating around. Therefore, we can't use isinstance() to check for |
|
831 | 831 | # the six meta path importer, since the other six instance will have |
|
832 | 832 | # inserted an importer with different class. |
|
833 | 833 | if (type(importer).__name__ == "_SixMetaPathImporter" and |
|
834 | 834 | importer.name == __name__): |
|
835 | 835 | del sys.meta_path[i] |
|
836 | 836 | break |
|
837 | 837 | del i, importer |
|
838 | 838 | # Finally, add the importer to the meta path import hook. |
|
839 | 839 | sys.meta_path.append(_importer) |
@@ -1,370 +1,370 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import hashlib |
|
23 | 23 | import itsdangerous |
|
24 | 24 | import logging |
|
25 | 25 | import requests |
|
26 | 26 | import datetime |
|
27 | 27 | |
|
28 | 28 | from dogpile.util.readwrite_lock import ReadWriteMutex |
|
29 | 29 | from pyramid.threadlocal import get_current_registry |
|
30 | 30 | |
|
31 | 31 | import rhodecode.lib.helpers as h |
|
32 | 32 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
33 | 33 | from rhodecode.lib.ext_json import json |
|
34 | 34 | from rhodecode.model.db import User |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | LOCK = ReadWriteMutex() |
|
39 | 39 | |
|
40 | 40 | USER_STATE_PUBLIC_KEYS = [ |
|
41 | 41 | 'id', 'username', 'first_name', 'last_name', |
|
42 | 42 | 'icon_link', 'display_name', 'display_link'] |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class ChannelstreamException(Exception): |
|
46 | 46 | pass |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class ChannelstreamConnectionException(ChannelstreamException): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class ChannelstreamPermissionException(ChannelstreamException): |
|
54 | 54 | pass |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def get_channelstream_server_url(config, endpoint): |
|
58 | 58 | return 'http://{}{}'.format(config['server'], endpoint) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def channelstream_request(config, payload, endpoint, raise_exc=True): |
|
62 | 62 | signer = itsdangerous.TimestampSigner(config['secret']) |
|
63 | 63 | sig_for_server = signer.sign(endpoint) |
|
64 | 64 | secret_headers = {'x-channelstream-secret': sig_for_server, |
|
65 | 65 | 'x-channelstream-endpoint': endpoint, |
|
66 | 66 | 'Content-Type': 'application/json'} |
|
67 | 67 | req_url = get_channelstream_server_url(config, endpoint) |
|
68 | 68 | |
|
69 | 69 | log.debug('Sending a channelstream request to endpoint: `%s`', req_url) |
|
70 | 70 | response = None |
|
71 | 71 | try: |
|
72 | 72 | response = requests.post(req_url, data=json.dumps(payload), |
|
73 | 73 | headers=secret_headers).json() |
|
74 | 74 | except requests.ConnectionError: |
|
75 | 75 | log.exception('ConnectionError occurred for endpoint %s', req_url) |
|
76 | 76 | if raise_exc: |
|
77 | 77 | raise ChannelstreamConnectionException(req_url) |
|
78 | 78 | except Exception: |
|
79 | 79 | log.exception('Exception related to Channelstream happened') |
|
80 | 80 | if raise_exc: |
|
81 | 81 | raise ChannelstreamConnectionException() |
|
82 | 82 | log.debug('Got channelstream response: %s', response) |
|
83 | 83 | return response |
|
84 | 84 | |
|
85 | 85 | |
|
86 | 86 | def get_user_data(user_id): |
|
87 | 87 | user = User.get(user_id) |
|
88 | 88 | return { |
|
89 | 89 | 'id': user.user_id, |
|
90 | 90 | 'username': user.username, |
|
91 | 91 | 'first_name': user.first_name, |
|
92 | 92 | 'last_name': user.last_name, |
|
93 | 93 | 'icon_link': h.gravatar_url(user.email, 60), |
|
94 | 94 | 'display_name': h.person(user, 'username_or_name_or_email'), |
|
95 | 95 | 'display_link': h.link_to_user(user), |
|
96 | 96 | 'notifications': user.user_data.get('notification_status', True) |
|
97 | 97 | } |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def broadcast_validator(channel_name): |
|
101 | 101 | """ checks if user can access the broadcast channel """ |
|
102 | 102 | if channel_name == 'broadcast': |
|
103 | 103 | return True |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | def repo_validator(channel_name): |
|
107 | 107 | """ checks if user can access the broadcast channel """ |
|
108 | 108 | channel_prefix = '/repo$' |
|
109 | 109 | if channel_name.startswith(channel_prefix): |
|
110 | 110 | elements = channel_name[len(channel_prefix):].split('$') |
|
111 | 111 | repo_name = elements[0] |
|
112 | 112 | can_access = HasRepoPermissionAny( |
|
113 | 113 | 'repository.read', |
|
114 | 114 | 'repository.write', |
|
115 | 115 | 'repository.admin')(repo_name) |
|
116 | 116 | log.debug( |
|
117 | 117 | 'permission check for %s channel resulted in %s', |
|
118 | 118 | repo_name, can_access) |
|
119 | 119 | if can_access: |
|
120 | 120 | return True |
|
121 | 121 | return False |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def check_channel_permissions(channels, plugin_validators, should_raise=True): |
|
125 | 125 | valid_channels = [] |
|
126 | 126 | |
|
127 | 127 | validators = [broadcast_validator, repo_validator] |
|
128 | 128 | if plugin_validators: |
|
129 | 129 | validators.extend(plugin_validators) |
|
130 | 130 | for channel_name in channels: |
|
131 | 131 | is_valid = False |
|
132 | 132 | for validator in validators: |
|
133 | 133 | if validator(channel_name): |
|
134 | 134 | is_valid = True |
|
135 | 135 | break |
|
136 | 136 | if is_valid: |
|
137 | 137 | valid_channels.append(channel_name) |
|
138 | 138 | else: |
|
139 | 139 | if should_raise: |
|
140 | 140 | raise ChannelstreamPermissionException() |
|
141 | 141 | return valid_channels |
|
142 | 142 | |
|
143 | 143 | |
|
144 | 144 | def get_channels_info(self, channels): |
|
145 | 145 | payload = {'channels': channels} |
|
146 | 146 | # gather persistence info |
|
147 | 147 | return channelstream_request(self._config(), payload, '/info') |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | def parse_channels_info(info_result, include_channel_info=None): |
|
151 | 151 | """ |
|
152 | 152 | Returns data that contains only secure information that can be |
|
153 | 153 | presented to clients |
|
154 | 154 | """ |
|
155 | 155 | include_channel_info = include_channel_info or [] |
|
156 | 156 | |
|
157 | 157 | user_state_dict = {} |
|
158 | 158 | for userinfo in info_result['users']: |
|
159 | 159 | user_state_dict[userinfo['user']] = { |
|
160 | 160 | k: v for k, v in userinfo['state'].items() |
|
161 | 161 | if k in USER_STATE_PUBLIC_KEYS |
|
162 | 162 | } |
|
163 | 163 | |
|
164 | 164 | channels_info = {} |
|
165 | 165 | |
|
166 | 166 | for c_name, c_info in info_result['channels'].items(): |
|
167 | 167 | if c_name not in include_channel_info: |
|
168 | 168 | continue |
|
169 | 169 | connected_list = [] |
|
170 | 170 | for username in c_info['users']: |
|
171 | 171 | connected_list.append({ |
|
172 | 172 | 'user': username, |
|
173 | 173 | 'state': user_state_dict[username] |
|
174 | 174 | }) |
|
175 | 175 | channels_info[c_name] = {'users': connected_list, |
|
176 | 176 | 'history': c_info['history']} |
|
177 | 177 | |
|
178 | 178 | return channels_info |
|
179 | 179 | |
|
180 | 180 | |
|
181 | 181 | def log_filepath(history_location, channel_name): |
|
182 | 182 | hasher = hashlib.sha256() |
|
183 | 183 | hasher.update(channel_name.encode('utf8')) |
|
184 | 184 | filename = '{}.log'.format(hasher.hexdigest()) |
|
185 | 185 | filepath = os.path.join(history_location, filename) |
|
186 | 186 | return filepath |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | def read_history(history_location, channel_name): |
|
190 | 190 | filepath = log_filepath(history_location, channel_name) |
|
191 | 191 | if not os.path.exists(filepath): |
|
192 | 192 | return [] |
|
193 | 193 | history_lines_limit = -100 |
|
194 | 194 | history = [] |
|
195 | 195 | with open(filepath, 'rb') as f: |
|
196 | 196 | for line in f.readlines()[history_lines_limit:]: |
|
197 | 197 | try: |
|
198 | 198 | history.append(json.loads(line)) |
|
199 | 199 | except Exception: |
|
200 | 200 | log.exception('Failed to load history') |
|
201 | 201 | return history |
|
202 | 202 | |
|
203 | 203 | |
|
204 | 204 | def update_history_from_logs(config, channels, payload): |
|
205 | 205 | history_location = config.get('history.location') |
|
206 | 206 | for channel in channels: |
|
207 | 207 | history = read_history(history_location, channel) |
|
208 | 208 | payload['channels_info'][channel]['history'] = history |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | def write_history(config, message): |
|
212 | 212 | """ writes a messge to a base64encoded filename """ |
|
213 | 213 | history_location = config.get('history.location') |
|
214 | 214 | if not os.path.exists(history_location): |
|
215 | 215 | return |
|
216 | 216 | try: |
|
217 | 217 | LOCK.acquire_write_lock() |
|
218 | 218 | filepath = log_filepath(history_location, message['channel']) |
|
219 | 219 | with open(filepath, 'ab') as f: |
|
220 | 220 | json.dump(message, f) |
|
221 | 221 | f.write('\n') |
|
222 | 222 | finally: |
|
223 | 223 | LOCK.release_write_lock() |
|
224 | 224 | |
|
225 | 225 | |
|
226 | 226 | def get_connection_validators(registry): |
|
227 | 227 | validators = [] |
|
228 | 228 | for k, config in registry.rhodecode_plugins.items(): |
|
229 | 229 | validator = config.get('channelstream', {}).get('connect_validator') |
|
230 | 230 | if validator: |
|
231 | 231 | validators.append(validator) |
|
232 | 232 | return validators |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | def get_channelstream_config(registry=None): |
|
236 | 236 | if not registry: |
|
237 | 237 | registry = get_current_registry() |
|
238 | 238 | |
|
239 | 239 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
240 | 240 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
241 | 241 | return channelstream_config |
|
242 | 242 | |
|
243 | 243 | |
|
244 | 244 | def post_message(channel, message, username, registry=None): |
|
245 | 245 | channelstream_config = get_channelstream_config(registry) |
|
246 | 246 | if not channelstream_config.get('enabled'): |
|
247 | 247 | return |
|
248 | 248 | |
|
249 | 249 | message_obj = message |
|
250 |
if isinstance(message, |
|
|
250 | if isinstance(message, str): | |
|
251 | 251 | message_obj = { |
|
252 | 252 | 'message': message, |
|
253 | 253 | 'level': 'success', |
|
254 | 254 | 'topic': '/notifications' |
|
255 | 255 | } |
|
256 | 256 | |
|
257 | 257 | log.debug('Channelstream: sending notification to channel %s', channel) |
|
258 | 258 | payload = { |
|
259 | 259 | 'type': 'message', |
|
260 | 260 | 'timestamp': datetime.datetime.utcnow(), |
|
261 | 261 | 'user': 'system', |
|
262 | 262 | 'exclude_users': [username], |
|
263 | 263 | 'channel': channel, |
|
264 | 264 | 'message': message_obj |
|
265 | 265 | } |
|
266 | 266 | |
|
267 | 267 | try: |
|
268 | 268 | return channelstream_request( |
|
269 | 269 | channelstream_config, [payload], '/message', |
|
270 | 270 | raise_exc=False) |
|
271 | 271 | except ChannelstreamException: |
|
272 | 272 | log.exception('Failed to send channelstream data') |
|
273 | 273 | raise |
|
274 | 274 | |
|
275 | 275 | |
|
276 | 276 | def _reload_link(label): |
|
277 | 277 | return ( |
|
278 | 278 | '<a onclick="window.location.reload()">' |
|
279 | 279 | '<strong>{}</strong>' |
|
280 | 280 | '</a>'.format(label) |
|
281 | 281 | ) |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | def pr_channel(pull_request): |
|
285 | 285 | repo_name = pull_request.target_repo.repo_name |
|
286 | 286 | pull_request_id = pull_request.pull_request_id |
|
287 | 287 | channel = '/repo${}$/pr/{}'.format(repo_name, pull_request_id) |
|
288 | 288 | log.debug('Getting pull-request channelstream broadcast channel: %s', channel) |
|
289 | 289 | return channel |
|
290 | 290 | |
|
291 | 291 | |
|
292 | 292 | def comment_channel(repo_name, commit_obj=None, pull_request_obj=None): |
|
293 | 293 | channel = None |
|
294 | 294 | if commit_obj: |
|
295 | 295 | channel = u'/repo${}$/commit/{}'.format( |
|
296 | 296 | repo_name, commit_obj.raw_id |
|
297 | 297 | ) |
|
298 | 298 | elif pull_request_obj: |
|
299 | 299 | channel = u'/repo${}$/pr/{}'.format( |
|
300 | 300 | repo_name, pull_request_obj.pull_request_id |
|
301 | 301 | ) |
|
302 | 302 | log.debug('Getting comment channelstream broadcast channel: %s', channel) |
|
303 | 303 | |
|
304 | 304 | return channel |
|
305 | 305 | |
|
306 | 306 | |
|
307 | 307 | def pr_update_channelstream_push(request, pr_broadcast_channel, user, msg, **kwargs): |
|
308 | 308 | """ |
|
309 | 309 | Channel push on pull request update |
|
310 | 310 | """ |
|
311 | 311 | if not pr_broadcast_channel: |
|
312 | 312 | return |
|
313 | 313 | |
|
314 | 314 | _ = request.translate |
|
315 | 315 | |
|
316 | 316 | message = '{} {}'.format( |
|
317 | 317 | msg, |
|
318 | 318 | _reload_link(_(' Reload page to load changes'))) |
|
319 | 319 | |
|
320 | 320 | message_obj = { |
|
321 | 321 | 'message': message, |
|
322 | 322 | 'level': 'success', |
|
323 | 323 | 'topic': '/notifications' |
|
324 | 324 | } |
|
325 | 325 | |
|
326 | 326 | post_message( |
|
327 | 327 | pr_broadcast_channel, message_obj, user.username, |
|
328 | 328 | registry=request.registry) |
|
329 | 329 | |
|
330 | 330 | |
|
331 | 331 | def comment_channelstream_push(request, comment_broadcast_channel, user, msg, **kwargs): |
|
332 | 332 | """ |
|
333 | 333 | Channelstream push on comment action, on commit, or pull-request |
|
334 | 334 | """ |
|
335 | 335 | if not comment_broadcast_channel: |
|
336 | 336 | return |
|
337 | 337 | |
|
338 | 338 | _ = request.translate |
|
339 | 339 | |
|
340 | 340 | comment_data = kwargs.pop('comment_data', {}) |
|
341 | 341 | user_data = kwargs.pop('user_data', {}) |
|
342 | 342 | comment_id = comment_data.keys()[0] if comment_data else '' |
|
343 | 343 | |
|
344 | 344 | message = '<strong>{}</strong> {} #{}'.format( |
|
345 | 345 | user.username, |
|
346 | 346 | msg, |
|
347 | 347 | comment_id, |
|
348 | 348 | ) |
|
349 | 349 | |
|
350 | 350 | message_obj = { |
|
351 | 351 | 'message': message, |
|
352 | 352 | 'level': 'success', |
|
353 | 353 | 'topic': '/notifications' |
|
354 | 354 | } |
|
355 | 355 | |
|
356 | 356 | post_message( |
|
357 | 357 | comment_broadcast_channel, message_obj, user.username, |
|
358 | 358 | registry=request.registry) |
|
359 | 359 | |
|
360 | 360 | message_obj = { |
|
361 | 361 | 'message': None, |
|
362 | 362 | 'user': user.username, |
|
363 | 363 | 'comment_id': comment_id, |
|
364 | 364 | 'comment_data': comment_data, |
|
365 | 365 | 'user_data': user_data, |
|
366 | 366 | 'topic': '/comment' |
|
367 | 367 | } |
|
368 | 368 | post_message( |
|
369 | 369 | comment_broadcast_channel, message_obj, user.username, |
|
370 | 370 | registry=request.registry) |
@@ -1,2155 +1,2155 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helper functions |
|
23 | 23 | |
|
24 | 24 | Consists of functions to typically be used within templates, but also |
|
25 | 25 | available to Controllers. This module is available to both as 'h'. |
|
26 | 26 | """ |
|
27 | 27 | import base64 |
|
28 | 28 | import collections |
|
29 | 29 | |
|
30 | 30 | import os |
|
31 | 31 | import random |
|
32 | 32 | import hashlib |
|
33 | 33 | import StringIO |
|
34 | 34 | import textwrap |
|
35 | 35 | import urllib.request, urllib.parse, urllib.error |
|
36 | 36 | import math |
|
37 | 37 | import logging |
|
38 | 38 | import re |
|
39 | 39 | import time |
|
40 | 40 | import string |
|
41 | 41 | import hashlib |
|
42 | 42 | import regex |
|
43 | 43 | from collections import OrderedDict |
|
44 | 44 | |
|
45 | 45 | import pygments |
|
46 | 46 | import itertools |
|
47 | 47 | import fnmatch |
|
48 | 48 | import bleach |
|
49 | 49 | |
|
50 | 50 | from datetime import datetime |
|
51 | 51 | from functools import partial |
|
52 | 52 | from pygments.formatters.html import HtmlFormatter |
|
53 | 53 | from pygments.lexers import ( |
|
54 | 54 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
55 | 55 | |
|
56 | 56 | from pyramid.threadlocal import get_current_request |
|
57 | 57 | from tempita import looper |
|
58 | 58 | from webhelpers2.html import literal, HTML, escape |
|
59 | 59 | from webhelpers2.html._autolink import _auto_link_urls |
|
60 | 60 | from webhelpers2.html.tools import ( |
|
61 | 61 | button_to, highlight, js_obfuscate, strip_links, strip_tags) |
|
62 | 62 | |
|
63 | 63 | from webhelpers2.text import ( |
|
64 | 64 | chop_at, collapse, convert_accented_entities, |
|
65 | 65 | convert_misc_entities, lchop, plural, rchop, remove_formatting, |
|
66 | 66 | replace_whitespace, urlify, truncate, wrap_paragraphs) |
|
67 | 67 | from webhelpers2.date import time_ago_in_words |
|
68 | 68 | |
|
69 | 69 | from webhelpers2.html.tags import ( |
|
70 | 70 | _input, NotGiven, _make_safe_id_component as safeid, |
|
71 | 71 | form as insecure_form, |
|
72 | 72 | auto_discovery_link, checkbox, end_form, file, |
|
73 | 73 | hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol, |
|
74 | 74 | select as raw_select, stylesheet_link, submit, text, password, textarea, |
|
75 | 75 | ul, radio, Options) |
|
76 | 76 | |
|
77 | 77 | from webhelpers2.number import format_byte_size |
|
78 | 78 | |
|
79 | 79 | from rhodecode.lib.action_parser import action_parser |
|
80 | 80 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage |
|
81 | 81 | from rhodecode.lib.ext_json import json |
|
82 | 82 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
83 | 83 | from rhodecode.lib.utils2 import ( |
|
84 | 84 | str2bool, safe_unicode, safe_str, |
|
85 | 85 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, |
|
86 | 86 | AttributeDict, safe_int, md5, md5_safe, get_host_info) |
|
87 | 87 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
88 | 88 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
89 | 89 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
90 | 90 | from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS |
|
91 | 91 | from rhodecode.lib.index.search_utils import get_matching_line_offsets |
|
92 | 92 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
93 | 93 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
94 | 94 | from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore |
|
95 | 95 | from rhodecode.model.repo_group import RepoGroupModel |
|
96 | 96 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | log = logging.getLogger(__name__) |
|
100 | 100 | |
|
101 | 101 | |
|
102 | 102 | DEFAULT_USER = User.DEFAULT_USER |
|
103 | 103 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | def asset(path, ver=None, **kwargs): |
|
107 | 107 | """ |
|
108 | 108 | Helper to generate a static asset file path for rhodecode assets |
|
109 | 109 | |
|
110 | 110 | eg. h.asset('images/image.png', ver='3923') |
|
111 | 111 | |
|
112 | 112 | :param path: path of asset |
|
113 | 113 | :param ver: optional version query param to append as ?ver= |
|
114 | 114 | """ |
|
115 | 115 | request = get_current_request() |
|
116 | 116 | query = {} |
|
117 | 117 | query.update(kwargs) |
|
118 | 118 | if ver: |
|
119 | 119 | query = {'ver': ver} |
|
120 | 120 | return request.static_path( |
|
121 | 121 | 'rhodecode:public/{}'.format(path), _query=query) |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | default_html_escape_table = { |
|
125 | 125 | ord('&'): u'&', |
|
126 | 126 | ord('<'): u'<', |
|
127 | 127 | ord('>'): u'>', |
|
128 | 128 | ord('"'): u'"', |
|
129 | 129 | ord("'"): u''', |
|
130 | 130 | } |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
134 | 134 | """Produce entities within text.""" |
|
135 | 135 | return text.translate(html_escape_table) |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
139 | 139 | """ |
|
140 | 140 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
141 | 141 | |
|
142 | 142 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
143 | 143 | """ |
|
144 | 144 | suffix_if_chopped = suffix_if_chopped or '' |
|
145 | 145 | pos = s.find(sub) |
|
146 | 146 | if pos == -1: |
|
147 | 147 | return s |
|
148 | 148 | |
|
149 | 149 | if inclusive: |
|
150 | 150 | pos += len(sub) |
|
151 | 151 | |
|
152 | 152 | chopped = s[:pos] |
|
153 | 153 | left = s[pos:].strip() |
|
154 | 154 | |
|
155 | 155 | if left and suffix_if_chopped: |
|
156 | 156 | chopped += suffix_if_chopped |
|
157 | 157 | |
|
158 | 158 | return chopped |
|
159 | 159 | |
|
160 | 160 | |
|
161 | 161 | def shorter(text, size=20, prefix=False): |
|
162 | 162 | postfix = '...' |
|
163 | 163 | if len(text) > size: |
|
164 | 164 | if prefix: |
|
165 | 165 | # shorten in front |
|
166 | 166 | return postfix + text[-(size - len(postfix)):] |
|
167 | 167 | else: |
|
168 | 168 | return text[:size - len(postfix)] + postfix |
|
169 | 169 | return text |
|
170 | 170 | |
|
171 | 171 | |
|
172 | 172 | def reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
173 | 173 | """ |
|
174 | 174 | Reset button |
|
175 | 175 | """ |
|
176 | 176 | return _input(type, name, value, id, attrs) |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | def select(name, selected_values, options, id=NotGiven, **attrs): |
|
180 | 180 | |
|
181 | 181 | if isinstance(options, (list, tuple)): |
|
182 | 182 | options_iter = options |
|
183 | 183 | # Handle old value,label lists ... where value also can be value,label lists |
|
184 | 184 | options = Options() |
|
185 | 185 | for opt in options_iter: |
|
186 | 186 | if isinstance(opt, tuple) and len(opt) == 2: |
|
187 | 187 | value, label = opt |
|
188 |
elif isinstance(opt, |
|
|
188 | elif isinstance(opt, str): | |
|
189 | 189 | value = label = opt |
|
190 | 190 | else: |
|
191 | 191 | raise ValueError('invalid select option type %r' % type(opt)) |
|
192 | 192 | |
|
193 | 193 | if isinstance(value, (list, tuple)): |
|
194 | 194 | option_group = options.add_optgroup(label) |
|
195 | 195 | for opt2 in value: |
|
196 | 196 | if isinstance(opt2, tuple) and len(opt2) == 2: |
|
197 | 197 | group_value, group_label = opt2 |
|
198 |
elif isinstance(opt2, |
|
|
198 | elif isinstance(opt2, str): | |
|
199 | 199 | group_value = group_label = opt2 |
|
200 | 200 | else: |
|
201 | 201 | raise ValueError('invalid select option type %r' % type(opt2)) |
|
202 | 202 | |
|
203 | 203 | option_group.add_option(group_label, group_value) |
|
204 | 204 | else: |
|
205 | 205 | options.add_option(label, value) |
|
206 | 206 | |
|
207 | 207 | return raw_select(name, selected_values, options, id=id, **attrs) |
|
208 | 208 | |
|
209 | 209 | |
|
210 | 210 | def branding(name, length=40): |
|
211 | 211 | return truncate(name, length, indicator="") |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | def FID(raw_id, path): |
|
215 | 215 | """ |
|
216 | 216 | Creates a unique ID for filenode based on it's hash of path and commit |
|
217 | 217 | it's safe to use in urls |
|
218 | 218 | |
|
219 | 219 | :param raw_id: |
|
220 | 220 | :param path: |
|
221 | 221 | """ |
|
222 | 222 | |
|
223 | 223 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
224 | 224 | |
|
225 | 225 | |
|
226 | 226 | class _GetError(object): |
|
227 | 227 | """Get error from form_errors, and represent it as span wrapped error |
|
228 | 228 | message |
|
229 | 229 | |
|
230 | 230 | :param field_name: field to fetch errors for |
|
231 | 231 | :param form_errors: form errors dict |
|
232 | 232 | """ |
|
233 | 233 | |
|
234 | 234 | def __call__(self, field_name, form_errors): |
|
235 | 235 | tmpl = """<span class="error_msg">%s</span>""" |
|
236 | 236 | if form_errors and field_name in form_errors: |
|
237 | 237 | return literal(tmpl % form_errors.get(field_name)) |
|
238 | 238 | |
|
239 | 239 | |
|
240 | 240 | get_error = _GetError() |
|
241 | 241 | |
|
242 | 242 | |
|
243 | 243 | class _ToolTip(object): |
|
244 | 244 | |
|
245 | 245 | def __call__(self, tooltip_title, trim_at=50): |
|
246 | 246 | """ |
|
247 | 247 | Special function just to wrap our text into nice formatted |
|
248 | 248 | autowrapped text |
|
249 | 249 | |
|
250 | 250 | :param tooltip_title: |
|
251 | 251 | """ |
|
252 | 252 | tooltip_title = escape(tooltip_title) |
|
253 | 253 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
254 | 254 | return tooltip_title |
|
255 | 255 | |
|
256 | 256 | |
|
257 | 257 | tooltip = _ToolTip() |
|
258 | 258 | |
|
259 | 259 | files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>' |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None, |
|
263 | 263 | limit_items=False, linkify_last_item=False, hide_last_item=False, |
|
264 | 264 | copy_path_icon=True): |
|
265 | 265 | if isinstance(file_path, str): |
|
266 | 266 | file_path = safe_unicode(file_path) |
|
267 | 267 | |
|
268 | 268 | if at_ref: |
|
269 | 269 | route_qry = {'at': at_ref} |
|
270 | 270 | default_landing_ref = at_ref or landing_ref_name or commit_id |
|
271 | 271 | else: |
|
272 | 272 | route_qry = None |
|
273 | 273 | default_landing_ref = commit_id |
|
274 | 274 | |
|
275 | 275 | # first segment is a `HOME` link to repo files root location |
|
276 | 276 | root_name = literal(u'<i class="icon-home"></i>') |
|
277 | 277 | |
|
278 | 278 | url_segments = [ |
|
279 | 279 | link_to( |
|
280 | 280 | root_name, |
|
281 | 281 | repo_files_by_ref_url( |
|
282 | 282 | repo_name, |
|
283 | 283 | repo_type, |
|
284 | 284 | f_path=None, # None here is a special case for SVN repos, |
|
285 | 285 | # that won't prefix with a ref |
|
286 | 286 | ref_name=default_landing_ref, |
|
287 | 287 | commit_id=commit_id, |
|
288 | 288 | query=route_qry |
|
289 | 289 | ) |
|
290 | 290 | )] |
|
291 | 291 | |
|
292 | 292 | path_segments = file_path.split('/') |
|
293 | 293 | last_cnt = len(path_segments) - 1 |
|
294 | 294 | for cnt, segment in enumerate(path_segments): |
|
295 | 295 | if not segment: |
|
296 | 296 | continue |
|
297 | 297 | segment_html = escape(segment) |
|
298 | 298 | |
|
299 | 299 | last_item = cnt == last_cnt |
|
300 | 300 | |
|
301 | 301 | if last_item and hide_last_item: |
|
302 | 302 | # iterate over and hide last element |
|
303 | 303 | continue |
|
304 | 304 | |
|
305 | 305 | if last_item and linkify_last_item is False: |
|
306 | 306 | # plain version |
|
307 | 307 | url_segments.append(segment_html) |
|
308 | 308 | else: |
|
309 | 309 | url_segments.append( |
|
310 | 310 | link_to( |
|
311 | 311 | segment_html, |
|
312 | 312 | repo_files_by_ref_url( |
|
313 | 313 | repo_name, |
|
314 | 314 | repo_type, |
|
315 | 315 | f_path='/'.join(path_segments[:cnt + 1]), |
|
316 | 316 | ref_name=default_landing_ref, |
|
317 | 317 | commit_id=commit_id, |
|
318 | 318 | query=route_qry |
|
319 | 319 | ), |
|
320 | 320 | )) |
|
321 | 321 | |
|
322 | 322 | limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:] |
|
323 | 323 | if limit_items and len(limited_url_segments) < len(url_segments): |
|
324 | 324 | url_segments = limited_url_segments |
|
325 | 325 | |
|
326 | 326 | full_path = file_path |
|
327 | 327 | if copy_path_icon: |
|
328 | 328 | icon = files_icon.format(escape(full_path)) |
|
329 | 329 | else: |
|
330 | 330 | icon = '' |
|
331 | 331 | |
|
332 | 332 | if file_path == '': |
|
333 | 333 | return root_name |
|
334 | 334 | else: |
|
335 | 335 | return literal(' / '.join(url_segments) + icon) |
|
336 | 336 | |
|
337 | 337 | |
|
338 | 338 | def files_url_data(request): |
|
339 | 339 | import urllib.request, urllib.parse, urllib.error |
|
340 | 340 | matchdict = request.matchdict |
|
341 | 341 | |
|
342 | 342 | if 'f_path' not in matchdict: |
|
343 | 343 | matchdict['f_path'] = '' |
|
344 | 344 | else: |
|
345 | 345 | matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path'])) |
|
346 | 346 | if 'commit_id' not in matchdict: |
|
347 | 347 | matchdict['commit_id'] = 'tip' |
|
348 | 348 | |
|
349 | 349 | return json.dumps(matchdict) |
|
350 | 350 | |
|
351 | 351 | |
|
352 | 352 | def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ): |
|
353 | 353 | _is_svn = is_svn(db_repo_type) |
|
354 | 354 | final_f_path = f_path |
|
355 | 355 | |
|
356 | 356 | if _is_svn: |
|
357 | 357 | """ |
|
358 | 358 | For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with |
|
359 | 359 | actually commit_id followed by the ref_name. This should be done only in case |
|
360 | 360 | This is a initial landing url, without additional paths. |
|
361 | 361 | |
|
362 | 362 | like: /1000/tags/1.0.0/?at=tags/1.0.0 |
|
363 | 363 | """ |
|
364 | 364 | |
|
365 | 365 | if ref_name and ref_name != 'tip': |
|
366 | 366 | # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it |
|
367 | 367 | # for SVN we only do this magic prefix if it's root, .eg landing revision |
|
368 | 368 | # of files link. If we are in the tree we don't need this since we traverse the url |
|
369 | 369 | # that has everything stored |
|
370 | 370 | if f_path in ['', '/']: |
|
371 | 371 | final_f_path = '/'.join([ref_name, f_path]) |
|
372 | 372 | |
|
373 | 373 | # SVN always needs a commit_id explicitly, without a named REF |
|
374 | 374 | default_commit_id = commit_id |
|
375 | 375 | else: |
|
376 | 376 | """ |
|
377 | 377 | For git and mercurial we construct a new URL using the names instead of commit_id |
|
378 | 378 | like: /master/some_path?at=master |
|
379 | 379 | """ |
|
380 | 380 | # We currently do not support branches with slashes |
|
381 | 381 | if '/' in ref_name: |
|
382 | 382 | default_commit_id = commit_id |
|
383 | 383 | else: |
|
384 | 384 | default_commit_id = ref_name |
|
385 | 385 | |
|
386 | 386 | # sometimes we pass f_path as None, to indicate explicit no prefix, |
|
387 | 387 | # we translate it to string to not have None |
|
388 | 388 | final_f_path = final_f_path or '' |
|
389 | 389 | |
|
390 | 390 | files_url = route_path( |
|
391 | 391 | 'repo_files', |
|
392 | 392 | repo_name=db_repo_name, |
|
393 | 393 | commit_id=default_commit_id, |
|
394 | 394 | f_path=final_f_path, |
|
395 | 395 | _query=query |
|
396 | 396 | ) |
|
397 | 397 | return files_url |
|
398 | 398 | |
|
399 | 399 | |
|
400 | 400 | def code_highlight(code, lexer, formatter, use_hl_filter=False): |
|
401 | 401 | """ |
|
402 | 402 | Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. |
|
403 | 403 | |
|
404 | 404 | If ``outfile`` is given and a valid file object (an object |
|
405 | 405 | with a ``write`` method), the result will be written to it, otherwise |
|
406 | 406 | it is returned as a string. |
|
407 | 407 | """ |
|
408 | 408 | if use_hl_filter: |
|
409 | 409 | # add HL filter |
|
410 | 410 | from rhodecode.lib.index import search_utils |
|
411 | 411 | lexer.add_filter(search_utils.ElasticSearchHLFilter()) |
|
412 | 412 | return pygments.format(pygments.lex(code, lexer), formatter) |
|
413 | 413 | |
|
414 | 414 | |
|
415 | 415 | class CodeHtmlFormatter(HtmlFormatter): |
|
416 | 416 | """ |
|
417 | 417 | My code Html Formatter for source codes |
|
418 | 418 | """ |
|
419 | 419 | |
|
420 | 420 | def wrap(self, source, outfile): |
|
421 | 421 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
422 | 422 | |
|
423 | 423 | def _wrap_code(self, source): |
|
424 | 424 | for cnt, it in enumerate(source): |
|
425 | 425 | i, t = it |
|
426 | 426 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) |
|
427 | 427 | yield i, t |
|
428 | 428 | |
|
429 | 429 | def _wrap_tablelinenos(self, inner): |
|
430 | 430 | dummyoutfile = StringIO.StringIO() |
|
431 | 431 | lncount = 0 |
|
432 | 432 | for t, line in inner: |
|
433 | 433 | if t: |
|
434 | 434 | lncount += 1 |
|
435 | 435 | dummyoutfile.write(line) |
|
436 | 436 | |
|
437 | 437 | fl = self.linenostart |
|
438 | 438 | mw = len(str(lncount + fl - 1)) |
|
439 | 439 | sp = self.linenospecial |
|
440 | 440 | st = self.linenostep |
|
441 | 441 | la = self.lineanchors |
|
442 | 442 | aln = self.anchorlinenos |
|
443 | 443 | nocls = self.noclasses |
|
444 | 444 | if sp: |
|
445 | 445 | lines = [] |
|
446 | 446 | |
|
447 | 447 | for i in range(fl, fl + lncount): |
|
448 | 448 | if i % st == 0: |
|
449 | 449 | if i % sp == 0: |
|
450 | 450 | if aln: |
|
451 | 451 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
452 | 452 | (la, i, mw, i)) |
|
453 | 453 | else: |
|
454 | 454 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
455 | 455 | else: |
|
456 | 456 | if aln: |
|
457 | 457 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
458 | 458 | else: |
|
459 | 459 | lines.append('%*d' % (mw, i)) |
|
460 | 460 | else: |
|
461 | 461 | lines.append('') |
|
462 | 462 | ls = '\n'.join(lines) |
|
463 | 463 | else: |
|
464 | 464 | lines = [] |
|
465 | 465 | for i in range(fl, fl + lncount): |
|
466 | 466 | if i % st == 0: |
|
467 | 467 | if aln: |
|
468 | 468 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
469 | 469 | else: |
|
470 | 470 | lines.append('%*d' % (mw, i)) |
|
471 | 471 | else: |
|
472 | 472 | lines.append('') |
|
473 | 473 | ls = '\n'.join(lines) |
|
474 | 474 | |
|
475 | 475 | # in case you wonder about the seemingly redundant <div> here: since the |
|
476 | 476 | # content in the other cell also is wrapped in a div, some browsers in |
|
477 | 477 | # some configurations seem to mess up the formatting... |
|
478 | 478 | if nocls: |
|
479 | 479 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
480 | 480 | '<tr><td><div class="linenodiv" ' |
|
481 | 481 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
482 | 482 | '<pre style="line-height: 125%">' + |
|
483 | 483 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
484 | 484 | else: |
|
485 | 485 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
486 | 486 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
487 | 487 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
488 | 488 | yield 0, dummyoutfile.getvalue() |
|
489 | 489 | yield 0, '</td></tr></table>' |
|
490 | 490 | |
|
491 | 491 | |
|
492 | 492 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
493 | 493 | def __init__(self, **kw): |
|
494 | 494 | # only show these line numbers if set |
|
495 | 495 | self.only_lines = kw.pop('only_line_numbers', []) |
|
496 | 496 | self.query_terms = kw.pop('query_terms', []) |
|
497 | 497 | self.max_lines = kw.pop('max_lines', 5) |
|
498 | 498 | self.line_context = kw.pop('line_context', 3) |
|
499 | 499 | self.url = kw.pop('url', None) |
|
500 | 500 | |
|
501 | 501 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
502 | 502 | |
|
503 | 503 | def _wrap_code(self, source): |
|
504 | 504 | for cnt, it in enumerate(source): |
|
505 | 505 | i, t = it |
|
506 | 506 | t = '<pre>%s</pre>' % t |
|
507 | 507 | yield i, t |
|
508 | 508 | |
|
509 | 509 | def _wrap_tablelinenos(self, inner): |
|
510 | 510 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
511 | 511 | |
|
512 | 512 | last_shown_line_number = 0 |
|
513 | 513 | current_line_number = 1 |
|
514 | 514 | |
|
515 | 515 | for t, line in inner: |
|
516 | 516 | if not t: |
|
517 | 517 | yield t, line |
|
518 | 518 | continue |
|
519 | 519 | |
|
520 | 520 | if current_line_number in self.only_lines: |
|
521 | 521 | if last_shown_line_number + 1 != current_line_number: |
|
522 | 522 | yield 0, '<tr>' |
|
523 | 523 | yield 0, '<td class="line">...</td>' |
|
524 | 524 | yield 0, '<td id="hlcode" class="code"></td>' |
|
525 | 525 | yield 0, '</tr>' |
|
526 | 526 | |
|
527 | 527 | yield 0, '<tr>' |
|
528 | 528 | if self.url: |
|
529 | 529 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
530 | 530 | self.url, current_line_number, current_line_number) |
|
531 | 531 | else: |
|
532 | 532 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
533 | 533 | current_line_number) |
|
534 | 534 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
535 | 535 | yield 0, '</tr>' |
|
536 | 536 | |
|
537 | 537 | last_shown_line_number = current_line_number |
|
538 | 538 | |
|
539 | 539 | current_line_number += 1 |
|
540 | 540 | |
|
541 | 541 | yield 0, '</table>' |
|
542 | 542 | |
|
543 | 543 | |
|
544 | 544 | def hsv_to_rgb(h, s, v): |
|
545 | 545 | """ Convert hsv color values to rgb """ |
|
546 | 546 | |
|
547 | 547 | if s == 0.0: |
|
548 | 548 | return v, v, v |
|
549 | 549 | i = int(h * 6.0) # XXX assume int() truncates! |
|
550 | 550 | f = (h * 6.0) - i |
|
551 | 551 | p = v * (1.0 - s) |
|
552 | 552 | q = v * (1.0 - s * f) |
|
553 | 553 | t = v * (1.0 - s * (1.0 - f)) |
|
554 | 554 | i = i % 6 |
|
555 | 555 | if i == 0: |
|
556 | 556 | return v, t, p |
|
557 | 557 | if i == 1: |
|
558 | 558 | return q, v, p |
|
559 | 559 | if i == 2: |
|
560 | 560 | return p, v, t |
|
561 | 561 | if i == 3: |
|
562 | 562 | return p, q, v |
|
563 | 563 | if i == 4: |
|
564 | 564 | return t, p, v |
|
565 | 565 | if i == 5: |
|
566 | 566 | return v, p, q |
|
567 | 567 | |
|
568 | 568 | |
|
569 | 569 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
570 | 570 | """ |
|
571 | 571 | Generator for getting n of evenly distributed colors using |
|
572 | 572 | hsv color and golden ratio. It always return same order of colors |
|
573 | 573 | |
|
574 | 574 | :param n: number of colors to generate |
|
575 | 575 | :param saturation: saturation of returned colors |
|
576 | 576 | :param lightness: lightness of returned colors |
|
577 | 577 | :returns: RGB tuple |
|
578 | 578 | """ |
|
579 | 579 | |
|
580 | 580 | golden_ratio = 0.618033988749895 |
|
581 | 581 | h = 0.22717784590367374 |
|
582 | 582 | |
|
583 | 583 | for _ in range(n): |
|
584 | 584 | h += golden_ratio |
|
585 | 585 | h %= 1 |
|
586 | 586 | HSV_tuple = [h, saturation, lightness] |
|
587 | 587 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
588 | 588 | yield map(lambda x: str(int(x * 256)), RGB_tuple) |
|
589 | 589 | |
|
590 | 590 | |
|
591 | 591 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
592 | 592 | """ |
|
593 | 593 | Returns a function which when called with an argument returns a unique |
|
594 | 594 | color for that argument, eg. |
|
595 | 595 | |
|
596 | 596 | :param n: number of colors to generate |
|
597 | 597 | :param saturation: saturation of returned colors |
|
598 | 598 | :param lightness: lightness of returned colors |
|
599 | 599 | :returns: css RGB string |
|
600 | 600 | |
|
601 | 601 | >>> color_hash = color_hasher() |
|
602 | 602 | >>> color_hash('hello') |
|
603 | 603 | 'rgb(34, 12, 59)' |
|
604 | 604 | >>> color_hash('hello') |
|
605 | 605 | 'rgb(34, 12, 59)' |
|
606 | 606 | >>> color_hash('other') |
|
607 | 607 | 'rgb(90, 224, 159)' |
|
608 | 608 | """ |
|
609 | 609 | |
|
610 | 610 | color_dict = {} |
|
611 | 611 | cgenerator = unique_color_generator( |
|
612 | 612 | saturation=saturation, lightness=lightness) |
|
613 | 613 | |
|
614 | 614 | def get_color_string(thing): |
|
615 | 615 | if thing in color_dict: |
|
616 | 616 | col = color_dict[thing] |
|
617 | 617 | else: |
|
618 | 618 | col = color_dict[thing] = cgenerator.next() |
|
619 | 619 | return "rgb(%s)" % (', '.join(col)) |
|
620 | 620 | |
|
621 | 621 | return get_color_string |
|
622 | 622 | |
|
623 | 623 | |
|
624 | 624 | def get_lexer_safe(mimetype=None, filepath=None): |
|
625 | 625 | """ |
|
626 | 626 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
627 | 627 | defaulting to plain text if none could be found |
|
628 | 628 | """ |
|
629 | 629 | lexer = None |
|
630 | 630 | try: |
|
631 | 631 | if mimetype: |
|
632 | 632 | lexer = get_lexer_for_mimetype(mimetype) |
|
633 | 633 | if not lexer: |
|
634 | 634 | lexer = get_lexer_for_filename(filepath) |
|
635 | 635 | except pygments.util.ClassNotFound: |
|
636 | 636 | pass |
|
637 | 637 | |
|
638 | 638 | if not lexer: |
|
639 | 639 | lexer = get_lexer_by_name('text') |
|
640 | 640 | |
|
641 | 641 | return lexer |
|
642 | 642 | |
|
643 | 643 | |
|
644 | 644 | def get_lexer_for_filenode(filenode): |
|
645 | 645 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
646 | 646 | return lexer |
|
647 | 647 | |
|
648 | 648 | |
|
649 | 649 | def pygmentize(filenode, **kwargs): |
|
650 | 650 | """ |
|
651 | 651 | pygmentize function using pygments |
|
652 | 652 | |
|
653 | 653 | :param filenode: |
|
654 | 654 | """ |
|
655 | 655 | lexer = get_lexer_for_filenode(filenode) |
|
656 | 656 | return literal(code_highlight(filenode.content, lexer, |
|
657 | 657 | CodeHtmlFormatter(**kwargs))) |
|
658 | 658 | |
|
659 | 659 | |
|
660 | 660 | def is_following_repo(repo_name, user_id): |
|
661 | 661 | from rhodecode.model.scm import ScmModel |
|
662 | 662 | return ScmModel().is_following_repo(repo_name, user_id) |
|
663 | 663 | |
|
664 | 664 | |
|
665 | 665 | class _Message(object): |
|
666 | 666 | """A message returned by ``Flash.pop_messages()``. |
|
667 | 667 | |
|
668 | 668 | Converting the message to a string returns the message text. Instances |
|
669 | 669 | also have the following attributes: |
|
670 | 670 | |
|
671 | 671 | * ``message``: the message text. |
|
672 | 672 | * ``category``: the category specified when the message was created. |
|
673 | 673 | """ |
|
674 | 674 | |
|
675 | 675 | def __init__(self, category, message, sub_data=None): |
|
676 | 676 | self.category = category |
|
677 | 677 | self.message = message |
|
678 | 678 | self.sub_data = sub_data or {} |
|
679 | 679 | |
|
680 | 680 | def __str__(self): |
|
681 | 681 | return self.message |
|
682 | 682 | |
|
683 | 683 | __unicode__ = __str__ |
|
684 | 684 | |
|
685 | 685 | def __html__(self): |
|
686 | 686 | return escape(safe_unicode(self.message)) |
|
687 | 687 | |
|
688 | 688 | |
|
689 | 689 | class Flash(object): |
|
690 | 690 | # List of allowed categories. If None, allow any category. |
|
691 | 691 | categories = ["warning", "notice", "error", "success"] |
|
692 | 692 | |
|
693 | 693 | # Default category if none is specified. |
|
694 | 694 | default_category = "notice" |
|
695 | 695 | |
|
696 | 696 | def __init__(self, session_key="flash", categories=None, |
|
697 | 697 | default_category=None): |
|
698 | 698 | """ |
|
699 | 699 | Instantiate a ``Flash`` object. |
|
700 | 700 | |
|
701 | 701 | ``session_key`` is the key to save the messages under in the user's |
|
702 | 702 | session. |
|
703 | 703 | |
|
704 | 704 | ``categories`` is an optional list which overrides the default list |
|
705 | 705 | of categories. |
|
706 | 706 | |
|
707 | 707 | ``default_category`` overrides the default category used for messages |
|
708 | 708 | when none is specified. |
|
709 | 709 | """ |
|
710 | 710 | self.session_key = session_key |
|
711 | 711 | if categories is not None: |
|
712 | 712 | self.categories = categories |
|
713 | 713 | if default_category is not None: |
|
714 | 714 | self.default_category = default_category |
|
715 | 715 | if self.categories and self.default_category not in self.categories: |
|
716 | 716 | raise ValueError( |
|
717 | 717 | "unrecognized default category %r" % (self.default_category,)) |
|
718 | 718 | |
|
719 | 719 | def pop_messages(self, session=None, request=None): |
|
720 | 720 | """ |
|
721 | 721 | Return all accumulated messages and delete them from the session. |
|
722 | 722 | |
|
723 | 723 | The return value is a list of ``Message`` objects. |
|
724 | 724 | """ |
|
725 | 725 | messages = [] |
|
726 | 726 | |
|
727 | 727 | if not session: |
|
728 | 728 | if not request: |
|
729 | 729 | request = get_current_request() |
|
730 | 730 | session = request.session |
|
731 | 731 | |
|
732 | 732 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
733 | 733 | # (category, message) |
|
734 | 734 | for cat, msg in session.pop(self.session_key, []): |
|
735 | 735 | messages.append(_Message(cat, msg)) |
|
736 | 736 | |
|
737 | 737 | # Pop the 'new' pyramid flash messages for each category as list |
|
738 | 738 | # of strings. |
|
739 | 739 | for cat in self.categories: |
|
740 | 740 | for msg in session.pop_flash(queue=cat): |
|
741 | 741 | sub_data = {} |
|
742 | 742 | if hasattr(msg, 'rsplit'): |
|
743 | 743 | flash_data = msg.rsplit('|DELIM|', 1) |
|
744 | 744 | org_message = flash_data[0] |
|
745 | 745 | if len(flash_data) > 1: |
|
746 | 746 | sub_data = json.loads(flash_data[1]) |
|
747 | 747 | else: |
|
748 | 748 | org_message = msg |
|
749 | 749 | |
|
750 | 750 | messages.append(_Message(cat, org_message, sub_data=sub_data)) |
|
751 | 751 | |
|
752 | 752 | # Map messages from the default queue to the 'notice' category. |
|
753 | 753 | for msg in session.pop_flash(): |
|
754 | 754 | messages.append(_Message('notice', msg)) |
|
755 | 755 | |
|
756 | 756 | session.save() |
|
757 | 757 | return messages |
|
758 | 758 | |
|
759 | 759 | def json_alerts(self, session=None, request=None): |
|
760 | 760 | payloads = [] |
|
761 | 761 | messages = flash.pop_messages(session=session, request=request) or [] |
|
762 | 762 | for message in messages: |
|
763 | 763 | payloads.append({ |
|
764 | 764 | 'message': { |
|
765 | 765 | 'message': u'{}'.format(message.message), |
|
766 | 766 | 'level': message.category, |
|
767 | 767 | 'force': True, |
|
768 | 768 | 'subdata': message.sub_data |
|
769 | 769 | } |
|
770 | 770 | }) |
|
771 | 771 | return json.dumps(payloads) |
|
772 | 772 | |
|
773 | 773 | def __call__(self, message, category=None, ignore_duplicate=True, |
|
774 | 774 | session=None, request=None): |
|
775 | 775 | |
|
776 | 776 | if not session: |
|
777 | 777 | if not request: |
|
778 | 778 | request = get_current_request() |
|
779 | 779 | session = request.session |
|
780 | 780 | |
|
781 | 781 | session.flash( |
|
782 | 782 | message, queue=category, allow_duplicate=not ignore_duplicate) |
|
783 | 783 | |
|
784 | 784 | |
|
785 | 785 | flash = Flash() |
|
786 | 786 | |
|
787 | 787 | #============================================================================== |
|
788 | 788 | # SCM FILTERS available via h. |
|
789 | 789 | #============================================================================== |
|
790 | 790 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
791 | 791 | from rhodecode.lib.utils2 import age, age_from_seconds |
|
792 | 792 | from rhodecode.model.db import User, ChangesetStatus |
|
793 | 793 | |
|
794 | 794 | |
|
795 | 795 | email = author_email |
|
796 | 796 | |
|
797 | 797 | |
|
798 | 798 | def capitalize(raw_text): |
|
799 | 799 | return raw_text.capitalize() |
|
800 | 800 | |
|
801 | 801 | |
|
802 | 802 | def short_id(long_id): |
|
803 | 803 | return long_id[:12] |
|
804 | 804 | |
|
805 | 805 | |
|
806 | 806 | def hide_credentials(url): |
|
807 | 807 | from rhodecode.lib.utils2 import credentials_filter |
|
808 | 808 | return credentials_filter(url) |
|
809 | 809 | |
|
810 | 810 | |
|
811 | 811 | import pytz |
|
812 | 812 | import tzlocal |
|
813 | 813 | local_timezone = tzlocal.get_localzone() |
|
814 | 814 | |
|
815 | 815 | |
|
816 | 816 | def get_timezone(datetime_iso, time_is_local=False): |
|
817 | 817 | tzinfo = '+00:00' |
|
818 | 818 | |
|
819 | 819 | # detect if we have a timezone info, otherwise, add it |
|
820 | 820 | if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
821 | 821 | force_timezone = os.environ.get('RC_TIMEZONE', '') |
|
822 | 822 | if force_timezone: |
|
823 | 823 | force_timezone = pytz.timezone(force_timezone) |
|
824 | 824 | timezone = force_timezone or local_timezone |
|
825 | 825 | offset = timezone.localize(datetime_iso).strftime('%z') |
|
826 | 826 | tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) |
|
827 | 827 | return tzinfo |
|
828 | 828 | |
|
829 | 829 | |
|
830 | 830 | def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): |
|
831 | 831 | title = value or format_date(datetime_iso) |
|
832 | 832 | tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local) |
|
833 | 833 | |
|
834 | 834 | return literal( |
|
835 | 835 | '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format( |
|
836 | 836 | cls='tooltip' if tooltip else '', |
|
837 | 837 | tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '', |
|
838 | 838 | title=title, dt=datetime_iso, tzinfo=tzinfo |
|
839 | 839 | )) |
|
840 | 840 | |
|
841 | 841 | |
|
842 | 842 | def _shorten_commit_id(commit_id, commit_len=None): |
|
843 | 843 | if commit_len is None: |
|
844 | 844 | request = get_current_request() |
|
845 | 845 | commit_len = request.call_context.visual.show_sha_length |
|
846 | 846 | return commit_id[:commit_len] |
|
847 | 847 | |
|
848 | 848 | |
|
849 | 849 | def show_id(commit, show_idx=None, commit_len=None): |
|
850 | 850 | """ |
|
851 | 851 | Configurable function that shows ID |
|
852 | 852 | by default it's r123:fffeeefffeee |
|
853 | 853 | |
|
854 | 854 | :param commit: commit instance |
|
855 | 855 | """ |
|
856 | 856 | if show_idx is None: |
|
857 | 857 | request = get_current_request() |
|
858 | 858 | show_idx = request.call_context.visual.show_revision_number |
|
859 | 859 | |
|
860 | 860 | raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len) |
|
861 | 861 | if show_idx: |
|
862 | 862 | return 'r%s:%s' % (commit.idx, raw_id) |
|
863 | 863 | else: |
|
864 | 864 | return '%s' % (raw_id, ) |
|
865 | 865 | |
|
866 | 866 | |
|
867 | 867 | def format_date(date): |
|
868 | 868 | """ |
|
869 | 869 | use a standardized formatting for dates used in RhodeCode |
|
870 | 870 | |
|
871 | 871 | :param date: date/datetime object |
|
872 | 872 | :return: formatted date |
|
873 | 873 | """ |
|
874 | 874 | |
|
875 | 875 | if date: |
|
876 | 876 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
877 | 877 | return safe_unicode(date.strftime(_fmt)) |
|
878 | 878 | |
|
879 | 879 | return u"" |
|
880 | 880 | |
|
881 | 881 | |
|
882 | 882 | class _RepoChecker(object): |
|
883 | 883 | |
|
884 | 884 | def __init__(self, backend_alias): |
|
885 | 885 | self._backend_alias = backend_alias |
|
886 | 886 | |
|
887 | 887 | def __call__(self, repository): |
|
888 | 888 | if hasattr(repository, 'alias'): |
|
889 | 889 | _type = repository.alias |
|
890 | 890 | elif hasattr(repository, 'repo_type'): |
|
891 | 891 | _type = repository.repo_type |
|
892 | 892 | else: |
|
893 | 893 | _type = repository |
|
894 | 894 | return _type == self._backend_alias |
|
895 | 895 | |
|
896 | 896 | |
|
897 | 897 | is_git = _RepoChecker('git') |
|
898 | 898 | is_hg = _RepoChecker('hg') |
|
899 | 899 | is_svn = _RepoChecker('svn') |
|
900 | 900 | |
|
901 | 901 | |
|
902 | 902 | def get_repo_type_by_name(repo_name): |
|
903 | 903 | repo = Repository.get_by_repo_name(repo_name) |
|
904 | 904 | if repo: |
|
905 | 905 | return repo.repo_type |
|
906 | 906 | |
|
907 | 907 | |
|
908 | 908 | def is_svn_without_proxy(repository): |
|
909 | 909 | if is_svn(repository): |
|
910 | 910 | from rhodecode.model.settings import VcsSettingsModel |
|
911 | 911 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() |
|
912 | 912 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
913 | 913 | return False |
|
914 | 914 | |
|
915 | 915 | |
|
916 | 916 | def discover_user(author): |
|
917 | 917 | """ |
|
918 | 918 | Tries to discover RhodeCode User based on the author string. Author string |
|
919 | 919 | is typically `FirstName LastName <email@address.com>` |
|
920 | 920 | """ |
|
921 | 921 | |
|
922 | 922 | # if author is already an instance use it for extraction |
|
923 | 923 | if isinstance(author, User): |
|
924 | 924 | return author |
|
925 | 925 | |
|
926 | 926 | # Valid email in the attribute passed, see if they're in the system |
|
927 | 927 | _email = author_email(author) |
|
928 | 928 | if _email != '': |
|
929 | 929 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
930 | 930 | if user is not None: |
|
931 | 931 | return user |
|
932 | 932 | |
|
933 | 933 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
934 | 934 | _author = author_name(author) |
|
935 | 935 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
936 | 936 | if user is not None: |
|
937 | 937 | return user |
|
938 | 938 | |
|
939 | 939 | return None |
|
940 | 940 | |
|
941 | 941 | |
|
942 | 942 | def email_or_none(author): |
|
943 | 943 | # extract email from the commit string |
|
944 | 944 | _email = author_email(author) |
|
945 | 945 | |
|
946 | 946 | # If we have an email, use it, otherwise |
|
947 | 947 | # see if it contains a username we can get an email from |
|
948 | 948 | if _email != '': |
|
949 | 949 | return _email |
|
950 | 950 | else: |
|
951 | 951 | user = User.get_by_username( |
|
952 | 952 | author_name(author), case_insensitive=True, cache=True) |
|
953 | 953 | |
|
954 | 954 | if user is not None: |
|
955 | 955 | return user.email |
|
956 | 956 | |
|
957 | 957 | # No valid email, not a valid user in the system, none! |
|
958 | 958 | return None |
|
959 | 959 | |
|
960 | 960 | |
|
961 | 961 | def link_to_user(author, length=0, **kwargs): |
|
962 | 962 | user = discover_user(author) |
|
963 | 963 | # user can be None, but if we have it already it means we can re-use it |
|
964 | 964 | # in the person() function, so we save 1 intensive-query |
|
965 | 965 | if user: |
|
966 | 966 | author = user |
|
967 | 967 | |
|
968 | 968 | display_person = person(author, 'username_or_name_or_email') |
|
969 | 969 | if length: |
|
970 | 970 | display_person = shorter(display_person, length) |
|
971 | 971 | |
|
972 | 972 | if user and user.username != user.DEFAULT_USER: |
|
973 | 973 | return link_to( |
|
974 | 974 | escape(display_person), |
|
975 | 975 | route_path('user_profile', username=user.username), |
|
976 | 976 | **kwargs) |
|
977 | 977 | else: |
|
978 | 978 | return escape(display_person) |
|
979 | 979 | |
|
980 | 980 | |
|
981 | 981 | def link_to_group(users_group_name, **kwargs): |
|
982 | 982 | return link_to( |
|
983 | 983 | escape(users_group_name), |
|
984 | 984 | route_path('user_group_profile', user_group_name=users_group_name), |
|
985 | 985 | **kwargs) |
|
986 | 986 | |
|
987 | 987 | |
|
988 | 988 | def person(author, show_attr="username_and_name"): |
|
989 | 989 | user = discover_user(author) |
|
990 | 990 | if user: |
|
991 | 991 | return getattr(user, show_attr) |
|
992 | 992 | else: |
|
993 | 993 | _author = author_name(author) |
|
994 | 994 | _email = email(author) |
|
995 | 995 | return _author or _email |
|
996 | 996 | |
|
997 | 997 | |
|
998 | 998 | def author_string(email): |
|
999 | 999 | if email: |
|
1000 | 1000 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
1001 | 1001 | if user: |
|
1002 | 1002 | if user.first_name or user.last_name: |
|
1003 | 1003 | return '%s %s <%s>' % ( |
|
1004 | 1004 | user.first_name, user.last_name, email) |
|
1005 | 1005 | else: |
|
1006 | 1006 | return email |
|
1007 | 1007 | else: |
|
1008 | 1008 | return email |
|
1009 | 1009 | else: |
|
1010 | 1010 | return None |
|
1011 | 1011 | |
|
1012 | 1012 | |
|
1013 | 1013 | def person_by_id(id_, show_attr="username_and_name"): |
|
1014 | 1014 | # attr to return from fetched user |
|
1015 | 1015 | person_getter = lambda usr: getattr(usr, show_attr) |
|
1016 | 1016 | |
|
1017 | 1017 | #maybe it's an ID ? |
|
1018 | 1018 | if str(id_).isdigit() or isinstance(id_, int): |
|
1019 | 1019 | id_ = int(id_) |
|
1020 | 1020 | user = User.get(id_) |
|
1021 | 1021 | if user is not None: |
|
1022 | 1022 | return person_getter(user) |
|
1023 | 1023 | return id_ |
|
1024 | 1024 | |
|
1025 | 1025 | |
|
1026 | 1026 | def gravatar_with_user(request, author, show_disabled=False, tooltip=False): |
|
1027 | 1027 | _render = request.get_partial_renderer('rhodecode:templates/base/base.mako') |
|
1028 | 1028 | return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip) |
|
1029 | 1029 | |
|
1030 | 1030 | |
|
1031 | 1031 | tags_paterns = OrderedDict(( |
|
1032 | 1032 | ('lang', (re.compile(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]'), |
|
1033 | 1033 | '<div class="metatag" tag="lang">\\2</div>')), |
|
1034 | 1034 | |
|
1035 | 1035 | ('see', (re.compile(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]'), |
|
1036 | 1036 | '<div class="metatag" tag="see">see: \\1 </div>')), |
|
1037 | 1037 | |
|
1038 | 1038 | ('url', (re.compile(r'\[url\ \=\>\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'), |
|
1039 | 1039 | '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')), |
|
1040 | 1040 | |
|
1041 | 1041 | ('license', (re.compile(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]'), |
|
1042 | 1042 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')), |
|
1043 | 1043 | |
|
1044 | 1044 | ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]'), |
|
1045 | 1045 | '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')), |
|
1046 | 1046 | |
|
1047 | 1047 | ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'), |
|
1048 | 1048 | '<div class="metatag" tag="state \\1">\\1</div>')), |
|
1049 | 1049 | |
|
1050 | 1050 | # label in grey |
|
1051 | 1051 | ('label', (re.compile(r'\[([a-z]+)\]'), |
|
1052 | 1052 | '<div class="metatag" tag="label">\\1</div>')), |
|
1053 | 1053 | |
|
1054 | 1054 | # generic catch all in grey |
|
1055 | 1055 | ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'), |
|
1056 | 1056 | '<div class="metatag" tag="generic">\\1</div>')), |
|
1057 | 1057 | )) |
|
1058 | 1058 | |
|
1059 | 1059 | |
|
1060 | 1060 | def extract_metatags(value): |
|
1061 | 1061 | """ |
|
1062 | 1062 | Extract supported meta-tags from given text value |
|
1063 | 1063 | """ |
|
1064 | 1064 | tags = [] |
|
1065 | 1065 | if not value: |
|
1066 | 1066 | return tags, '' |
|
1067 | 1067 | |
|
1068 | 1068 | for key, val in tags_paterns.items(): |
|
1069 | 1069 | pat, replace_html = val |
|
1070 | 1070 | tags.extend([(key, x.group()) for x in pat.finditer(value)]) |
|
1071 | 1071 | value = pat.sub('', value) |
|
1072 | 1072 | |
|
1073 | 1073 | return tags, value |
|
1074 | 1074 | |
|
1075 | 1075 | |
|
1076 | 1076 | def style_metatag(tag_type, value): |
|
1077 | 1077 | """ |
|
1078 | 1078 | converts tags from value into html equivalent |
|
1079 | 1079 | """ |
|
1080 | 1080 | if not value: |
|
1081 | 1081 | return '' |
|
1082 | 1082 | |
|
1083 | 1083 | html_value = value |
|
1084 | 1084 | tag_data = tags_paterns.get(tag_type) |
|
1085 | 1085 | if tag_data: |
|
1086 | 1086 | pat, replace_html = tag_data |
|
1087 | 1087 | # convert to plain `unicode` instead of a markup tag to be used in |
|
1088 | 1088 | # regex expressions. safe_unicode doesn't work here |
|
1089 | 1089 | html_value = pat.sub(replace_html, unicode(value)) |
|
1090 | 1090 | |
|
1091 | 1091 | return html_value |
|
1092 | 1092 | |
|
1093 | 1093 | |
|
1094 | 1094 | def bool2icon(value, show_at_false=True): |
|
1095 | 1095 | """ |
|
1096 | 1096 | Returns boolean value of a given value, represented as html element with |
|
1097 | 1097 | classes that will represent icons |
|
1098 | 1098 | |
|
1099 | 1099 | :param value: given value to convert to html node |
|
1100 | 1100 | """ |
|
1101 | 1101 | |
|
1102 | 1102 | if value: # does bool conversion |
|
1103 | 1103 | return HTML.tag('i', class_="icon-true", title='True') |
|
1104 | 1104 | else: # not true as bool |
|
1105 | 1105 | if show_at_false: |
|
1106 | 1106 | return HTML.tag('i', class_="icon-false", title='False') |
|
1107 | 1107 | return HTML.tag('i') |
|
1108 | 1108 | |
|
1109 | 1109 | |
|
1110 | 1110 | def b64(inp): |
|
1111 | 1111 | return base64.b64encode(inp) |
|
1112 | 1112 | |
|
1113 | 1113 | #============================================================================== |
|
1114 | 1114 | # PERMS |
|
1115 | 1115 | #============================================================================== |
|
1116 | 1116 | from rhodecode.lib.auth import ( |
|
1117 | 1117 | HasPermissionAny, HasPermissionAll, |
|
1118 | 1118 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, |
|
1119 | 1119 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, |
|
1120 | 1120 | csrf_token_key, AuthUser) |
|
1121 | 1121 | |
|
1122 | 1122 | |
|
1123 | 1123 | #============================================================================== |
|
1124 | 1124 | # GRAVATAR URL |
|
1125 | 1125 | #============================================================================== |
|
1126 | 1126 | class InitialsGravatar(object): |
|
1127 | 1127 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1128 | 1128 | background=None, text_color='#fff'): |
|
1129 | 1129 | self.size = size |
|
1130 | 1130 | self.first_name = first_name |
|
1131 | 1131 | self.last_name = last_name |
|
1132 | 1132 | self.email_address = email_address |
|
1133 | 1133 | self.background = background or self.str2color(email_address) |
|
1134 | 1134 | self.text_color = text_color |
|
1135 | 1135 | |
|
1136 | 1136 | def get_color_bank(self): |
|
1137 | 1137 | """ |
|
1138 | 1138 | returns a predefined list of colors that gravatars can use. |
|
1139 | 1139 | Those are randomized distinct colors that guarantee readability and |
|
1140 | 1140 | uniqueness. |
|
1141 | 1141 | |
|
1142 | 1142 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1143 | 1143 | """ |
|
1144 | 1144 | return [ |
|
1145 | 1145 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1146 | 1146 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1147 | 1147 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1148 | 1148 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1149 | 1149 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1150 | 1150 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1151 | 1151 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1152 | 1152 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1153 | 1153 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1154 | 1154 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1155 | 1155 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1156 | 1156 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1157 | 1157 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1158 | 1158 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1159 | 1159 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1160 | 1160 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1161 | 1161 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1162 | 1162 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1163 | 1163 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1164 | 1164 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1165 | 1165 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1166 | 1166 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1167 | 1167 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1168 | 1168 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1169 | 1169 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1170 | 1170 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1171 | 1171 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1172 | 1172 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1173 | 1173 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1174 | 1174 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1175 | 1175 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1176 | 1176 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1177 | 1177 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1178 | 1178 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1179 | 1179 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1180 | 1180 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1181 | 1181 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1182 | 1182 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1183 | 1183 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1184 | 1184 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1185 | 1185 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1186 | 1186 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1187 | 1187 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1188 | 1188 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1189 | 1189 | '#4f8c46', '#368dd9', '#5c0073' |
|
1190 | 1190 | ] |
|
1191 | 1191 | |
|
1192 | 1192 | def rgb_to_hex_color(self, rgb_tuple): |
|
1193 | 1193 | """ |
|
1194 | 1194 | Converts an rgb_tuple passed to an hex color. |
|
1195 | 1195 | |
|
1196 | 1196 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1197 | 1197 | """ |
|
1198 | 1198 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1199 | 1199 | |
|
1200 | 1200 | def email_to_int_list(self, email_str): |
|
1201 | 1201 | """ |
|
1202 | 1202 | Get every byte of the hex digest value of email and turn it to integer. |
|
1203 | 1203 | It's going to be always between 0-255 |
|
1204 | 1204 | """ |
|
1205 | 1205 | digest = md5_safe(email_str.lower()) |
|
1206 | 1206 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1207 | 1207 | |
|
1208 | 1208 | def pick_color_bank_index(self, email_str, color_bank): |
|
1209 | 1209 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1210 | 1210 | |
|
1211 | 1211 | def str2color(self, email_str): |
|
1212 | 1212 | """ |
|
1213 | 1213 | Tries to map in a stable algorithm an email to color |
|
1214 | 1214 | |
|
1215 | 1215 | :param email_str: |
|
1216 | 1216 | """ |
|
1217 | 1217 | color_bank = self.get_color_bank() |
|
1218 | 1218 | # pick position (module it's length so we always find it in the |
|
1219 | 1219 | # bank even if it's smaller than 256 values |
|
1220 | 1220 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1221 | 1221 | return color_bank[pos] |
|
1222 | 1222 | |
|
1223 | 1223 | def normalize_email(self, email_address): |
|
1224 | 1224 | import unicodedata |
|
1225 | 1225 | # default host used to fill in the fake/missing email |
|
1226 | 1226 | default_host = u'localhost' |
|
1227 | 1227 | |
|
1228 | 1228 | if not email_address: |
|
1229 | 1229 | email_address = u'%s@%s' % (User.DEFAULT_USER, default_host) |
|
1230 | 1230 | |
|
1231 | 1231 | email_address = safe_unicode(email_address) |
|
1232 | 1232 | |
|
1233 | 1233 | if u'@' not in email_address: |
|
1234 | 1234 | email_address = u'%s@%s' % (email_address, default_host) |
|
1235 | 1235 | |
|
1236 | 1236 | if email_address.endswith(u'@'): |
|
1237 | 1237 | email_address = u'%s%s' % (email_address, default_host) |
|
1238 | 1238 | |
|
1239 | 1239 | email_address = unicodedata.normalize('NFKD', email_address)\ |
|
1240 | 1240 | .encode('ascii', 'ignore') |
|
1241 | 1241 | return email_address |
|
1242 | 1242 | |
|
1243 | 1243 | def get_initials(self): |
|
1244 | 1244 | """ |
|
1245 | 1245 | Returns 2 letter initials calculated based on the input. |
|
1246 | 1246 | The algorithm picks first given email address, and takes first letter |
|
1247 | 1247 | of part before @, and then the first letter of server name. In case |
|
1248 | 1248 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1249 | 1249 | the server letter with first letter of somestring2 |
|
1250 | 1250 | |
|
1251 | 1251 | In case function was initialized with both first and lastname, this |
|
1252 | 1252 | overrides the extraction from email by first letter of the first and |
|
1253 | 1253 | last name. We add special logic to that functionality, In case Full name |
|
1254 | 1254 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1255 | 1255 | (Von Rossum) picking `R`. |
|
1256 | 1256 | |
|
1257 | 1257 | Function also normalizes the non-ascii characters to they ascii |
|
1258 | 1258 | representation, eg Δ => A |
|
1259 | 1259 | """ |
|
1260 | 1260 | import unicodedata |
|
1261 | 1261 | # replace non-ascii to ascii |
|
1262 | 1262 | first_name = unicodedata.normalize( |
|
1263 | 1263 | 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore') |
|
1264 | 1264 | last_name = unicodedata.normalize( |
|
1265 | 1265 | 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore') |
|
1266 | 1266 | |
|
1267 | 1267 | # do NFKD encoding, and also make sure email has proper format |
|
1268 | 1268 | email_address = self.normalize_email(self.email_address) |
|
1269 | 1269 | |
|
1270 | 1270 | # first push the email initials |
|
1271 | 1271 | prefix, server = email_address.split('@', 1) |
|
1272 | 1272 | |
|
1273 | 1273 | # check if prefix is maybe a 'first_name.last_name' syntax |
|
1274 | 1274 | _dot_split = prefix.rsplit('.', 1) |
|
1275 | 1275 | if len(_dot_split) == 2 and _dot_split[1]: |
|
1276 | 1276 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1277 | 1277 | else: |
|
1278 | 1278 | initials = [prefix[0], server[0]] |
|
1279 | 1279 | |
|
1280 | 1280 | # then try to replace either first_name or last_name |
|
1281 | 1281 | fn_letter = (first_name or " ")[0].strip() |
|
1282 | 1282 | ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip() |
|
1283 | 1283 | |
|
1284 | 1284 | if fn_letter: |
|
1285 | 1285 | initials[0] = fn_letter |
|
1286 | 1286 | |
|
1287 | 1287 | if ln_letter: |
|
1288 | 1288 | initials[1] = ln_letter |
|
1289 | 1289 | |
|
1290 | 1290 | return ''.join(initials).upper() |
|
1291 | 1291 | |
|
1292 | 1292 | def get_img_data_by_type(self, font_family, img_type): |
|
1293 | 1293 | default_user = """ |
|
1294 | 1294 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1295 | 1295 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1296 | 1296 | viewBox="-15 -10 439.165 429.164" |
|
1297 | 1297 | |
|
1298 | 1298 | xml:space="preserve" |
|
1299 | 1299 | style="background:{background};" > |
|
1300 | 1300 | |
|
1301 | 1301 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1302 | 1302 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1303 | 1303 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1304 | 1304 | 168.596,153.916,216.671, |
|
1305 | 1305 | 204.583,216.671z" fill="{text_color}"/> |
|
1306 | 1306 | <path d="M407.164,374.717L360.88, |
|
1307 | 1307 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1308 | 1308 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1309 | 1309 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1310 | 1310 | 0-48.762-8.122-69.078-23.488 |
|
1311 | 1311 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1312 | 1312 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1313 | 1313 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1314 | 1314 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1315 | 1315 | 19.402-10.527 C409.699,390.129, |
|
1316 | 1316 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1317 | 1317 | </svg>""".format( |
|
1318 | 1318 | size=self.size, |
|
1319 | 1319 | background='#979797', # @grey4 |
|
1320 | 1320 | text_color=self.text_color, |
|
1321 | 1321 | font_family=font_family) |
|
1322 | 1322 | |
|
1323 | 1323 | return { |
|
1324 | 1324 | "default_user": default_user |
|
1325 | 1325 | }[img_type] |
|
1326 | 1326 | |
|
1327 | 1327 | def get_img_data(self, svg_type=None): |
|
1328 | 1328 | """ |
|
1329 | 1329 | generates the svg metadata for image |
|
1330 | 1330 | """ |
|
1331 | 1331 | fonts = [ |
|
1332 | 1332 | '-apple-system', |
|
1333 | 1333 | 'BlinkMacSystemFont', |
|
1334 | 1334 | 'Segoe UI', |
|
1335 | 1335 | 'Roboto', |
|
1336 | 1336 | 'Oxygen-Sans', |
|
1337 | 1337 | 'Ubuntu', |
|
1338 | 1338 | 'Cantarell', |
|
1339 | 1339 | 'Helvetica Neue', |
|
1340 | 1340 | 'sans-serif' |
|
1341 | 1341 | ] |
|
1342 | 1342 | font_family = ','.join(fonts) |
|
1343 | 1343 | if svg_type: |
|
1344 | 1344 | return self.get_img_data_by_type(font_family, svg_type) |
|
1345 | 1345 | |
|
1346 | 1346 | initials = self.get_initials() |
|
1347 | 1347 | img_data = """ |
|
1348 | 1348 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1349 | 1349 | width="{size}" height="{size}" |
|
1350 | 1350 | style="width: 100%; height: 100%; background-color: {background}" |
|
1351 | 1351 | viewBox="0 0 {size} {size}"> |
|
1352 | 1352 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1353 | 1353 | pointer-events="auto" fill="{text_color}" |
|
1354 | 1354 | font-family="{font_family}" |
|
1355 | 1355 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1356 | 1356 | </text> |
|
1357 | 1357 | </svg>""".format( |
|
1358 | 1358 | size=self.size, |
|
1359 | 1359 | f_size=self.size/2.05, # scale the text inside the box nicely |
|
1360 | 1360 | background=self.background, |
|
1361 | 1361 | text_color=self.text_color, |
|
1362 | 1362 | text=initials.upper(), |
|
1363 | 1363 | font_family=font_family) |
|
1364 | 1364 | |
|
1365 | 1365 | return img_data |
|
1366 | 1366 | |
|
1367 | 1367 | def generate_svg(self, svg_type=None): |
|
1368 | 1368 | img_data = self.get_img_data(svg_type) |
|
1369 | 1369 | return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data) |
|
1370 | 1370 | |
|
1371 | 1371 | |
|
1372 | 1372 | def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False): |
|
1373 | 1373 | |
|
1374 | 1374 | svg_type = None |
|
1375 | 1375 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1376 | 1376 | svg_type = 'default_user' |
|
1377 | 1377 | |
|
1378 | 1378 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1379 | 1379 | |
|
1380 | 1380 | if store_on_disk: |
|
1381 | 1381 | from rhodecode.apps.file_store import utils as store_utils |
|
1382 | 1382 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ |
|
1383 | 1383 | FileOverSizeException |
|
1384 | 1384 | from rhodecode.model.db import Session |
|
1385 | 1385 | |
|
1386 | 1386 | image_key = md5_safe(email_address.lower() |
|
1387 | 1387 | + first_name.lower() + last_name.lower()) |
|
1388 | 1388 | |
|
1389 | 1389 | storage = store_utils.get_file_storage(request.registry.settings) |
|
1390 | 1390 | filename = '{}.svg'.format(image_key) |
|
1391 | 1391 | subdir = 'gravatars' |
|
1392 | 1392 | # since final name has a counter, we apply the 0 |
|
1393 | 1393 | uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) |
|
1394 | 1394 | store_uid = os.path.join(subdir, uid) |
|
1395 | 1395 | |
|
1396 | 1396 | db_entry = FileStore.get_by_store_uid(store_uid) |
|
1397 | 1397 | if db_entry: |
|
1398 | 1398 | return request.route_path('download_file', fid=store_uid) |
|
1399 | 1399 | |
|
1400 | 1400 | img_data = klass.get_img_data(svg_type=svg_type) |
|
1401 | 1401 | img_file = store_utils.bytes_to_file_obj(img_data) |
|
1402 | 1402 | |
|
1403 | 1403 | try: |
|
1404 | 1404 | store_uid, metadata = storage.save_file( |
|
1405 | 1405 | img_file, filename, directory=subdir, |
|
1406 | 1406 | extensions=['.svg'], randomized_name=False) |
|
1407 | 1407 | except (FileNotAllowedException, FileOverSizeException): |
|
1408 | 1408 | raise |
|
1409 | 1409 | |
|
1410 | 1410 | try: |
|
1411 | 1411 | entry = FileStore.create( |
|
1412 | 1412 | file_uid=store_uid, filename=metadata["filename"], |
|
1413 | 1413 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
1414 | 1414 | file_display_name=filename, |
|
1415 | 1415 | file_description=u'user gravatar `{}`'.format(safe_unicode(filename)), |
|
1416 | 1416 | hidden=True, check_acl=False, user_id=1 |
|
1417 | 1417 | ) |
|
1418 | 1418 | Session().add(entry) |
|
1419 | 1419 | Session().commit() |
|
1420 | 1420 | log.debug('Stored upload in DB as %s', entry) |
|
1421 | 1421 | except Exception: |
|
1422 | 1422 | raise |
|
1423 | 1423 | |
|
1424 | 1424 | return request.route_path('download_file', fid=store_uid) |
|
1425 | 1425 | |
|
1426 | 1426 | else: |
|
1427 | 1427 | return klass.generate_svg(svg_type=svg_type) |
|
1428 | 1428 | |
|
1429 | 1429 | |
|
1430 | 1430 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): |
|
1431 | 1431 | return safe_str(gravatar_url_tmpl)\ |
|
1432 | 1432 | .replace('{email}', email_address) \ |
|
1433 | 1433 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1434 | 1434 | .replace('{netloc}', request.host) \ |
|
1435 | 1435 | .replace('{scheme}', request.scheme) \ |
|
1436 | 1436 | .replace('{size}', safe_str(size)) |
|
1437 | 1437 | |
|
1438 | 1438 | |
|
1439 | 1439 | def gravatar_url(email_address, size=30, request=None): |
|
1440 | 1440 | request = request or get_current_request() |
|
1441 | 1441 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1442 | 1442 | |
|
1443 | 1443 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1444 | 1444 | if isinstance(email_address, unicode): |
|
1445 | 1445 | # hashlib crashes on unicode items |
|
1446 | 1446 | email_address = safe_str(email_address) |
|
1447 | 1447 | |
|
1448 | 1448 | # empty email or default user |
|
1449 | 1449 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1450 | 1450 | return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1451 | 1451 | |
|
1452 | 1452 | if _use_gravatar: |
|
1453 | 1453 | gravatar_url_tmpl = request.call_context.visual.gravatar_url \ |
|
1454 | 1454 | or User.DEFAULT_GRAVATAR_URL |
|
1455 | 1455 | return gravatar_external(request, gravatar_url_tmpl, email_address, size=size) |
|
1456 | 1456 | |
|
1457 | 1457 | else: |
|
1458 | 1458 | return initials_gravatar(request, email_address, '', '', size=size) |
|
1459 | 1459 | |
|
1460 | 1460 | |
|
1461 | 1461 | def breadcrumb_repo_link(repo): |
|
1462 | 1462 | """ |
|
1463 | 1463 | Makes a breadcrumbs path link to repo |
|
1464 | 1464 | |
|
1465 | 1465 | ex:: |
|
1466 | 1466 | group >> subgroup >> repo |
|
1467 | 1467 | |
|
1468 | 1468 | :param repo: a Repository instance |
|
1469 | 1469 | """ |
|
1470 | 1470 | |
|
1471 | 1471 | path = [ |
|
1472 | 1472 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), |
|
1473 | 1473 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1474 | 1474 | for group in repo.groups_with_parents |
|
1475 | 1475 | ] + [ |
|
1476 | 1476 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), |
|
1477 | 1477 | title='last change:{}'.format(format_date(repo.last_commit_change))) |
|
1478 | 1478 | ] |
|
1479 | 1479 | |
|
1480 | 1480 | return literal(' » '.join(path)) |
|
1481 | 1481 | |
|
1482 | 1482 | |
|
1483 | 1483 | def breadcrumb_repo_group_link(repo_group): |
|
1484 | 1484 | """ |
|
1485 | 1485 | Makes a breadcrumbs path link to repo |
|
1486 | 1486 | |
|
1487 | 1487 | ex:: |
|
1488 | 1488 | group >> subgroup |
|
1489 | 1489 | |
|
1490 | 1490 | :param repo_group: a Repository Group instance |
|
1491 | 1491 | """ |
|
1492 | 1492 | |
|
1493 | 1493 | path = [ |
|
1494 | 1494 | link_to(group.name, |
|
1495 | 1495 | route_path('repo_group_home', repo_group_name=group.group_name), |
|
1496 | 1496 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1497 | 1497 | for group in repo_group.parents |
|
1498 | 1498 | ] + [ |
|
1499 | 1499 | link_to(repo_group.name, |
|
1500 | 1500 | route_path('repo_group_home', repo_group_name=repo_group.group_name), |
|
1501 | 1501 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) |
|
1502 | 1502 | ] |
|
1503 | 1503 | |
|
1504 | 1504 | return literal(' » '.join(path)) |
|
1505 | 1505 | |
|
1506 | 1506 | |
|
1507 | 1507 | def format_byte_size_binary(file_size): |
|
1508 | 1508 | """ |
|
1509 | 1509 | Formats file/folder sizes to standard. |
|
1510 | 1510 | """ |
|
1511 | 1511 | if file_size is None: |
|
1512 | 1512 | file_size = 0 |
|
1513 | 1513 | |
|
1514 | 1514 | formatted_size = format_byte_size(file_size, binary=True) |
|
1515 | 1515 | return formatted_size |
|
1516 | 1516 | |
|
1517 | 1517 | |
|
1518 | 1518 | def urlify_text(text_, safe=True, **href_attrs): |
|
1519 | 1519 | """ |
|
1520 | 1520 | Extract urls from text and make html links out of them |
|
1521 | 1521 | """ |
|
1522 | 1522 | |
|
1523 | 1523 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1524 | 1524 | '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1525 | 1525 | |
|
1526 | 1526 | def url_func(match_obj): |
|
1527 | 1527 | url_full = match_obj.groups()[0] |
|
1528 | 1528 | a_options = dict(href_attrs) |
|
1529 | 1529 | a_options['href'] = url_full |
|
1530 | 1530 | a_text = url_full |
|
1531 | 1531 | return HTML.tag("a", a_text, **a_options) |
|
1532 | 1532 | |
|
1533 | 1533 | _new_text = url_pat.sub(url_func, text_) |
|
1534 | 1534 | |
|
1535 | 1535 | if safe: |
|
1536 | 1536 | return literal(_new_text) |
|
1537 | 1537 | return _new_text |
|
1538 | 1538 | |
|
1539 | 1539 | |
|
1540 | 1540 | def urlify_commits(text_, repo_name): |
|
1541 | 1541 | """ |
|
1542 | 1542 | Extract commit ids from text and make link from them |
|
1543 | 1543 | |
|
1544 | 1544 | :param text_: |
|
1545 | 1545 | :param repo_name: repo name to build the URL with |
|
1546 | 1546 | """ |
|
1547 | 1547 | |
|
1548 | 1548 | url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1549 | 1549 | |
|
1550 | 1550 | def url_func(match_obj): |
|
1551 | 1551 | commit_id = match_obj.groups()[1] |
|
1552 | 1552 | pref = match_obj.groups()[0] |
|
1553 | 1553 | suf = match_obj.groups()[2] |
|
1554 | 1554 | |
|
1555 | 1555 | tmpl = ( |
|
1556 | 1556 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">' |
|
1557 | 1557 | '%(commit_id)s</a>%(suf)s' |
|
1558 | 1558 | ) |
|
1559 | 1559 | return tmpl % { |
|
1560 | 1560 | 'pref': pref, |
|
1561 | 1561 | 'cls': 'revision-link', |
|
1562 | 1562 | 'url': route_url( |
|
1563 | 1563 | 'repo_commit', repo_name=repo_name, commit_id=commit_id), |
|
1564 | 1564 | 'commit_id': commit_id, |
|
1565 | 1565 | 'suf': suf, |
|
1566 | 1566 | 'hovercard_alt': 'Commit: {}'.format(commit_id), |
|
1567 | 1567 | 'hovercard_url': route_url( |
|
1568 | 1568 | 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id) |
|
1569 | 1569 | } |
|
1570 | 1570 | |
|
1571 | 1571 | new_text = url_pat.sub(url_func, text_) |
|
1572 | 1572 | |
|
1573 | 1573 | return new_text |
|
1574 | 1574 | |
|
1575 | 1575 | |
|
1576 | 1576 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1577 | 1577 | return_raw_data=False, link_format='html'): |
|
1578 | 1578 | pref = '' |
|
1579 | 1579 | if match_obj.group().startswith(' '): |
|
1580 | 1580 | pref = ' ' |
|
1581 | 1581 | |
|
1582 | 1582 | issue_id = ''.join(match_obj.groups()) |
|
1583 | 1583 | |
|
1584 | 1584 | if link_format == 'html': |
|
1585 | 1585 | tmpl = ( |
|
1586 | 1586 | '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">' |
|
1587 | 1587 | '%(issue-prefix)s%(id-repr)s' |
|
1588 | 1588 | '</a>') |
|
1589 | 1589 | elif link_format == 'html+hovercard': |
|
1590 | 1590 | tmpl = ( |
|
1591 | 1591 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">' |
|
1592 | 1592 | '%(issue-prefix)s%(id-repr)s' |
|
1593 | 1593 | '</a>') |
|
1594 | 1594 | elif link_format in ['rst', 'rst+hovercard']: |
|
1595 | 1595 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1596 | 1596 | elif link_format in ['markdown', 'markdown+hovercard']: |
|
1597 | 1597 | tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1598 | 1598 | else: |
|
1599 | 1599 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1600 | 1600 | |
|
1601 | 1601 | (repo_name_cleaned, |
|
1602 | 1602 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name) |
|
1603 | 1603 | |
|
1604 | 1604 | # variables replacement |
|
1605 | 1605 | named_vars = { |
|
1606 | 1606 | 'id': issue_id, |
|
1607 | 1607 | 'repo': repo_name, |
|
1608 | 1608 | 'repo_name': repo_name_cleaned, |
|
1609 | 1609 | 'group_name': parent_group_name, |
|
1610 | 1610 | # set dummy keys so we always have them |
|
1611 | 1611 | 'hostname': '', |
|
1612 | 1612 | 'netloc': '', |
|
1613 | 1613 | 'scheme': '' |
|
1614 | 1614 | } |
|
1615 | 1615 | |
|
1616 | 1616 | request = get_current_request() |
|
1617 | 1617 | if request: |
|
1618 | 1618 | # exposes, hostname, netloc, scheme |
|
1619 | 1619 | host_data = get_host_info(request) |
|
1620 | 1620 | named_vars.update(host_data) |
|
1621 | 1621 | |
|
1622 | 1622 | # named regex variables |
|
1623 | 1623 | named_vars.update(match_obj.groupdict()) |
|
1624 | 1624 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1625 | 1625 | desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars) |
|
1626 | 1626 | hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) |
|
1627 | 1627 | |
|
1628 | 1628 | def quote_cleaner(input_str): |
|
1629 | 1629 | """Remove quotes as it's HTML""" |
|
1630 | 1630 | return input_str.replace('"', '') |
|
1631 | 1631 | |
|
1632 | 1632 | data = { |
|
1633 | 1633 | 'pref': pref, |
|
1634 | 1634 | 'cls': quote_cleaner('issue-tracker-link'), |
|
1635 | 1635 | 'url': quote_cleaner(_url), |
|
1636 | 1636 | 'id-repr': issue_id, |
|
1637 | 1637 | 'issue-prefix': entry['pref'], |
|
1638 | 1638 | 'serv': entry['url'], |
|
1639 | 1639 | 'title': bleach.clean(desc, strip=True), |
|
1640 | 1640 | 'hovercard_url': hovercard_url |
|
1641 | 1641 | } |
|
1642 | 1642 | |
|
1643 | 1643 | if return_raw_data: |
|
1644 | 1644 | return { |
|
1645 | 1645 | 'id': issue_id, |
|
1646 | 1646 | 'url': _url |
|
1647 | 1647 | } |
|
1648 | 1648 | return tmpl % data |
|
1649 | 1649 | |
|
1650 | 1650 | |
|
1651 | 1651 | def get_active_pattern_entries(repo_name): |
|
1652 | 1652 | repo = None |
|
1653 | 1653 | if repo_name: |
|
1654 | 1654 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1655 | 1655 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1656 | 1656 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1657 | 1657 | |
|
1658 | 1658 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1659 | 1659 | active_entries = settings_model.get_settings(cache=True) |
|
1660 | 1660 | return active_entries |
|
1661 | 1661 | |
|
1662 | 1662 | |
|
1663 | 1663 | pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)') |
|
1664 | 1664 | |
|
1665 | 1665 | allowed_link_formats = [ |
|
1666 | 1666 | 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] |
|
1667 | 1667 | |
|
1668 | 1668 | compile_cache = { |
|
1669 | 1669 | |
|
1670 | 1670 | } |
|
1671 | 1671 | |
|
1672 | 1672 | |
|
1673 | 1673 | def process_patterns(text_string, repo_name, link_format='html', active_entries=None): |
|
1674 | 1674 | |
|
1675 | 1675 | if link_format not in allowed_link_formats: |
|
1676 | 1676 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1677 | 1677 | allowed_link_formats, link_format)) |
|
1678 | 1678 | issues_data = [] |
|
1679 | 1679 | errors = [] |
|
1680 | 1680 | new_text = text_string |
|
1681 | 1681 | |
|
1682 | 1682 | if active_entries is None: |
|
1683 | 1683 | log.debug('Fetch active issue tracker patterns for repo: %s', repo_name) |
|
1684 | 1684 | active_entries = get_active_pattern_entries(repo_name) |
|
1685 | 1685 | |
|
1686 | 1686 | log.debug('Got %s pattern entries to process', len(active_entries)) |
|
1687 | 1687 | |
|
1688 | 1688 | for uid, entry in active_entries.items(): |
|
1689 | 1689 | |
|
1690 | 1690 | if not (entry['pat'] and entry['url']): |
|
1691 | 1691 | log.debug('skipping due to missing data') |
|
1692 | 1692 | continue |
|
1693 | 1693 | |
|
1694 | 1694 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s', |
|
1695 | 1695 | uid, entry['pat'], entry['url'], entry['pref']) |
|
1696 | 1696 | |
|
1697 | 1697 | if entry.get('pat_compiled'): |
|
1698 | 1698 | pattern = entry['pat_compiled'] |
|
1699 | 1699 | elif entry['pat'] in compile_cache: |
|
1700 | 1700 | pattern = compile_cache[entry['pat']] |
|
1701 | 1701 | else: |
|
1702 | 1702 | try: |
|
1703 | 1703 | pattern = regex.compile(r'%s' % entry['pat']) |
|
1704 | 1704 | except regex.error as e: |
|
1705 | 1705 | regex_err = ValueError('{}:{}'.format(entry['pat'], e)) |
|
1706 | 1706 | log.exception('issue tracker pattern: `%s` failed to compile', regex_err) |
|
1707 | 1707 | errors.append(regex_err) |
|
1708 | 1708 | continue |
|
1709 | 1709 | compile_cache[entry['pat']] = pattern |
|
1710 | 1710 | |
|
1711 | 1711 | data_func = partial( |
|
1712 | 1712 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1713 | 1713 | return_raw_data=True) |
|
1714 | 1714 | |
|
1715 | 1715 | for match_obj in pattern.finditer(text_string): |
|
1716 | 1716 | issues_data.append(data_func(match_obj)) |
|
1717 | 1717 | |
|
1718 | 1718 | url_func = partial( |
|
1719 | 1719 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1720 | 1720 | link_format=link_format) |
|
1721 | 1721 | |
|
1722 | 1722 | new_text = pattern.sub(url_func, new_text) |
|
1723 | 1723 | log.debug('processed prefix:uid `%s`', uid) |
|
1724 | 1724 | |
|
1725 | 1725 | # finally use global replace, eg !123 -> pr-link, those will not catch |
|
1726 | 1726 | # if already similar pattern exists |
|
1727 | 1727 | server_url = '${scheme}://${netloc}' |
|
1728 | 1728 | pr_entry = { |
|
1729 | 1729 | 'pref': '!', |
|
1730 | 1730 | 'url': server_url + '/_admin/pull-requests/${id}', |
|
1731 | 1731 | 'desc': 'Pull Request !${id}', |
|
1732 | 1732 | 'hovercard_url': server_url + '/_hovercard/pull_request/${id}' |
|
1733 | 1733 | } |
|
1734 | 1734 | pr_url_func = partial( |
|
1735 | 1735 | _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None, |
|
1736 | 1736 | link_format=link_format+'+hovercard') |
|
1737 | 1737 | new_text = pr_pattern_re.sub(pr_url_func, new_text) |
|
1738 | 1738 | log.debug('processed !pr pattern') |
|
1739 | 1739 | |
|
1740 | 1740 | return new_text, issues_data, errors |
|
1741 | 1741 | |
|
1742 | 1742 | |
|
1743 | 1743 | def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None, |
|
1744 | 1744 | issues_container_callback=None, error_container=None): |
|
1745 | 1745 | """ |
|
1746 | 1746 | Parses given text message and makes proper links. |
|
1747 | 1747 | issues are linked to given issue-server, and rest is a commit link |
|
1748 | 1748 | """ |
|
1749 | 1749 | |
|
1750 | 1750 | def escaper(_text): |
|
1751 | 1751 | return _text.replace('<', '<').replace('>', '>') |
|
1752 | 1752 | |
|
1753 | 1753 | new_text = escaper(commit_text) |
|
1754 | 1754 | |
|
1755 | 1755 | # extract http/https links and make them real urls |
|
1756 | 1756 | new_text = urlify_text(new_text, safe=False) |
|
1757 | 1757 | |
|
1758 | 1758 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1759 | 1759 | # the scope of repository present. |
|
1760 | 1760 | if repository: |
|
1761 | 1761 | new_text = urlify_commits(new_text, repository) |
|
1762 | 1762 | |
|
1763 | 1763 | # process issue tracker patterns |
|
1764 | 1764 | new_text, issues, errors = process_patterns( |
|
1765 | 1765 | new_text, repository or '', active_entries=active_pattern_entries) |
|
1766 | 1766 | |
|
1767 | 1767 | if issues_container_callback is not None: |
|
1768 | 1768 | for issue in issues: |
|
1769 | 1769 | issues_container_callback(issue) |
|
1770 | 1770 | |
|
1771 | 1771 | if error_container is not None: |
|
1772 | 1772 | error_container.extend(errors) |
|
1773 | 1773 | |
|
1774 | 1774 | return literal(new_text) |
|
1775 | 1775 | |
|
1776 | 1776 | |
|
1777 | 1777 | def render_binary(repo_name, file_obj): |
|
1778 | 1778 | """ |
|
1779 | 1779 | Choose how to render a binary file |
|
1780 | 1780 | """ |
|
1781 | 1781 | |
|
1782 | 1782 | # unicode |
|
1783 | 1783 | filename = file_obj.name |
|
1784 | 1784 | |
|
1785 | 1785 | # images |
|
1786 | 1786 | for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']: |
|
1787 | 1787 | if fnmatch.fnmatch(filename, pat=ext): |
|
1788 | 1788 | src = route_path( |
|
1789 | 1789 | 'repo_file_raw', repo_name=repo_name, |
|
1790 | 1790 | commit_id=file_obj.commit.raw_id, |
|
1791 | 1791 | f_path=file_obj.path) |
|
1792 | 1792 | |
|
1793 | 1793 | return literal( |
|
1794 | 1794 | '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src)) |
|
1795 | 1795 | |
|
1796 | 1796 | |
|
1797 | 1797 | def renderer_from_filename(filename, exclude=None): |
|
1798 | 1798 | """ |
|
1799 | 1799 | choose a renderer based on filename, this works only for text based files |
|
1800 | 1800 | """ |
|
1801 | 1801 | |
|
1802 | 1802 | # ipython |
|
1803 | 1803 | for ext in ['*.ipynb']: |
|
1804 | 1804 | if fnmatch.fnmatch(filename, pat=ext): |
|
1805 | 1805 | return 'jupyter' |
|
1806 | 1806 | |
|
1807 | 1807 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1808 | 1808 | if is_markup: |
|
1809 | 1809 | return is_markup |
|
1810 | 1810 | return None |
|
1811 | 1811 | |
|
1812 | 1812 | |
|
1813 | 1813 | def render(source, renderer='rst', mentions=False, relative_urls=None, |
|
1814 | 1814 | repo_name=None, active_pattern_entries=None, issues_container_callback=None): |
|
1815 | 1815 | |
|
1816 | 1816 | def maybe_convert_relative_links(html_source): |
|
1817 | 1817 | if relative_urls: |
|
1818 | 1818 | return relative_links(html_source, relative_urls) |
|
1819 | 1819 | return html_source |
|
1820 | 1820 | |
|
1821 | 1821 | if renderer == 'plain': |
|
1822 | 1822 | return literal( |
|
1823 | 1823 | MarkupRenderer.plain(source, leading_newline=False)) |
|
1824 | 1824 | |
|
1825 | 1825 | elif renderer == 'rst': |
|
1826 | 1826 | if repo_name: |
|
1827 | 1827 | # process patterns on comments if we pass in repo name |
|
1828 | 1828 | source, issues, errors = process_patterns( |
|
1829 | 1829 | source, repo_name, link_format='rst', |
|
1830 | 1830 | active_entries=active_pattern_entries) |
|
1831 | 1831 | if issues_container_callback is not None: |
|
1832 | 1832 | for issue in issues: |
|
1833 | 1833 | issues_container_callback(issue) |
|
1834 | 1834 | |
|
1835 | 1835 | return literal( |
|
1836 | 1836 | '<div class="rst-block">%s</div>' % |
|
1837 | 1837 | maybe_convert_relative_links( |
|
1838 | 1838 | MarkupRenderer.rst(source, mentions=mentions))) |
|
1839 | 1839 | |
|
1840 | 1840 | elif renderer == 'markdown': |
|
1841 | 1841 | if repo_name: |
|
1842 | 1842 | # process patterns on comments if we pass in repo name |
|
1843 | 1843 | source, issues, errors = process_patterns( |
|
1844 | 1844 | source, repo_name, link_format='markdown', |
|
1845 | 1845 | active_entries=active_pattern_entries) |
|
1846 | 1846 | if issues_container_callback is not None: |
|
1847 | 1847 | for issue in issues: |
|
1848 | 1848 | issues_container_callback(issue) |
|
1849 | 1849 | |
|
1850 | 1850 | |
|
1851 | 1851 | return literal( |
|
1852 | 1852 | '<div class="markdown-block">%s</div>' % |
|
1853 | 1853 | maybe_convert_relative_links( |
|
1854 | 1854 | MarkupRenderer.markdown(source, flavored=True, |
|
1855 | 1855 | mentions=mentions))) |
|
1856 | 1856 | |
|
1857 | 1857 | elif renderer == 'jupyter': |
|
1858 | 1858 | return literal( |
|
1859 | 1859 | '<div class="ipynb">%s</div>' % |
|
1860 | 1860 | maybe_convert_relative_links( |
|
1861 | 1861 | MarkupRenderer.jupyter(source))) |
|
1862 | 1862 | |
|
1863 | 1863 | # None means just show the file-source |
|
1864 | 1864 | return None |
|
1865 | 1865 | |
|
1866 | 1866 | |
|
1867 | 1867 | def commit_status(repo, commit_id): |
|
1868 | 1868 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1869 | 1869 | |
|
1870 | 1870 | |
|
1871 | 1871 | def commit_status_lbl(commit_status): |
|
1872 | 1872 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1873 | 1873 | |
|
1874 | 1874 | |
|
1875 | 1875 | def commit_time(repo_name, commit_id): |
|
1876 | 1876 | repo = Repository.get_by_repo_name(repo_name) |
|
1877 | 1877 | commit = repo.get_commit(commit_id=commit_id) |
|
1878 | 1878 | return commit.date |
|
1879 | 1879 | |
|
1880 | 1880 | |
|
1881 | 1881 | def get_permission_name(key): |
|
1882 | 1882 | return dict(Permission.PERMS).get(key) |
|
1883 | 1883 | |
|
1884 | 1884 | |
|
1885 | 1885 | def journal_filter_help(request): |
|
1886 | 1886 | _ = request.translate |
|
1887 | 1887 | from rhodecode.lib.audit_logger import ACTIONS |
|
1888 | 1888 | actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80)) |
|
1889 | 1889 | |
|
1890 | 1890 | return _( |
|
1891 | 1891 | 'Example filter terms:\n' + |
|
1892 | 1892 | ' repository:vcs\n' + |
|
1893 | 1893 | ' username:marcin\n' + |
|
1894 | 1894 | ' username:(NOT marcin)\n' + |
|
1895 | 1895 | ' action:*push*\n' + |
|
1896 | 1896 | ' ip:127.0.0.1\n' + |
|
1897 | 1897 | ' date:20120101\n' + |
|
1898 | 1898 | ' date:[20120101100000 TO 20120102]\n' + |
|
1899 | 1899 | '\n' + |
|
1900 | 1900 | 'Actions: {actions}\n' + |
|
1901 | 1901 | '\n' + |
|
1902 | 1902 | 'Generate wildcards using \'*\' character:\n' + |
|
1903 | 1903 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1904 | 1904 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1905 | 1905 | '\n' + |
|
1906 | 1906 | 'Optional AND / OR operators in queries\n' + |
|
1907 | 1907 | ' "repository:vcs OR repository:test"\n' + |
|
1908 | 1908 | ' "username:test AND repository:test*"\n' |
|
1909 | 1909 | ).format(actions=actions) |
|
1910 | 1910 | |
|
1911 | 1911 | |
|
1912 | 1912 | def not_mapped_error(repo_name): |
|
1913 | 1913 | from rhodecode.translation import _ |
|
1914 | 1914 | flash(_('%s repository is not mapped to db perhaps' |
|
1915 | 1915 | ' it was created or renamed from the filesystem' |
|
1916 | 1916 | ' please run the application again' |
|
1917 | 1917 | ' in order to rescan repositories') % repo_name, category='error') |
|
1918 | 1918 | |
|
1919 | 1919 | |
|
1920 | 1920 | def ip_range(ip_addr): |
|
1921 | 1921 | from rhodecode.model.db import UserIpMap |
|
1922 | 1922 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1923 | 1923 | return '%s - %s' % (s, e) |
|
1924 | 1924 | |
|
1925 | 1925 | |
|
1926 | 1926 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1927 | 1927 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1928 | 1928 | if method.lower() != 'get' and needs_csrf_token: |
|
1929 | 1929 | raise Exception( |
|
1930 | 1930 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1931 | 1931 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1932 | 1932 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1933 | 1933 | |
|
1934 | 1934 | return insecure_form(url, method=method, **attrs) |
|
1935 | 1935 | |
|
1936 | 1936 | |
|
1937 | 1937 | def secure_form(form_url, method="POST", multipart=False, **attrs): |
|
1938 | 1938 | """Start a form tag that points the action to an url. This |
|
1939 | 1939 | form tag will also include the hidden field containing |
|
1940 | 1940 | the auth token. |
|
1941 | 1941 | |
|
1942 | 1942 | The url options should be given either as a string, or as a |
|
1943 | 1943 | ``url()`` function. The method for the form defaults to POST. |
|
1944 | 1944 | |
|
1945 | 1945 | Options: |
|
1946 | 1946 | |
|
1947 | 1947 | ``multipart`` |
|
1948 | 1948 | If set to True, the enctype is set to "multipart/form-data". |
|
1949 | 1949 | ``method`` |
|
1950 | 1950 | The method to use when submitting the form, usually either |
|
1951 | 1951 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1952 | 1952 | hidden input with name _method is added to simulate the verb |
|
1953 | 1953 | over POST. |
|
1954 | 1954 | |
|
1955 | 1955 | """ |
|
1956 | 1956 | |
|
1957 | 1957 | if 'request' in attrs: |
|
1958 | 1958 | session = attrs['request'].session |
|
1959 | 1959 | del attrs['request'] |
|
1960 | 1960 | else: |
|
1961 | 1961 | raise ValueError( |
|
1962 | 1962 | 'Calling this form requires request= to be passed as argument') |
|
1963 | 1963 | |
|
1964 | 1964 | _form = insecure_form(form_url, method, multipart, **attrs) |
|
1965 | 1965 | token = literal( |
|
1966 | 1966 | '<input type="hidden" name="{}" value="{}">'.format( |
|
1967 | 1967 | csrf_token_key, get_csrf_token(session))) |
|
1968 | 1968 | |
|
1969 | 1969 | return literal("%s\n%s" % (_form, token)) |
|
1970 | 1970 | |
|
1971 | 1971 | |
|
1972 | 1972 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
1973 | 1973 | select_html = select(name, selected, options, **attrs) |
|
1974 | 1974 | |
|
1975 | 1975 | select2 = """ |
|
1976 | 1976 | <script> |
|
1977 | 1977 | $(document).ready(function() { |
|
1978 | 1978 | $('#%s').select2({ |
|
1979 | 1979 | containerCssClass: 'drop-menu %s', |
|
1980 | 1980 | dropdownCssClass: 'drop-menu-dropdown', |
|
1981 | 1981 | dropdownAutoWidth: true%s |
|
1982 | 1982 | }); |
|
1983 | 1983 | }); |
|
1984 | 1984 | </script> |
|
1985 | 1985 | """ |
|
1986 | 1986 | |
|
1987 | 1987 | filter_option = """, |
|
1988 | 1988 | minimumResultsForSearch: -1 |
|
1989 | 1989 | """ |
|
1990 | 1990 | input_id = attrs.get('id') or name |
|
1991 | 1991 | extra_classes = ' '.join(attrs.pop('extra_classes', [])) |
|
1992 | 1992 | filter_enabled = "" if enable_filter else filter_option |
|
1993 | 1993 | select_script = literal(select2 % (input_id, extra_classes, filter_enabled)) |
|
1994 | 1994 | |
|
1995 | 1995 | return literal(select_html+select_script) |
|
1996 | 1996 | |
|
1997 | 1997 | |
|
1998 | 1998 | def get_visual_attr(tmpl_context_var, attr_name): |
|
1999 | 1999 | """ |
|
2000 | 2000 | A safe way to get a variable from visual variable of template context |
|
2001 | 2001 | |
|
2002 | 2002 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
2003 | 2003 | :param attr_name: name of the attribute we fetch from the c.visual |
|
2004 | 2004 | """ |
|
2005 | 2005 | visual = getattr(tmpl_context_var, 'visual', None) |
|
2006 | 2006 | if not visual: |
|
2007 | 2007 | return |
|
2008 | 2008 | else: |
|
2009 | 2009 | return getattr(visual, attr_name, None) |
|
2010 | 2010 | |
|
2011 | 2011 | |
|
2012 | 2012 | def get_last_path_part(file_node): |
|
2013 | 2013 | if not file_node.path: |
|
2014 | 2014 | return u'/' |
|
2015 | 2015 | |
|
2016 | 2016 | path = safe_unicode(file_node.path.split('/')[-1]) |
|
2017 | 2017 | return u'../' + path |
|
2018 | 2018 | |
|
2019 | 2019 | |
|
2020 | 2020 | def route_url(*args, **kwargs): |
|
2021 | 2021 | """ |
|
2022 | 2022 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
2023 | 2023 | """ |
|
2024 | 2024 | req = get_current_request() |
|
2025 | 2025 | return req.route_url(*args, **kwargs) |
|
2026 | 2026 | |
|
2027 | 2027 | |
|
2028 | 2028 | def route_path(*args, **kwargs): |
|
2029 | 2029 | """ |
|
2030 | 2030 | Wrapper around pyramids `route_path` function. |
|
2031 | 2031 | """ |
|
2032 | 2032 | req = get_current_request() |
|
2033 | 2033 | return req.route_path(*args, **kwargs) |
|
2034 | 2034 | |
|
2035 | 2035 | |
|
2036 | 2036 | def route_path_or_none(*args, **kwargs): |
|
2037 | 2037 | try: |
|
2038 | 2038 | return route_path(*args, **kwargs) |
|
2039 | 2039 | except KeyError: |
|
2040 | 2040 | return None |
|
2041 | 2041 | |
|
2042 | 2042 | |
|
2043 | 2043 | def current_route_path(request, **kw): |
|
2044 | 2044 | new_args = request.GET.mixed() |
|
2045 | 2045 | new_args.update(kw) |
|
2046 | 2046 | return request.current_route_path(_query=new_args) |
|
2047 | 2047 | |
|
2048 | 2048 | |
|
2049 | 2049 | def curl_api_example(method, args): |
|
2050 | 2050 | args_json = json.dumps(OrderedDict([ |
|
2051 | 2051 | ('id', 1), |
|
2052 | 2052 | ('auth_token', 'SECRET'), |
|
2053 | 2053 | ('method', method), |
|
2054 | 2054 | ('args', args) |
|
2055 | 2055 | ])) |
|
2056 | 2056 | |
|
2057 | 2057 | return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format( |
|
2058 | 2058 | api_url=route_url('apiv2'), |
|
2059 | 2059 | args_json=args_json |
|
2060 | 2060 | ) |
|
2061 | 2061 | |
|
2062 | 2062 | |
|
2063 | 2063 | def api_call_example(method, args): |
|
2064 | 2064 | """ |
|
2065 | 2065 | Generates an API call example via CURL |
|
2066 | 2066 | """ |
|
2067 | 2067 | curl_call = curl_api_example(method, args) |
|
2068 | 2068 | |
|
2069 | 2069 | return literal( |
|
2070 | 2070 | curl_call + |
|
2071 | 2071 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2072 | 2072 | "and needs to be of `api calls` role." |
|
2073 | 2073 | .format(token_url=route_url('my_account_auth_tokens'))) |
|
2074 | 2074 | |
|
2075 | 2075 | |
|
2076 | 2076 | def notification_description(notification, request): |
|
2077 | 2077 | """ |
|
2078 | 2078 | Generate notification human readable description based on notification type |
|
2079 | 2079 | """ |
|
2080 | 2080 | from rhodecode.model.notification import NotificationModel |
|
2081 | 2081 | return NotificationModel().make_description( |
|
2082 | 2082 | notification, translate=request.translate) |
|
2083 | 2083 | |
|
2084 | 2084 | |
|
2085 | 2085 | def go_import_header(request, db_repo=None): |
|
2086 | 2086 | """ |
|
2087 | 2087 | Creates a header for go-import functionality in Go Lang |
|
2088 | 2088 | """ |
|
2089 | 2089 | |
|
2090 | 2090 | if not db_repo: |
|
2091 | 2091 | return |
|
2092 | 2092 | if 'go-get' not in request.GET: |
|
2093 | 2093 | return |
|
2094 | 2094 | |
|
2095 | 2095 | clone_url = db_repo.clone_url() |
|
2096 | 2096 | prefix = re.split(r'^https?:\/\/', clone_url)[-1] |
|
2097 | 2097 | # we have a repo and go-get flag, |
|
2098 | 2098 | return literal('<meta name="go-import" content="{} {} {}">'.format( |
|
2099 | 2099 | prefix, db_repo.repo_type, clone_url)) |
|
2100 | 2100 | |
|
2101 | 2101 | |
|
2102 | 2102 | def reviewer_as_json(*args, **kwargs): |
|
2103 | 2103 | from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json |
|
2104 | 2104 | return _reviewer_as_json(*args, **kwargs) |
|
2105 | 2105 | |
|
2106 | 2106 | |
|
2107 | 2107 | def get_repo_view_type(request): |
|
2108 | 2108 | route_name = request.matched_route.name |
|
2109 | 2109 | route_to_view_type = { |
|
2110 | 2110 | 'repo_changelog': 'commits', |
|
2111 | 2111 | 'repo_commits': 'commits', |
|
2112 | 2112 | 'repo_files': 'files', |
|
2113 | 2113 | 'repo_summary': 'summary', |
|
2114 | 2114 | 'repo_commit': 'commit' |
|
2115 | 2115 | } |
|
2116 | 2116 | |
|
2117 | 2117 | return route_to_view_type.get(route_name) |
|
2118 | 2118 | |
|
2119 | 2119 | |
|
2120 | 2120 | def is_active(menu_entry, selected): |
|
2121 | 2121 | """ |
|
2122 | 2122 | Returns active class for selecting menus in templates |
|
2123 | 2123 | <li class=${h.is_active('settings', current_active)}></li> |
|
2124 | 2124 | """ |
|
2125 | 2125 | if not isinstance(menu_entry, list): |
|
2126 | 2126 | menu_entry = [menu_entry] |
|
2127 | 2127 | |
|
2128 | 2128 | if selected in menu_entry: |
|
2129 | 2129 | return "active" |
|
2130 | 2130 | |
|
2131 | 2131 | |
|
2132 | 2132 | class IssuesRegistry(object): |
|
2133 | 2133 | """ |
|
2134 | 2134 | issue_registry = IssuesRegistry() |
|
2135 | 2135 | some_func(issues_callback=issues_registry(...)) |
|
2136 | 2136 | """ |
|
2137 | 2137 | |
|
2138 | 2138 | def __init__(self): |
|
2139 | 2139 | self.issues = [] |
|
2140 | 2140 | self.unique_issues = collections.defaultdict(lambda: []) |
|
2141 | 2141 | |
|
2142 | 2142 | def __call__(self, commit_dict=None): |
|
2143 | 2143 | def callback(issue): |
|
2144 | 2144 | if commit_dict and issue: |
|
2145 | 2145 | issue['commit'] = commit_dict |
|
2146 | 2146 | self.issues.append(issue) |
|
2147 | 2147 | self.unique_issues[issue['id']].append(issue) |
|
2148 | 2148 | return callback |
|
2149 | 2149 | |
|
2150 | 2150 | def get_issues(self): |
|
2151 | 2151 | return self.issues |
|
2152 | 2152 | |
|
2153 | 2153 | @property |
|
2154 | 2154 | def issues_unique_count(self): |
|
2155 | 2155 | return len(set(i['id'] for i in self.issues)) |
@@ -1,1028 +1,1028 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Scm model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os.path |
|
26 | 26 | import traceback |
|
27 | 27 | import logging |
|
28 | 28 | import cStringIO |
|
29 | 29 | |
|
30 | 30 | from sqlalchemy import func |
|
31 | 31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
32 | 32 | |
|
33 | 33 | import rhodecode |
|
34 | 34 | from rhodecode.lib.vcs import get_backend |
|
35 | 35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
36 | 36 | from rhodecode.lib.vcs.nodes import FileNode |
|
37 | 37 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
38 | 38 | from rhodecode.lib import helpers as h, rc_cache |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
41 | 41 | HasUserGroupPermissionAny) |
|
42 | 42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
43 | 43 | from rhodecode.lib import hooks_utils |
|
44 | 44 | from rhodecode.lib.utils import ( |
|
45 | 45 | get_filesystem_repos, make_db_config) |
|
46 | 46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
47 | 47 | from rhodecode.lib.system_info import get_system_info |
|
48 | 48 | from rhodecode.model import BaseModel |
|
49 | 49 | from rhodecode.model.db import ( |
|
50 | 50 | or_, false, |
|
51 | 51 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
52 | 52 | PullRequest, FileStore) |
|
53 | 53 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | 54 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class UserTemp(object): |
|
60 | 60 | def __init__(self, user_id): |
|
61 | 61 | self.user_id = user_id |
|
62 | 62 | |
|
63 | 63 | def __repr__(self): |
|
64 | 64 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class RepoTemp(object): |
|
68 | 68 | def __init__(self, repo_id): |
|
69 | 69 | self.repo_id = repo_id |
|
70 | 70 | |
|
71 | 71 | def __repr__(self): |
|
72 | 72 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | class SimpleCachedRepoList(object): |
|
76 | 76 | """ |
|
77 | 77 | Lighter version of of iteration of repos without the scm initialisation, |
|
78 | 78 | and with cache usage |
|
79 | 79 | """ |
|
80 | 80 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
81 | 81 | self.db_repo_list = db_repo_list |
|
82 | 82 | self.repos_path = repos_path |
|
83 | 83 | self.order_by = order_by |
|
84 | 84 | self.reversed = (order_by or '').startswith('-') |
|
85 | 85 | if not perm_set: |
|
86 | 86 | perm_set = ['repository.read', 'repository.write', |
|
87 | 87 | 'repository.admin'] |
|
88 | 88 | self.perm_set = perm_set |
|
89 | 89 | |
|
90 | 90 | def __len__(self): |
|
91 | 91 | return len(self.db_repo_list) |
|
92 | 92 | |
|
93 | 93 | def __repr__(self): |
|
94 | 94 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
95 | 95 | |
|
96 | 96 | def __iter__(self): |
|
97 | 97 | for dbr in self.db_repo_list: |
|
98 | 98 | # check permission at this level |
|
99 | 99 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
100 | 100 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
101 | 101 | if not has_perm: |
|
102 | 102 | continue |
|
103 | 103 | |
|
104 | 104 | tmp_d = { |
|
105 | 105 | 'name': dbr.repo_name, |
|
106 | 106 | 'dbrepo': dbr.get_dict(), |
|
107 | 107 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
108 | 108 | } |
|
109 | 109 | yield tmp_d |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | class _PermCheckIterator(object): |
|
113 | 113 | |
|
114 | 114 | def __init__( |
|
115 | 115 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
116 | 116 | extra_kwargs=None): |
|
117 | 117 | """ |
|
118 | 118 | Creates iterator from given list of objects, additionally |
|
119 | 119 | checking permission for them from perm_set var |
|
120 | 120 | |
|
121 | 121 | :param obj_list: list of db objects |
|
122 | 122 | :param obj_attr: attribute of object to pass into perm_checker |
|
123 | 123 | :param perm_set: list of permissions to check |
|
124 | 124 | :param perm_checker: callable to check permissions against |
|
125 | 125 | """ |
|
126 | 126 | self.obj_list = obj_list |
|
127 | 127 | self.obj_attr = obj_attr |
|
128 | 128 | self.perm_set = perm_set |
|
129 | 129 | self.perm_checker = perm_checker(*self.perm_set) |
|
130 | 130 | self.extra_kwargs = extra_kwargs or {} |
|
131 | 131 | |
|
132 | 132 | def __len__(self): |
|
133 | 133 | return len(self.obj_list) |
|
134 | 134 | |
|
135 | 135 | def __repr__(self): |
|
136 | 136 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
137 | 137 | |
|
138 | 138 | def __iter__(self): |
|
139 | 139 | for db_obj in self.obj_list: |
|
140 | 140 | # check permission at this level |
|
141 | 141 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() |
|
142 | 142 | name = db_obj.__dict__.get(self.obj_attr, None) |
|
143 | 143 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
144 | 144 | continue |
|
145 | 145 | |
|
146 | 146 | yield db_obj |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | class RepoList(_PermCheckIterator): |
|
150 | 150 | |
|
151 | 151 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
152 | 152 | if not perm_set: |
|
153 | 153 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
154 | 154 | |
|
155 | 155 | super(RepoList, self).__init__( |
|
156 | 156 | obj_list=db_repo_list, |
|
157 | 157 | obj_attr='_repo_name', perm_set=perm_set, |
|
158 | 158 | perm_checker=HasRepoPermissionAny, |
|
159 | 159 | extra_kwargs=extra_kwargs) |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | class RepoGroupList(_PermCheckIterator): |
|
163 | 163 | |
|
164 | 164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
165 | 165 | if not perm_set: |
|
166 | 166 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
167 | 167 | |
|
168 | 168 | super(RepoGroupList, self).__init__( |
|
169 | 169 | obj_list=db_repo_group_list, |
|
170 | 170 | obj_attr='_group_name', perm_set=perm_set, |
|
171 | 171 | perm_checker=HasRepoGroupPermissionAny, |
|
172 | 172 | extra_kwargs=extra_kwargs) |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | class UserGroupList(_PermCheckIterator): |
|
176 | 176 | |
|
177 | 177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
178 | 178 | if not perm_set: |
|
179 | 179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
180 | 180 | |
|
181 | 181 | super(UserGroupList, self).__init__( |
|
182 | 182 | obj_list=db_user_group_list, |
|
183 | 183 | obj_attr='users_group_name', perm_set=perm_set, |
|
184 | 184 | perm_checker=HasUserGroupPermissionAny, |
|
185 | 185 | extra_kwargs=extra_kwargs) |
|
186 | 186 | |
|
187 | 187 | |
|
188 | 188 | class ScmModel(BaseModel): |
|
189 | 189 | """ |
|
190 | 190 | Generic Scm Model |
|
191 | 191 | """ |
|
192 | 192 | |
|
193 | 193 | @LazyProperty |
|
194 | 194 | def repos_path(self): |
|
195 | 195 | """ |
|
196 | 196 | Gets the repositories root path from database |
|
197 | 197 | """ |
|
198 | 198 | |
|
199 | 199 | settings_model = VcsSettingsModel(sa=self.sa) |
|
200 | 200 | return settings_model.get_repos_location() |
|
201 | 201 | |
|
202 | 202 | def repo_scan(self, repos_path=None): |
|
203 | 203 | """ |
|
204 | 204 | Listing of repositories in given path. This path should not be a |
|
205 | 205 | repository itself. Return a dictionary of repository objects |
|
206 | 206 | |
|
207 | 207 | :param repos_path: path to directory containing repositories |
|
208 | 208 | """ |
|
209 | 209 | |
|
210 | 210 | if repos_path is None: |
|
211 | 211 | repos_path = self.repos_path |
|
212 | 212 | |
|
213 | 213 | log.info('scanning for repositories in %s', repos_path) |
|
214 | 214 | |
|
215 | 215 | config = make_db_config() |
|
216 | 216 | config.set('extensions', 'largefiles', '') |
|
217 | 217 | repos = {} |
|
218 | 218 | |
|
219 | 219 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
220 | 220 | # name need to be decomposed and put back together using the / |
|
221 | 221 | # since this is internal storage separator for rhodecode |
|
222 | 222 | name = Repository.normalize_repo_name(name) |
|
223 | 223 | |
|
224 | 224 | try: |
|
225 | 225 | if name in repos: |
|
226 | 226 | raise RepositoryError('Duplicate repository name %s ' |
|
227 | 227 | 'found in %s' % (name, path)) |
|
228 | 228 | elif path[0] in rhodecode.BACKENDS: |
|
229 | 229 | backend = get_backend(path[0]) |
|
230 | 230 | repos[name] = backend(path[1], config=config, |
|
231 | 231 | with_wire={"cache": False}) |
|
232 | 232 | except OSError: |
|
233 | 233 | continue |
|
234 | 234 | except RepositoryError: |
|
235 | 235 | log.exception('Failed to create a repo') |
|
236 | 236 | continue |
|
237 | 237 | |
|
238 | 238 | log.debug('found %s paths with repositories', len(repos)) |
|
239 | 239 | return repos |
|
240 | 240 | |
|
241 | 241 | def get_repos(self, all_repos=None, sort_key=None): |
|
242 | 242 | """ |
|
243 | 243 | Get all repositories from db and for each repo create it's |
|
244 | 244 | backend instance and fill that backed with information from database |
|
245 | 245 | |
|
246 | 246 | :param all_repos: list of repository names as strings |
|
247 | 247 | give specific repositories list, good for filtering |
|
248 | 248 | |
|
249 | 249 | :param sort_key: initial sorting of repositories |
|
250 | 250 | """ |
|
251 | 251 | if all_repos is None: |
|
252 | 252 | all_repos = self.sa.query(Repository)\ |
|
253 | 253 | .filter(Repository.group_id == None)\ |
|
254 | 254 | .order_by(func.lower(Repository.repo_name)).all() |
|
255 | 255 | repo_iter = SimpleCachedRepoList( |
|
256 | 256 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
257 | 257 | return repo_iter |
|
258 | 258 | |
|
259 | 259 | def get_repo_groups(self, all_groups=None): |
|
260 | 260 | if all_groups is None: |
|
261 | 261 | all_groups = RepoGroup.query()\ |
|
262 | 262 | .filter(RepoGroup.group_parent_id == None).all() |
|
263 | 263 | return [x for x in RepoGroupList(all_groups)] |
|
264 | 264 | |
|
265 | 265 | def mark_for_invalidation(self, repo_name, delete=False): |
|
266 | 266 | """ |
|
267 | 267 | Mark caches of this repo invalid in the database. `delete` flag |
|
268 | 268 | removes the cache entries |
|
269 | 269 | |
|
270 | 270 | :param repo_name: the repo_name for which caches should be marked |
|
271 | 271 | invalid, or deleted |
|
272 | 272 | :param delete: delete the entry keys instead of setting bool |
|
273 | 273 | flag on them, and also purge caches used by the dogpile |
|
274 | 274 | """ |
|
275 | 275 | repo = Repository.get_by_repo_name(repo_name) |
|
276 | 276 | |
|
277 | 277 | if repo: |
|
278 | 278 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( |
|
279 | 279 | repo_id=repo.repo_id) |
|
280 | 280 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) |
|
281 | 281 | |
|
282 | 282 | repo_id = repo.repo_id |
|
283 | 283 | config = repo._config |
|
284 | 284 | config.set('extensions', 'largefiles', '') |
|
285 | 285 | repo.update_commit_cache(config=config, cs_cache=None) |
|
286 | 286 | if delete: |
|
287 | 287 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
288 | 288 | rc_cache.clear_cache_namespace( |
|
289 | 289 | 'cache_repo', cache_namespace_uid, invalidate=True) |
|
290 | 290 | |
|
291 | 291 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
292 | 292 | |
|
293 | 293 | f = self.sa.query(UserFollowing)\ |
|
294 | 294 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
295 | 295 | .filter(UserFollowing.user_id == user_id).scalar() |
|
296 | 296 | |
|
297 | 297 | if f is not None: |
|
298 | 298 | try: |
|
299 | 299 | self.sa.delete(f) |
|
300 | 300 | return |
|
301 | 301 | except Exception: |
|
302 | 302 | log.error(traceback.format_exc()) |
|
303 | 303 | raise |
|
304 | 304 | |
|
305 | 305 | try: |
|
306 | 306 | f = UserFollowing() |
|
307 | 307 | f.user_id = user_id |
|
308 | 308 | f.follows_repo_id = follow_repo_id |
|
309 | 309 | self.sa.add(f) |
|
310 | 310 | except Exception: |
|
311 | 311 | log.error(traceback.format_exc()) |
|
312 | 312 | raise |
|
313 | 313 | |
|
314 | 314 | def toggle_following_user(self, follow_user_id, user_id): |
|
315 | 315 | f = self.sa.query(UserFollowing)\ |
|
316 | 316 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
317 | 317 | .filter(UserFollowing.user_id == user_id).scalar() |
|
318 | 318 | |
|
319 | 319 | if f is not None: |
|
320 | 320 | try: |
|
321 | 321 | self.sa.delete(f) |
|
322 | 322 | return |
|
323 | 323 | except Exception: |
|
324 | 324 | log.error(traceback.format_exc()) |
|
325 | 325 | raise |
|
326 | 326 | |
|
327 | 327 | try: |
|
328 | 328 | f = UserFollowing() |
|
329 | 329 | f.user_id = user_id |
|
330 | 330 | f.follows_user_id = follow_user_id |
|
331 | 331 | self.sa.add(f) |
|
332 | 332 | except Exception: |
|
333 | 333 | log.error(traceback.format_exc()) |
|
334 | 334 | raise |
|
335 | 335 | |
|
336 | 336 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
337 | 337 | r = self.sa.query(Repository)\ |
|
338 | 338 | .filter(Repository.repo_name == repo_name).scalar() |
|
339 | 339 | |
|
340 | 340 | f = self.sa.query(UserFollowing)\ |
|
341 | 341 | .filter(UserFollowing.follows_repository == r)\ |
|
342 | 342 | .filter(UserFollowing.user_id == user_id).scalar() |
|
343 | 343 | |
|
344 | 344 | return f is not None |
|
345 | 345 | |
|
346 | 346 | def is_following_user(self, username, user_id, cache=False): |
|
347 | 347 | u = User.get_by_username(username) |
|
348 | 348 | |
|
349 | 349 | f = self.sa.query(UserFollowing)\ |
|
350 | 350 | .filter(UserFollowing.follows_user == u)\ |
|
351 | 351 | .filter(UserFollowing.user_id == user_id).scalar() |
|
352 | 352 | |
|
353 | 353 | return f is not None |
|
354 | 354 | |
|
355 | 355 | def get_followers(self, repo): |
|
356 | 356 | repo = self._get_repo(repo) |
|
357 | 357 | |
|
358 | 358 | return self.sa.query(UserFollowing)\ |
|
359 | 359 | .filter(UserFollowing.follows_repository == repo).count() |
|
360 | 360 | |
|
361 | 361 | def get_forks(self, repo): |
|
362 | 362 | repo = self._get_repo(repo) |
|
363 | 363 | return self.sa.query(Repository)\ |
|
364 | 364 | .filter(Repository.fork == repo).count() |
|
365 | 365 | |
|
366 | 366 | def get_pull_requests(self, repo): |
|
367 | 367 | repo = self._get_repo(repo) |
|
368 | 368 | return self.sa.query(PullRequest)\ |
|
369 | 369 | .filter(PullRequest.target_repo == repo)\ |
|
370 | 370 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
371 | 371 | |
|
372 | 372 | def get_artifacts(self, repo): |
|
373 | 373 | repo = self._get_repo(repo) |
|
374 | 374 | return self.sa.query(FileStore)\ |
|
375 | 375 | .filter(FileStore.repo == repo)\ |
|
376 | 376 | .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count() |
|
377 | 377 | |
|
378 | 378 | def mark_as_fork(self, repo, fork, user): |
|
379 | 379 | repo = self._get_repo(repo) |
|
380 | 380 | fork = self._get_repo(fork) |
|
381 | 381 | if fork and repo.repo_id == fork.repo_id: |
|
382 | 382 | raise Exception("Cannot set repository as fork of itself") |
|
383 | 383 | |
|
384 | 384 | if fork and repo.repo_type != fork.repo_type: |
|
385 | 385 | raise RepositoryError( |
|
386 | 386 | "Cannot set repository as fork of repository with other type") |
|
387 | 387 | |
|
388 | 388 | repo.fork = fork |
|
389 | 389 | self.sa.add(repo) |
|
390 | 390 | return repo |
|
391 | 391 | |
|
392 | 392 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
393 | 393 | dbrepo = self._get_repo(repo) |
|
394 | 394 | remote_uri = remote_uri or dbrepo.clone_uri |
|
395 | 395 | if not remote_uri: |
|
396 | 396 | raise Exception("This repository doesn't have a clone uri") |
|
397 | 397 | |
|
398 | 398 | repo = dbrepo.scm_instance(cache=False) |
|
399 | 399 | repo.config.clear_section('hooks') |
|
400 | 400 | |
|
401 | 401 | try: |
|
402 | 402 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
403 | 403 | # this is due this tasks can be executed via scheduler without |
|
404 | 404 | # proper validation of remote_uri |
|
405 | 405 | if validate_uri: |
|
406 | 406 | config = make_db_config(clear_session=False) |
|
407 | 407 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
408 | 408 | except InvalidCloneUrl: |
|
409 | 409 | raise |
|
410 | 410 | |
|
411 | 411 | repo_name = dbrepo.repo_name |
|
412 | 412 | try: |
|
413 | 413 | # TODO: we need to make sure those operations call proper hooks ! |
|
414 | 414 | repo.fetch(remote_uri) |
|
415 | 415 | |
|
416 | 416 | self.mark_for_invalidation(repo_name) |
|
417 | 417 | except Exception: |
|
418 | 418 | log.error(traceback.format_exc()) |
|
419 | 419 | raise |
|
420 | 420 | |
|
421 | 421 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
422 | 422 | dbrepo = self._get_repo(repo) |
|
423 | 423 | remote_uri = remote_uri or dbrepo.push_uri |
|
424 | 424 | if not remote_uri: |
|
425 | 425 | raise Exception("This repository doesn't have a clone uri") |
|
426 | 426 | |
|
427 | 427 | repo = dbrepo.scm_instance(cache=False) |
|
428 | 428 | repo.config.clear_section('hooks') |
|
429 | 429 | |
|
430 | 430 | try: |
|
431 | 431 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
432 | 432 | # this is due this tasks can be executed via scheduler without |
|
433 | 433 | # proper validation of remote_uri |
|
434 | 434 | if validate_uri: |
|
435 | 435 | config = make_db_config(clear_session=False) |
|
436 | 436 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
437 | 437 | except InvalidCloneUrl: |
|
438 | 438 | raise |
|
439 | 439 | |
|
440 | 440 | try: |
|
441 | 441 | repo.push(remote_uri) |
|
442 | 442 | except Exception: |
|
443 | 443 | log.error(traceback.format_exc()) |
|
444 | 444 | raise |
|
445 | 445 | |
|
446 | 446 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
447 | 447 | content, f_path): |
|
448 | 448 | """ |
|
449 | 449 | Commits changes |
|
450 | 450 | |
|
451 | 451 | :param repo: SCM instance |
|
452 | 452 | |
|
453 | 453 | """ |
|
454 | 454 | user = self._get_user(user) |
|
455 | 455 | |
|
456 | 456 | # decoding here will force that we have proper encoded values |
|
457 | 457 | # in any other case this will throw exceptions and deny commit |
|
458 | 458 | content = safe_str(content) |
|
459 | 459 | path = safe_str(f_path) |
|
460 | 460 | # message and author needs to be unicode |
|
461 | 461 | # proper backend should then translate that into required type |
|
462 | 462 | message = safe_unicode(message) |
|
463 | 463 | author = safe_unicode(author) |
|
464 | 464 | imc = repo.in_memory_commit |
|
465 | 465 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
466 | 466 | try: |
|
467 | 467 | # TODO: handle pre-push action ! |
|
468 | 468 | tip = imc.commit( |
|
469 | 469 | message=message, author=author, parents=[commit], |
|
470 | 470 | branch=commit.branch) |
|
471 | 471 | except Exception as e: |
|
472 | 472 | log.error(traceback.format_exc()) |
|
473 | 473 | raise IMCCommitError(str(e)) |
|
474 | 474 | finally: |
|
475 | 475 | # always clear caches, if commit fails we want fresh object also |
|
476 | 476 | self.mark_for_invalidation(repo_name) |
|
477 | 477 | |
|
478 | 478 | # We trigger the post-push action |
|
479 | 479 | hooks_utils.trigger_post_push_hook( |
|
480 | 480 | username=user.username, action='push_local', hook_type='post_push', |
|
481 | 481 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) |
|
482 | 482 | return tip |
|
483 | 483 | |
|
484 | 484 | def _sanitize_path(self, f_path): |
|
485 | 485 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
486 | 486 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
487 | 487 | if f_path: |
|
488 | 488 | f_path = os.path.normpath(f_path) |
|
489 | 489 | return f_path |
|
490 | 490 | |
|
491 | 491 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
492 | 492 | if not dir_node.is_dir(): |
|
493 | 493 | return [] |
|
494 | 494 | |
|
495 | 495 | data = [] |
|
496 | 496 | for node in dir_node: |
|
497 | 497 | if not node.is_file(): |
|
498 | 498 | # we skip file-nodes |
|
499 | 499 | continue |
|
500 | 500 | |
|
501 | 501 | last_commit = node.last_commit |
|
502 | 502 | last_commit_date = last_commit.date |
|
503 | 503 | data.append({ |
|
504 | 504 | 'name': node.name, |
|
505 | 505 | 'size': h.format_byte_size_binary(node.size), |
|
506 | 506 | 'modified_at': h.format_date(last_commit_date), |
|
507 | 507 | 'modified_ts': last_commit_date.isoformat(), |
|
508 | 508 | 'revision': last_commit.revision, |
|
509 | 509 | 'short_id': last_commit.short_id, |
|
510 | 510 | 'message': h.escape(last_commit.message), |
|
511 | 511 | 'author': h.escape(last_commit.author), |
|
512 | 512 | 'user_profile': h.gravatar_with_user( |
|
513 | 513 | request, last_commit.author), |
|
514 | 514 | }) |
|
515 | 515 | |
|
516 | 516 | return data |
|
517 | 517 | |
|
518 | 518 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
519 | 519 | extended_info=False, content=False, max_file_bytes=None): |
|
520 | 520 | """ |
|
521 | 521 | recursive walk in root dir and return a set of all path in that dir |
|
522 | 522 | based on repository walk function |
|
523 | 523 | |
|
524 | 524 | :param repo_name: name of repository |
|
525 | 525 | :param commit_id: commit id for which to list nodes |
|
526 | 526 | :param root_path: root path to list |
|
527 | 527 | :param flat: return as a list, if False returns a dict with description |
|
528 | 528 | :param extended_info: show additional info such as md5, binary, size etc |
|
529 | 529 | :param content: add nodes content to the return data |
|
530 | 530 | :param max_file_bytes: will not return file contents over this limit |
|
531 | 531 | |
|
532 | 532 | """ |
|
533 | 533 | _files = list() |
|
534 | 534 | _dirs = list() |
|
535 | 535 | try: |
|
536 | 536 | _repo = self._get_repo(repo_name) |
|
537 | 537 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
538 | 538 | root_path = root_path.lstrip('/') |
|
539 | 539 | for __, dirs, files in commit.walk(root_path): |
|
540 | 540 | |
|
541 | 541 | for f in files: |
|
542 | 542 | _content = None |
|
543 | 543 | _data = f_name = f.unicode_path |
|
544 | 544 | |
|
545 | 545 | if not flat: |
|
546 | 546 | _data = { |
|
547 | 547 | "name": h.escape(f_name), |
|
548 | 548 | "type": "file", |
|
549 | 549 | } |
|
550 | 550 | if extended_info: |
|
551 | 551 | _data.update({ |
|
552 | 552 | "md5": f.md5, |
|
553 | 553 | "binary": f.is_binary, |
|
554 | 554 | "size": f.size, |
|
555 | 555 | "extension": f.extension, |
|
556 | 556 | "mimetype": f.mimetype, |
|
557 | 557 | "lines": f.lines()[0] |
|
558 | 558 | }) |
|
559 | 559 | |
|
560 | 560 | if content: |
|
561 | 561 | over_size_limit = (max_file_bytes is not None |
|
562 | 562 | and f.size > max_file_bytes) |
|
563 | 563 | full_content = None |
|
564 | 564 | if not f.is_binary and not over_size_limit: |
|
565 | 565 | full_content = safe_str(f.content) |
|
566 | 566 | |
|
567 | 567 | _data.update({ |
|
568 | 568 | "content": full_content, |
|
569 | 569 | }) |
|
570 | 570 | _files.append(_data) |
|
571 | 571 | |
|
572 | 572 | for d in dirs: |
|
573 | 573 | _data = d_name = d.unicode_path |
|
574 | 574 | if not flat: |
|
575 | 575 | _data = { |
|
576 | 576 | "name": h.escape(d_name), |
|
577 | 577 | "type": "dir", |
|
578 | 578 | } |
|
579 | 579 | if extended_info: |
|
580 | 580 | _data.update({ |
|
581 | 581 | "md5": None, |
|
582 | 582 | "binary": None, |
|
583 | 583 | "size": None, |
|
584 | 584 | "extension": None, |
|
585 | 585 | }) |
|
586 | 586 | if content: |
|
587 | 587 | _data.update({ |
|
588 | 588 | "content": None |
|
589 | 589 | }) |
|
590 | 590 | _dirs.append(_data) |
|
591 | 591 | except RepositoryError: |
|
592 | 592 | log.exception("Exception in get_nodes") |
|
593 | 593 | raise |
|
594 | 594 | |
|
595 | 595 | return _dirs, _files |
|
596 | 596 | |
|
597 | 597 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): |
|
598 | 598 | """ |
|
599 | 599 | Generate files for quick filter in files view |
|
600 | 600 | """ |
|
601 | 601 | |
|
602 | 602 | _files = list() |
|
603 | 603 | _dirs = list() |
|
604 | 604 | try: |
|
605 | 605 | _repo = self._get_repo(repo_name) |
|
606 | 606 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
607 | 607 | root_path = root_path.lstrip('/') |
|
608 | 608 | for __, dirs, files in commit.walk(root_path): |
|
609 | 609 | |
|
610 | 610 | for f in files: |
|
611 | 611 | |
|
612 | 612 | _data = { |
|
613 | 613 | "name": h.escape(f.unicode_path), |
|
614 | 614 | "type": "file", |
|
615 | 615 | } |
|
616 | 616 | |
|
617 | 617 | _files.append(_data) |
|
618 | 618 | |
|
619 | 619 | for d in dirs: |
|
620 | 620 | |
|
621 | 621 | _data = { |
|
622 | 622 | "name": h.escape(d.unicode_path), |
|
623 | 623 | "type": "dir", |
|
624 | 624 | } |
|
625 | 625 | |
|
626 | 626 | _dirs.append(_data) |
|
627 | 627 | except RepositoryError: |
|
628 | 628 | log.exception("Exception in get_quick_filter_nodes") |
|
629 | 629 | raise |
|
630 | 630 | |
|
631 | 631 | return _dirs, _files |
|
632 | 632 | |
|
633 | 633 | def get_node(self, repo_name, commit_id, file_path, |
|
634 | 634 | extended_info=False, content=False, max_file_bytes=None, cache=True): |
|
635 | 635 | """ |
|
636 | 636 | retrieve single node from commit |
|
637 | 637 | """ |
|
638 | 638 | try: |
|
639 | 639 | |
|
640 | 640 | _repo = self._get_repo(repo_name) |
|
641 | 641 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
642 | 642 | |
|
643 | 643 | file_node = commit.get_node(file_path) |
|
644 | 644 | if file_node.is_dir(): |
|
645 | 645 | raise RepositoryError('The given path is a directory') |
|
646 | 646 | |
|
647 | 647 | _content = None |
|
648 | 648 | f_name = file_node.unicode_path |
|
649 | 649 | |
|
650 | 650 | file_data = { |
|
651 | 651 | "name": h.escape(f_name), |
|
652 | 652 | "type": "file", |
|
653 | 653 | } |
|
654 | 654 | |
|
655 | 655 | if extended_info: |
|
656 | 656 | file_data.update({ |
|
657 | 657 | "extension": file_node.extension, |
|
658 | 658 | "mimetype": file_node.mimetype, |
|
659 | 659 | }) |
|
660 | 660 | |
|
661 | 661 | if cache: |
|
662 | 662 | md5 = file_node.md5 |
|
663 | 663 | is_binary = file_node.is_binary |
|
664 | 664 | size = file_node.size |
|
665 | 665 | else: |
|
666 | 666 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
667 | 667 | |
|
668 | 668 | file_data.update({ |
|
669 | 669 | "md5": md5, |
|
670 | 670 | "binary": is_binary, |
|
671 | 671 | "size": size, |
|
672 | 672 | }) |
|
673 | 673 | |
|
674 | 674 | if content and cache: |
|
675 | 675 | # get content + cache |
|
676 | 676 | size = file_node.size |
|
677 | 677 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
678 | 678 | full_content = None |
|
679 | 679 | all_lines = 0 |
|
680 | 680 | if not file_node.is_binary and not over_size_limit: |
|
681 | 681 | full_content = safe_unicode(file_node.content) |
|
682 | 682 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
683 | 683 | |
|
684 | 684 | file_data.update({ |
|
685 | 685 | "content": full_content, |
|
686 | 686 | "lines": all_lines |
|
687 | 687 | }) |
|
688 | 688 | elif content: |
|
689 | 689 | # get content *without* cache |
|
690 | 690 | if _content is None: |
|
691 | 691 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
692 | 692 | |
|
693 | 693 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
694 | 694 | full_content = None |
|
695 | 695 | all_lines = 0 |
|
696 | 696 | if not is_binary and not over_size_limit: |
|
697 | 697 | full_content = safe_unicode(_content) |
|
698 | 698 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
699 | 699 | |
|
700 | 700 | file_data.update({ |
|
701 | 701 | "content": full_content, |
|
702 | 702 | "lines": all_lines |
|
703 | 703 | }) |
|
704 | 704 | |
|
705 | 705 | except RepositoryError: |
|
706 | 706 | log.exception("Exception in get_node") |
|
707 | 707 | raise |
|
708 | 708 | |
|
709 | 709 | return file_data |
|
710 | 710 | |
|
711 | 711 | def get_fts_data(self, repo_name, commit_id, root_path='/'): |
|
712 | 712 | """ |
|
713 | 713 | Fetch node tree for usage in full text search |
|
714 | 714 | """ |
|
715 | 715 | |
|
716 | 716 | tree_info = list() |
|
717 | 717 | |
|
718 | 718 | try: |
|
719 | 719 | _repo = self._get_repo(repo_name) |
|
720 | 720 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
721 | 721 | root_path = root_path.lstrip('/') |
|
722 | 722 | for __, dirs, files in commit.walk(root_path): |
|
723 | 723 | |
|
724 | 724 | for f in files: |
|
725 | 725 | is_binary, md5, size, _content = f.metadata_uncached() |
|
726 | 726 | _data = { |
|
727 | 727 | "name": f.unicode_path, |
|
728 | 728 | "md5": md5, |
|
729 | 729 | "extension": f.extension, |
|
730 | 730 | "binary": is_binary, |
|
731 | 731 | "size": size |
|
732 | 732 | } |
|
733 | 733 | |
|
734 | 734 | tree_info.append(_data) |
|
735 | 735 | |
|
736 | 736 | except RepositoryError: |
|
737 | 737 | log.exception("Exception in get_nodes") |
|
738 | 738 | raise |
|
739 | 739 | |
|
740 | 740 | return tree_info |
|
741 | 741 | |
|
742 | 742 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
743 | 743 | author=None, trigger_push_hook=True): |
|
744 | 744 | """ |
|
745 | 745 | Commits given multiple nodes into repo |
|
746 | 746 | |
|
747 | 747 | :param user: RhodeCode User object or user_id, the commiter |
|
748 | 748 | :param repo: RhodeCode Repository object |
|
749 | 749 | :param message: commit message |
|
750 | 750 | :param nodes: mapping {filename:{'content':content},...} |
|
751 | 751 | :param parent_commit: parent commit, can be empty than it's |
|
752 | 752 | initial commit |
|
753 | 753 | :param author: author of commit, cna be different that commiter |
|
754 | 754 | only for git |
|
755 | 755 | :param trigger_push_hook: trigger push hooks |
|
756 | 756 | |
|
757 | 757 | :returns: new committed commit |
|
758 | 758 | """ |
|
759 | 759 | |
|
760 | 760 | user = self._get_user(user) |
|
761 | 761 | scm_instance = repo.scm_instance(cache=False) |
|
762 | 762 | |
|
763 | 763 | processed_nodes = [] |
|
764 | 764 | for f_path in nodes: |
|
765 | 765 | f_path = self._sanitize_path(f_path) |
|
766 | 766 | content = nodes[f_path]['content'] |
|
767 | 767 | f_path = safe_str(f_path) |
|
768 | 768 | # decoding here will force that we have proper encoded values |
|
769 | 769 | # in any other case this will throw exceptions and deny commit |
|
770 |
if isinstance(content, ( |
|
|
770 | if isinstance(content, (str,)): | |
|
771 | 771 | content = safe_str(content) |
|
772 | 772 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
773 | 773 | content = content.read() |
|
774 | 774 | else: |
|
775 | 775 | raise Exception('Content is of unrecognized type %s' % ( |
|
776 | 776 | type(content) |
|
777 | 777 | )) |
|
778 | 778 | processed_nodes.append((f_path, content)) |
|
779 | 779 | |
|
780 | 780 | message = safe_unicode(message) |
|
781 | 781 | commiter = user.full_contact |
|
782 | 782 | author = safe_unicode(author) if author else commiter |
|
783 | 783 | |
|
784 | 784 | imc = scm_instance.in_memory_commit |
|
785 | 785 | |
|
786 | 786 | if not parent_commit: |
|
787 | 787 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
788 | 788 | |
|
789 | 789 | if isinstance(parent_commit, EmptyCommit): |
|
790 | 790 | # EmptyCommit means we we're editing empty repository |
|
791 | 791 | parents = None |
|
792 | 792 | else: |
|
793 | 793 | parents = [parent_commit] |
|
794 | 794 | # add multiple nodes |
|
795 | 795 | for path, content in processed_nodes: |
|
796 | 796 | imc.add(FileNode(path, content=content)) |
|
797 | 797 | # TODO: handle pre push scenario |
|
798 | 798 | tip = imc.commit(message=message, |
|
799 | 799 | author=author, |
|
800 | 800 | parents=parents, |
|
801 | 801 | branch=parent_commit.branch) |
|
802 | 802 | |
|
803 | 803 | self.mark_for_invalidation(repo.repo_name) |
|
804 | 804 | if trigger_push_hook: |
|
805 | 805 | hooks_utils.trigger_post_push_hook( |
|
806 | 806 | username=user.username, action='push_local', |
|
807 | 807 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
808 | 808 | hook_type='post_push', |
|
809 | 809 | commit_ids=[tip.raw_id]) |
|
810 | 810 | return tip |
|
811 | 811 | |
|
812 | 812 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
813 | 813 | author=None, trigger_push_hook=True): |
|
814 | 814 | user = self._get_user(user) |
|
815 | 815 | scm_instance = repo.scm_instance(cache=False) |
|
816 | 816 | |
|
817 | 817 | message = safe_unicode(message) |
|
818 | 818 | commiter = user.full_contact |
|
819 | 819 | author = safe_unicode(author) if author else commiter |
|
820 | 820 | |
|
821 | 821 | imc = scm_instance.in_memory_commit |
|
822 | 822 | |
|
823 | 823 | if not parent_commit: |
|
824 | 824 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
825 | 825 | |
|
826 | 826 | if isinstance(parent_commit, EmptyCommit): |
|
827 | 827 | # EmptyCommit means we we're editing empty repository |
|
828 | 828 | parents = None |
|
829 | 829 | else: |
|
830 | 830 | parents = [parent_commit] |
|
831 | 831 | |
|
832 | 832 | # add multiple nodes |
|
833 | 833 | for _filename, data in nodes.items(): |
|
834 | 834 | # new filename, can be renamed from the old one, also sanitaze |
|
835 | 835 | # the path for any hack around relative paths like ../../ etc. |
|
836 | 836 | filename = self._sanitize_path(data['filename']) |
|
837 | 837 | old_filename = self._sanitize_path(_filename) |
|
838 | 838 | content = data['content'] |
|
839 | 839 | file_mode = data.get('mode') |
|
840 | 840 | filenode = FileNode(old_filename, content=content, mode=file_mode) |
|
841 | 841 | op = data['op'] |
|
842 | 842 | if op == 'add': |
|
843 | 843 | imc.add(filenode) |
|
844 | 844 | elif op == 'del': |
|
845 | 845 | imc.remove(filenode) |
|
846 | 846 | elif op == 'mod': |
|
847 | 847 | if filename != old_filename: |
|
848 | 848 | # TODO: handle renames more efficient, needs vcs lib changes |
|
849 | 849 | imc.remove(filenode) |
|
850 | 850 | imc.add(FileNode(filename, content=content, mode=file_mode)) |
|
851 | 851 | else: |
|
852 | 852 | imc.change(filenode) |
|
853 | 853 | |
|
854 | 854 | try: |
|
855 | 855 | # TODO: handle pre push scenario commit changes |
|
856 | 856 | tip = imc.commit(message=message, |
|
857 | 857 | author=author, |
|
858 | 858 | parents=parents, |
|
859 | 859 | branch=parent_commit.branch) |
|
860 | 860 | except NodeNotChangedError: |
|
861 | 861 | raise |
|
862 | 862 | except Exception as e: |
|
863 | 863 | log.exception("Unexpected exception during call to imc.commit") |
|
864 | 864 | raise IMCCommitError(str(e)) |
|
865 | 865 | finally: |
|
866 | 866 | # always clear caches, if commit fails we want fresh object also |
|
867 | 867 | self.mark_for_invalidation(repo.repo_name) |
|
868 | 868 | |
|
869 | 869 | if trigger_push_hook: |
|
870 | 870 | hooks_utils.trigger_post_push_hook( |
|
871 | 871 | username=user.username, action='push_local', hook_type='post_push', |
|
872 | 872 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
873 | 873 | commit_ids=[tip.raw_id]) |
|
874 | 874 | |
|
875 | 875 | return tip |
|
876 | 876 | |
|
877 | 877 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
878 | 878 | author=None, trigger_push_hook=True): |
|
879 | 879 | """ |
|
880 | 880 | Deletes given multiple nodes into `repo` |
|
881 | 881 | |
|
882 | 882 | :param user: RhodeCode User object or user_id, the committer |
|
883 | 883 | :param repo: RhodeCode Repository object |
|
884 | 884 | :param message: commit message |
|
885 | 885 | :param nodes: mapping {filename:{'content':content},...} |
|
886 | 886 | :param parent_commit: parent commit, can be empty than it's initial |
|
887 | 887 | commit |
|
888 | 888 | :param author: author of commit, cna be different that commiter only |
|
889 | 889 | for git |
|
890 | 890 | :param trigger_push_hook: trigger push hooks |
|
891 | 891 | |
|
892 | 892 | :returns: new commit after deletion |
|
893 | 893 | """ |
|
894 | 894 | |
|
895 | 895 | user = self._get_user(user) |
|
896 | 896 | scm_instance = repo.scm_instance(cache=False) |
|
897 | 897 | |
|
898 | 898 | processed_nodes = [] |
|
899 | 899 | for f_path in nodes: |
|
900 | 900 | f_path = self._sanitize_path(f_path) |
|
901 | 901 | # content can be empty but for compatabilty it allows same dicts |
|
902 | 902 | # structure as add_nodes |
|
903 | 903 | content = nodes[f_path].get('content') |
|
904 | 904 | processed_nodes.append((f_path, content)) |
|
905 | 905 | |
|
906 | 906 | message = safe_unicode(message) |
|
907 | 907 | commiter = user.full_contact |
|
908 | 908 | author = safe_unicode(author) if author else commiter |
|
909 | 909 | |
|
910 | 910 | imc = scm_instance.in_memory_commit |
|
911 | 911 | |
|
912 | 912 | if not parent_commit: |
|
913 | 913 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
914 | 914 | |
|
915 | 915 | if isinstance(parent_commit, EmptyCommit): |
|
916 | 916 | # EmptyCommit means we we're editing empty repository |
|
917 | 917 | parents = None |
|
918 | 918 | else: |
|
919 | 919 | parents = [parent_commit] |
|
920 | 920 | # add multiple nodes |
|
921 | 921 | for path, content in processed_nodes: |
|
922 | 922 | imc.remove(FileNode(path, content=content)) |
|
923 | 923 | |
|
924 | 924 | # TODO: handle pre push scenario |
|
925 | 925 | tip = imc.commit(message=message, |
|
926 | 926 | author=author, |
|
927 | 927 | parents=parents, |
|
928 | 928 | branch=parent_commit.branch) |
|
929 | 929 | |
|
930 | 930 | self.mark_for_invalidation(repo.repo_name) |
|
931 | 931 | if trigger_push_hook: |
|
932 | 932 | hooks_utils.trigger_post_push_hook( |
|
933 | 933 | username=user.username, action='push_local', hook_type='post_push', |
|
934 | 934 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
935 | 935 | commit_ids=[tip.raw_id]) |
|
936 | 936 | return tip |
|
937 | 937 | |
|
938 | 938 | def strip(self, repo, commit_id, branch): |
|
939 | 939 | scm_instance = repo.scm_instance(cache=False) |
|
940 | 940 | scm_instance.config.clear_section('hooks') |
|
941 | 941 | scm_instance.strip(commit_id, branch) |
|
942 | 942 | self.mark_for_invalidation(repo.repo_name) |
|
943 | 943 | |
|
944 | 944 | def get_unread_journal(self): |
|
945 | 945 | return self.sa.query(UserLog).count() |
|
946 | 946 | |
|
947 | 947 | @classmethod |
|
948 | 948 | def backend_landing_ref(cls, repo_type): |
|
949 | 949 | """ |
|
950 | 950 | Return a default landing ref based on a repository type. |
|
951 | 951 | """ |
|
952 | 952 | |
|
953 | 953 | landing_ref = { |
|
954 | 954 | 'hg': ('branch:default', 'default'), |
|
955 | 955 | 'git': ('branch:master', 'master'), |
|
956 | 956 | 'svn': ('rev:tip', 'latest tip'), |
|
957 | 957 | 'default': ('rev:tip', 'latest tip'), |
|
958 | 958 | } |
|
959 | 959 | |
|
960 | 960 | return landing_ref.get(repo_type) or landing_ref['default'] |
|
961 | 961 | |
|
962 | 962 | def get_repo_landing_revs(self, translator, repo=None): |
|
963 | 963 | """ |
|
964 | 964 | Generates select option with tags branches and bookmarks (for hg only) |
|
965 | 965 | grouped by type |
|
966 | 966 | |
|
967 | 967 | :param repo: |
|
968 | 968 | """ |
|
969 | 969 | from rhodecode.lib.vcs.backends.git import GitRepository |
|
970 | 970 | |
|
971 | 971 | _ = translator |
|
972 | 972 | repo = self._get_repo(repo) |
|
973 | 973 | |
|
974 | 974 | if repo: |
|
975 | 975 | repo_type = repo.repo_type |
|
976 | 976 | else: |
|
977 | 977 | repo_type = 'default' |
|
978 | 978 | |
|
979 | 979 | default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) |
|
980 | 980 | |
|
981 | 981 | default_ref_options = [ |
|
982 | 982 | [default_landing_ref, landing_ref_lbl] |
|
983 | 983 | ] |
|
984 | 984 | default_choices = [ |
|
985 | 985 | default_landing_ref |
|
986 | 986 | ] |
|
987 | 987 | |
|
988 | 988 | if not repo: |
|
989 | 989 | # presented at NEW repo creation |
|
990 | 990 | return default_choices, default_ref_options |
|
991 | 991 | |
|
992 | 992 | repo = repo.scm_instance() |
|
993 | 993 | |
|
994 | 994 | ref_options = [(default_landing_ref, landing_ref_lbl)] |
|
995 | 995 | choices = [default_landing_ref] |
|
996 | 996 | |
|
997 | 997 | # branches |
|
998 | 998 | branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches] |
|
999 | 999 | if not branch_group: |
|
1000 | 1000 | # new repo, or without maybe a branch? |
|
1001 | 1001 | branch_group = default_ref_options |
|
1002 | 1002 | |
|
1003 | 1003 | branches_group = (branch_group, _("Branches")) |
|
1004 | 1004 | ref_options.append(branches_group) |
|
1005 | 1005 | choices.extend([x[0] for x in branches_group[0]]) |
|
1006 | 1006 | |
|
1007 | 1007 | # bookmarks for HG |
|
1008 | 1008 | if repo.alias == 'hg': |
|
1009 | 1009 | bookmarks_group = ( |
|
1010 | 1010 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
1011 | 1011 | for b in repo.bookmarks], |
|
1012 | 1012 | _("Bookmarks")) |
|
1013 | 1013 | ref_options.append(bookmarks_group) |
|
1014 | 1014 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
1015 | 1015 | |
|
1016 | 1016 | # tags |
|
1017 | 1017 | tags_group = ( |
|
1018 | 1018 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
1019 | 1019 | for t in repo.tags], |
|
1020 | 1020 | _("Tags")) |
|
1021 | 1021 | ref_options.append(tags_group) |
|
1022 | 1022 | choices.extend([x[0] for x in tags_group[0]]) |
|
1023 | 1023 | |
|
1024 | 1024 | return choices, ref_options |
|
1025 | 1025 | |
|
1026 | 1026 | def get_server_info(self, environ=None): |
|
1027 | 1027 | server_info = get_system_info(environ) |
|
1028 | 1028 | return server_info |
General Comments 0
You need to be logged in to leave comments.
Login now