Show More
@@ -1,226 +1,226 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2015-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2015-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import beaker |
|
22 | import beaker | |
23 | import logging |
|
23 | import logging | |
24 |
|
24 | |||
25 | from beaker.cache import _cache_decorate, cache_regions, region_invalidate |
|
25 | from beaker.cache import _cache_decorate, cache_regions, region_invalidate | |
26 |
|
26 | |||
27 | from rhodecode.lib.utils import safe_str, md5 |
|
27 | from rhodecode.lib.utils import safe_str, md5 | |
28 | from rhodecode.model.db import Session, CacheKey, IntegrityError |
|
28 | from rhodecode.model.db import Session, CacheKey, IntegrityError | |
29 |
|
29 | |||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 | FILE_TREE = 'cache_file_tree' |
|
32 | FILE_TREE = 'cache_file_tree' | |
33 | FILE_TREE_META = 'cache_file_tree_metadata' |
|
33 | FILE_TREE_META = 'cache_file_tree_metadata' | |
34 | FILE_SEARCH_TREE_META = 'cache_file_search_metadata' |
|
34 | FILE_SEARCH_TREE_META = 'cache_file_search_metadata' | |
35 | SUMMARY_STATS = 'cache_summary_stats' |
|
35 | SUMMARY_STATS = 'cache_summary_stats' | |
36 |
|
36 | |||
37 | # This list of caches gets purged when invalidation happens |
|
37 | # This list of caches gets purged when invalidation happens | |
38 | USED_REPO_CACHES = (FILE_TREE, FILE_TREE_META, FILE_TREE_META) |
|
38 | USED_REPO_CACHES = (FILE_TREE, FILE_TREE_META, FILE_TREE_META) | |
39 |
|
39 | |||
40 | DEFAULT_CACHE_MANAGER_CONFIG = { |
|
40 | DEFAULT_CACHE_MANAGER_CONFIG = { | |
41 | 'type': 'memorylru_base', |
|
41 | 'type': 'memorylru_base', | |
42 | 'max_items': 10240, |
|
42 | 'max_items': 10240, | |
43 | 'key_length': 256, |
|
43 | 'key_length': 256, | |
44 | 'enabled': True |
|
44 | 'enabled': True | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def configure_cache_region( |
|
48 | def configure_cache_region( | |
49 | region_name, region_kw, default_cache_kw, default_expire=60): |
|
49 | region_name, region_kw, default_cache_kw, default_expire=60): | |
50 | default_type = default_cache_kw.get('type', 'memory') |
|
50 | default_type = default_cache_kw.get('type', 'memory') | |
51 | default_lock_dir = default_cache_kw.get('lock_dir') |
|
51 | default_lock_dir = default_cache_kw.get('lock_dir') | |
52 | default_data_dir = default_cache_kw.get('data_dir') |
|
52 | default_data_dir = default_cache_kw.get('data_dir') | |
53 |
|
53 | |||
54 | region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir) |
|
54 | region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir) | |
55 | region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir) |
|
55 | region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir) | |
56 | region_kw['type'] = region_kw.get('type', default_type) |
|
56 | region_kw['type'] = region_kw.get('type', default_type) | |
57 | region_kw['expire'] = int(region_kw.get('expire', default_expire)) |
|
57 | region_kw['expire'] = int(region_kw.get('expire', default_expire)) | |
58 |
|
58 | |||
59 | beaker.cache.cache_regions[region_name] = region_kw |
|
59 | beaker.cache.cache_regions[region_name] = region_kw | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def get_cache_manager(region_name, cache_name, custom_ttl=None): |
|
62 | def get_cache_manager(region_name, cache_name, custom_ttl=None): | |
63 | """ |
|
63 | """ | |
64 | Creates a Beaker cache manager. Such instance can be used like that:: |
|
64 | Creates a Beaker cache manager. Such instance can be used like that:: | |
65 |
|
65 | |||
66 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) |
|
66 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) | |
67 | cache_manager = caches.get_cache_manager('repo_cache_long', _namespace) |
|
67 | cache_manager = caches.get_cache_manager('repo_cache_long', _namespace) | |
68 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) |
|
68 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) | |
69 | def heavy_compute(): |
|
69 | def heavy_compute(): | |
70 | ... |
|
70 | ... | |
71 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) |
|
71 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) | |
72 |
|
72 | |||
73 | :param region_name: region from ini file |
|
73 | :param region_name: region from ini file | |
74 | :param cache_name: custom cache name, usually prefix+repo_name. eg |
|
74 | :param cache_name: custom cache name, usually prefix+repo_name. eg | |
75 | file_switcher_repo1 |
|
75 | file_switcher_repo1 | |
76 | :param custom_ttl: override .ini file timeout on this cache |
|
76 | :param custom_ttl: override .ini file timeout on this cache | |
77 | :return: instance of cache manager |
|
77 | :return: instance of cache manager | |
78 | """ |
|
78 | """ | |
79 |
|
79 | |||
80 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) |
|
80 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) | |
81 | if custom_ttl: |
|
81 | if custom_ttl: | |
82 | log.debug('Updating region %s with custom ttl: %s', |
|
82 | log.debug('Updating region %s with custom ttl: %s', | |
83 | region_name, custom_ttl) |
|
83 | region_name, custom_ttl) | |
84 | cache_config.update({'expire': custom_ttl}) |
|
84 | cache_config.update({'expire': custom_ttl}) | |
85 |
|
85 | |||
86 | return beaker.cache.Cache._get_cache(cache_name, cache_config) |
|
86 | return beaker.cache.Cache._get_cache(cache_name, cache_config) | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def clear_cache_manager(cache_manager): |
|
89 | def clear_cache_manager(cache_manager): | |
90 | """ |
|
90 | """ | |
91 | namespace = 'foobar' |
|
91 | namespace = 'foobar' | |
92 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
92 | cache_manager = get_cache_manager('repo_cache_long', namespace) | |
93 | clear_cache_manager(cache_manager) |
|
93 | clear_cache_manager(cache_manager) | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | log.debug('Clearing all values for cache manager %s', cache_manager) |
|
96 | log.debug('Clearing all values for cache manager %s', cache_manager) | |
97 | cache_manager.clear() |
|
97 | cache_manager.clear() | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | def clear_repo_caches(repo_name): |
|
100 | def clear_repo_caches(repo_name): | |
101 | # invalidate cache manager for this repo |
|
101 | # invalidate cache manager for this repo | |
102 | for prefix in USED_REPO_CACHES: |
|
102 | for prefix in USED_REPO_CACHES: | |
103 | namespace = get_repo_namespace_key(prefix, repo_name) |
|
103 | namespace = get_repo_namespace_key(prefix, repo_name) | |
104 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
104 | cache_manager = get_cache_manager('repo_cache_long', namespace) | |
105 | clear_cache_manager(cache_manager) |
|
105 | clear_cache_manager(cache_manager) | |
106 |
|
106 | |||
107 |
|
107 | |||
108 | def compute_key_from_params(*args): |
|
108 | def compute_key_from_params(*args): | |
109 | """ |
|
109 | """ | |
110 | Helper to compute key from given params to be used in cache manager |
|
110 | Helper to compute key from given params to be used in cache manager | |
111 | """ |
|
111 | """ | |
112 | return md5("_".join(map(safe_str, args))) |
|
112 | return md5("_".join(map(safe_str, args))) | |
113 |
|
113 | |||
114 |
|
114 | |||
115 | def get_repo_namespace_key(prefix, repo_name): |
|
115 | def get_repo_namespace_key(prefix, repo_name): | |
116 | return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) |
|
116 | return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) | |
117 |
|
117 | |||
118 |
|
118 | |||
119 | def conditional_cache(region, prefix, condition, func): |
|
119 | def conditional_cache(region, prefix, condition, func): | |
120 | """ |
|
120 | """ | |
121 | Conditional caching function use like:: |
|
121 | Conditional caching function use like:: | |
122 | def _c(arg): |
|
122 | def _c(arg): | |
123 | # heavy computation function |
|
123 | # heavy computation function | |
124 | return data |
|
124 | return data | |
125 |
|
125 | |||
126 | # depending on the condition the compute is wrapped in cache or not |
|
126 | # depending on the condition the compute is wrapped in cache or not | |
127 | compute = conditional_cache('short_term', 'cache_desc', |
|
127 | compute = conditional_cache('short_term', 'cache_desc', | |
128 | condition=True, func=func) |
|
128 | condition=True, func=func) | |
129 | return compute(arg) |
|
129 | return compute(arg) | |
130 |
|
130 | |||
131 | :param region: name of cache region |
|
131 | :param region: name of cache region | |
132 | :param prefix: cache region prefix |
|
132 | :param prefix: cache region prefix | |
133 | :param condition: condition for cache to be triggered, and |
|
133 | :param condition: condition for cache to be triggered, and | |
134 | return data cached |
|
134 | return data cached | |
135 | :param func: wrapped heavy function to compute |
|
135 | :param func: wrapped heavy function to compute | |
136 |
|
136 | |||
137 | """ |
|
137 | """ | |
138 | wrapped = func |
|
138 | wrapped = func | |
139 | if condition: |
|
139 | if condition: | |
140 | log.debug('conditional_cache: True, wrapping call of ' |
|
140 | log.debug('conditional_cache: True, wrapping call of ' | |
141 | 'func: %s into %s region cache', region, func) |
|
141 | 'func: %s into %s region cache', region, func) | |
142 | cached_region = _cache_decorate((prefix,), None, None, region) |
|
142 | cached_region = _cache_decorate((prefix,), None, None, region) | |
143 | wrapped = cached_region(func) |
|
143 | wrapped = cached_region(func) | |
144 | return wrapped |
|
144 | return wrapped | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | class ActiveRegionCache(object): |
|
147 | class ActiveRegionCache(object): | |
148 | def __init__(self, context): |
|
148 | def __init__(self, context): | |
149 | self.context = context |
|
149 | self.context = context | |
150 |
|
150 | |||
151 | def invalidate(self, *args, **kwargs): |
|
151 | def invalidate(self, *args, **kwargs): | |
152 | return False |
|
152 | return False | |
153 |
|
153 | |||
154 | def compute(self): |
|
154 | def compute(self): | |
155 | log.debug('Context cache: getting obj %s from cache', self.context) |
|
155 | log.debug('Context cache: getting obj %s from cache', self.context) | |
156 | return self.context.compute_func(self.context.cache_key) |
|
156 | return self.context.compute_func(self.context.cache_key) | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | class FreshRegionCache(ActiveRegionCache): |
|
159 | class FreshRegionCache(ActiveRegionCache): | |
160 | def invalidate(self): |
|
160 | def invalidate(self): | |
161 | log.debug('Context cache: invalidating cache for %s', self.context) |
|
161 | log.debug('Context cache: invalidating cache for %s', self.context) | |
162 | region_invalidate( |
|
162 | region_invalidate( | |
163 | self.context.compute_func, None, self.context.cache_key) |
|
163 | self.context.compute_func, None, self.context.cache_key) | |
164 | return True |
|
164 | return True | |
165 |
|
165 | |||
166 |
|
166 | |||
167 | class InvalidationContext(object): |
|
167 | class InvalidationContext(object): | |
168 | def __repr__(self): |
|
168 | def __repr__(self): | |
169 | return '<InvalidationContext:{}[{}]>'.format( |
|
169 | return '<InvalidationContext:{}[{}]>'.format( | |
170 | self.repo_name, self.cache_type) |
|
170 | safe_str(self.repo_name), safe_str(self.cache_type)) | |
171 |
|
171 | |||
172 | def __init__(self, compute_func, repo_name, cache_type, |
|
172 | def __init__(self, compute_func, repo_name, cache_type, | |
173 | raise_exception=False): |
|
173 | raise_exception=False): | |
174 | self.compute_func = compute_func |
|
174 | self.compute_func = compute_func | |
175 | self.repo_name = repo_name |
|
175 | self.repo_name = repo_name | |
176 | self.cache_type = cache_type |
|
176 | self.cache_type = cache_type | |
177 | self.cache_key = compute_key_from_params( |
|
177 | self.cache_key = compute_key_from_params( | |
178 | repo_name, cache_type) |
|
178 | repo_name, cache_type) | |
179 | self.raise_exception = raise_exception |
|
179 | self.raise_exception = raise_exception | |
180 |
|
180 | |||
181 | def get_cache_obj(self): |
|
181 | def get_cache_obj(self): | |
182 | cache_key = CacheKey.get_cache_key( |
|
182 | cache_key = CacheKey.get_cache_key( | |
183 | self.repo_name, self.cache_type) |
|
183 | self.repo_name, self.cache_type) | |
184 | cache_obj = CacheKey.get_active_cache(cache_key) |
|
184 | cache_obj = CacheKey.get_active_cache(cache_key) | |
185 | if not cache_obj: |
|
185 | if not cache_obj: | |
186 | cache_obj = CacheKey(cache_key, self.repo_name) |
|
186 | cache_obj = CacheKey(cache_key, self.repo_name) | |
187 | return cache_obj |
|
187 | return cache_obj | |
188 |
|
188 | |||
189 | def __enter__(self): |
|
189 | def __enter__(self): | |
190 | """ |
|
190 | """ | |
191 | Test if current object is valid, and return CacheRegion function |
|
191 | Test if current object is valid, and return CacheRegion function | |
192 | that does invalidation and calculation |
|
192 | that does invalidation and calculation | |
193 | """ |
|
193 | """ | |
194 |
|
194 | |||
195 | self.cache_obj = self.get_cache_obj() |
|
195 | self.cache_obj = self.get_cache_obj() | |
196 | if self.cache_obj.cache_active: |
|
196 | if self.cache_obj.cache_active: | |
197 | # means our cache obj is existing and marked as it's |
|
197 | # means our cache obj is existing and marked as it's | |
198 | # cache is not outdated, we return BaseInvalidator |
|
198 | # cache is not outdated, we return BaseInvalidator | |
199 | self.skip_cache_active_change = True |
|
199 | self.skip_cache_active_change = True | |
200 | return ActiveRegionCache(self) |
|
200 | return ActiveRegionCache(self) | |
201 |
|
201 | |||
202 | # the key is either not existing or set to False, we return |
|
202 | # the key is either not existing or set to False, we return | |
203 | # the real invalidator which re-computes value. We additionally set |
|
203 | # the real invalidator which re-computes value. We additionally set | |
204 | # the flag to actually update the Database objects |
|
204 | # the flag to actually update the Database objects | |
205 | self.skip_cache_active_change = False |
|
205 | self.skip_cache_active_change = False | |
206 | return FreshRegionCache(self) |
|
206 | return FreshRegionCache(self) | |
207 |
|
207 | |||
208 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
208 | def __exit__(self, exc_type, exc_val, exc_tb): | |
209 |
|
209 | |||
210 | if self.skip_cache_active_change: |
|
210 | if self.skip_cache_active_change: | |
211 | return |
|
211 | return | |
212 |
|
212 | |||
213 | try: |
|
213 | try: | |
214 | self.cache_obj.cache_active = True |
|
214 | self.cache_obj.cache_active = True | |
215 | Session().add(self.cache_obj) |
|
215 | Session().add(self.cache_obj) | |
216 | Session().commit() |
|
216 | Session().commit() | |
217 | except IntegrityError: |
|
217 | except IntegrityError: | |
218 | # if we catch integrity error, it means we inserted this object |
|
218 | # if we catch integrity error, it means we inserted this object | |
219 | # assumption is that's really an edge race-condition case and |
|
219 | # assumption is that's really an edge race-condition case and | |
220 | # it's safe is to skip it |
|
220 | # it's safe is to skip it | |
221 | Session().rollback() |
|
221 | Session().rollback() | |
222 | except Exception: |
|
222 | except Exception: | |
223 | log.exception('Failed to commit on cache key update') |
|
223 | log.exception('Failed to commit on cache key update') | |
224 | Session().rollback() |
|
224 | Session().rollback() | |
225 | if self.raise_exception: |
|
225 | if self.raise_exception: | |
226 | raise |
|
226 | raise |
@@ -1,510 +1,518 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Package for testing various lib/helper functions in rhodecode |
|
23 | Package for testing various lib/helper functions in rhodecode | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import datetime |
|
26 | import datetime | |
27 | import string |
|
27 | import string | |
28 | import mock |
|
28 | import mock | |
29 | import pytest |
|
29 | import pytest | |
30 | from rhodecode.tests.utils import run_test_concurrently |
|
30 | from rhodecode.tests.utils import run_test_concurrently | |
31 | from rhodecode.lib.helpers import InitialsGravatar |
|
31 | from rhodecode.lib.helpers import InitialsGravatar | |
32 |
|
32 | |||
33 | from rhodecode.lib.utils2 import AttributeDict |
|
33 | from rhodecode.lib.utils2 import AttributeDict | |
34 | from rhodecode.model.db import Repository |
|
34 | from rhodecode.model.db import Repository | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def _urls_for_proto(proto): |
|
37 | def _urls_for_proto(proto): | |
38 | return [ |
|
38 | return [ | |
39 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
39 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
40 | '%s://127.0.0.1' % proto), |
|
40 | '%s://127.0.0.1' % proto), | |
41 | ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
41 | ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
42 | '%s://127.0.0.1' % proto), |
|
42 | '%s://127.0.0.1' % proto), | |
43 | ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
43 | ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
44 | '%s://127.0.0.1' % proto), |
|
44 | '%s://127.0.0.1' % proto), | |
45 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
45 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], | |
46 | '%s://127.0.0.1:8080' % proto), |
|
46 | '%s://127.0.0.1:8080' % proto), | |
47 | ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'], |
|
47 | ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'], | |
48 | '%s://domain.org' % proto), |
|
48 | '%s://domain.org' % proto), | |
49 | ('%s://user:pass@domain.org:8080' % proto, |
|
49 | ('%s://user:pass@domain.org:8080' % proto, | |
50 | ['%s://' % proto, 'domain.org', '8080'], |
|
50 | ['%s://' % proto, 'domain.org', '8080'], | |
51 | '%s://domain.org:8080' % proto), |
|
51 | '%s://domain.org:8080' % proto), | |
52 | ] |
|
52 | ] | |
53 |
|
53 | |||
54 | TEST_URLS = _urls_for_proto('http') + _urls_for_proto('https') |
|
54 | TEST_URLS = _urls_for_proto('http') + _urls_for_proto('https') | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS) |
|
57 | @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS) | |
58 | def test_uri_filter(test_url, expected, expected_creds): |
|
58 | def test_uri_filter(test_url, expected, expected_creds): | |
59 | from rhodecode.lib.utils2 import uri_filter |
|
59 | from rhodecode.lib.utils2 import uri_filter | |
60 | assert uri_filter(test_url) == expected |
|
60 | assert uri_filter(test_url) == expected | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS) |
|
63 | @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS) | |
64 | def test_credentials_filter(test_url, expected, expected_creds): |
|
64 | def test_credentials_filter(test_url, expected, expected_creds): | |
65 | from rhodecode.lib.utils2 import credentials_filter |
|
65 | from rhodecode.lib.utils2 import credentials_filter | |
66 | assert credentials_filter(test_url) == expected_creds |
|
66 | assert credentials_filter(test_url) == expected_creds | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | @pytest.mark.parametrize("str_bool, expected", [ |
|
69 | @pytest.mark.parametrize("str_bool, expected", [ | |
70 | ('t', True), |
|
70 | ('t', True), | |
71 | ('true', True), |
|
71 | ('true', True), | |
72 | ('y', True), |
|
72 | ('y', True), | |
73 | ('yes', True), |
|
73 | ('yes', True), | |
74 | ('on', True), |
|
74 | ('on', True), | |
75 | ('1', True), |
|
75 | ('1', True), | |
76 | ('Y', True), |
|
76 | ('Y', True), | |
77 | ('yeS', True), |
|
77 | ('yeS', True), | |
78 | ('Y', True), |
|
78 | ('Y', True), | |
79 | ('TRUE', True), |
|
79 | ('TRUE', True), | |
80 | ('T', True), |
|
80 | ('T', True), | |
81 | ('False', False), |
|
81 | ('False', False), | |
82 | ('F', False), |
|
82 | ('F', False), | |
83 | ('FALSE', False), |
|
83 | ('FALSE', False), | |
84 | ('0', False), |
|
84 | ('0', False), | |
85 | ('-1', False), |
|
85 | ('-1', False), | |
86 | ('', False) |
|
86 | ('', False) | |
87 | ]) |
|
87 | ]) | |
88 | def test_str2bool(str_bool, expected): |
|
88 | def test_str2bool(str_bool, expected): | |
89 | from rhodecode.lib.utils2 import str2bool |
|
89 | from rhodecode.lib.utils2 import str2bool | |
90 | assert str2bool(str_bool) == expected |
|
90 | assert str2bool(str_bool) == expected | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | @pytest.mark.parametrize("text, expected", reduce(lambda a1,a2:a1+a2, [ |
|
93 | @pytest.mark.parametrize("text, expected", reduce(lambda a1,a2:a1+a2, [ | |
94 | [ |
|
94 | [ | |
95 | (pref+"", []), |
|
95 | (pref+"", []), | |
96 | (pref+"Hi there @marcink", ['marcink']), |
|
96 | (pref+"Hi there @marcink", ['marcink']), | |
97 | (pref+"Hi there @marcink and @bob", ['bob', 'marcink']), |
|
97 | (pref+"Hi there @marcink and @bob", ['bob', 'marcink']), | |
98 | (pref+"Hi there @marcink\n", ['marcink']), |
|
98 | (pref+"Hi there @marcink\n", ['marcink']), | |
99 | (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']), |
|
99 | (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']), | |
100 | (pref+"Hi there marcin@rhodecode.com", []), |
|
100 | (pref+"Hi there marcin@rhodecode.com", []), | |
101 | (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']), |
|
101 | (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']), | |
102 | (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]), |
|
102 | (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]), | |
103 | (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]), |
|
103 | (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]), | |
104 | (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]), |
|
104 | (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]), | |
105 | (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]), |
|
105 | (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]), | |
106 | (pref+"@john @mary, please review", ["john", "mary"]), |
|
106 | (pref+"@john @mary, please review", ["john", "mary"]), | |
107 | (pref+"@john,@mary, please review", ["john", "mary"]), |
|
107 | (pref+"@john,@mary, please review", ["john", "mary"]), | |
108 | (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']), |
|
108 | (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']), | |
109 | (pref+"@first hi there @marcink here's my email marcin@email.com " |
|
109 | (pref+"@first hi there @marcink here's my email marcin@email.com " | |
110 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']), |
|
110 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']), | |
111 | (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']), |
|
111 | (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']), | |
112 | (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']), |
|
112 | (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']), | |
113 | (pref+"user.dot hej ! not-needed maril@domain.org", []), |
|
113 | (pref+"user.dot hej ! not-needed maril@domain.org", []), | |
114 | (pref+"\n@marcin", ['marcin']), |
|
114 | (pref+"\n@marcin", ['marcin']), | |
115 | ] |
|
115 | ] | |
116 | for pref in ['', '\n', 'hi !', '\t', '\n\n']])) |
|
116 | for pref in ['', '\n', 'hi !', '\t', '\n\n']])) | |
117 | def test_mention_extractor(text, expected): |
|
117 | def test_mention_extractor(text, expected): | |
118 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
118 | from rhodecode.lib.utils2 import extract_mentioned_users | |
119 | got = extract_mentioned_users(text) |
|
119 | got = extract_mentioned_users(text) | |
120 | assert sorted(got, key=lambda x: x.lower()) == got |
|
120 | assert sorted(got, key=lambda x: x.lower()) == got | |
121 | assert set(expected) == set(got) |
|
121 | assert set(expected) == set(got) | |
122 |
|
122 | |||
123 | @pytest.mark.parametrize("age_args, expected, kw", [ |
|
123 | @pytest.mark.parametrize("age_args, expected, kw", [ | |
124 | ({}, u'just now', {}), |
|
124 | ({}, u'just now', {}), | |
125 | ({'seconds': -1}, u'1 second ago', {}), |
|
125 | ({'seconds': -1}, u'1 second ago', {}), | |
126 | ({'seconds': -60 * 2}, u'2 minutes ago', {}), |
|
126 | ({'seconds': -60 * 2}, u'2 minutes ago', {}), | |
127 | ({'hours': -1}, u'1 hour ago', {}), |
|
127 | ({'hours': -1}, u'1 hour ago', {}), | |
128 | ({'hours': -24}, u'1 day ago', {}), |
|
128 | ({'hours': -24}, u'1 day ago', {}), | |
129 | ({'hours': -24 * 5}, u'5 days ago', {}), |
|
129 | ({'hours': -24 * 5}, u'5 days ago', {}), | |
130 | ({'months': -1}, u'1 month ago', {}), |
|
130 | ({'months': -1}, u'1 month ago', {}), | |
131 | ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}), |
|
131 | ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}), | |
132 | ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}), |
|
132 | ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}), | |
133 | ({}, u'just now', {'short_format': True}), |
|
133 | ({}, u'just now', {'short_format': True}), | |
134 | ({'seconds': -1}, u'1sec ago', {'short_format': True}), |
|
134 | ({'seconds': -1}, u'1sec ago', {'short_format': True}), | |
135 | ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}), |
|
135 | ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}), | |
136 | ({'hours': -1}, u'1h ago', {'short_format': True}), |
|
136 | ({'hours': -1}, u'1h ago', {'short_format': True}), | |
137 | ({'hours': -24}, u'1d ago', {'short_format': True}), |
|
137 | ({'hours': -24}, u'1d ago', {'short_format': True}), | |
138 | ({'hours': -24 * 5}, u'5d ago', {'short_format': True}), |
|
138 | ({'hours': -24 * 5}, u'5d ago', {'short_format': True}), | |
139 | ({'months': -1}, u'1m ago', {'short_format': True}), |
|
139 | ({'months': -1}, u'1m ago', {'short_format': True}), | |
140 | ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}), |
|
140 | ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}), | |
141 | ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}), |
|
141 | ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}), | |
142 | ]) |
|
142 | ]) | |
143 | def test_age(age_args, expected, kw, pylonsapp): |
|
143 | def test_age(age_args, expected, kw, pylonsapp): | |
144 | from rhodecode.lib.utils2 import age |
|
144 | from rhodecode.lib.utils2 import age | |
145 | from dateutil import relativedelta |
|
145 | from dateutil import relativedelta | |
146 | n = datetime.datetime(year=2012, month=5, day=17) |
|
146 | n = datetime.datetime(year=2012, month=5, day=17) | |
147 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
147 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
148 | assert age(n + delt(**age_args), now=n, **kw) == expected |
|
148 | assert age(n + delt(**age_args), now=n, **kw) == expected | |
149 |
|
149 | |||
150 | @pytest.mark.parametrize("age_args, expected, kw", [ |
|
150 | @pytest.mark.parametrize("age_args, expected, kw", [ | |
151 | ({}, u'just now', {}), |
|
151 | ({}, u'just now', {}), | |
152 | ({'seconds': 1}, u'in 1 second', {}), |
|
152 | ({'seconds': 1}, u'in 1 second', {}), | |
153 | ({'seconds': 60 * 2}, u'in 2 minutes', {}), |
|
153 | ({'seconds': 60 * 2}, u'in 2 minutes', {}), | |
154 | ({'hours': 1}, u'in 1 hour', {}), |
|
154 | ({'hours': 1}, u'in 1 hour', {}), | |
155 | ({'hours': 24}, u'in 1 day', {}), |
|
155 | ({'hours': 24}, u'in 1 day', {}), | |
156 | ({'hours': 24 * 5}, u'in 5 days', {}), |
|
156 | ({'hours': 24 * 5}, u'in 5 days', {}), | |
157 | ({'months': 1}, u'in 1 month', {}), |
|
157 | ({'months': 1}, u'in 1 month', {}), | |
158 | ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}), |
|
158 | ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}), | |
159 | ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}), |
|
159 | ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}), | |
160 | ({}, u'just now', {'short_format': True}), |
|
160 | ({}, u'just now', {'short_format': True}), | |
161 | ({'seconds': 1}, u'in 1sec', {'short_format': True}), |
|
161 | ({'seconds': 1}, u'in 1sec', {'short_format': True}), | |
162 | ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}), |
|
162 | ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}), | |
163 | ({'hours': 1}, u'in 1h', {'short_format': True}), |
|
163 | ({'hours': 1}, u'in 1h', {'short_format': True}), | |
164 | ({'hours': 24}, u'in 1d', {'short_format': True}), |
|
164 | ({'hours': 24}, u'in 1d', {'short_format': True}), | |
165 | ({'hours': 24 * 5}, u'in 5d', {'short_format': True}), |
|
165 | ({'hours': 24 * 5}, u'in 5d', {'short_format': True}), | |
166 | ({'months': 1}, u'in 1m', {'short_format': True}), |
|
166 | ({'months': 1}, u'in 1m', {'short_format': True}), | |
167 | ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}), |
|
167 | ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}), | |
168 | ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}), |
|
168 | ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}), | |
169 | ]) |
|
169 | ]) | |
170 | def test_age_in_future(age_args, expected, kw, pylonsapp): |
|
170 | def test_age_in_future(age_args, expected, kw, pylonsapp): | |
171 | from rhodecode.lib.utils2 import age |
|
171 | from rhodecode.lib.utils2 import age | |
172 | from dateutil import relativedelta |
|
172 | from dateutil import relativedelta | |
173 | n = datetime.datetime(year=2012, month=5, day=17) |
|
173 | n = datetime.datetime(year=2012, month=5, day=17) | |
174 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
174 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
175 | assert age(n + delt(**age_args), now=n, **kw) == expected |
|
175 | assert age(n + delt(**age_args), now=n, **kw) == expected | |
176 |
|
176 | |||
177 |
|
177 | |||
178 | def test_tag_exctrator(): |
|
178 | def test_tag_exctrator(): | |
179 | sample = ( |
|
179 | sample = ( | |
180 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" |
|
180 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" | |
181 | "[requires] [stale] [see<>=>] [see => http://url.com]" |
|
181 | "[requires] [stale] [see<>=>] [see => http://url.com]" | |
182 | "[requires => url] [lang => python] [just a tag] <html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>" |
|
182 | "[requires => url] [lang => python] [just a tag] <html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>" | |
183 | "[,d] [ => ULR ] [obsolete] [desc]]" |
|
183 | "[,d] [ => ULR ] [obsolete] [desc]]" | |
184 | ) |
|
184 | ) | |
185 | from rhodecode.lib.helpers import desc_stylize, escaped_stylize |
|
185 | from rhodecode.lib.helpers import desc_stylize, escaped_stylize | |
186 | res = desc_stylize(sample) |
|
186 | res = desc_stylize(sample) | |
187 | assert '<div class="metatag" tag="tag">tag</div>' in res |
|
187 | assert '<div class="metatag" tag="tag">tag</div>' in res | |
188 | assert '<div class="metatag" tag="obsolete">obsolete</div>' in res |
|
188 | assert '<div class="metatag" tag="obsolete">obsolete</div>' in res | |
189 | assert '<div class="metatag" tag="stale">stale</div>' in res |
|
189 | assert '<div class="metatag" tag="stale">stale</div>' in res | |
190 | assert '<div class="metatag" tag="lang">python</div>' in res |
|
190 | assert '<div class="metatag" tag="lang">python</div>' in res | |
191 | assert '<div class="metatag" tag="requires">requires => <a href="/url">url</a></div>' in res |
|
191 | assert '<div class="metatag" tag="requires">requires => <a href="/url">url</a></div>' in res | |
192 | assert '<div class="metatag" tag="tag">tag</div>' in res |
|
192 | assert '<div class="metatag" tag="tag">tag</div>' in res | |
193 | assert '<html_tag first=\'abc\' attr=\"my.url?attr=&another=\"></html_tag>' in res |
|
193 | assert '<html_tag first=\'abc\' attr=\"my.url?attr=&another=\"></html_tag>' in res | |
194 |
|
194 | |||
195 | res_encoded = escaped_stylize(sample) |
|
195 | res_encoded = escaped_stylize(sample) | |
196 | assert '<div class="metatag" tag="tag">tag</div>' in res_encoded |
|
196 | assert '<div class="metatag" tag="tag">tag</div>' in res_encoded | |
197 | assert '<div class="metatag" tag="obsolete">obsolete</div>' in res_encoded |
|
197 | assert '<div class="metatag" tag="obsolete">obsolete</div>' in res_encoded | |
198 | assert '<div class="metatag" tag="stale">stale</div>' in res_encoded |
|
198 | assert '<div class="metatag" tag="stale">stale</div>' in res_encoded | |
199 | assert '<div class="metatag" tag="lang">python</div>' in res_encoded |
|
199 | assert '<div class="metatag" tag="lang">python</div>' in res_encoded | |
200 | assert '<div class="metatag" tag="requires">requires => <a href="/url">url</a></div>' in res_encoded |
|
200 | assert '<div class="metatag" tag="requires">requires => <a href="/url">url</a></div>' in res_encoded | |
201 | assert '<div class="metatag" tag="tag">tag</div>' in res_encoded |
|
201 | assert '<div class="metatag" tag="tag">tag</div>' in res_encoded | |
202 | assert '<html_tag first='abc' attr="my.url?attr=&another="></html_tag>' in res_encoded |
|
202 | assert '<html_tag first='abc' attr="my.url?attr=&another="></html_tag>' in res_encoded | |
203 |
|
203 | |||
204 |
|
204 | |||
205 | @pytest.mark.parametrize("tmpl_url, email, expected", [ |
|
205 | @pytest.mark.parametrize("tmpl_url, email, expected", [ | |
206 | ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'), |
|
206 | ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'), | |
207 |
|
207 | |||
208 | ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'), |
|
208 | ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'), | |
209 | ('http://test.com/{md5email}', 'testΔ Δ@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'), |
|
209 | ('http://test.com/{md5email}', 'testΔ Δ@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'), | |
210 |
|
210 | |||
211 | ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'), |
|
211 | ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'), | |
212 | ('http://testX.com/{md5email}?s={size}', 'testΔ Δ@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'), |
|
212 | ('http://testX.com/{md5email}?s={size}', 'testΔ Δ@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'), | |
213 |
|
213 | |||
214 | ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'), |
|
214 | ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'), | |
215 | ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ Δ@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'), |
|
215 | ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ Δ@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'), | |
216 |
|
216 | |||
217 | ('http://test.com/{email}', 'testΔ Δ@foo.com', 'http://test.com/testΔ Δ@foo.com'), |
|
217 | ('http://test.com/{email}', 'testΔ Δ@foo.com', 'http://test.com/testΔ Δ@foo.com'), | |
218 | ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'), |
|
218 | ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'), | |
219 | ('http://test.com/{email}?size={size}', 'testΔ Δ@foo.com', 'http://test.com/testΔ Δ@foo.com?size=24'), |
|
219 | ('http://test.com/{email}?size={size}', 'testΔ Δ@foo.com', 'http://test.com/testΔ Δ@foo.com?size=24'), | |
220 | ]) |
|
220 | ]) | |
221 | def test_gravatar_url_builder(tmpl_url, email, expected, request_stub): |
|
221 | def test_gravatar_url_builder(tmpl_url, email, expected, request_stub): | |
222 | from rhodecode.lib.helpers import gravatar_url |
|
222 | from rhodecode.lib.helpers import gravatar_url | |
223 |
|
223 | |||
224 | # mock pyramid.threadlocals |
|
224 | # mock pyramid.threadlocals | |
225 | def fake_get_current_request(): |
|
225 | def fake_get_current_request(): | |
226 | request_stub.scheme = 'https' |
|
226 | request_stub.scheme = 'https' | |
227 | request_stub.host = 'server.com' |
|
227 | request_stub.host = 'server.com' | |
228 | return request_stub |
|
228 | return request_stub | |
229 |
|
229 | |||
230 | # mock pylons.tmpl_context |
|
230 | # mock pylons.tmpl_context | |
231 | def fake_tmpl_context(_url): |
|
231 | def fake_tmpl_context(_url): | |
232 | _c = AttributeDict() |
|
232 | _c = AttributeDict() | |
233 | _c.visual = AttributeDict() |
|
233 | _c.visual = AttributeDict() | |
234 | _c.visual.use_gravatar = True |
|
234 | _c.visual.use_gravatar = True | |
235 | _c.visual.gravatar_url = _url |
|
235 | _c.visual.gravatar_url = _url | |
236 |
|
236 | |||
237 | return _c |
|
237 | return _c | |
238 |
|
238 | |||
239 | with mock.patch('rhodecode.lib.helpers.get_current_request', |
|
239 | with mock.patch('rhodecode.lib.helpers.get_current_request', | |
240 | fake_get_current_request): |
|
240 | fake_get_current_request): | |
241 | fake = fake_tmpl_context(_url=tmpl_url) |
|
241 | fake = fake_tmpl_context(_url=tmpl_url) | |
242 | with mock.patch('pylons.tmpl_context', fake): |
|
242 | with mock.patch('pylons.tmpl_context', fake): | |
243 | grav = gravatar_url(email_address=email, size=24) |
|
243 | grav = gravatar_url(email_address=email, size=24) | |
244 | assert grav == expected |
|
244 | assert grav == expected | |
245 |
|
245 | |||
246 |
|
246 | |||
247 | @pytest.mark.parametrize( |
|
247 | @pytest.mark.parametrize( | |
248 | "email, first_name, last_name, expected_initials, expected_color", [ |
|
248 | "email, first_name, last_name, expected_initials, expected_color", [ | |
249 |
|
249 | |||
250 | ('test@rhodecode.com', '', '', 'TR', '#8a994d'), |
|
250 | ('test@rhodecode.com', '', '', 'TR', '#8a994d'), | |
251 | ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'), |
|
251 | ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'), | |
252 | # special cases of email |
|
252 | # special cases of email | |
253 | ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'), |
|
253 | ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'), | |
254 | ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'), |
|
254 | ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'), | |
255 | ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'), |
|
255 | ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'), | |
256 |
|
256 | |||
257 | ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'), |
|
257 | ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'), | |
258 | ('pclouds@rhodecode.com', 'Nguyα» n ThΓ‘i', 'Tgα»c Duy', 'ND', '#665200'), |
|
258 | ('pclouds@rhodecode.com', 'Nguyα» n ThΓ‘i', 'Tgα»c Duy', 'ND', '#665200'), | |
259 |
|
259 | |||
260 | ('john-brown@foo.com', '', '', 'JF', '#73006b'), |
|
260 | ('john-brown@foo.com', '', '', 'JF', '#73006b'), | |
261 | ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'), |
|
261 | ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'), | |
262 | # partials |
|
262 | # partials | |
263 | ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email |
|
263 | ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email | |
264 | ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln |
|
264 | ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln | |
265 | # non-ascii |
|
265 | # non-ascii | |
266 | ('admin@rhodecode.com', 'Marcin', 'Εuzminski', 'MS', '#104036'), |
|
266 | ('admin@rhodecode.com', 'Marcin', 'Εuzminski', 'MS', '#104036'), | |
267 | ('marcin.Εuzminski@rhodecode.com', '', '', 'MS', '#73000f'), |
|
267 | ('marcin.Εuzminski@rhodecode.com', '', '', 'MS', '#73000f'), | |
268 |
|
268 | |||
269 | # special cases, LDAP can provide those... |
|
269 | # special cases, LDAP can provide those... | |
270 | ('admin@', 'Marcin', 'Εuzminski', 'MS', '#aa00ff'), |
|
270 | ('admin@', 'Marcin', 'Εuzminski', 'MS', '#aa00ff'), | |
271 | ('marcin.Εuzminski', '', '', 'MS', '#402020'), |
|
271 | ('marcin.Εuzminski', '', '', 'MS', '#402020'), | |
272 | ('null', '', '', 'NL', '#8c4646'), |
|
272 | ('null', '', '', 'NL', '#8c4646'), | |
273 | ]) |
|
273 | ]) | |
274 | def test_initials_gravatar_pick_of_initials_and_color_algo( |
|
274 | def test_initials_gravatar_pick_of_initials_and_color_algo( | |
275 | email, first_name, last_name, expected_initials, expected_color): |
|
275 | email, first_name, last_name, expected_initials, expected_color): | |
276 | instance = InitialsGravatar(email, first_name, last_name) |
|
276 | instance = InitialsGravatar(email, first_name, last_name) | |
277 | assert instance.get_initials() == expected_initials |
|
277 | assert instance.get_initials() == expected_initials | |
278 | assert instance.str2color(email) == expected_color |
|
278 | assert instance.str2color(email) == expected_color | |
279 |
|
279 | |||
280 |
|
280 | |||
281 | def test_initials_gravatar_mapping_algo(): |
|
281 | def test_initials_gravatar_mapping_algo(): | |
282 | pos = set() |
|
282 | pos = set() | |
283 | instance = InitialsGravatar('', '', '') |
|
283 | instance = InitialsGravatar('', '', '') | |
284 | iterations = 0 |
|
284 | iterations = 0 | |
285 |
|
285 | |||
286 | variations = [] |
|
286 | variations = [] | |
287 | for letter1 in string.ascii_letters: |
|
287 | for letter1 in string.ascii_letters: | |
288 | for letter2 in string.ascii_letters[::-1][:10]: |
|
288 | for letter2 in string.ascii_letters[::-1][:10]: | |
289 | for letter3 in string.ascii_letters[:10]: |
|
289 | for letter3 in string.ascii_letters[:10]: | |
290 | variations.append( |
|
290 | variations.append( | |
291 | '%s@rhodecode.com' % (letter1+letter2+letter3)) |
|
291 | '%s@rhodecode.com' % (letter1+letter2+letter3)) | |
292 |
|
292 | |||
293 | max_variations = 4096 |
|
293 | max_variations = 4096 | |
294 | for email in variations[:max_variations]: |
|
294 | for email in variations[:max_variations]: | |
295 | iterations += 1 |
|
295 | iterations += 1 | |
296 | pos.add( |
|
296 | pos.add( | |
297 | instance.pick_color_bank_index(email, |
|
297 | instance.pick_color_bank_index(email, | |
298 | instance.get_color_bank())) |
|
298 | instance.get_color_bank())) | |
299 |
|
299 | |||
300 | # we assume that we have match all 256 possible positions, |
|
300 | # we assume that we have match all 256 possible positions, | |
301 | # in reasonable amount of different email addresses |
|
301 | # in reasonable amount of different email addresses | |
302 | assert len(pos) == 256 |
|
302 | assert len(pos) == 256 | |
303 | assert iterations == max_variations |
|
303 | assert iterations == max_variations | |
304 |
|
304 | |||
305 |
|
305 | |||
306 | @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [ |
|
306 | @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [ | |
307 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'), |
|
307 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'), | |
308 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'), |
|
308 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'), | |
309 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'), |
|
309 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'), | |
310 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'), |
|
310 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'), | |
311 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'), |
|
311 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'), | |
312 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'), |
|
312 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'), | |
313 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'), |
|
313 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'), | |
314 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'), |
|
314 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'), | |
315 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'), |
|
315 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'), | |
316 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'), |
|
316 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'), | |
317 | ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'), |
|
317 | ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'), | |
318 | ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'), |
|
318 | ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'), | |
319 | ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'), |
|
319 | ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'), | |
320 | ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'), |
|
320 | ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'), | |
321 | ]) |
|
321 | ]) | |
322 | def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected): |
|
322 | def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected): | |
323 | from rhodecode.lib.utils2 import get_clone_url |
|
323 | from rhodecode.lib.utils2 import get_clone_url | |
324 | clone_url = get_clone_url(uri_tmpl=tmpl, qualifed_home_url='http://vps1:8000'+prefix, |
|
324 | clone_url = get_clone_url(uri_tmpl=tmpl, qualifed_home_url='http://vps1:8000'+prefix, | |
325 | repo_name=repo_name, repo_id=23, **overrides) |
|
325 | repo_name=repo_name, repo_id=23, **overrides) | |
326 | assert clone_url == expected |
|
326 | assert clone_url == expected | |
327 |
|
327 | |||
328 |
|
328 | |||
329 | def _quick_url(text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None): |
|
329 | def _quick_url(text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None): | |
330 | """ |
|
330 | """ | |
331 | Changes `some text url[foo]` => `some text <a href="/">foo</a> |
|
331 | Changes `some text url[foo]` => `some text <a href="/">foo</a> | |
332 |
|
332 | |||
333 | :param text: |
|
333 | :param text: | |
334 | """ |
|
334 | """ | |
335 | import re |
|
335 | import re | |
336 | # quickly change expected url[] into a link |
|
336 | # quickly change expected url[] into a link | |
337 | URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])') |
|
337 | URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])') | |
338 |
|
338 | |||
339 | def url_func(match_obj): |
|
339 | def url_func(match_obj): | |
340 | _url = match_obj.groups()[0] |
|
340 | _url = match_obj.groups()[0] | |
341 | return tmpl % (url_ or '/some-url', _url) |
|
341 | return tmpl % (url_ or '/some-url', _url) | |
342 | return URL_PAT.sub(url_func, text) |
|
342 | return URL_PAT.sub(url_func, text) | |
343 |
|
343 | |||
344 |
|
344 | |||
345 | @pytest.mark.parametrize("sample, expected", [ |
|
345 | @pytest.mark.parametrize("sample, expected", [ | |
346 | ("", |
|
346 | ("", | |
347 | ""), |
|
347 | ""), | |
348 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", |
|
348 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", | |
349 | "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"), |
|
349 | "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"), | |
350 | ("from rev 000000000000", |
|
350 | ("from rev 000000000000", | |
351 | "from rev url[000000000000]"), |
|
351 | "from rev url[000000000000]"), | |
352 | ("from rev 000000000000123123 also rev 000000000000", |
|
352 | ("from rev 000000000000123123 also rev 000000000000", | |
353 | "from rev url[000000000000123123] also rev url[000000000000]"), |
|
353 | "from rev url[000000000000123123] also rev url[000000000000]"), | |
354 | ("this should-000 00", |
|
354 | ("this should-000 00", | |
355 | "this should-000 00"), |
|
355 | "this should-000 00"), | |
356 | ("longtextffffffffff rev 123123123123", |
|
356 | ("longtextffffffffff rev 123123123123", | |
357 | "longtextffffffffff rev url[123123123123]"), |
|
357 | "longtextffffffffff rev url[123123123123]"), | |
358 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", |
|
358 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", | |
359 | "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"), |
|
359 | "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"), | |
360 | ("ffffffffffff some text traalaa", |
|
360 | ("ffffffffffff some text traalaa", | |
361 | "url[ffffffffffff] some text traalaa"), |
|
361 | "url[ffffffffffff] some text traalaa"), | |
362 | ("""Multi line |
|
362 | ("""Multi line | |
363 | 123123123123 |
|
363 | 123123123123 | |
364 | some text 123123123123 |
|
364 | some text 123123123123 | |
365 | sometimes ! |
|
365 | sometimes ! | |
366 | """, |
|
366 | """, | |
367 | """Multi line |
|
367 | """Multi line | |
368 | url[123123123123] |
|
368 | url[123123123123] | |
369 | some text url[123123123123] |
|
369 | some text url[123123123123] | |
370 | sometimes ! |
|
370 | sometimes ! | |
371 | """) |
|
371 | """) | |
372 | ]) |
|
372 | ]) | |
373 | def test_urlify_commits(sample, expected): |
|
373 | def test_urlify_commits(sample, expected): | |
374 | def fake_url(self, *args, **kwargs): |
|
374 | def fake_url(self, *args, **kwargs): | |
375 | return '/some-url' |
|
375 | return '/some-url' | |
376 |
|
376 | |||
377 | expected = _quick_url(expected) |
|
377 | expected = _quick_url(expected) | |
378 |
|
378 | |||
379 | with mock.patch('pylons.url', fake_url): |
|
379 | with mock.patch('pylons.url', fake_url): | |
380 | from rhodecode.lib.helpers import urlify_commits |
|
380 | from rhodecode.lib.helpers import urlify_commits | |
381 | assert urlify_commits(sample, 'repo_name') == expected |
|
381 | assert urlify_commits(sample, 'repo_name') == expected | |
382 |
|
382 | |||
383 |
|
383 | |||
384 | @pytest.mark.parametrize("sample, expected, url_", [ |
|
384 | @pytest.mark.parametrize("sample, expected, url_", [ | |
385 | ("", |
|
385 | ("", | |
386 | "", |
|
386 | "", | |
387 | ""), |
|
387 | ""), | |
388 | ("https://svn.apache.org/repos", |
|
388 | ("https://svn.apache.org/repos", | |
389 | "url[https://svn.apache.org/repos]", |
|
389 | "url[https://svn.apache.org/repos]", | |
390 | "https://svn.apache.org/repos"), |
|
390 | "https://svn.apache.org/repos"), | |
391 | ("http://svn.apache.org/repos", |
|
391 | ("http://svn.apache.org/repos", | |
392 | "url[http://svn.apache.org/repos]", |
|
392 | "url[http://svn.apache.org/repos]", | |
393 | "http://svn.apache.org/repos"), |
|
393 | "http://svn.apache.org/repos"), | |
394 | ("from rev a also rev http://google.com", |
|
394 | ("from rev a also rev http://google.com", | |
395 | "from rev a also rev url[http://google.com]", |
|
395 | "from rev a also rev url[http://google.com]", | |
396 | "http://google.com"), |
|
396 | "http://google.com"), | |
397 | ("""Multi line |
|
397 | ("""Multi line | |
398 | https://foo.bar.com |
|
398 | https://foo.bar.com | |
399 | some text lalala""", |
|
399 | some text lalala""", | |
400 | """Multi line |
|
400 | """Multi line | |
401 | url[https://foo.bar.com] |
|
401 | url[https://foo.bar.com] | |
402 | some text lalala""", |
|
402 | some text lalala""", | |
403 | "https://foo.bar.com") |
|
403 | "https://foo.bar.com") | |
404 | ]) |
|
404 | ]) | |
405 | def test_urlify_test(sample, expected, url_): |
|
405 | def test_urlify_test(sample, expected, url_): | |
406 | from rhodecode.lib.helpers import urlify_text |
|
406 | from rhodecode.lib.helpers import urlify_text | |
407 | expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_) |
|
407 | expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_) | |
408 | assert urlify_text(sample) == expected |
|
408 | assert urlify_text(sample) == expected | |
409 |
|
409 | |||
410 |
|
410 | |||
411 | @pytest.mark.parametrize("test, expected", [ |
|
411 | @pytest.mark.parametrize("test, expected", [ | |
412 | ("", None), |
|
412 | ("", None), | |
413 | ("/_2", '2'), |
|
413 | ("/_2", '2'), | |
414 | ("_2", '2'), |
|
414 | ("_2", '2'), | |
415 | ("/_2/", '2'), |
|
415 | ("/_2/", '2'), | |
416 | ("_2/", '2'), |
|
416 | ("_2/", '2'), | |
417 |
|
417 | |||
418 | ("/_21", '21'), |
|
418 | ("/_21", '21'), | |
419 | ("_21", '21'), |
|
419 | ("_21", '21'), | |
420 | ("/_21/", '21'), |
|
420 | ("/_21/", '21'), | |
421 | ("_21/", '21'), |
|
421 | ("_21/", '21'), | |
422 |
|
422 | |||
423 | ("/_21/foobar", '21'), |
|
423 | ("/_21/foobar", '21'), | |
424 | ("_21/121", '21'), |
|
424 | ("_21/121", '21'), | |
425 | ("/_21/_12", '21'), |
|
425 | ("/_21/_12", '21'), | |
426 | ("_21/rc/foo", '21'), |
|
426 | ("_21/rc/foo", '21'), | |
427 |
|
427 | |||
428 | ]) |
|
428 | ]) | |
429 | def test_get_repo_by_id(test, expected): |
|
429 | def test_get_repo_by_id(test, expected): | |
430 | from rhodecode.model.repo import RepoModel |
|
430 | from rhodecode.model.repo import RepoModel | |
431 | _test = RepoModel()._extract_id_from_repo_name(test) |
|
431 | _test = RepoModel()._extract_id_from_repo_name(test) | |
432 | assert _test == expected |
|
432 | assert _test == expected | |
433 |
|
433 | |||
434 |
|
434 | |||
435 | def test_invalidation_context(pylonsapp): |
|
435 | @pytest.mark.parametrize("test_repo_name, repo_type", [ | |
|
436 | ("test_repo_1", None), | |||
|
437 | ("repo_group/foobar", None), | |||
|
438 | ("test_non_asci_Δ ΔΔ", None), | |||
|
439 | (u"test_non_asci_unicode_Δ ΔΔ", None), | |||
|
440 | ]) | |||
|
441 | def test_invalidation_context(pylonsapp, test_repo_name, repo_type): | |||
436 | from beaker.cache import cache_region |
|
442 | from beaker.cache import cache_region | |
437 | from rhodecode.lib import caches |
|
443 | from rhodecode.lib import caches | |
438 | from rhodecode.model.db import CacheKey |
|
444 | from rhodecode.model.db import CacheKey | |
439 |
|
445 | |||
440 | @cache_region('long_term') |
|
446 | @cache_region('long_term') | |
441 | def _dummy_func(cache_key): |
|
447 | def _dummy_func(cache_key): | |
442 | return 'result' |
|
448 | return 'result' | |
443 |
|
449 | |||
444 | invalidator_context = CacheKey.repo_context_cache( |
|
450 | invalidator_context = CacheKey.repo_context_cache( | |
445 |
_dummy_func, |
|
451 | _dummy_func, test_repo_name, 'repo') | |
446 |
|
452 | |||
447 | with invalidator_context as context: |
|
453 | with invalidator_context as context: | |
448 | invalidated = context.invalidate() |
|
454 | invalidated = context.invalidate() | |
449 | result = context.compute() |
|
455 | result = context.compute() | |
450 |
|
456 | |||
451 | assert invalidated == True |
|
457 | assert invalidated == True | |
452 | assert 'result' == result |
|
458 | assert 'result' == result | |
453 | assert isinstance(context, caches.FreshRegionCache) |
|
459 | assert isinstance(context, caches.FreshRegionCache) | |
454 |
|
460 | |||
|
461 | assert 'InvalidationContext' in repr(invalidator_context) | |||
|
462 | ||||
455 | with invalidator_context as context: |
|
463 | with invalidator_context as context: | |
456 | context.invalidate() |
|
464 | context.invalidate() | |
457 | result = context.compute() |
|
465 | result = context.compute() | |
458 |
|
466 | |||
459 | assert 'result' == result |
|
467 | assert 'result' == result | |
460 | assert isinstance(context, caches.ActiveRegionCache) |
|
468 | assert isinstance(context, caches.ActiveRegionCache) | |
461 |
|
469 | |||
462 |
|
470 | |||
463 | def test_invalidation_context_exception_in_compute(pylonsapp): |
|
471 | def test_invalidation_context_exception_in_compute(pylonsapp): | |
464 | from rhodecode.model.db import CacheKey |
|
472 | from rhodecode.model.db import CacheKey | |
465 | from beaker.cache import cache_region |
|
473 | from beaker.cache import cache_region | |
466 |
|
474 | |||
467 | @cache_region('long_term') |
|
475 | @cache_region('long_term') | |
468 | def _dummy_func(cache_key): |
|
476 | def _dummy_func(cache_key): | |
469 | # this causes error since it doesn't get any params |
|
477 | # this causes error since it doesn't get any params | |
470 | raise Exception('ups') |
|
478 | raise Exception('ups') | |
471 |
|
479 | |||
472 | invalidator_context = CacheKey.repo_context_cache( |
|
480 | invalidator_context = CacheKey.repo_context_cache( | |
473 | _dummy_func, 'test_repo_2', 'repo') |
|
481 | _dummy_func, 'test_repo_2', 'repo') | |
474 |
|
482 | |||
475 | with pytest.raises(Exception): |
|
483 | with pytest.raises(Exception): | |
476 | with invalidator_context as context: |
|
484 | with invalidator_context as context: | |
477 | context.invalidate() |
|
485 | context.invalidate() | |
478 | context.compute() |
|
486 | context.compute() | |
479 |
|
487 | |||
480 |
|
488 | |||
481 | @pytest.mark.parametrize('execution_number', range(5)) |
|
489 | @pytest.mark.parametrize('execution_number', range(5)) | |
482 | def test_cache_invalidation_race_condition(execution_number, pylonsapp): |
|
490 | def test_cache_invalidation_race_condition(execution_number, pylonsapp): | |
483 | import time |
|
491 | import time | |
484 | from beaker.cache import cache_region |
|
492 | from beaker.cache import cache_region | |
485 | from rhodecode.model.db import CacheKey |
|
493 | from rhodecode.model.db import CacheKey | |
486 |
|
494 | |||
487 | if CacheKey.metadata.bind.url.get_backend_name() == "mysql": |
|
495 | if CacheKey.metadata.bind.url.get_backend_name() == "mysql": | |
488 | reason = ( |
|
496 | reason = ( | |
489 | 'Fails on MariaDB due to some locking issues. Investigation' |
|
497 | 'Fails on MariaDB due to some locking issues. Investigation' | |
490 | ' needed') |
|
498 | ' needed') | |
491 | pytest.xfail(reason=reason) |
|
499 | pytest.xfail(reason=reason) | |
492 |
|
500 | |||
493 | @run_test_concurrently(25) |
|
501 | @run_test_concurrently(25) | |
494 | def test_create_and_delete_cache_keys(): |
|
502 | def test_create_and_delete_cache_keys(): | |
495 | time.sleep(0.2) |
|
503 | time.sleep(0.2) | |
496 |
|
504 | |||
497 | @cache_region('long_term') |
|
505 | @cache_region('long_term') | |
498 | def _dummy_func(cache_key): |
|
506 | def _dummy_func(cache_key): | |
499 | return 'result' |
|
507 | return 'result' | |
500 |
|
508 | |||
501 | invalidator_context = CacheKey.repo_context_cache( |
|
509 | invalidator_context = CacheKey.repo_context_cache( | |
502 | _dummy_func, 'test_repo_1', 'repo') |
|
510 | _dummy_func, 'test_repo_1', 'repo') | |
503 |
|
511 | |||
504 | with invalidator_context as context: |
|
512 | with invalidator_context as context: | |
505 | context.invalidate() |
|
513 | context.invalidate() | |
506 | context.compute() |
|
514 | context.compute() | |
507 |
|
515 | |||
508 | CacheKey.set_invalidate('test_repo_1', delete=True) |
|
516 | CacheKey.set_invalidate('test_repo_1', delete=True) | |
509 |
|
517 | |||
510 | test_create_and_delete_cache_keys() |
|
518 | test_create_and_delete_cache_keys() |
General Comments 0
You need to be logged in to leave comments.
Login now