##// END OF EJS Templates
vcs-server: expose remote tracebacks from http backend using the Pyro4AwareFormatter.
marcink -
r1257:edb7f6bf default
parent child Browse files
Show More
@@ -1,116 +1,136 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import sys
21 22 import logging
22 23
23 24
24 25 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
25 26
26 27 # Sequences
27 28 RESET_SEQ = "\033[0m"
28 29 COLOR_SEQ = "\033[0;%dm"
29 30 BOLD_SEQ = "\033[1m"
30 31
31 32 COLORS = {
32 33 'CRITICAL': MAGENTA,
33 34 'ERROR': RED,
34 35 'WARNING': CYAN,
35 36 'INFO': GREEN,
36 37 'DEBUG': BLUE,
37 38 'SQL': YELLOW
38 39 }
39 40
40 41
41 42 def one_space_trim(s):
42 43 if s.find(" ") == -1:
43 44 return s
44 45 else:
45 46 s = s.replace(' ', ' ')
46 47 return one_space_trim(s)
47 48
48 49
49 50 def format_sql(sql):
50 51 sql = sql.replace('\n', '')
51 52 sql = one_space_trim(sql)
52 53 sql = sql\
53 54 .replace(',', ',\n\t')\
54 55 .replace('SELECT', '\n\tSELECT \n\t')\
55 56 .replace('UPDATE', '\n\tUPDATE \n\t')\
56 57 .replace('DELETE', '\n\tDELETE \n\t')\
57 58 .replace('FROM', '\n\tFROM')\
58 59 .replace('ORDER BY', '\n\tORDER BY')\
59 60 .replace('LIMIT', '\n\tLIMIT')\
60 61 .replace('WHERE', '\n\tWHERE')\
61 62 .replace('AND', '\n\tAND')\
62 63 .replace('LEFT', '\n\tLEFT')\
63 64 .replace('INNER', '\n\tINNER')\
64 65 .replace('INSERT', '\n\tINSERT')\
65 66 .replace('DELETE', '\n\tDELETE')
66 67 return sql
67 68
68 69
69 70 class Pyro4AwareFormatter(logging.Formatter):
70 71 """
71 72 Extended logging formatter which prints out Pyro4 remote tracebacks.
72 73 """
73 74
74 75 def formatException(self, ei):
75 76 ex_type, ex_value, ex_tb = ei
76 if hasattr(ex_value, '_pyroTraceback'):
77 # johbo: Avoiding to import pyro4 until we get an exception
78 # which actually has a remote traceback. This avoids issues
79 # when gunicorn is used with gevent, since the logging would
80 # trigger an import of Pyro4 before the patches of gevent
81 # are applied.
82 import Pyro4.util
83 return ''.join(
84 Pyro4.util.getPyroTraceback(ex_type, ex_value, ex_tb))
85 return logging.Formatter.formatException(self, ei)
77
78 local_tb = logging.Formatter.formatException(self, ei)
79 if hasattr(ex_value, '_vcs_server_traceback'):
80
81 def formatRemoteTraceback(remote_tb_lines):
82 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
83 result.append(remote_tb_lines)
84 result.append("\n +--- End of remote traceback\n")
85 return result
86
87 try:
88 if ex_type is not None and ex_value is None and ex_tb is None:
89 # possible old (3.x) call syntax where caller is only providing exception object
90 if type(ex_type) is not type:
91 raise TypeError(
92 "invalid argument: ex_type should be an exception type, or just supply no arguments at all")
93 if ex_type is None and ex_tb is None:
94 ex_type, ex_value, ex_tb = sys.exc_info()
95
96 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
97
98 if remote_tb:
99 remote_tb = formatRemoteTraceback(remote_tb)
100 return local_tb + ''.join(remote_tb)
101 finally:
102 # clean up cycle to traceback, to allow proper GC
103 del ex_type, ex_value, ex_tb
104
105 return local_tb
86 106
87 107
88 108 class ColorFormatter(Pyro4AwareFormatter):
89 109
90 110 def format(self, record):
91 111 """
92 112 Changes record's levelname to use with COLORS enum
93 113 """
94 114
95 115 levelname = record.levelname
96 116 start = COLOR_SEQ % (COLORS[levelname])
97 117 def_record = logging.Formatter.format(self, record)
98 118 end = RESET_SEQ
99 119
100 120 colored_record = ''.join([start, def_record, end])
101 121 return colored_record
102 122
103 123
104 124 class ColorFormatterSql(logging.Formatter):
105 125
106 126 def format(self, record):
107 127 """
108 128 Changes record's levelname to use with COLORS enum
109 129 """
110 130
111 131 start = COLOR_SEQ % (COLORS['SQL'])
112 132 def_record = format_sql(logging.Formatter.format(self, record))
113 133 end = RESET_SEQ
114 134
115 135 colored_record = ''.join([start, def_record, end])
116 136 return colored_record
@@ -1,91 +1,91 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 middleware to handle appenlight publishing of errors
23 23 """
24 24
25 25 from appenlight_client import make_appenlight_middleware
26 26 from appenlight_client.exceptions import get_current_traceback
27 27 from appenlight_client.wsgi import AppenlightWSGIWrapper
28 28
29 29
30 30 def track_exception(environ):
31 31 if 'appenlight.client' not in environ:
32 32 return
33 33
34 34 # pass the traceback object to middleware
35 35 environ['appenlight.__traceback'] = get_current_traceback(
36 36 skip=1,
37 37 show_hidden_frames=True,
38 38 ignore_system_exceptions=True)
39 39
40 40
41 41 def track_extra_information(environ, section, value):
42 42 """
43 43 Utility function to attach extra information in case of an error condition.
44 44
45 45 It will take care of attaching this information to the right place inside
46 46 of `environ`, so that the appenight client can pick it up.
47 47 """
48 48 environ.setdefault('appenlight.extra', {})
49 49 environ['appenlight.extra'][section] = value
50 50
51 51
52 52 def wrap_in_appenlight_if_enabled(app, settings, appenlight_client=None):
53 53 """
54 54 Wraps the given `app` for appenlight support.
55 55
56 56 .. important::
57 57
58 58 Appenlight expects that the wrapper is executed only once, that's why
59 59 the parameter `appenlight_client` can be used to pass in an already
60 60 existing client instance to avoid that decorators are applied more than
61 61 once.
62 62
63 63 This is in use to support our setup of the vcs related middlewares.
64 64
65 65 """
66 66 if settings['appenlight']:
67 67 app = RemoteTracebackTracker(app)
68 68 if not appenlight_client:
69 69 app = make_appenlight_middleware(app, settings)
70 70 appenlight_client = app.appenlight_client
71 71 else:
72 72 app = AppenlightWSGIWrapper(app, appenlight_client)
73 73 return app, appenlight_client
74 74
75 75
76 76 class RemoteTracebackTracker(object):
77 77 """
78 Utility middleware which forwards Pyro4 remote traceback information.
78 Utility middleware which forwards VCSServer remote traceback information.
79 79 """
80 80
81 81 def __init__(self, app):
82 82 self.application = app
83 83
84 84 def __call__(self, environ, start_response):
85 85 try:
86 86 return self.application(environ, start_response)
87 87 except Exception as e:
88 if hasattr(e, '_pyroTraceback'):
88 if hasattr(e, '_vcs_server_traceback'):
89 89 track_extra_information(
90 environ, 'remote_traceback', ''.join(e._pyroTraceback))
90 environ, 'remote_traceback', e._vcs_server_traceback)
91 91 raise
@@ -1,285 +1,291 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Client for the VCSServer implemented based on HTTP.
23 23
24 24
25 25 Status
26 26 ------
27 27
28 28 This client implementation shall eventually replace the Pyro4 based
29 29 implementation.
30 30 """
31 31
32 32 import copy
33 33 import logging
34 34 import threading
35 35 import urllib2
36 36 import urlparse
37 37 import uuid
38 38
39 39 import pycurl
40 40 import msgpack
41 41 import requests
42 42
43 43 from . import exceptions, CurlSession
44 44
45 45
46 46 log = logging.getLogger(__name__)
47 47
48 48
49 49 # TODO: mikhail: Keep it in sync with vcsserver's
50 50 # HTTPApplication.ALLOWED_EXCEPTIONS
51 51 EXCEPTIONS_MAP = {
52 52 'KeyError': KeyError,
53 53 'URLError': urllib2.URLError,
54 54 }
55 55
56 56
57 57 class RepoMaker(object):
58 58
59 59 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
60 60 self.url = urlparse.urljoin(
61 61 'http://%s' % server_and_port, backend_endpoint)
62 62 self._session_factory = session_factory
63 63 self.backend_type = backend_type
64 64
65 65 def __call__(self, path, config, with_wire=None):
66 66 log.debug('RepoMaker call on %s', path)
67 67 return RemoteRepo(
68 68 path, config, self.url, self._session_factory(),
69 69 with_wire=with_wire)
70 70
71 71 def __getattr__(self, name):
72 72 def f(*args, **kwargs):
73 73 return self._call(name, *args, **kwargs)
74 74 return f
75 75
76 76 @exceptions.map_vcs_exceptions
77 77 def _call(self, name, *args, **kwargs):
78 78 payload = {
79 79 'id': str(uuid.uuid4()),
80 80 'method': name,
81 81 'backend': self.backend_type,
82 82 'params': {'args': args, 'kwargs': kwargs}
83 83 }
84 84 return _remote_call(
85 85 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
86 86
87 87
88 88 class ServiceConnection(object):
89 89 def __init__(self, server_and_port, backend_endpoint, session_factory):
90 90 self.url = urlparse.urljoin(
91 91 'http://%s' % server_and_port, backend_endpoint)
92 92 self._session_factory = session_factory
93 93
94 94 def __getattr__(self, name):
95 95 def f(*args, **kwargs):
96 96 return self._call(name, *args, **kwargs)
97 97
98 98 return f
99 99
100 100 @exceptions.map_vcs_exceptions
101 101 def _call(self, name, *args, **kwargs):
102 102 payload = {
103 103 'id': str(uuid.uuid4()),
104 104 'method': name,
105 105 'params': {'args': args, 'kwargs': kwargs}
106 106 }
107 107 return _remote_call(
108 108 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
109 109
110 110
111 111 class RemoteRepo(object):
112 112
113 113 def __init__(self, path, config, url, session, with_wire=None):
114 114 self.url = url
115 115 self._session = session
116 116 self._wire = {
117 117 "path": path,
118 118 "config": config,
119 119 "context": self._create_vcs_cache_context(),
120 120 }
121 121 if with_wire:
122 122 self._wire.update(with_wire)
123 123
124 124 # johbo: Trading complexity for performance. Avoiding the call to
125 125 # log.debug brings a few percent gain even if is is not active.
126 126 if log.isEnabledFor(logging.DEBUG):
127 127 self._call = self._call_with_logging
128 128
129 129 def __getattr__(self, name):
130 130 def f(*args, **kwargs):
131 131 return self._call(name, *args, **kwargs)
132 132 return f
133 133
134 134 @exceptions.map_vcs_exceptions
135 135 def _call(self, name, *args, **kwargs):
136 136 # TODO: oliver: This is currently necessary pre-call since the
137 137 # config object is being changed for hooking scenarios
138 138 wire = copy.deepcopy(self._wire)
139 139 wire["config"] = wire["config"].serialize()
140 140 payload = {
141 141 'id': str(uuid.uuid4()),
142 142 'method': name,
143 143 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
144 144 }
145 145 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
146 146
147 147 def _call_with_logging(self, name, *args, **kwargs):
148 148 log.debug('Calling %s@%s', self.url, name)
149 149 return RemoteRepo._call(self, name, *args, **kwargs)
150 150
151 151 def __getitem__(self, key):
152 152 return self.revision(key)
153 153
154 154 def _create_vcs_cache_context(self):
155 155 """
156 156 Creates a unique string which is passed to the VCSServer on every
157 157 remote call. It is used as cache key in the VCSServer.
158 158 """
159 159 return str(uuid.uuid4())
160 160
161 161 def invalidate_vcs_cache(self):
162 162 """
163 163 This invalidates the context which is sent to the VCSServer on every
164 164 call to a remote method. It forces the VCSServer to create a fresh
165 165 repository instance on the next call to a remote method.
166 166 """
167 167 self._wire['context'] = self._create_vcs_cache_context()
168 168
169 169
170 170 class RemoteObject(object):
171 171
172 172 def __init__(self, url, session):
173 173 self._url = url
174 174 self._session = session
175 175
176 176 # johbo: Trading complexity for performance. Avoiding the call to
177 177 # log.debug brings a few percent gain even if is is not active.
178 178 if log.isEnabledFor(logging.DEBUG):
179 179 self._call = self._call_with_logging
180 180
181 181 def __getattr__(self, name):
182 182 def f(*args, **kwargs):
183 183 return self._call(name, *args, **kwargs)
184 184 return f
185 185
186 186 @exceptions.map_vcs_exceptions
187 187 def _call(self, name, *args, **kwargs):
188 188 payload = {
189 189 'id': str(uuid.uuid4()),
190 190 'method': name,
191 191 'params': {'args': args, 'kwargs': kwargs}
192 192 }
193 193 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
194 194
195 195 def _call_with_logging(self, name, *args, **kwargs):
196 196 log.debug('Calling %s@%s', self._url, name)
197 197 return RemoteObject._call(self, name, *args, **kwargs)
198 198
199 199
200 200 def _remote_call(url, payload, exceptions_map, session):
201 201 try:
202 202 response = session.post(url, data=msgpack.packb(payload))
203 203 except pycurl.error as e:
204 204 raise exceptions.HttpVCSCommunicationError(e)
205 205
206 206 try:
207 207 response = msgpack.unpackb(response.content)
208 208 except Exception:
209 209 log.exception('Failed to decode repsponse %r', response.content)
210 210 raise
211 211
212 212 error = response.get('error')
213 213 if error:
214 214 type_ = error.get('type', 'Exception')
215 215 exc = exceptions_map.get(type_, Exception)
216 216 exc = exc(error.get('message'))
217 217 try:
218 218 exc._vcs_kind = error['_vcs_kind']
219 219 except KeyError:
220 220 pass
221
222 try:
223 exc._vcs_server_traceback = error['traceback']
224 except KeyError:
225 pass
226
221 227 raise exc
222 228 return response.get('result')
223 229
224 230
225 231 class VcsHttpProxy(object):
226 232
227 233 CHUNK_SIZE = 16384
228 234
229 235 def __init__(self, server_and_port, backend_endpoint):
230 236 adapter = requests.adapters.HTTPAdapter(max_retries=5)
231 237 self.base_url = urlparse.urljoin(
232 238 'http://%s' % server_and_port, backend_endpoint)
233 239 self.session = requests.Session()
234 240 self.session.mount('http://', adapter)
235 241
236 242 def handle(self, environment, input_data, *args, **kwargs):
237 243 data = {
238 244 'environment': environment,
239 245 'input_data': input_data,
240 246 'args': args,
241 247 'kwargs': kwargs
242 248 }
243 249 result = self.session.post(
244 250 self.base_url, msgpack.packb(data), stream=True)
245 251 return self._get_result(result)
246 252
247 253 def _deserialize_and_raise(self, error):
248 254 exception = Exception(error['message'])
249 255 try:
250 256 exception._vcs_kind = error['_vcs_kind']
251 257 except KeyError:
252 258 pass
253 259 raise exception
254 260
255 261 def _iterate(self, result):
256 262 unpacker = msgpack.Unpacker()
257 263 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
258 264 unpacker.feed(line)
259 265 for chunk in unpacker:
260 266 yield chunk
261 267
262 268 def _get_result(self, result):
263 269 iterator = self._iterate(result)
264 270 error = iterator.next()
265 271 if error:
266 272 self._deserialize_and_raise(error)
267 273
268 274 status = iterator.next()
269 275 headers = iterator.next()
270 276
271 277 return iterator, status, headers
272 278
273 279
274 280 class ThreadlocalSessionFactory(object):
275 281 """
276 282 Creates one CurlSession per thread on demand.
277 283 """
278 284
279 285 def __init__(self):
280 286 self._thread_local = threading.local()
281 287
282 288 def __call__(self):
283 289 if not hasattr(self._thread_local, 'curl_session'):
284 290 self._thread_local.curl_session = CurlSession()
285 291 return self._thread_local.curl_session
@@ -1,205 +1,205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Custom vcs exceptions module.
23 23 """
24 24
25 25 import functools
26 26 import urllib2
27 27
28 28
29 29 class VCSCommunicationError(Exception):
30 30 pass
31 31
32 32
33 33 class PyroVCSCommunicationError(VCSCommunicationError):
34 34 pass
35 35
36 36
37 37 class HttpVCSCommunicationError(VCSCommunicationError):
38 38 pass
39 39
40 40
41 41 class VCSError(Exception):
42 42 pass
43 43
44 44
45 45 class RepositoryError(VCSError):
46 46 pass
47 47
48 48
49 49 class RepositoryRequirementError(RepositoryError):
50 50 pass
51 51
52 52
53 53 class VCSBackendNotSupportedError(VCSError):
54 54 """
55 55 Exception raised when VCSServer does not support requested backend
56 56 """
57 57
58 58
59 59 class EmptyRepositoryError(RepositoryError):
60 60 pass
61 61
62 62
63 63 class TagAlreadyExistError(RepositoryError):
64 64 pass
65 65
66 66
67 67 class TagDoesNotExistError(RepositoryError):
68 68 pass
69 69
70 70
71 71 class BranchAlreadyExistError(RepositoryError):
72 72 pass
73 73
74 74
75 75 class BranchDoesNotExistError(RepositoryError):
76 76 pass
77 77
78 78
79 79 class CommitError(RepositoryError):
80 80 """
81 81 Exceptions related to an existing commit
82 82 """
83 83
84 84
85 85 class CommitDoesNotExistError(CommitError):
86 86 pass
87 87
88 88
89 89 class CommittingError(RepositoryError):
90 90 """
91 91 Exceptions happening while creating a new commit
92 92 """
93 93
94 94
95 95 class NothingChangedError(CommittingError):
96 96 pass
97 97
98 98
99 99 class NodeError(VCSError):
100 100 pass
101 101
102 102
103 103 class RemovedFileNodeError(NodeError):
104 104 pass
105 105
106 106
107 107 class NodeAlreadyExistsError(CommittingError):
108 108 pass
109 109
110 110
111 111 class NodeAlreadyChangedError(CommittingError):
112 112 pass
113 113
114 114
115 115 class NodeDoesNotExistError(CommittingError):
116 116 pass
117 117
118 118
119 119 class NodeNotChangedError(CommittingError):
120 120 pass
121 121
122 122
123 123 class NodeAlreadyAddedError(CommittingError):
124 124 pass
125 125
126 126
127 127 class NodeAlreadyRemovedError(CommittingError):
128 128 pass
129 129
130 130
131 131 class SubrepoMergeError(RepositoryError):
132 132 """
133 133 This happens if we try to merge a repository which contains subrepos and
134 134 the subrepos cannot be merged. The subrepos are not merged itself but
135 135 their references in the root repo are merged.
136 136 """
137 137
138 138
139 139 class ImproperArchiveTypeError(VCSError):
140 140 pass
141 141
142 142
143 143 class CommandError(VCSError):
144 144 pass
145 145
146 146
147 147 class UnhandledException(VCSError):
148 148 """
149 149 Signals that something unexpected went wrong.
150 150
151 151 This usually means we have a programming error on the side of the VCSServer
152 152 and should inspect the logfile of the VCSServer to find more details.
153 153 """
154 154
155 155
156 156 _EXCEPTION_MAP = {
157 157 'abort': RepositoryError,
158 158 'archive': ImproperArchiveTypeError,
159 159 'error': RepositoryError,
160 160 'lookup': CommitDoesNotExistError,
161 161 'repo_locked': RepositoryError,
162 162 'requirement': RepositoryRequirementError,
163 163 'unhandled': UnhandledException,
164 164 # TODO: johbo: Define our own exception for this and stop abusing
165 165 # urllib's exception class.
166 166 'url_error': urllib2.URLError,
167 167 'subrepo_merge_error': SubrepoMergeError,
168 168 }
169 169
170 170
171 171 def map_vcs_exceptions(func):
172 172 """
173 173 Utility to decorate functions so that plain exceptions are translated.
174 174
175 175 The translation is based on `exc_map` which maps a `str` indicating
176 176 the error type into an exception class representing this error inside
177 177 of the vcs layer.
178 178 """
179 179
180 180 @functools.wraps(func)
181 181 def wrapper(*args, **kwargs):
182 182 try:
183 183 return func(*args, **kwargs)
184 184 except Exception as e:
185 185 # The error middleware adds information if it finds
186 186 # __traceback_info__ in a frame object. This way the remote
187 187 # traceback information is made available in error reports.
188 remote_tb = getattr(e, '_pyroTraceback', None)
188 remote_tb = getattr(e, '_vcs_server_traceback', None)
189 189 if remote_tb:
190 190 __traceback_info__ = (
191 'Found Pyro4 remote traceback information:\n\n' +
191 'Found VCSServer remote traceback information:\n\n' +
192 192 '\n'.join(remote_tb))
193 193
194 194 # Avoid that remote_tb also appears in the frame
195 195 del remote_tb
196 196
197 197 # Special vcs errors had an attribute "_vcs_kind" which is used
198 198 # to translate them to the proper exception class in the vcs
199 199 # client layer.
200 200 kind = getattr(e, '_vcs_kind', None)
201 201 if kind:
202 202 raise _EXCEPTION_MAP[kind](*e.args)
203 203 else:
204 204 raise
205 205 return wrapper
@@ -1,54 +1,55 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 15 pyroutes.register('home', '/', []);
16 16 pyroutes.register('user_autocomplete_data', '/_users', []);
17 17 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
18 18 pyroutes.register('new_repo', '/_admin/create_repository', []);
19 19 pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']);
20 20 pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']);
21 21 pyroutes.register('gists', '/_admin/gists', []);
22 22 pyroutes.register('new_gist', '/_admin/gists/new', []);
23 23 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
24 24 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
25 25 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
26 26 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
27 27 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/default-reviewers', ['repo_name']);
28 28 pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']);
29 29 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
30 30 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
31 31 pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']);
32 32 pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']);
33 33 pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
34 34 pyroutes.register('changeset_info', '/%(repo_name)s/changeset_info/%(revision)s', ['repo_name', 'revision']);
35 35 pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
36 36 pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']);
37 37 pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']);
38 38 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
39 39 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
40 40 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
41 41 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
42 42 pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
43 43 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
44 44 pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']);
45 45 pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
46 46 pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
47 47 pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
48 48 pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
49 pyroutes.register('files_annotate_home', '/%(repo_name)s/annotate/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
49 50 pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
50 51 pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
51 52 pyroutes.register('files_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
52 53 pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']);
53 54 pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']);
54 55 }
@@ -1,1799 +1,1798 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32
33 33 import mock
34 34 import pyramid.testing
35 35 import pytest
36 36 import colander
37 37 import requests
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.utils2 import AttributeDict
41 41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 42 from rhodecode.model.comment import ChangesetCommentsModel
43 43 from rhodecode.model.db import (
44 44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.pull_request import PullRequestModel
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50 from rhodecode.model.user import UserModel
51 51 from rhodecode.model.settings import VcsSettingsModel
52 52 from rhodecode.model.user_group import UserGroupModel
53 53 from rhodecode.model.integration import IntegrationModel
54 54 from rhodecode.integrations import integration_type_registry
55 55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 56 from rhodecode.lib.utils import repo2db_mapper
57 57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 58 from rhodecode.lib.vcs.backends import get_backend
59 59 from rhodecode.lib.vcs.nodes import FileNode
60 60 from rhodecode.tests import (
61 61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 63 TEST_USER_REGULAR_PASS)
64 64 from rhodecode.tests.utils import CustomTestApp
65 65 from rhodecode.tests.fixture import Fixture
66 66
67 67
68 68 def _split_comma(value):
69 69 return value.split(',')
70 70
71 71
72 72 def pytest_addoption(parser):
73 73 parser.addoption(
74 74 '--keep-tmp-path', action='store_true',
75 75 help="Keep the test temporary directories")
76 76 parser.addoption(
77 77 '--backends', action='store', type=_split_comma,
78 78 default=['git', 'hg', 'svn'],
79 79 help="Select which backends to test for backend specific tests.")
80 80 parser.addoption(
81 81 '--dbs', action='store', type=_split_comma,
82 82 default=['sqlite'],
83 83 help="Select which database to test for database specific tests. "
84 84 "Possible options are sqlite,postgres,mysql")
85 85 parser.addoption(
86 86 '--appenlight', '--ae', action='store_true',
87 87 help="Track statistics in appenlight.")
88 88 parser.addoption(
89 89 '--appenlight-api-key', '--ae-key',
90 90 help="API key for Appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-url', '--ae-url',
93 93 default="https://ae.rhodecode.com",
94 94 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 95 parser.addoption(
96 96 '--sqlite-connection-string', action='store',
97 97 default='', help="Connection string for the dbs tests with SQLite")
98 98 parser.addoption(
99 99 '--postgres-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with Postgres")
101 101 parser.addoption(
102 102 '--mysql-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with MySQL")
104 104 parser.addoption(
105 105 '--repeat', type=int, default=100,
106 106 help="Number of repetitions in performance tests.")
107 107
108 108
109 109 def pytest_configure(config):
110 110 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
111 111 from rhodecode.config import patches
112 112 patches.kombu_1_5_1_python_2_7_11()
113 113
114 114
115 115 def pytest_collection_modifyitems(session, config, items):
116 116 # nottest marked, compare nose, used for transition from nose to pytest
117 117 remaining = [
118 118 i for i in items if getattr(i.obj, '__test__', True)]
119 119 items[:] = remaining
120 120
121 121
122 122 def pytest_generate_tests(metafunc):
123 123 # Support test generation based on --backend parameter
124 124 if 'backend_alias' in metafunc.fixturenames:
125 125 backends = get_backends_from_metafunc(metafunc)
126 126 scope = None
127 127 if not backends:
128 128 pytest.skip("Not enabled for any of selected backends")
129 129 metafunc.parametrize('backend_alias', backends, scope=scope)
130 130 elif hasattr(metafunc.function, 'backends'):
131 131 backends = get_backends_from_metafunc(metafunc)
132 132 if not backends:
133 133 pytest.skip("Not enabled for any of selected backends")
134 134
135 135
136 136 def get_backends_from_metafunc(metafunc):
137 137 requested_backends = set(metafunc.config.getoption('--backends'))
138 138 if hasattr(metafunc.function, 'backends'):
139 139 # Supported backends by this test function, created from
140 140 # pytest.mark.backends
141 141 backends = metafunc.function.backends.args
142 142 elif hasattr(metafunc.cls, 'backend_alias'):
143 143 # Support class attribute "backend_alias", this is mainly
144 144 # for legacy reasons for tests not yet using pytest.mark.backends
145 145 backends = [metafunc.cls.backend_alias]
146 146 else:
147 147 backends = metafunc.config.getoption('--backends')
148 148 return requested_backends.intersection(backends)
149 149
150 150
151 151 @pytest.fixture(scope='session', autouse=True)
152 152 def activate_example_rcextensions(request):
153 153 """
154 154 Patch in an example rcextensions module which verifies passed in kwargs.
155 155 """
156 156 from rhodecode.tests.other import example_rcextensions
157 157
158 158 old_extensions = rhodecode.EXTENSIONS
159 159 rhodecode.EXTENSIONS = example_rcextensions
160 160
161 161 @request.addfinalizer
162 162 def cleanup():
163 163 rhodecode.EXTENSIONS = old_extensions
164 164
165 165
166 166 @pytest.fixture
167 167 def capture_rcextensions():
168 168 """
169 169 Returns the recorded calls to entry points in rcextensions.
170 170 """
171 171 calls = rhodecode.EXTENSIONS.calls
172 172 calls.clear()
173 173 # Note: At this moment, it is still the empty dict, but that will
174 174 # be filled during the test run and since it is a reference this
175 175 # is enough to make it work.
176 176 return calls
177 177
178 178
179 179 @pytest.fixture(scope='session')
180 180 def http_environ_session():
181 181 """
182 182 Allow to use "http_environ" in session scope.
183 183 """
184 184 return http_environ(
185 185 http_host_stub=http_host_stub())
186 186
187 187
188 188 @pytest.fixture
189 189 def http_host_stub():
190 190 """
191 191 Value of HTTP_HOST in the test run.
192 192 """
193 193 return 'test.example.com:80'
194 194
195 195
196 196 @pytest.fixture
197 197 def http_environ(http_host_stub):
198 198 """
199 199 HTTP extra environ keys.
200 200
201 201 User by the test application and as well for setting up the pylons
202 202 environment. In the case of the fixture "app" it should be possible
203 203 to override this for a specific test case.
204 204 """
205 205 return {
206 206 'SERVER_NAME': http_host_stub.split(':')[0],
207 207 'SERVER_PORT': http_host_stub.split(':')[1],
208 208 'HTTP_HOST': http_host_stub,
209 209 }
210 210
211 211
212 212 @pytest.fixture(scope='function')
213 213 def app(request, pylonsapp, http_environ):
214 214
215 215
216 216 app = CustomTestApp(
217 217 pylonsapp,
218 218 extra_environ=http_environ)
219 219 if request.cls:
220 220 request.cls.app = app
221 221 return app
222 222
223 223
224 224 @pytest.fixture(scope='session')
225 225 def app_settings(pylonsapp, pylons_config):
226 226 """
227 227 Settings dictionary used to create the app.
228 228
229 229 Parses the ini file and passes the result through the sanitize and apply
230 230 defaults mechanism in `rhodecode.config.middleware`.
231 231 """
232 232 from paste.deploy.loadwsgi import loadcontext, APP
233 233 from rhodecode.config.middleware import (
234 234 sanitize_settings_and_apply_defaults)
235 235 context = loadcontext(APP, 'config:' + pylons_config)
236 236 settings = sanitize_settings_and_apply_defaults(context.config())
237 237 return settings
238 238
239 239
240 240 @pytest.fixture(scope='session')
241 241 def db(app_settings):
242 242 """
243 243 Initializes the database connection.
244 244
245 245 It uses the same settings which are used to create the ``pylonsapp`` or
246 246 ``app`` fixtures.
247 247 """
248 248 from rhodecode.config.utils import initialize_database
249 249 initialize_database(app_settings)
250 250
251 251
252 252 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
253 253
254 254
255 255 def _autologin_user(app, *args):
256 256 session = login_user_session(app, *args)
257 257 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
258 258 return LoginData(csrf_token, session['rhodecode_user'])
259 259
260 260
261 261 @pytest.fixture
262 262 def autologin_user(app):
263 263 """
264 264 Utility fixture which makes sure that the admin user is logged in
265 265 """
266 266 return _autologin_user(app)
267 267
268 268
269 269 @pytest.fixture
270 270 def autologin_regular_user(app):
271 271 """
272 272 Utility fixture which makes sure that the regular user is logged in
273 273 """
274 274 return _autologin_user(
275 275 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
276 276
277 277
278 278 @pytest.fixture(scope='function')
279 279 def csrf_token(request, autologin_user):
280 280 return autologin_user.csrf_token
281 281
282 282
283 283 @pytest.fixture(scope='function')
284 284 def xhr_header(request):
285 285 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
286 286
287 287
288 288 @pytest.fixture
289 289 def real_crypto_backend(monkeypatch):
290 290 """
291 291 Switch the production crypto backend on for this test.
292 292
293 293 During the test run the crypto backend is replaced with a faster
294 294 implementation based on the MD5 algorithm.
295 295 """
296 296 monkeypatch.setattr(rhodecode, 'is_test', False)
297 297
298 298
299 299 @pytest.fixture(scope='class')
300 300 def index_location(request, pylonsapp):
301 301 index_location = pylonsapp.config['app_conf']['search.location']
302 302 if request.cls:
303 303 request.cls.index_location = index_location
304 304 return index_location
305 305
306 306
307 307 @pytest.fixture(scope='session', autouse=True)
308 308 def tests_tmp_path(request):
309 309 """
310 310 Create temporary directory to be used during the test session.
311 311 """
312 312 if not os.path.exists(TESTS_TMP_PATH):
313 313 os.makedirs(TESTS_TMP_PATH)
314 314
315 315 if not request.config.getoption('--keep-tmp-path'):
316 316 @request.addfinalizer
317 317 def remove_tmp_path():
318 318 shutil.rmtree(TESTS_TMP_PATH)
319 319
320 320 return TESTS_TMP_PATH
321 321
322 322
323 323 @pytest.fixture(scope='session', autouse=True)
324 324 def patch_pyro_request_scope_proxy_factory(request):
325 325 """
326 326 Patch the pyro proxy factory to always use the same dummy request object
327 327 when under test. This will return the same pyro proxy on every call.
328 328 """
329 329 dummy_request = pyramid.testing.DummyRequest()
330 330
331 331 def mocked_call(self, request=None):
332 332 return self.getProxy(request=dummy_request)
333 333
334 334 patcher = mock.patch(
335 335 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
336 336 new=mocked_call)
337 337 patcher.start()
338 338
339 339 @request.addfinalizer
340 340 def undo_patching():
341 341 patcher.stop()
342 342
343 343
344 344 @pytest.fixture
345 345 def test_repo_group(request):
346 346 """
347 347 Create a temporary repository group, and destroy it after
348 348 usage automatically
349 349 """
350 350 fixture = Fixture()
351 351 repogroupid = 'test_repo_group_%s' % int(time.time())
352 352 repo_group = fixture.create_repo_group(repogroupid)
353 353
354 354 def _cleanup():
355 355 fixture.destroy_repo_group(repogroupid)
356 356
357 357 request.addfinalizer(_cleanup)
358 358 return repo_group
359 359
360 360
361 361 @pytest.fixture
362 362 def test_user_group(request):
363 363 """
364 364 Create a temporary user group, and destroy it after
365 365 usage automatically
366 366 """
367 367 fixture = Fixture()
368 368 usergroupid = 'test_user_group_%s' % int(time.time())
369 369 user_group = fixture.create_user_group(usergroupid)
370 370
371 371 def _cleanup():
372 372 fixture.destroy_user_group(user_group)
373 373
374 374 request.addfinalizer(_cleanup)
375 375 return user_group
376 376
377 377
378 378 @pytest.fixture(scope='session')
379 379 def test_repo(request):
380 380 container = TestRepoContainer()
381 381 request.addfinalizer(container._cleanup)
382 382 return container
383 383
384 384
385 385 class TestRepoContainer(object):
386 386 """
387 387 Container for test repositories which are used read only.
388 388
389 389 Repositories will be created on demand and re-used during the lifetime
390 390 of this object.
391 391
392 392 Usage to get the svn test repository "minimal"::
393 393
394 394 test_repo = TestContainer()
395 395 repo = test_repo('minimal', 'svn')
396 396
397 397 """
398 398
399 399 dump_extractors = {
400 400 'git': utils.extract_git_repo_from_dump,
401 401 'hg': utils.extract_hg_repo_from_dump,
402 402 'svn': utils.extract_svn_repo_from_dump,
403 403 }
404 404
405 405 def __init__(self):
406 406 self._cleanup_repos = []
407 407 self._fixture = Fixture()
408 408 self._repos = {}
409 409
410 410 def __call__(self, dump_name, backend_alias):
411 411 key = (dump_name, backend_alias)
412 412 if key not in self._repos:
413 413 repo = self._create_repo(dump_name, backend_alias)
414 414 self._repos[key] = repo.repo_id
415 415 return Repository.get(self._repos[key])
416 416
417 417 def _create_repo(self, dump_name, backend_alias):
418 418 repo_name = '%s-%s' % (backend_alias, dump_name)
419 419 backend_class = get_backend(backend_alias)
420 420 dump_extractor = self.dump_extractors[backend_alias]
421 421 repo_path = dump_extractor(dump_name, repo_name)
422 422 vcs_repo = backend_class(repo_path)
423 423 repo2db_mapper({repo_name: vcs_repo})
424 424 repo = RepoModel().get_by_repo_name(repo_name)
425 425 self._cleanup_repos.append(repo_name)
426 426 return repo
427 427
428 428 def _cleanup(self):
429 429 for repo_name in reversed(self._cleanup_repos):
430 430 self._fixture.destroy_repo(repo_name)
431 431
432 432
433 433 @pytest.fixture
434 434 def backend(request, backend_alias, pylonsapp, test_repo):
435 435 """
436 436 Parametrized fixture which represents a single backend implementation.
437 437
438 438 It respects the option `--backends` to focus the test run on specific
439 439 backend implementations.
440 440
441 441 It also supports `pytest.mark.xfail_backends` to mark tests as failing
442 442 for specific backends. This is intended as a utility for incremental
443 443 development of a new backend implementation.
444 444 """
445 445 if backend_alias not in request.config.getoption('--backends'):
446 446 pytest.skip("Backend %s not selected." % (backend_alias, ))
447 447
448 448 utils.check_xfail_backends(request.node, backend_alias)
449 449 utils.check_skip_backends(request.node, backend_alias)
450 450
451 451 repo_name = 'vcs_test_%s' % (backend_alias, )
452 452 backend = Backend(
453 453 alias=backend_alias,
454 454 repo_name=repo_name,
455 455 test_name=request.node.name,
456 456 test_repo_container=test_repo)
457 457 request.addfinalizer(backend.cleanup)
458 458 return backend
459 459
460 460
461 461 @pytest.fixture
462 462 def backend_git(request, pylonsapp, test_repo):
463 463 return backend(request, 'git', pylonsapp, test_repo)
464 464
465 465
466 466 @pytest.fixture
467 467 def backend_hg(request, pylonsapp, test_repo):
468 468 return backend(request, 'hg', pylonsapp, test_repo)
469 469
470 470
471 471 @pytest.fixture
472 472 def backend_svn(request, pylonsapp, test_repo):
473 473 return backend(request, 'svn', pylonsapp, test_repo)
474 474
475 475
476 476 @pytest.fixture
477 477 def backend_random(backend_git):
478 478 """
479 479 Use this to express that your tests need "a backend.
480 480
481 481 A few of our tests need a backend, so that we can run the code. This
482 482 fixture is intended to be used for such cases. It will pick one of the
483 483 backends and run the tests.
484 484
485 485 The fixture `backend` would run the test multiple times for each
486 486 available backend which is a pure waste of time if the test is
487 487 independent of the backend type.
488 488 """
489 489 # TODO: johbo: Change this to pick a random backend
490 490 return backend_git
491 491
492 492
493 493 @pytest.fixture
494 494 def backend_stub(backend_git):
495 495 """
496 496 Use this to express that your tests need a backend stub
497 497
498 498 TODO: mikhail: Implement a real stub logic instead of returning
499 499 a git backend
500 500 """
501 501 return backend_git
502 502
503 503
504 504 @pytest.fixture
505 505 def repo_stub(backend_stub):
506 506 """
507 507 Use this to express that your tests need a repository stub
508 508 """
509 509 return backend_stub.create_repo()
510 510
511 511
512 512 class Backend(object):
513 513 """
514 514 Represents the test configuration for one supported backend
515 515
516 516 Provides easy access to different test repositories based on
517 517 `__getitem__`. Such repositories will only be created once per test
518 518 session.
519 519 """
520 520
521 521 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
522 522 _master_repo = None
523 523 _commit_ids = {}
524 524
525 525 def __init__(self, alias, repo_name, test_name, test_repo_container):
526 526 self.alias = alias
527 527 self.repo_name = repo_name
528 528 self._cleanup_repos = []
529 529 self._test_name = test_name
530 530 self._test_repo_container = test_repo_container
531 531 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
532 532 # Fixture will survive in the end.
533 533 self._fixture = Fixture()
534 534
535 535 def __getitem__(self, key):
536 536 return self._test_repo_container(key, self.alias)
537 537
538 538 @property
539 539 def repo(self):
540 540 """
541 541 Returns the "current" repository. This is the vcs_test repo or the
542 542 last repo which has been created with `create_repo`.
543 543 """
544 544 from rhodecode.model.db import Repository
545 545 return Repository.get_by_repo_name(self.repo_name)
546 546
547 547 @property
548 548 def default_branch_name(self):
549 549 VcsRepository = get_backend(self.alias)
550 550 return VcsRepository.DEFAULT_BRANCH_NAME
551 551
552 552 @property
553 553 def default_head_id(self):
554 554 """
555 555 Returns the default head id of the underlying backend.
556 556
557 557 This will be the default branch name in case the backend does have a
558 558 default branch. In the other cases it will point to a valid head
559 559 which can serve as the base to create a new commit on top of it.
560 560 """
561 561 vcsrepo = self.repo.scm_instance()
562 562 head_id = (
563 563 vcsrepo.DEFAULT_BRANCH_NAME or
564 564 vcsrepo.commit_ids[-1])
565 565 return head_id
566 566
567 567 @property
568 568 def commit_ids(self):
569 569 """
570 570 Returns the list of commits for the last created repository
571 571 """
572 572 return self._commit_ids
573 573
574 574 def create_master_repo(self, commits):
575 575 """
576 576 Create a repository and remember it as a template.
577 577
578 578 This allows to easily create derived repositories to construct
579 579 more complex scenarios for diff, compare and pull requests.
580 580
581 581 Returns a commit map which maps from commit message to raw_id.
582 582 """
583 583 self._master_repo = self.create_repo(commits=commits)
584 584 return self._commit_ids
585 585
586 586 def create_repo(
587 587 self, commits=None, number_of_commits=0, heads=None,
588 588 name_suffix=u'', **kwargs):
589 589 """
590 590 Create a repository and record it for later cleanup.
591 591
592 592 :param commits: Optional. A sequence of dict instances.
593 593 Will add a commit per entry to the new repository.
594 594 :param number_of_commits: Optional. If set to a number, this number of
595 595 commits will be added to the new repository.
596 596 :param heads: Optional. Can be set to a sequence of of commit
597 597 names which shall be pulled in from the master repository.
598 598
599 599 """
600 600 self.repo_name = self._next_repo_name() + name_suffix
601 601 repo = self._fixture.create_repo(
602 602 self.repo_name, repo_type=self.alias, **kwargs)
603 603 self._cleanup_repos.append(repo.repo_name)
604 604
605 605 commits = commits or [
606 606 {'message': 'Commit %s of %s' % (x, self.repo_name)}
607 607 for x in xrange(number_of_commits)]
608 608 self._add_commits_to_repo(repo.scm_instance(), commits)
609 609 if heads:
610 610 self.pull_heads(repo, heads)
611 611
612 612 return repo
613 613
614 614 def pull_heads(self, repo, heads):
615 615 """
616 616 Make sure that repo contains all commits mentioned in `heads`
617 617 """
618 618 vcsmaster = self._master_repo.scm_instance()
619 619 vcsrepo = repo.scm_instance()
620 620 vcsrepo.config.clear_section('hooks')
621 621 commit_ids = [self._commit_ids[h] for h in heads]
622 622 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
623 623
624 624 def create_fork(self):
625 625 repo_to_fork = self.repo_name
626 626 self.repo_name = self._next_repo_name()
627 627 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
628 628 self._cleanup_repos.append(self.repo_name)
629 629 return repo
630 630
631 631 def new_repo_name(self, suffix=u''):
632 632 self.repo_name = self._next_repo_name() + suffix
633 633 self._cleanup_repos.append(self.repo_name)
634 634 return self.repo_name
635 635
636 636 def _next_repo_name(self):
637 637 return u"%s_%s" % (
638 638 self.invalid_repo_name.sub(u'_', self._test_name),
639 639 len(self._cleanup_repos))
640 640
641 641 def ensure_file(self, filename, content='Test content\n'):
642 642 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
643 643 commits = [
644 644 {'added': [
645 645 FileNode(filename, content=content),
646 646 ]},
647 647 ]
648 648 self._add_commits_to_repo(self.repo.scm_instance(), commits)
649 649
650 650 def enable_downloads(self):
651 651 repo = self.repo
652 652 repo.enable_downloads = True
653 653 Session().add(repo)
654 654 Session().commit()
655 655
656 656 def cleanup(self):
657 657 for repo_name in reversed(self._cleanup_repos):
658 658 self._fixture.destroy_repo(repo_name)
659 659
660 660 def _add_commits_to_repo(self, repo, commits):
661 661 commit_ids = _add_commits_to_repo(repo, commits)
662 662 if not commit_ids:
663 663 return
664 664 self._commit_ids = commit_ids
665 665
666 666 # Creating refs for Git to allow fetching them from remote repository
667 667 if self.alias == 'git':
668 668 refs = {}
669 669 for message in self._commit_ids:
670 670 # TODO: mikhail: do more special chars replacements
671 671 ref_name = 'refs/test-refs/{}'.format(
672 672 message.replace(' ', ''))
673 673 refs[ref_name] = self._commit_ids[message]
674 674 self._create_refs(repo, refs)
675 675
676 676 def _create_refs(self, repo, refs):
677 677 for ref_name in refs:
678 678 repo.set_refs(ref_name, refs[ref_name])
679 679
680 680
681 681 @pytest.fixture
682 682 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
683 683 """
684 684 Parametrized fixture which represents a single vcs backend implementation.
685 685
686 686 See the fixture `backend` for more details. This one implements the same
687 687 concept, but on vcs level. So it does not provide model instances etc.
688 688
689 689 Parameters are generated dynamically, see :func:`pytest_generate_tests`
690 690 for how this works.
691 691 """
692 692 if backend_alias not in request.config.getoption('--backends'):
693 693 pytest.skip("Backend %s not selected." % (backend_alias, ))
694 694
695 695 utils.check_xfail_backends(request.node, backend_alias)
696 696 utils.check_skip_backends(request.node, backend_alias)
697 697
698 698 repo_name = 'vcs_test_%s' % (backend_alias, )
699 699 repo_path = os.path.join(tests_tmp_path, repo_name)
700 700 backend = VcsBackend(
701 701 alias=backend_alias,
702 702 repo_path=repo_path,
703 703 test_name=request.node.name,
704 704 test_repo_container=test_repo)
705 705 request.addfinalizer(backend.cleanup)
706 706 return backend
707 707
708 708
709 709 @pytest.fixture
710 710 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
711 711 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
712 712
713 713
714 714 @pytest.fixture
715 715 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
716 716 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
717 717
718 718
719 719 @pytest.fixture
720 720 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
721 721 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
722 722
723 723
724 724 @pytest.fixture
725 725 def vcsbackend_random(vcsbackend_git):
726 726 """
727 727 Use this to express that your tests need "a vcsbackend".
728 728
729 729 The fixture `vcsbackend` would run the test multiple times for each
730 730 available vcs backend which is a pure waste of time if the test is
731 731 independent of the vcs backend type.
732 732 """
733 733 # TODO: johbo: Change this to pick a random backend
734 734 return vcsbackend_git
735 735
736 736
737 737 @pytest.fixture
738 738 def vcsbackend_stub(vcsbackend_git):
739 739 """
740 740 Use this to express that your test just needs a stub of a vcsbackend.
741 741
742 742 Plan is to eventually implement an in-memory stub to speed tests up.
743 743 """
744 744 return vcsbackend_git
745 745
746 746
747 747 class VcsBackend(object):
748 748 """
749 749 Represents the test configuration for one supported vcs backend.
750 750 """
751 751
752 752 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
753 753
754 754 def __init__(self, alias, repo_path, test_name, test_repo_container):
755 755 self.alias = alias
756 756 self._repo_path = repo_path
757 757 self._cleanup_repos = []
758 758 self._test_name = test_name
759 759 self._test_repo_container = test_repo_container
760 760
761 761 def __getitem__(self, key):
762 762 return self._test_repo_container(key, self.alias).scm_instance()
763 763
764 764 @property
765 765 def repo(self):
766 766 """
767 767 Returns the "current" repository. This is the vcs_test repo of the last
768 768 repo which has been created.
769 769 """
770 770 Repository = get_backend(self.alias)
771 771 return Repository(self._repo_path)
772 772
773 773 @property
774 774 def backend(self):
775 775 """
776 776 Returns the backend implementation class.
777 777 """
778 778 return get_backend(self.alias)
779 779
780 780 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
781 781 repo_name = self._next_repo_name()
782 782 self._repo_path = get_new_dir(repo_name)
783 783 repo_class = get_backend(self.alias)
784 784 src_url = None
785 785 if _clone_repo:
786 786 src_url = _clone_repo.path
787 787 repo = repo_class(self._repo_path, create=True, src_url=src_url)
788 788 self._cleanup_repos.append(repo)
789 789
790 790 commits = commits or [
791 791 {'message': 'Commit %s of %s' % (x, repo_name)}
792 792 for x in xrange(number_of_commits)]
793 793 _add_commits_to_repo(repo, commits)
794 794 return repo
795 795
796 796 def clone_repo(self, repo):
797 797 return self.create_repo(_clone_repo=repo)
798 798
799 799 def cleanup(self):
800 800 for repo in self._cleanup_repos:
801 801 shutil.rmtree(repo.path)
802 802
803 803 def new_repo_path(self):
804 804 repo_name = self._next_repo_name()
805 805 self._repo_path = get_new_dir(repo_name)
806 806 return self._repo_path
807 807
808 808 def _next_repo_name(self):
809 809 return "%s_%s" % (
810 810 self.invalid_repo_name.sub('_', self._test_name),
811 811 len(self._cleanup_repos))
812 812
813 813 def add_file(self, repo, filename, content='Test content\n'):
814 814 imc = repo.in_memory_commit
815 815 imc.add(FileNode(filename, content=content))
816 816 imc.commit(
817 817 message=u'Automatic commit from vcsbackend fixture',
818 818 author=u'Automatic')
819 819
820 820 def ensure_file(self, filename, content='Test content\n'):
821 821 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
822 822 self.add_file(self.repo, filename, content)
823 823
824 824
825 825 def _add_commits_to_repo(vcs_repo, commits):
826 826 commit_ids = {}
827 827 if not commits:
828 828 return commit_ids
829 829
830 830 imc = vcs_repo.in_memory_commit
831 831 commit = None
832 832
833 833 for idx, commit in enumerate(commits):
834 834 message = unicode(commit.get('message', 'Commit %s' % idx))
835 835
836 836 for node in commit.get('added', []):
837 837 imc.add(FileNode(node.path, content=node.content))
838 838 for node in commit.get('changed', []):
839 839 imc.change(FileNode(node.path, content=node.content))
840 840 for node in commit.get('removed', []):
841 841 imc.remove(FileNode(node.path))
842 842
843 843 parents = [
844 844 vcs_repo.get_commit(commit_id=commit_ids[p])
845 845 for p in commit.get('parents', [])]
846 846
847 847 operations = ('added', 'changed', 'removed')
848 848 if not any((commit.get(o) for o in operations)):
849 849 imc.add(FileNode('file_%s' % idx, content=message))
850 850
851 851 commit = imc.commit(
852 852 message=message,
853 853 author=unicode(commit.get('author', 'Automatic')),
854 854 date=commit.get('date'),
855 855 branch=commit.get('branch'),
856 856 parents=parents)
857 857
858 858 commit_ids[commit.message] = commit.raw_id
859 859
860 860 return commit_ids
861 861
862 862
863 863 @pytest.fixture
864 864 def reposerver(request):
865 865 """
866 866 Allows to serve a backend repository
867 867 """
868 868
869 869 repo_server = RepoServer()
870 870 request.addfinalizer(repo_server.cleanup)
871 871 return repo_server
872 872
873 873
874 874 class RepoServer(object):
875 875 """
876 876 Utility to serve a local repository for the duration of a test case.
877 877
878 878 Supports only Subversion so far.
879 879 """
880 880
881 881 url = None
882 882
883 883 def __init__(self):
884 884 self._cleanup_servers = []
885 885
886 886 def serve(self, vcsrepo):
887 887 if vcsrepo.alias != 'svn':
888 888 raise TypeError("Backend %s not supported" % vcsrepo.alias)
889 889
890 890 proc = subprocess32.Popen(
891 891 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
892 892 '--root', vcsrepo.path])
893 893 self._cleanup_servers.append(proc)
894 894 self.url = 'svn://localhost'
895 895
896 896 def cleanup(self):
897 897 for proc in self._cleanup_servers:
898 898 proc.terminate()
899 899
900 900
901 901 @pytest.fixture
902 902 def pr_util(backend, request):
903 903 """
904 904 Utility for tests of models and for functional tests around pull requests.
905 905
906 906 It gives an instance of :class:`PRTestUtility` which provides various
907 907 utility methods around one pull request.
908 908
909 909 This fixture uses `backend` and inherits its parameterization.
910 910 """
911 911
912 912 util = PRTestUtility(backend)
913 913
914 914 @request.addfinalizer
915 915 def cleanup():
916 916 util.cleanup()
917 917
918 918 return util
919 919
920 920
921 921 class PRTestUtility(object):
922 922
923 923 pull_request = None
924 924 pull_request_id = None
925 925 mergeable_patcher = None
926 926 mergeable_mock = None
927 927 notification_patcher = None
928 928
929 929 def __init__(self, backend):
930 930 self.backend = backend
931 931
932 932 def create_pull_request(
933 933 self, commits=None, target_head=None, source_head=None,
934 934 revisions=None, approved=False, author=None, mergeable=False,
935 935 enable_notifications=True, name_suffix=u'', reviewers=None,
936 936 title=u"Test", description=u"Description"):
937 937 self.set_mergeable(mergeable)
938 938 if not enable_notifications:
939 939 # mock notification side effect
940 940 self.notification_patcher = mock.patch(
941 941 'rhodecode.model.notification.NotificationModel.create')
942 942 self.notification_patcher.start()
943 943
944 944 if not self.pull_request:
945 945 if not commits:
946 946 commits = [
947 947 {'message': 'c1'},
948 948 {'message': 'c2'},
949 949 {'message': 'c3'},
950 950 ]
951 951 target_head = 'c1'
952 952 source_head = 'c2'
953 953 revisions = ['c2']
954 954
955 955 self.commit_ids = self.backend.create_master_repo(commits)
956 956 self.target_repository = self.backend.create_repo(
957 957 heads=[target_head], name_suffix=name_suffix)
958 958 self.source_repository = self.backend.create_repo(
959 959 heads=[source_head], name_suffix=name_suffix)
960 960 self.author = author or UserModel().get_by_username(
961 961 TEST_USER_ADMIN_LOGIN)
962 962
963 963 model = PullRequestModel()
964 964 self.create_parameters = {
965 965 'created_by': self.author,
966 966 'source_repo': self.source_repository.repo_name,
967 967 'source_ref': self._default_branch_reference(source_head),
968 968 'target_repo': self.target_repository.repo_name,
969 969 'target_ref': self._default_branch_reference(target_head),
970 970 'revisions': [self.commit_ids[r] for r in revisions],
971 971 'reviewers': reviewers or self._get_reviewers(),
972 972 'title': title,
973 973 'description': description,
974 974 }
975 975 self.pull_request = model.create(**self.create_parameters)
976 976 assert model.get_versions(self.pull_request) == []
977 977
978 978 self.pull_request_id = self.pull_request.pull_request_id
979 979
980 980 if approved:
981 981 self.approve()
982 982
983 983 Session().add(self.pull_request)
984 984 Session().commit()
985 985
986 986 return self.pull_request
987 987
988 988 def approve(self):
989 989 self.create_status_votes(
990 990 ChangesetStatus.STATUS_APPROVED,
991 991 *self.pull_request.reviewers)
992 992
993 993 def close(self):
994 994 PullRequestModel().close_pull_request(self.pull_request, self.author)
995 995
996 996 def _default_branch_reference(self, commit_message):
997 997 reference = '%s:%s:%s' % (
998 998 'branch',
999 999 self.backend.default_branch_name,
1000 1000 self.commit_ids[commit_message])
1001 1001 return reference
1002 1002
1003 1003 def _get_reviewers(self):
1004 1004 model = UserModel()
1005 1005 return [
1006 1006 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1007 1007 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1008 1008 ]
1009 1009
1010 1010 def update_source_repository(self, head=None):
1011 1011 heads = [head or 'c3']
1012 1012 self.backend.pull_heads(self.source_repository, heads=heads)
1013 1013
1014 1014 def add_one_commit(self, head=None):
1015 1015 self.update_source_repository(head=head)
1016 1016 old_commit_ids = set(self.pull_request.revisions)
1017 1017 PullRequestModel().update_commits(self.pull_request)
1018 1018 commit_ids = set(self.pull_request.revisions)
1019 1019 new_commit_ids = commit_ids - old_commit_ids
1020 1020 assert len(new_commit_ids) == 1
1021 1021 return new_commit_ids.pop()
1022 1022
1023 1023 def remove_one_commit(self):
1024 1024 assert len(self.pull_request.revisions) == 2
1025 1025 source_vcs = self.source_repository.scm_instance()
1026 1026 removed_commit_id = source_vcs.commit_ids[-1]
1027 1027
1028 1028 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1029 1029 # remove the if once that's sorted out.
1030 1030 if self.backend.alias == "git":
1031 1031 kwargs = {'branch_name': self.backend.default_branch_name}
1032 1032 else:
1033 1033 kwargs = {}
1034 1034 source_vcs.strip(removed_commit_id, **kwargs)
1035 1035
1036 1036 PullRequestModel().update_commits(self.pull_request)
1037 1037 assert len(self.pull_request.revisions) == 1
1038 1038 return removed_commit_id
1039 1039
1040 1040 def create_comment(self, linked_to=None):
1041 1041 comment = ChangesetCommentsModel().create(
1042 1042 text=u"Test comment",
1043 1043 repo=self.target_repository.repo_name,
1044 1044 user=self.author,
1045 1045 pull_request=self.pull_request)
1046 1046 assert comment.pull_request_version_id is None
1047 1047
1048 1048 if linked_to:
1049 1049 PullRequestModel()._link_comments_to_version(linked_to)
1050 1050
1051 1051 return comment
1052 1052
1053 1053 def create_inline_comment(
1054 1054 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1055 1055 comment = ChangesetCommentsModel().create(
1056 1056 text=u"Test comment",
1057 1057 repo=self.target_repository.repo_name,
1058 1058 user=self.author,
1059 1059 line_no=line_no,
1060 1060 f_path=file_path,
1061 1061 pull_request=self.pull_request)
1062 1062 assert comment.pull_request_version_id is None
1063 1063
1064 1064 if linked_to:
1065 1065 PullRequestModel()._link_comments_to_version(linked_to)
1066 1066
1067 1067 return comment
1068 1068
1069 1069 def create_version_of_pull_request(self):
1070 1070 pull_request = self.create_pull_request()
1071 1071 version = PullRequestModel()._create_version_from_snapshot(
1072 1072 pull_request)
1073 1073 return version
1074 1074
1075 1075 def create_status_votes(self, status, *reviewers):
1076 1076 for reviewer in reviewers:
1077 1077 ChangesetStatusModel().set_status(
1078 1078 repo=self.pull_request.target_repo,
1079 1079 status=status,
1080 1080 user=reviewer.user_id,
1081 1081 pull_request=self.pull_request)
1082 1082
1083 1083 def set_mergeable(self, value):
1084 1084 if not self.mergeable_patcher:
1085 1085 self.mergeable_patcher = mock.patch.object(
1086 1086 VcsSettingsModel, 'get_general_settings')
1087 1087 self.mergeable_mock = self.mergeable_patcher.start()
1088 1088 self.mergeable_mock.return_value = {
1089 1089 'rhodecode_pr_merge_enabled': value}
1090 1090
1091 1091 def cleanup(self):
1092 1092 # In case the source repository is already cleaned up, the pull
1093 1093 # request will already be deleted.
1094 1094 pull_request = PullRequest().get(self.pull_request_id)
1095 1095 if pull_request:
1096 1096 PullRequestModel().delete(pull_request)
1097 1097 Session().commit()
1098 1098
1099 1099 if self.notification_patcher:
1100 1100 self.notification_patcher.stop()
1101 1101
1102 1102 if self.mergeable_patcher:
1103 1103 self.mergeable_patcher.stop()
1104 1104
1105 1105
1106 1106 @pytest.fixture
1107 1107 def user_admin(pylonsapp):
1108 1108 """
1109 1109 Provides the default admin test user as an instance of `db.User`.
1110 1110 """
1111 1111 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1112 1112 return user
1113 1113
1114 1114
1115 1115 @pytest.fixture
1116 1116 def user_regular(pylonsapp):
1117 1117 """
1118 1118 Provides the default regular test user as an instance of `db.User`.
1119 1119 """
1120 1120 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1121 1121 return user
1122 1122
1123 1123
1124 1124 @pytest.fixture
1125 1125 def user_util(request, pylonsapp):
1126 1126 """
1127 1127 Provides a wired instance of `UserUtility` with integrated cleanup.
1128 1128 """
1129 1129 utility = UserUtility(test_name=request.node.name)
1130 1130 request.addfinalizer(utility.cleanup)
1131 1131 return utility
1132 1132
1133 1133
1134 1134 # TODO: johbo: Split this up into utilities per domain or something similar
1135 1135 class UserUtility(object):
1136 1136
1137 1137 def __init__(self, test_name="test"):
1138 1138 self._test_name = self._sanitize_name(test_name)
1139 1139 self.fixture = Fixture()
1140 1140 self.repo_group_ids = []
1141 1141 self.user_ids = []
1142 1142 self.user_group_ids = []
1143 1143 self.user_repo_permission_ids = []
1144 1144 self.user_group_repo_permission_ids = []
1145 1145 self.user_repo_group_permission_ids = []
1146 1146 self.user_group_repo_group_permission_ids = []
1147 1147 self.user_user_group_permission_ids = []
1148 1148 self.user_group_user_group_permission_ids = []
1149 1149 self.user_permissions = []
1150 1150
1151 1151 def _sanitize_name(self, name):
1152 1152 for char in ['[', ']']:
1153 1153 name = name.replace(char, '_')
1154 1154 return name
1155 1155
1156 1156 def create_repo_group(
1157 1157 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1158 1158 group_name = "{prefix}_repogroup_{count}".format(
1159 1159 prefix=self._test_name,
1160 1160 count=len(self.repo_group_ids))
1161 1161 repo_group = self.fixture.create_repo_group(
1162 1162 group_name, cur_user=owner)
1163 1163 if auto_cleanup:
1164 1164 self.repo_group_ids.append(repo_group.group_id)
1165 1165 return repo_group
1166 1166
1167 1167 def create_user(self, auto_cleanup=True, **kwargs):
1168 1168 user_name = "{prefix}_user_{count}".format(
1169 1169 prefix=self._test_name,
1170 1170 count=len(self.user_ids))
1171 1171 user = self.fixture.create_user(user_name, **kwargs)
1172 1172 if auto_cleanup:
1173 1173 self.user_ids.append(user.user_id)
1174 1174 return user
1175 1175
1176 1176 def create_user_with_group(self):
1177 1177 user = self.create_user()
1178 1178 user_group = self.create_user_group(members=[user])
1179 1179 return user, user_group
1180 1180
1181 1181 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1182 1182 group_name = "{prefix}_usergroup_{count}".format(
1183 1183 prefix=self._test_name,
1184 1184 count=len(self.user_group_ids))
1185 1185 user_group = self.fixture.create_user_group(group_name, **kwargs)
1186 1186 if auto_cleanup:
1187 1187 self.user_group_ids.append(user_group.users_group_id)
1188 1188 if members:
1189 1189 for user in members:
1190 1190 UserGroupModel().add_user_to_group(user_group, user)
1191 1191 return user_group
1192 1192
1193 1193 def grant_user_permission(self, user_name, permission_name):
1194 1194 self._inherit_default_user_permissions(user_name, False)
1195 1195 self.user_permissions.append((user_name, permission_name))
1196 1196
1197 1197 def grant_user_permission_to_repo_group(
1198 1198 self, repo_group, user, permission_name):
1199 1199 permission = RepoGroupModel().grant_user_permission(
1200 1200 repo_group, user, permission_name)
1201 1201 self.user_repo_group_permission_ids.append(
1202 1202 (repo_group.group_id, user.user_id))
1203 1203 return permission
1204 1204
1205 1205 def grant_user_group_permission_to_repo_group(
1206 1206 self, repo_group, user_group, permission_name):
1207 1207 permission = RepoGroupModel().grant_user_group_permission(
1208 1208 repo_group, user_group, permission_name)
1209 1209 self.user_group_repo_group_permission_ids.append(
1210 1210 (repo_group.group_id, user_group.users_group_id))
1211 1211 return permission
1212 1212
1213 1213 def grant_user_permission_to_repo(
1214 1214 self, repo, user, permission_name):
1215 1215 permission = RepoModel().grant_user_permission(
1216 1216 repo, user, permission_name)
1217 1217 self.user_repo_permission_ids.append(
1218 1218 (repo.repo_id, user.user_id))
1219 1219 return permission
1220 1220
1221 1221 def grant_user_group_permission_to_repo(
1222 1222 self, repo, user_group, permission_name):
1223 1223 permission = RepoModel().grant_user_group_permission(
1224 1224 repo, user_group, permission_name)
1225 1225 self.user_group_repo_permission_ids.append(
1226 1226 (repo.repo_id, user_group.users_group_id))
1227 1227 return permission
1228 1228
1229 1229 def grant_user_permission_to_user_group(
1230 1230 self, target_user_group, user, permission_name):
1231 1231 permission = UserGroupModel().grant_user_permission(
1232 1232 target_user_group, user, permission_name)
1233 1233 self.user_user_group_permission_ids.append(
1234 1234 (target_user_group.users_group_id, user.user_id))
1235 1235 return permission
1236 1236
1237 1237 def grant_user_group_permission_to_user_group(
1238 1238 self, target_user_group, user_group, permission_name):
1239 1239 permission = UserGroupModel().grant_user_group_permission(
1240 1240 target_user_group, user_group, permission_name)
1241 1241 self.user_group_user_group_permission_ids.append(
1242 1242 (target_user_group.users_group_id, user_group.users_group_id))
1243 1243 return permission
1244 1244
1245 1245 def revoke_user_permission(self, user_name, permission_name):
1246 1246 self._inherit_default_user_permissions(user_name, True)
1247 1247 UserModel().revoke_perm(user_name, permission_name)
1248 1248
1249 1249 def _inherit_default_user_permissions(self, user_name, value):
1250 1250 user = UserModel().get_by_username(user_name)
1251 1251 user.inherit_default_permissions = value
1252 1252 Session().add(user)
1253 1253 Session().commit()
1254 1254
1255 1255 def cleanup(self):
1256 1256 self._cleanup_permissions()
1257 1257 self._cleanup_repo_groups()
1258 1258 self._cleanup_user_groups()
1259 1259 self._cleanup_users()
1260 1260
1261 1261 def _cleanup_permissions(self):
1262 1262 if self.user_permissions:
1263 1263 for user_name, permission_name in self.user_permissions:
1264 1264 self.revoke_user_permission(user_name, permission_name)
1265 1265
1266 1266 for permission in self.user_repo_permission_ids:
1267 1267 RepoModel().revoke_user_permission(*permission)
1268 1268
1269 1269 for permission in self.user_group_repo_permission_ids:
1270 1270 RepoModel().revoke_user_group_permission(*permission)
1271 1271
1272 1272 for permission in self.user_repo_group_permission_ids:
1273 1273 RepoGroupModel().revoke_user_permission(*permission)
1274 1274
1275 1275 for permission in self.user_group_repo_group_permission_ids:
1276 1276 RepoGroupModel().revoke_user_group_permission(*permission)
1277 1277
1278 1278 for permission in self.user_user_group_permission_ids:
1279 1279 UserGroupModel().revoke_user_permission(*permission)
1280 1280
1281 1281 for permission in self.user_group_user_group_permission_ids:
1282 1282 UserGroupModel().revoke_user_group_permission(*permission)
1283 1283
1284 1284 def _cleanup_repo_groups(self):
1285 1285 def _repo_group_compare(first_group_id, second_group_id):
1286 1286 """
1287 1287 Gives higher priority to the groups with the most complex paths
1288 1288 """
1289 1289 first_group = RepoGroup.get(first_group_id)
1290 1290 second_group = RepoGroup.get(second_group_id)
1291 1291 first_group_parts = (
1292 1292 len(first_group.group_name.split('/')) if first_group else 0)
1293 1293 second_group_parts = (
1294 1294 len(second_group.group_name.split('/')) if second_group else 0)
1295 1295 return cmp(second_group_parts, first_group_parts)
1296 1296
1297 1297 sorted_repo_group_ids = sorted(
1298 1298 self.repo_group_ids, cmp=_repo_group_compare)
1299 1299 for repo_group_id in sorted_repo_group_ids:
1300 1300 self.fixture.destroy_repo_group(repo_group_id)
1301 1301
1302 1302 def _cleanup_user_groups(self):
1303 1303 def _user_group_compare(first_group_id, second_group_id):
1304 1304 """
1305 1305 Gives higher priority to the groups with the most complex paths
1306 1306 """
1307 1307 first_group = UserGroup.get(first_group_id)
1308 1308 second_group = UserGroup.get(second_group_id)
1309 1309 first_group_parts = (
1310 1310 len(first_group.users_group_name.split('/'))
1311 1311 if first_group else 0)
1312 1312 second_group_parts = (
1313 1313 len(second_group.users_group_name.split('/'))
1314 1314 if second_group else 0)
1315 1315 return cmp(second_group_parts, first_group_parts)
1316 1316
1317 1317 sorted_user_group_ids = sorted(
1318 1318 self.user_group_ids, cmp=_user_group_compare)
1319 1319 for user_group_id in sorted_user_group_ids:
1320 1320 self.fixture.destroy_user_group(user_group_id)
1321 1321
1322 1322 def _cleanup_users(self):
1323 1323 for user_id in self.user_ids:
1324 1324 self.fixture.destroy_user(user_id)
1325 1325
1326 1326
1327 1327 # TODO: Think about moving this into a pytest-pyro package and make it a
1328 1328 # pytest plugin
1329 1329 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1330 1330 def pytest_runtest_makereport(item, call):
1331 1331 """
1332 1332 Adding the remote traceback if the exception has this information.
1333 1333
1334 Pyro4 attaches this information as the attribute `_pyroTraceback`
1334 Pyro4 attaches this information as the attribute `_vcs_server_traceback`
1335 1335 to the exception instance.
1336 1336 """
1337 1337 outcome = yield
1338 1338 report = outcome.get_result()
1339 1339 if call.excinfo:
1340 _add_pyro_remote_traceback(report, call.excinfo.value)
1340 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1341 1341
1342 1342
1343 def _add_pyro_remote_traceback(report, exc):
1344 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1343 def _add_vcsserver_remote_traceback(report, exc):
1344 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1345 1345
1346 if pyro_traceback:
1347 traceback = ''.join(pyro_traceback)
1348 section = 'Pyro4 remote traceback ' + report.when
1349 report.sections.append((section, traceback))
1346 if vcsserver_traceback:
1347 section = 'VCSServer remote traceback ' + report.when
1348 report.sections.append((section, vcsserver_traceback))
1350 1349
1351 1350
1352 1351 @pytest.fixture(scope='session')
1353 1352 def testrun():
1354 1353 return {
1355 1354 'uuid': uuid.uuid4(),
1356 1355 'start': datetime.datetime.utcnow().isoformat(),
1357 1356 'timestamp': int(time.time()),
1358 1357 }
1359 1358
1360 1359
1361 1360 @pytest.fixture(autouse=True)
1362 1361 def collect_appenlight_stats(request, testrun):
1363 1362 """
1364 1363 This fixture reports memory consumtion of single tests.
1365 1364
1366 1365 It gathers data based on `psutil` and sends them to Appenlight. The option
1367 1366 ``--ae`` has te be used to enable this fixture and the API key for your
1368 1367 application has to be provided in ``--ae-key``.
1369 1368 """
1370 1369 try:
1371 1370 # cygwin cannot have yet psutil support.
1372 1371 import psutil
1373 1372 except ImportError:
1374 1373 return
1375 1374
1376 1375 if not request.config.getoption('--appenlight'):
1377 1376 return
1378 1377 else:
1379 1378 # Only request the pylonsapp fixture if appenlight tracking is
1380 1379 # enabled. This will speed up a test run of unit tests by 2 to 3
1381 1380 # seconds if appenlight is not enabled.
1382 1381 pylonsapp = request.getfuncargvalue("pylonsapp")
1383 1382 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1384 1383 client = AppenlightClient(
1385 1384 url=url,
1386 1385 api_key=request.config.getoption('--appenlight-api-key'),
1387 1386 namespace=request.node.nodeid,
1388 1387 request=str(testrun['uuid']),
1389 1388 testrun=testrun)
1390 1389
1391 1390 client.collect({
1392 1391 'message': "Starting",
1393 1392 })
1394 1393
1395 1394 server_and_port = pylonsapp.config['vcs.server']
1396 1395 server = create_vcsserver_proxy(server_and_port)
1397 1396 with server:
1398 1397 vcs_pid = server.get_pid()
1399 1398 server.run_gc()
1400 1399 vcs_process = psutil.Process(vcs_pid)
1401 1400 mem = vcs_process.memory_info()
1402 1401 client.tag_before('vcsserver.rss', mem.rss)
1403 1402 client.tag_before('vcsserver.vms', mem.vms)
1404 1403
1405 1404 test_process = psutil.Process()
1406 1405 mem = test_process.memory_info()
1407 1406 client.tag_before('test.rss', mem.rss)
1408 1407 client.tag_before('test.vms', mem.vms)
1409 1408
1410 1409 client.tag_before('time', time.time())
1411 1410
1412 1411 @request.addfinalizer
1413 1412 def send_stats():
1414 1413 client.tag_after('time', time.time())
1415 1414 with server:
1416 1415 gc_stats = server.run_gc()
1417 1416 for tag, value in gc_stats.items():
1418 1417 client.tag_after(tag, value)
1419 1418 mem = vcs_process.memory_info()
1420 1419 client.tag_after('vcsserver.rss', mem.rss)
1421 1420 client.tag_after('vcsserver.vms', mem.vms)
1422 1421
1423 1422 mem = test_process.memory_info()
1424 1423 client.tag_after('test.rss', mem.rss)
1425 1424 client.tag_after('test.vms', mem.vms)
1426 1425
1427 1426 client.collect({
1428 1427 'message': "Finished",
1429 1428 })
1430 1429 client.send_stats()
1431 1430
1432 1431 return client
1433 1432
1434 1433
1435 1434 class AppenlightClient():
1436 1435
1437 1436 url_template = '{url}?protocol_version=0.5'
1438 1437
1439 1438 def __init__(
1440 1439 self, url, api_key, add_server=True, add_timestamp=True,
1441 1440 namespace=None, request=None, testrun=None):
1442 1441 self.url = self.url_template.format(url=url)
1443 1442 self.api_key = api_key
1444 1443 self.add_server = add_server
1445 1444 self.add_timestamp = add_timestamp
1446 1445 self.namespace = namespace
1447 1446 self.request = request
1448 1447 self.server = socket.getfqdn(socket.gethostname())
1449 1448 self.tags_before = {}
1450 1449 self.tags_after = {}
1451 1450 self.stats = []
1452 1451 self.testrun = testrun or {}
1453 1452
1454 1453 def tag_before(self, tag, value):
1455 1454 self.tags_before[tag] = value
1456 1455
1457 1456 def tag_after(self, tag, value):
1458 1457 self.tags_after[tag] = value
1459 1458
1460 1459 def collect(self, data):
1461 1460 if self.add_server:
1462 1461 data.setdefault('server', self.server)
1463 1462 if self.add_timestamp:
1464 1463 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1465 1464 if self.namespace:
1466 1465 data.setdefault('namespace', self.namespace)
1467 1466 if self.request:
1468 1467 data.setdefault('request', self.request)
1469 1468 self.stats.append(data)
1470 1469
1471 1470 def send_stats(self):
1472 1471 tags = [
1473 1472 ('testrun', self.request),
1474 1473 ('testrun.start', self.testrun['start']),
1475 1474 ('testrun.timestamp', self.testrun['timestamp']),
1476 1475 ('test', self.namespace),
1477 1476 ]
1478 1477 for key, value in self.tags_before.items():
1479 1478 tags.append((key + '.before', value))
1480 1479 try:
1481 1480 delta = self.tags_after[key] - value
1482 1481 tags.append((key + '.delta', delta))
1483 1482 except Exception:
1484 1483 pass
1485 1484 for key, value in self.tags_after.items():
1486 1485 tags.append((key + '.after', value))
1487 1486 self.collect({
1488 1487 'message': "Collected tags",
1489 1488 'tags': tags,
1490 1489 })
1491 1490
1492 1491 response = requests.post(
1493 1492 self.url,
1494 1493 headers={
1495 1494 'X-appenlight-api-key': self.api_key},
1496 1495 json=self.stats,
1497 1496 )
1498 1497
1499 1498 if not response.status_code == 200:
1500 1499 pprint.pprint(self.stats)
1501 1500 print response.headers
1502 1501 print response.text
1503 1502 raise Exception('Sending to appenlight failed')
1504 1503
1505 1504
1506 1505 @pytest.fixture
1507 1506 def gist_util(request, pylonsapp):
1508 1507 """
1509 1508 Provides a wired instance of `GistUtility` with integrated cleanup.
1510 1509 """
1511 1510 utility = GistUtility()
1512 1511 request.addfinalizer(utility.cleanup)
1513 1512 return utility
1514 1513
1515 1514
1516 1515 class GistUtility(object):
1517 1516 def __init__(self):
1518 1517 self.fixture = Fixture()
1519 1518 self.gist_ids = []
1520 1519
1521 1520 def create_gist(self, **kwargs):
1522 1521 gist = self.fixture.create_gist(**kwargs)
1523 1522 self.gist_ids.append(gist.gist_id)
1524 1523 return gist
1525 1524
1526 1525 def cleanup(self):
1527 1526 for id_ in self.gist_ids:
1528 1527 self.fixture.destroy_gists(str(id_))
1529 1528
1530 1529
1531 1530 @pytest.fixture
1532 1531 def enabled_backends(request):
1533 1532 backends = request.config.option.backends
1534 1533 return backends[:]
1535 1534
1536 1535
1537 1536 @pytest.fixture
1538 1537 def settings_util(request):
1539 1538 """
1540 1539 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1541 1540 """
1542 1541 utility = SettingsUtility()
1543 1542 request.addfinalizer(utility.cleanup)
1544 1543 return utility
1545 1544
1546 1545
1547 1546 class SettingsUtility(object):
1548 1547 def __init__(self):
1549 1548 self.rhodecode_ui_ids = []
1550 1549 self.rhodecode_setting_ids = []
1551 1550 self.repo_rhodecode_ui_ids = []
1552 1551 self.repo_rhodecode_setting_ids = []
1553 1552
1554 1553 def create_repo_rhodecode_ui(
1555 1554 self, repo, section, value, key=None, active=True, cleanup=True):
1556 1555 key = key or hashlib.sha1(
1557 1556 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1558 1557
1559 1558 setting = RepoRhodeCodeUi()
1560 1559 setting.repository_id = repo.repo_id
1561 1560 setting.ui_section = section
1562 1561 setting.ui_value = value
1563 1562 setting.ui_key = key
1564 1563 setting.ui_active = active
1565 1564 Session().add(setting)
1566 1565 Session().commit()
1567 1566
1568 1567 if cleanup:
1569 1568 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1570 1569 return setting
1571 1570
1572 1571 def create_rhodecode_ui(
1573 1572 self, section, value, key=None, active=True, cleanup=True):
1574 1573 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1575 1574
1576 1575 setting = RhodeCodeUi()
1577 1576 setting.ui_section = section
1578 1577 setting.ui_value = value
1579 1578 setting.ui_key = key
1580 1579 setting.ui_active = active
1581 1580 Session().add(setting)
1582 1581 Session().commit()
1583 1582
1584 1583 if cleanup:
1585 1584 self.rhodecode_ui_ids.append(setting.ui_id)
1586 1585 return setting
1587 1586
1588 1587 def create_repo_rhodecode_setting(
1589 1588 self, repo, name, value, type_, cleanup=True):
1590 1589 setting = RepoRhodeCodeSetting(
1591 1590 repo.repo_id, key=name, val=value, type=type_)
1592 1591 Session().add(setting)
1593 1592 Session().commit()
1594 1593
1595 1594 if cleanup:
1596 1595 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1597 1596 return setting
1598 1597
1599 1598 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1600 1599 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1601 1600 Session().add(setting)
1602 1601 Session().commit()
1603 1602
1604 1603 if cleanup:
1605 1604 self.rhodecode_setting_ids.append(setting.app_settings_id)
1606 1605
1607 1606 return setting
1608 1607
1609 1608 def cleanup(self):
1610 1609 for id_ in self.rhodecode_ui_ids:
1611 1610 setting = RhodeCodeUi.get(id_)
1612 1611 Session().delete(setting)
1613 1612
1614 1613 for id_ in self.rhodecode_setting_ids:
1615 1614 setting = RhodeCodeSetting.get(id_)
1616 1615 Session().delete(setting)
1617 1616
1618 1617 for id_ in self.repo_rhodecode_ui_ids:
1619 1618 setting = RepoRhodeCodeUi.get(id_)
1620 1619 Session().delete(setting)
1621 1620
1622 1621 for id_ in self.repo_rhodecode_setting_ids:
1623 1622 setting = RepoRhodeCodeSetting.get(id_)
1624 1623 Session().delete(setting)
1625 1624
1626 1625 Session().commit()
1627 1626
1628 1627
1629 1628 @pytest.fixture
1630 1629 def no_notifications(request):
1631 1630 notification_patcher = mock.patch(
1632 1631 'rhodecode.model.notification.NotificationModel.create')
1633 1632 notification_patcher.start()
1634 1633 request.addfinalizer(notification_patcher.stop)
1635 1634
1636 1635
1637 1636 @pytest.fixture
1638 1637 def silence_action_logger(request):
1639 1638 notification_patcher = mock.patch(
1640 1639 'rhodecode.lib.utils.action_logger')
1641 1640 notification_patcher.start()
1642 1641 request.addfinalizer(notification_patcher.stop)
1643 1642
1644 1643
1645 1644 @pytest.fixture(scope='session')
1646 1645 def repeat(request):
1647 1646 """
1648 1647 The number of repetitions is based on this fixture.
1649 1648
1650 1649 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1651 1650 tests are not too slow in our default test suite.
1652 1651 """
1653 1652 return request.config.getoption('--repeat')
1654 1653
1655 1654
1656 1655 @pytest.fixture
1657 1656 def rhodecode_fixtures():
1658 1657 return Fixture()
1659 1658
1660 1659
1661 1660 @pytest.fixture
1662 1661 def request_stub():
1663 1662 """
1664 1663 Stub request object.
1665 1664 """
1666 1665 request = pyramid.testing.DummyRequest()
1667 1666 request.scheme = 'https'
1668 1667 return request
1669 1668
1670 1669
1671 1670 @pytest.fixture
1672 1671 def config_stub(request, request_stub):
1673 1672 """
1674 1673 Set up pyramid.testing and return the Configurator.
1675 1674 """
1676 1675 config = pyramid.testing.setUp(request=request_stub)
1677 1676
1678 1677 @request.addfinalizer
1679 1678 def cleanup():
1680 1679 pyramid.testing.tearDown()
1681 1680
1682 1681 return config
1683 1682
1684 1683
1685 1684 @pytest.fixture
1686 1685 def StubIntegrationType():
1687 1686 class _StubIntegrationType(IntegrationTypeBase):
1688 1687 """ Test integration type class """
1689 1688
1690 1689 key = 'test'
1691 1690 display_name = 'Test integration type'
1692 1691 description = 'A test integration type for testing'
1693 1692 icon = 'test_icon_html_image'
1694 1693
1695 1694 def __init__(self, settings):
1696 1695 super(_StubIntegrationType, self).__init__(settings)
1697 1696 self.sent_events = [] # for testing
1698 1697
1699 1698 def send_event(self, event):
1700 1699 self.sent_events.append(event)
1701 1700
1702 1701 def settings_schema(self):
1703 1702 class SettingsSchema(colander.Schema):
1704 1703 test_string_field = colander.SchemaNode(
1705 1704 colander.String(),
1706 1705 missing=colander.required,
1707 1706 title='test string field',
1708 1707 )
1709 1708 test_int_field = colander.SchemaNode(
1710 1709 colander.Int(),
1711 1710 title='some integer setting',
1712 1711 )
1713 1712 return SettingsSchema()
1714 1713
1715 1714
1716 1715 integration_type_registry.register_integration_type(_StubIntegrationType)
1717 1716 return _StubIntegrationType
1718 1717
1719 1718 @pytest.fixture
1720 1719 def stub_integration_settings():
1721 1720 return {
1722 1721 'test_string_field': 'some data',
1723 1722 'test_int_field': 100,
1724 1723 }
1725 1724
1726 1725
1727 1726 @pytest.fixture
1728 1727 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1729 1728 stub_integration_settings):
1730 1729 integration = IntegrationModel().create(
1731 1730 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 1731 name='test repo integration',
1733 1732 repo=repo_stub, repo_group=None, child_repos_only=None)
1734 1733
1735 1734 @request.addfinalizer
1736 1735 def cleanup():
1737 1736 IntegrationModel().delete(integration)
1738 1737
1739 1738 return integration
1740 1739
1741 1740
1742 1741 @pytest.fixture
1743 1742 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1744 1743 stub_integration_settings):
1745 1744 integration = IntegrationModel().create(
1746 1745 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 1746 name='test repogroup integration',
1748 1747 repo=None, repo_group=test_repo_group, child_repos_only=True)
1749 1748
1750 1749 @request.addfinalizer
1751 1750 def cleanup():
1752 1751 IntegrationModel().delete(integration)
1753 1752
1754 1753 return integration
1755 1754
1756 1755
1757 1756 @pytest.fixture
1758 1757 def repogroup_recursive_integration_stub(request, test_repo_group,
1759 1758 StubIntegrationType, stub_integration_settings):
1760 1759 integration = IntegrationModel().create(
1761 1760 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 1761 name='test recursive repogroup integration',
1763 1762 repo=None, repo_group=test_repo_group, child_repos_only=False)
1764 1763
1765 1764 @request.addfinalizer
1766 1765 def cleanup():
1767 1766 IntegrationModel().delete(integration)
1768 1767
1769 1768 return integration
1770 1769
1771 1770
1772 1771 @pytest.fixture
1773 1772 def global_integration_stub(request, StubIntegrationType,
1774 1773 stub_integration_settings):
1775 1774 integration = IntegrationModel().create(
1776 1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 1776 name='test global integration',
1778 1777 repo=None, repo_group=None, child_repos_only=None)
1779 1778
1780 1779 @request.addfinalizer
1781 1780 def cleanup():
1782 1781 IntegrationModel().delete(integration)
1783 1782
1784 1783 return integration
1785 1784
1786 1785
1787 1786 @pytest.fixture
1788 1787 def root_repos_integration_stub(request, StubIntegrationType,
1789 1788 stub_integration_settings):
1790 1789 integration = IntegrationModel().create(
1791 1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 1791 name='test global integration',
1793 1792 repo=None, repo_group=None, child_repos_only=True)
1794 1793
1795 1794 @request.addfinalizer
1796 1795 def cleanup():
1797 1796 IntegrationModel().delete(integration)
1798 1797
1799 1798 return integration
General Comments 0
You need to be logged in to leave comments. Login now