##// END OF EJS Templates
python3: remove usage of subprocess32
super-admin -
r4926:cf2cc324 default
parent child Browse files
Show More
@@ -1,398 +1,398 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import io
21 21 import shlex
22 22
23 23 import math
24 24 import re
25 25 import os
26 26 import datetime
27 27 import logging
28 28 import Queue
29 import subprocess32
29 import subprocess
30 30
31 31
32 32 from dateutil.parser import parse
33 33 from pyramid.threadlocal import get_current_request
34 34 from pyramid.interfaces import IRoutesMapper
35 35 from pyramid.settings import asbool
36 36 from pyramid.path import AssetResolver
37 37 from threading import Thread
38 38
39 39 from rhodecode.config.jsroutes import generate_jsroutes_content
40 40 from rhodecode.lib.base import get_auth_user
41 41
42 42 import rhodecode
43 43
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 def add_renderer_globals(event):
49 49 from rhodecode.lib import helpers
50 50
51 51 # TODO: When executed in pyramid view context the request is not available
52 52 # in the event. Find a better solution to get the request.
53 53 request = event['request'] or get_current_request()
54 54
55 55 # Add Pyramid translation as '_' to context
56 56 event['_'] = request.translate
57 57 event['_ungettext'] = request.plularize
58 58 event['h'] = helpers
59 59
60 60
61 61 def set_user_lang(event):
62 62 request = event.request
63 63 cur_user = getattr(request, 'user', None)
64 64
65 65 if cur_user:
66 66 user_lang = cur_user.get_instance().user_data.get('language')
67 67 if user_lang:
68 68 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
69 69 event.request._LOCALE_ = user_lang
70 70
71 71
72 72 def update_celery_conf(event):
73 73 from rhodecode.lib.celerylib.loader import set_celery_conf
74 74 log.debug('Setting celery config from new request')
75 75 set_celery_conf(request=event.request, registry=event.request.registry)
76 76
77 77
78 78 def add_request_user_context(event):
79 79 """
80 80 Adds auth user into request context
81 81 """
82 82 request = event.request
83 83 # access req_id as soon as possible
84 84 req_id = request.req_id
85 85
86 86 if hasattr(request, 'vcs_call'):
87 87 # skip vcs calls
88 88 return
89 89
90 90 if hasattr(request, 'rpc_method'):
91 91 # skip api calls
92 92 return
93 93
94 94 auth_user, auth_token = get_auth_user(request)
95 95 request.user = auth_user
96 96 request.user_auth_token = auth_token
97 97 request.environ['rc_auth_user'] = auth_user
98 98 request.environ['rc_auth_user_id'] = auth_user.user_id
99 99 request.environ['rc_req_id'] = req_id
100 100
101 101
102 102 def reset_log_bucket(event):
103 103 """
104 104 reset the log bucket on new request
105 105 """
106 106 request = event.request
107 107 request.req_id_records_init()
108 108
109 109
110 110 def scan_repositories_if_enabled(event):
111 111 """
112 112 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
113 113 does a repository scan if enabled in the settings.
114 114 """
115 115 settings = event.app.registry.settings
116 116 vcs_server_enabled = settings['vcs.server.enable']
117 117 import_on_startup = settings['startup.import_repos']
118 118 if vcs_server_enabled and import_on_startup:
119 119 from rhodecode.model.scm import ScmModel
120 120 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
121 121 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
122 122 repo2db_mapper(repositories, remove_obsolete=False)
123 123
124 124
125 125 def write_metadata_if_needed(event):
126 126 """
127 127 Writes upgrade metadata
128 128 """
129 129 import rhodecode
130 130 from rhodecode.lib import system_info
131 131 from rhodecode.lib import ext_json
132 132
133 133 fname = '.rcmetadata.json'
134 134 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
135 135 metadata_destination = os.path.join(ini_loc, fname)
136 136
137 137 def get_update_age():
138 138 now = datetime.datetime.utcnow()
139 139
140 140 with open(metadata_destination, 'rb') as f:
141 141 data = ext_json.json.loads(f.read())
142 142 if 'created_on' in data:
143 143 update_date = parse(data['created_on'])
144 144 diff = now - update_date
145 145 return diff.total_seconds() / 60.0
146 146
147 147 return 0
148 148
149 149 def write():
150 150 configuration = system_info.SysInfo(
151 151 system_info.rhodecode_config)()['value']
152 152 license_token = configuration['config']['license_token']
153 153
154 154 setup = dict(
155 155 workers=configuration['config']['server:main'].get(
156 156 'workers', '?'),
157 157 worker_type=configuration['config']['server:main'].get(
158 158 'worker_class', 'sync'),
159 159 )
160 160 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
161 161 del dbinfo['url']
162 162
163 163 metadata = dict(
164 164 desc='upgrade metadata info',
165 165 license_token=license_token,
166 166 created_on=datetime.datetime.utcnow().isoformat(),
167 167 usage=system_info.SysInfo(system_info.usage_info)()['value'],
168 168 platform=system_info.SysInfo(system_info.platform_type)()['value'],
169 169 database=dbinfo,
170 170 cpu=system_info.SysInfo(system_info.cpu)()['value'],
171 171 memory=system_info.SysInfo(system_info.memory)()['value'],
172 172 setup=setup
173 173 )
174 174
175 175 with open(metadata_destination, 'wb') as f:
176 176 f.write(ext_json.json.dumps(metadata))
177 177
178 178 settings = event.app.registry.settings
179 179 if settings.get('metadata.skip'):
180 180 return
181 181
182 182 # only write this every 24h, workers restart caused unwanted delays
183 183 try:
184 184 age_in_min = get_update_age()
185 185 except Exception:
186 186 age_in_min = 0
187 187
188 188 if age_in_min > 60 * 60 * 24:
189 189 return
190 190
191 191 try:
192 192 write()
193 193 except Exception:
194 194 pass
195 195
196 196
197 197 def write_usage_data(event):
198 198 import rhodecode
199 199 from rhodecode.lib import system_info
200 200 from rhodecode.lib import ext_json
201 201
202 202 settings = event.app.registry.settings
203 203 instance_tag = settings.get('metadata.write_usage_tag')
204 204 if not settings.get('metadata.write_usage'):
205 205 return
206 206
207 207 def get_update_age(dest_file):
208 208 now = datetime.datetime.utcnow()
209 209
210 210 with open(dest_file, 'rb') as f:
211 211 data = ext_json.json.loads(f.read())
212 212 if 'created_on' in data:
213 213 update_date = parse(data['created_on'])
214 214 diff = now - update_date
215 215 return math.ceil(diff.total_seconds() / 60.0)
216 216
217 217 return 0
218 218
219 219 utc_date = datetime.datetime.utcnow()
220 220 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
221 221 fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format(
222 222 date=utc_date, hour=hour_quarter)
223 223 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
224 224
225 225 usage_dir = os.path.join(ini_loc, '.rcusage')
226 226 if not os.path.isdir(usage_dir):
227 227 os.makedirs(usage_dir)
228 228 usage_metadata_destination = os.path.join(usage_dir, fname)
229 229
230 230 try:
231 231 age_in_min = get_update_age(usage_metadata_destination)
232 232 except Exception:
233 233 age_in_min = 0
234 234
235 235 # write every 6th hour
236 236 if age_in_min and age_in_min < 60 * 6:
237 237 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
238 238 age_in_min, 60 * 6)
239 239 return
240 240
241 241 def write(dest_file):
242 242 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
243 243 license_token = configuration['config']['license_token']
244 244
245 245 metadata = dict(
246 246 desc='Usage data',
247 247 instance_tag=instance_tag,
248 248 license_token=license_token,
249 249 created_on=datetime.datetime.utcnow().isoformat(),
250 250 usage=system_info.SysInfo(system_info.usage_info)()['value'],
251 251 )
252 252
253 253 with open(dest_file, 'wb') as f:
254 254 f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True))
255 255
256 256 try:
257 257 log.debug('Writing usage file at: %s', usage_metadata_destination)
258 258 write(usage_metadata_destination)
259 259 except Exception:
260 260 pass
261 261
262 262
263 263 def write_js_routes_if_enabled(event):
264 264 registry = event.app.registry
265 265
266 266 mapper = registry.queryUtility(IRoutesMapper)
267 267 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
268 268
269 269 def _extract_route_information(route):
270 270 """
271 271 Convert a route into tuple(name, path, args), eg:
272 272 ('show_user', '/profile/%(username)s', ['username'])
273 273 """
274 274
275 275 routepath = route.pattern
276 276 pattern = route.pattern
277 277
278 278 def replace(matchobj):
279 279 if matchobj.group(1):
280 280 return "%%(%s)s" % matchobj.group(1).split(':')[0]
281 281 else:
282 282 return "%%(%s)s" % matchobj.group(2)
283 283
284 284 routepath = _argument_prog.sub(replace, routepath)
285 285
286 286 if not routepath.startswith('/'):
287 287 routepath = '/'+routepath
288 288
289 289 return (
290 290 route.name,
291 291 routepath,
292 292 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
293 293 for arg in _argument_prog.findall(pattern)]
294 294 )
295 295
296 296 def get_routes():
297 297 # pyramid routes
298 298 for route in mapper.get_routes():
299 299 if not route.name.startswith('__'):
300 300 yield _extract_route_information(route)
301 301
302 302 if asbool(registry.settings.get('generate_js_files', 'false')):
303 303 static_path = AssetResolver().resolve('rhodecode:public').abspath()
304 304 jsroutes = get_routes()
305 305 jsroutes_file_content = generate_jsroutes_content(jsroutes)
306 306 jsroutes_file_path = os.path.join(
307 307 static_path, 'js', 'rhodecode', 'routes.js')
308 308
309 309 try:
310 310 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
311 311 f.write(jsroutes_file_content)
312 312 except Exception:
313 313 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
314 314
315 315
316 316 class Subscriber(object):
317 317 """
318 318 Base class for subscribers to the pyramid event system.
319 319 """
320 320 def __call__(self, event):
321 321 self.run(event)
322 322
323 323 def run(self, event):
324 324 raise NotImplementedError('Subclass has to implement this.')
325 325
326 326
327 327 class AsyncSubscriber(Subscriber):
328 328 """
329 329 Subscriber that handles the execution of events in a separate task to not
330 330 block the execution of the code which triggers the event. It puts the
331 331 received events into a queue from which the worker process takes them in
332 332 order.
333 333 """
334 334 def __init__(self):
335 335 self._stop = False
336 336 self._eventq = Queue.Queue()
337 337 self._worker = self.create_worker()
338 338 self._worker.start()
339 339
340 340 def __call__(self, event):
341 341 self._eventq.put(event)
342 342
343 343 def create_worker(self):
344 344 worker = Thread(target=self.do_work)
345 345 worker.daemon = True
346 346 return worker
347 347
348 348 def stop_worker(self):
349 349 self._stop = False
350 350 self._eventq.put(None)
351 351 self._worker.join()
352 352
353 353 def do_work(self):
354 354 while not self._stop:
355 355 event = self._eventq.get()
356 356 if event is not None:
357 357 self.run(event)
358 358
359 359
360 360 class AsyncSubprocessSubscriber(AsyncSubscriber):
361 361 """
362 Subscriber that uses the subprocess32 module to execute a command if an
362 Subscriber that uses the subprocess module to execute a command if an
363 363 event is received. Events are handled asynchronously::
364 364
365 365 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
366 366 subscriber(dummyEvent) # running __call__(event)
367 367
368 368 """
369 369
370 370 def __init__(self, cmd, timeout=None):
371 371 if not isinstance(cmd, (list, tuple)):
372 372 cmd = shlex.split(cmd)
373 373 super(AsyncSubprocessSubscriber, self).__init__()
374 374 self._cmd = cmd
375 375 self._timeout = timeout
376 376
377 377 def run(self, event):
378 378 cmd = self._cmd
379 379 timeout = self._timeout
380 380 log.debug('Executing command %s.', cmd)
381 381
382 382 try:
383 output = subprocess32.check_output(
384 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
383 output = subprocess.check_output(
384 cmd, timeout=timeout, stderr=subprocess.STDOUT)
385 385 log.debug('Command finished %s', cmd)
386 386 if output:
387 387 log.debug('Command output: %s', output)
388 except subprocess32.TimeoutExpired as e:
388 except subprocess.TimeoutExpired as e:
389 389 log.exception('Timeout while executing command.')
390 390 if e.output:
391 391 log.error('Command output: %s', e.output)
392 except subprocess32.CalledProcessError as e:
392 except subprocess.CalledProcessError as e:
393 393 log.exception('Error while executing command.')
394 394 if e.output:
395 395 log.error('Command output: %s', e.output)
396 396 except Exception:
397 397 log.exception(
398 398 'Exception while executing command %s.', cmd)
@@ -1,293 +1,293 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 from subprocess32 import Popen, PIPE
21 from subprocess import Popen, PIPE
22 22 import os
23 23 import shutil
24 24 import sys
25 25 import tempfile
26 26
27 27 import pytest
28 28 from sqlalchemy.engine import url
29 29
30 30 from rhodecode.tests.fixture import TestINI
31 31
32 32
33 33 def _get_dbs_from_metafunc(metafunc):
34 34 dbs_mark = metafunc.definition.get_closest_marker('dbs')
35 35
36 36 if dbs_mark:
37 37 # Supported backends by this test function, created from pytest.mark.dbs
38 38 backends = dbs_mark.args
39 39 else:
40 40 backends = metafunc.config.getoption('--dbs')
41 41 return backends
42 42
43 43
44 44 def pytest_generate_tests(metafunc):
45 45 # Support test generation based on --dbs parameter
46 46 if 'db_backend' in metafunc.fixturenames:
47 47 requested_backends = set(metafunc.config.getoption('--dbs'))
48 48 backends = _get_dbs_from_metafunc(metafunc)
49 49 backends = requested_backends.intersection(backends)
50 50 # TODO: johbo: Disabling a backend did not work out with
51 51 # parametrization, find better way to achieve this.
52 52 if not backends:
53 53 metafunc.function._skip = True
54 54 metafunc.parametrize('db_backend_name', backends)
55 55
56 56
57 57 def pytest_collection_modifyitems(session, config, items):
58 58 remaining = [
59 59 i for i in items if not getattr(i.obj, '_skip', False)]
60 60 items[:] = remaining
61 61
62 62
63 63 @pytest.fixture()
64 64 def db_backend(
65 65 request, db_backend_name, ini_config, tmpdir_factory):
66 66 basetemp = tmpdir_factory.getbasetemp().strpath
67 67 klass = _get_backend(db_backend_name)
68 68
69 69 option_name = '--{}-connection-string'.format(db_backend_name)
70 70 connection_string = request.config.getoption(option_name) or None
71 71
72 72 return klass(
73 73 config_file=ini_config, basetemp=basetemp,
74 74 connection_string=connection_string)
75 75
76 76
77 77 def _get_backend(backend_type):
78 78 return {
79 79 'sqlite': SQLiteDBBackend,
80 80 'postgres': PostgresDBBackend,
81 81 'mysql': MySQLDBBackend,
82 82 '': EmptyDBBackend
83 83 }[backend_type]
84 84
85 85
86 86 class DBBackend(object):
87 87 _store = os.path.dirname(os.path.abspath(__file__))
88 88 _type = None
89 89 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
90 90 'startup.import_repos': 'false',
91 91 'is_test': 'False'}}]
92 92 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
93 93 _base_db_name = 'rhodecode_test_db_backend'
94 94
95 95 def __init__(
96 96 self, config_file, db_name=None, basetemp=None,
97 97 connection_string=None):
98 98
99 99 from rhodecode.lib.vcs.backends.hg import largefiles_store
100 100 from rhodecode.lib.vcs.backends.git import lfs_store
101 101
102 102 self.fixture_store = os.path.join(self._store, self._type)
103 103 self.db_name = db_name or self._base_db_name
104 104 self._base_ini_file = config_file
105 105 self.stderr = ''
106 106 self.stdout = ''
107 107 self._basetemp = basetemp or tempfile.gettempdir()
108 108 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
109 109 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
110 110 self._repos_git_lfs_store = lfs_store(self._basetemp)
111 111 self.connection_string = connection_string
112 112
113 113 @property
114 114 def connection_string(self):
115 115 return self._connection_string
116 116
117 117 @connection_string.setter
118 118 def connection_string(self, new_connection_string):
119 119 if not new_connection_string:
120 120 new_connection_string = self.get_default_connection_string()
121 121 else:
122 122 new_connection_string = new_connection_string.format(
123 123 db_name=self.db_name)
124 124 url_parts = url.make_url(new_connection_string)
125 125 self._connection_string = new_connection_string
126 126 self.user = url_parts.username
127 127 self.password = url_parts.password
128 128 self.host = url_parts.host
129 129
130 130 def get_default_connection_string(self):
131 131 raise NotImplementedError('default connection_string is required.')
132 132
133 133 def execute(self, cmd, env=None, *args):
134 134 """
135 135 Runs command on the system with given ``args``.
136 136 """
137 137
138 138 command = cmd + ' ' + ' '.join(args)
139 139 sys.stdout.write(command)
140 140
141 141 # Tell Python to use UTF-8 encoding out stdout
142 142 _env = os.environ.copy()
143 143 _env['PYTHONIOENCODING'] = 'UTF-8'
144 144 if env:
145 145 _env.update(env)
146 146 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
147 147 self.stdout, self.stderr = self.p.communicate()
148 148 sys.stdout.write('COMMAND:'+command+'\n')
149 149 sys.stdout.write(self.stdout)
150 150 return self.stdout, self.stderr
151 151
152 152 def assert_returncode_success(self):
153 153 if not self.p.returncode == 0:
154 154 print(self.stderr)
155 155 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
156 156
157 157 def assert_correct_output(self, stdout, version):
158 158 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
159 159
160 160 def setup_rhodecode_db(self, ini_params=None, env=None):
161 161 if not ini_params:
162 162 ini_params = self._base_ini_config
163 163
164 164 ini_params.extend(self._db_url)
165 165 with TestINI(self._base_ini_file, ini_params,
166 166 self._type, destroy=True) as _ini_file:
167 167
168 168 if not os.path.isdir(self._repos_location):
169 169 os.makedirs(self._repos_location)
170 170 if not os.path.isdir(self._repos_hg_largefiles_store):
171 171 os.makedirs(self._repos_hg_largefiles_store)
172 172 if not os.path.isdir(self._repos_git_lfs_store):
173 173 os.makedirs(self._repos_git_lfs_store)
174 174
175 175 return self.execute(
176 176 "rc-setup-app {0} --user=marcink "
177 177 "--email=marcin@rhodeocode.com --password={1} "
178 178 "--repos={2} --force-yes".format(
179 179 _ini_file, 'qweqwe', self._repos_location), env=env)
180 180
181 181 def upgrade_database(self, ini_params=None):
182 182 if not ini_params:
183 183 ini_params = self._base_ini_config
184 184 ini_params.extend(self._db_url)
185 185
186 186 test_ini = TestINI(
187 187 self._base_ini_file, ini_params, self._type, destroy=True)
188 188 with test_ini as ini_file:
189 189 if not os.path.isdir(self._repos_location):
190 190 os.makedirs(self._repos_location)
191 191
192 192 return self.execute(
193 193 "rc-upgrade-db {0} --force-yes".format(ini_file))
194 194
195 195 def setup_db(self):
196 196 raise NotImplementedError
197 197
198 198 def teardown_db(self):
199 199 raise NotImplementedError
200 200
201 201 def import_dump(self, dumpname):
202 202 raise NotImplementedError
203 203
204 204
205 205 class EmptyDBBackend(DBBackend):
206 206 _type = ''
207 207
208 208 def setup_db(self):
209 209 pass
210 210
211 211 def teardown_db(self):
212 212 pass
213 213
214 214 def import_dump(self, dumpname):
215 215 pass
216 216
217 217 def assert_returncode_success(self):
218 218 assert True
219 219
220 220
221 221 class SQLiteDBBackend(DBBackend):
222 222 _type = 'sqlite'
223 223
224 224 def get_default_connection_string(self):
225 225 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
226 226
227 227 def setup_db(self):
228 228 # dump schema for tests
229 229 # cp -v $TEST_DB_NAME
230 230 self._db_url = [{'app:main': {
231 231 'sqlalchemy.db1.url': self.connection_string}}]
232 232
233 233 def import_dump(self, dumpname):
234 234 dump = os.path.join(self.fixture_store, dumpname)
235 235 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
236 236 return self.execute('cp -v {} {}'.format(dump, target))
237 237
238 238 def teardown_db(self):
239 239 return self.execute("rm -rf {}.sqlite".format(
240 240 os.path.join(self._basetemp, self.db_name)))
241 241
242 242
243 243 class MySQLDBBackend(DBBackend):
244 244 _type = 'mysql'
245 245
246 246 def get_default_connection_string(self):
247 247 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
248 248
249 249 def setup_db(self):
250 250 # dump schema for tests
251 251 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
252 252 self._db_url = [{'app:main': {
253 253 'sqlalchemy.db1.url': self.connection_string}}]
254 254 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
255 255 self.user, self.password, self.db_name))
256 256
257 257 def import_dump(self, dumpname):
258 258 dump = os.path.join(self.fixture_store, dumpname)
259 259 return self.execute("mysql -u{} -p{} {} < {}".format(
260 260 self.user, self.password, self.db_name, dump))
261 261
262 262 def teardown_db(self):
263 263 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
264 264 self.user, self.password, self.db_name))
265 265
266 266
267 267 class PostgresDBBackend(DBBackend):
268 268 _type = 'postgres'
269 269
270 270 def get_default_connection_string(self):
271 271 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
272 272
273 273 def setup_db(self):
274 274 # dump schema for tests
275 275 # pg_dump -U postgres -h localhost $TEST_DB_NAME
276 276 self._db_url = [{'app:main': {
277 277 'sqlalchemy.db1.url':
278 278 self.connection_string}}]
279 279 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
280 280 "-c 'create database '{}';'".format(
281 281 self.password, self.user, self.db_name))
282 282
283 283 def teardown_db(self):
284 284 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
285 285 "-c 'drop database if exists '{}';'".format(
286 286 self.password, self.user, self.db_name))
287 287
288 288 def import_dump(self, dumpname):
289 289 dump = os.path.join(self.fixture_store, dumpname)
290 290 return self.execute(
291 291 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
292 292 "-f {}".format(
293 293 self.password, self.user, self.db_name, dump))
@@ -1,136 +1,136 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Checking the chunked data transfer via HTTP
23 23 """
24 24
25 25 import os
26 26 import time
27 import subprocess32
27 import subprocess
28 28
29 29 import pytest
30 30 import requests
31 31
32 32 from rhodecode.lib.middleware.utils import scm_app_http
33 33 from rhodecode.tests.utils import wait_for_url
34 34
35 35
36 36 def test_does_chunked_end_to_end_transfer(scm_app):
37 37 response = requests.post(scm_app, data='', stream=True)
38 38 assert response.headers['Transfer-Encoding'] == 'chunked'
39 39 times = [time.time() for chunk in response.raw.read_chunked()]
40 40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
41 41
42 42
43 43 @pytest.fixture()
44 44 def echo_app_chunking(request, available_port_factory):
45 45 """
46 46 Run the EchoApp via Waitress in a subprocess.
47 47
48 48 Return the URL endpoint to reach the app.
49 49 """
50 50 port = available_port_factory()
51 51 command = (
52 52 'waitress-serve --send-bytes 1 --port {port} --call '
53 53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
54 54 ':create_echo_app')
55 55 command = command.format(port=port)
56 proc = subprocess32.Popen(command.split(' '), bufsize=0)
56 proc = subprocess.Popen(command.split(' '), bufsize=0)
57 57 echo_app_url = 'http://localhost:' + str(port)
58 58
59 59 @request.addfinalizer
60 60 def stop_echo_app():
61 61 proc.kill()
62 62
63 63 return echo_app_url
64 64
65 65
66 66 @pytest.fixture()
67 67 def scm_app(request, available_port_factory, echo_app_chunking):
68 68 """
69 69 Run the scm_app in Waitress.
70 70
71 71 Returns the URL endpoint where this app can be reached.
72 72 """
73 73 port = available_port_factory()
74 74 command = (
75 75 'waitress-serve --send-bytes 1 --port {port} --call '
76 76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
77 77 ':create_scm_app')
78 78 command = command.format(port=port)
79 79 env = os.environ.copy()
80 80 env["RC_ECHO_URL"] = echo_app_chunking
81 proc = subprocess32.Popen(command.split(' '), bufsize=0, env=env)
81 proc = subprocess.Popen(command.split(' '), bufsize=0, env=env)
82 82 scm_app_url = 'http://localhost:' + str(port)
83 83 wait_for_url(scm_app_url)
84 84
85 85 @request.addfinalizer
86 86 def stop_echo_app():
87 87 proc.kill()
88 88
89 89 return scm_app_url
90 90
91 91
92 92 class EchoApp(object):
93 93 """
94 94 Stub WSGI application which returns a chunked response to every request.
95 95 """
96 96
97 97 def __init__(self, repo_path, repo_name, config):
98 98 self._repo_path = repo_path
99 99
100 100 def __call__(self, environ, start_response):
101 101 environ['wsgi.input'].read()
102 102 status = '200 OK'
103 103 headers = []
104 104 start_response(status, headers)
105 105 return result_generator()
106 106
107 107
108 108 def result_generator():
109 109 """
110 110 Simulate chunked results.
111 111
112 112 The intended usage is to simulate a chunked response as we would get it
113 113 out of a vcs operation during a call to "hg clone".
114 114 """
115 115 yield 'waiting 2 seconds'
116 116 # Wait long enough so that the first chunk can go out
117 117 time.sleep(2)
118 118 yield 'final chunk'
119 119 # Another small wait, otherwise they go together
120 120 time.sleep(0.1)
121 121
122 122
123 123 def create_echo_app():
124 124 """
125 125 Create EchoApp filled with stub data.
126 126 """
127 127 return EchoApp('stub_path', 'repo_name', {})
128 128
129 129
130 130 def create_scm_app():
131 131 """
132 132 Create a scm_app hooked up to speak to EchoApp.
133 133 """
134 134 echo_app_url = os.environ["RC_ECHO_URL"]
135 135 return scm_app_http.VcsHttpProxy(
136 136 echo_app_url, 'stub_path', 'stub_name', None)
@@ -1,463 +1,463 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module to test the performance of pull, push and clone operations.
23 23
24 24 It works by replaying a group of commits to the repo.
25 25 """
26 26
27 27 import argparse
28 28 import collections
29 29 import ConfigParser
30 30 import functools
31 31 import itertools
32 32 import os
33 33 import pprint
34 34 import shutil
35 import subprocess32
35 import subprocess
36 36 import sys
37 37 import time
38 38
39 39 import api
40 40
41 41
42 42 def mean(container):
43 43 """Return the mean of the container."""
44 44 if not container:
45 45 return -1.0
46 46 return sum(container) / len(container)
47 47
48 48
49 49 def keep_cwd(f):
50 50 """Decorator that keeps track of the starting working directory."""
51 51 @functools.wraps(f)
52 52 def wrapped_f(*args, **kwargs):
53 53 cur_dir = os.getcwd()
54 54 try:
55 55 return f(*args, **kwargs)
56 56 finally:
57 57 os.chdir(cur_dir)
58 58
59 59 return wrapped_f
60 60
61 61
62 62 def timed(f):
63 63 """Decorator that returns the time it took to execute the function."""
64 64 @functools.wraps(f)
65 65 def wrapped_f(*args, **kwargs):
66 66 start_time = time.time()
67 67 try:
68 68 f(*args, **kwargs)
69 69 finally:
70 70 return time.time() - start_time
71 71
72 72 return wrapped_f
73 73
74 74
75 75 def execute(*popenargs, **kwargs):
76 76 """Extension of subprocess.check_output to support writing to stdin."""
77 77 input = kwargs.pop('stdin', None)
78 78 stdin = None
79 79 if input:
80 stdin = subprocess32.PIPE
80 stdin = subprocess.PIPE
81 81 #if 'stderr' not in kwargs:
82 # kwargs['stderr'] = subprocess32.PIPE
82 # kwargs['stderr'] = subprocess.PIPE
83 83 if 'stdout' in kwargs:
84 84 raise ValueError('stdout argument not allowed, it will be overridden.')
85 process = subprocess32.Popen(stdin=stdin, stdout=subprocess32.PIPE,
85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
86 86 *popenargs, **kwargs)
87 87 output, error = process.communicate(input=input)
88 88 retcode = process.poll()
89 89 if retcode:
90 90 cmd = kwargs.get("args")
91 91 if cmd is None:
92 92 cmd = popenargs[0]
93 93 print('{} {} {} '.format(cmd, output, error))
94 raise subprocess32.CalledProcessError(retcode, cmd, output=output)
94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
95 95 return output
96 96
97 97
98 98 def get_repo_name(repo_url):
99 99 """Extract the repo name from its url."""
100 100 repo_url = repo_url.rstrip('/')
101 101 return repo_url.split('/')[-1].split('.')[0]
102 102
103 103
104 104 class TestPerformanceBase(object):
105 105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
106 106 skip_commits):
107 107 self.repo_url = repo_url
108 108 self.repo_name = get_repo_name(self.repo_url)
109 109 self.upstream_repo_name = '%s_upstream' % self.repo_name
110 110 self.base_dir = os.path.abspath(base_dir)
111 111 self.n_commits = n_commits
112 112 self.max_commits = max_commits
113 113 self.skip_commits = skip_commits
114 114 self.push_times = []
115 115 self.pull_times = []
116 116 self.empty_pull_times = []
117 117 self.clone_time = -1.0
118 118 self.last_commit = None
119 119
120 120 self.cloned_repo = ''
121 121 self.pull_repo = ''
122 122 self.orig_repo = ''
123 123
124 124 def run(self):
125 125 try:
126 126 self.test()
127 127 except Exception as error:
128 128 print(error)
129 129 finally:
130 130 self.cleanup()
131 131
132 132 print('Clone time :{}'.format(self.clone_time))
133 133 print('Push time :{}'.format(mean(self.push_times)))
134 134 print('Pull time :{}'.format(mean(self.pull_times)))
135 135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
136 136
137 137 return {
138 138 'clone': self.clone_time,
139 139 'push': mean(self.push_times),
140 140 'pull': mean(self.pull_times),
141 141 'empty_pull': mean(self.empty_pull_times),
142 142 }
143 143
144 144 @keep_cwd
145 145 def test(self):
146 146 os.chdir(self.base_dir)
147 147
148 148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
149 149 if not os.path.exists(self.orig_repo):
150 150 self.clone_repo(self.repo_url, default_only=True)
151 151
152 152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
153 153
154 154 self.add_remote(self.orig_repo, upstream_url)
155 155
156 156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
157 157 self.clone_repo(upstream_url, self.pull_repo)
158 158
159 159 commits = self.get_commits(self.orig_repo)
160 160 self.last_commit = commits[-1]
161 161 if self.skip_commits:
162 162 self.push(
163 163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
164 164 commits = commits[self.skip_commits:self.max_commits]
165 165
166 166 print('Working with %d commits' % len(commits))
167 167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
168 168 commit = commits[i]
169 169 print('Processing commit %s (%d)' % (commit, i + 1))
170 170 self.push_times.append(
171 171 self.push(self.orig_repo, commit, 'upstream'))
172 172 self.check_remote_last_commit_is(commit, upstream_url)
173 173
174 174 self.pull_times.append(self.pull(self.pull_repo))
175 175 self.check_local_last_commit_is(commit, self.pull_repo)
176 176
177 177 self.empty_pull_times.append(self.pull(self.pull_repo))
178 178
179 179 self.cloned_repo = os.path.join(self.base_dir,
180 180 '%s_clone' % self.repo_name)
181 181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
182 182
183 183 def cleanup(self):
184 184 try:
185 185 self.delete_repo(self.upstream_repo_name)
186 186 except api.ApiError:
187 187 # Continue in case we could not delete the repo. Maybe we did not
188 188 # create it in the first place.
189 189 pass
190 190
191 191 shutil.rmtree(self.pull_repo, ignore_errors=True)
192 192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
193 193
194 194 if os.path.exists(self.orig_repo):
195 195 self.remove_remote(self.orig_repo)
196 196
197 197
198 198 class RhodeCodeMixin(object):
199 199 """Mixin providing the methods to create and delete repos in RhodeCode."""
200 200 def __init__(self, api_key):
201 201 self.api = api.RCApi(api_key=api_key)
202 202
203 203 def create_repo(self, repo_name, repo_type):
204 204 return self.api.create_repo(repo_name, repo_type,
205 205 'Repo for perfomance testing')
206 206
207 207 def delete_repo(self, repo_name):
208 208 return self.api.delete_repo(repo_name)
209 209
210 210
211 211 class GitMixin(object):
212 212 """Mixin providing the git operations."""
213 213 @timed
214 214 def clone_repo(self, repo_url, destination=None, default_only=False):
215 215 args = ['git', 'clone']
216 216 if default_only:
217 217 args.extend(['--branch', 'master', '--single-branch'])
218 218 args.append(repo_url)
219 219 if destination:
220 220 args.append(destination)
221 221 execute(args)
222 222
223 223 @keep_cwd
224 224 def add_remote(self, repo, remote_url, remote_name='upstream'):
225 225 self.remove_remote(repo, remote_name)
226 226 os.chdir(repo)
227 227 execute(['git', 'remote', 'add', remote_name, remote_url])
228 228
229 229 @keep_cwd
230 230 def remove_remote(self, repo, remote_name='upstream'):
231 231 os.chdir(repo)
232 232 remotes = execute(['git', 'remote']).split('\n')
233 233 if remote_name in remotes:
234 234 execute(['git', 'remote', 'remove', remote_name])
235 235
236 236 @keep_cwd
237 237 def get_commits(self, repo, branch='master'):
238 238 os.chdir(repo)
239 239 commits_list = execute(
240 240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
241 241 return commits_list.strip().split('\n')[::-1]
242 242
243 243 @timed
244 244 def push(self, repo, commit, remote_name=None):
245 245 os.chdir(repo)
246 246 try:
247 247 execute(['git', 'reset', '--soft', commit])
248 248 args = ['git', 'push']
249 249 if remote_name:
250 250 args.append(remote_name)
251 251 execute(args)
252 252 finally:
253 253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
254 254
255 255 @timed
256 256 def pull(self, repo):
257 257 os.chdir(repo)
258 258 execute(['git', 'pull'])
259 259
260 260 def _remote_last_commit(self, repo_url):
261 261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
262 262 return output.split()[0]
263 263
264 264 def check_remote_last_commit_is(self, commit, repo_url):
265 265 last_remote_commit = self._remote_last_commit(repo_url)
266 266 if last_remote_commit != commit:
267 267 raise Exception('Push did not work, expected commit %s but got %s' %
268 268 (commit, last_remote_commit))
269 269
270 270 @keep_cwd
271 271 def _local_last_commit(self, repo):
272 272 os.chdir(repo)
273 273 return execute(['git', 'rev-parse', 'HEAD']).strip()
274 274
275 275 def check_local_last_commit_is(self, commit, repo):
276 276 last_local_commit = self._local_last_commit(repo)
277 277 if last_local_commit != commit:
278 278 raise Exception('Pull did not work, expected commit %s but got %s' %
279 279 (commit, last_local_commit))
280 280
281 281
282 282 class HgMixin(object):
283 283 """Mixin providing the mercurial operations."""
284 284 @timed
285 285 def clone_repo(self, repo_url, destination=None, default_only=False):
286 286 args = ['hg', 'clone']
287 287 if default_only:
288 288 args.extend(['--branch', 'default'])
289 289 args.append(repo_url)
290 290 if destination:
291 291 args.append(destination)
292 292 execute(args)
293 293
294 294 @keep_cwd
295 295 def add_remote(self, repo, remote_url, remote_name='upstream'):
296 296 self.remove_remote(repo, remote_name)
297 297 os.chdir(repo)
298 298 hgrc = ConfigParser.RawConfigParser()
299 299 hgrc.read('.hg/hgrc')
300 300 hgrc.set('paths', remote_name, remote_url)
301 301 with open('.hg/hgrc', 'w') as f:
302 302 hgrc.write(f)
303 303
304 304 @keep_cwd
305 305 def remove_remote(self, repo, remote_name='upstream'):
306 306 os.chdir(repo)
307 307 hgrc = ConfigParser.RawConfigParser()
308 308 hgrc.read('.hg/hgrc')
309 309 hgrc.remove_option('paths', remote_name)
310 310 with open('.hg/hgrc', 'w') as f:
311 311 hgrc.write(f)
312 312
313 313 @keep_cwd
314 314 def get_commits(self, repo, branch='default'):
315 315 os.chdir(repo)
316 316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
317 317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
318 318 '{node}\n', '--follow-first'])
319 319 return commits_list.strip().split('\n')[::-1]
320 320
321 321 @timed
322 322 def push(self, repo, commit, remote_name=None):
323 323 os.chdir(repo)
324 324 args = ['hg', 'push', '--rev', commit, '--new-branch']
325 325 if remote_name:
326 326 args.append(remote_name)
327 327 execute(args)
328 328
329 329 @timed
330 330 def pull(self, repo):
331 331 os.chdir(repo)
332 332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
333 333
334 334 def _remote_last_commit(self, repo_url):
335 335 return execute(['hg', 'identify', repo_url])[:12]
336 336
337 337 def check_remote_last_commit_is(self, commit, repo_url):
338 338 last_remote_commit = self._remote_last_commit(repo_url)
339 339 if not commit.startswith(last_remote_commit):
340 340 raise Exception('Push did not work, expected commit %s but got %s' %
341 341 (commit, last_remote_commit))
342 342
343 343 @keep_cwd
344 344 def _local_last_commit(self, repo):
345 345 os.chdir(repo)
346 346 return execute(['hg', 'identify'])[:12]
347 347
348 348 def check_local_last_commit_is(self, commit, repo):
349 349 last_local_commit = self._local_last_commit(repo)
350 350 if not commit.startswith(last_local_commit):
351 351 raise Exception('Pull did not work, expected commit %s but got %s' %
352 352 (commit, last_local_commit))
353 353
354 354
355 355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
356 356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
357 357 api_key):
358 358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
359 359 max_commits, skip_commits)
360 360 RhodeCodeMixin.__init__(self, api_key)
361 361 self.repo_type = 'git'
362 362
363 363
364 364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
365 365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
366 366 api_key):
367 367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
368 368 max_commits, skip_commits)
369 369 RhodeCodeMixin.__init__(self, api_key)
370 370 self.repo_type = 'hg'
371 371
372 372
373 373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
374 374 api_key):
375 375 max_commits = min(10 * step,
376 376 int((max_commits - skip_commits) / step) * step)
377 377 max_commits += skip_commits
378 378 if repo_type == 'git':
379 379 return GitTestPerformance(
380 380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
381 381 elif repo_type == 'hg':
382 382 return HgTestPerformance(
383 383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
384 384
385 385
386 386 def main(argv):
387 387 parser = argparse.ArgumentParser(
388 388 description='Performance tests for push/pull/clone for git and ' +
389 389 'mercurial repos.')
390 390 parser.add_argument(
391 391 '--tests', dest='tests', action='store', required=False, default='all',
392 392 help='The tests to run. Default: all. But could be any comma ' +
393 393 'separated list with python, hg, kernel or git')
394 394 parser.add_argument(
395 395 '--sizes', dest='sizes', action='store', required=False,
396 396 default='1,10,100,1000,2500',
397 397 help='The sizes to use. Default: 1,10,100,1000,2500')
398 398 parser.add_argument(
399 399 '--dir', dest='dir', action='store', required=True,
400 400 help='The dir where to store the repos')
401 401 parser.add_argument(
402 402 '--api-key', dest='api_key', action='store', required=True,
403 403 help='The api key of RhodeCode')
404 404 options = parser.parse_args(argv[1:])
405 405 print(options)
406 406
407 407 test_config = {
408 408 'python': {
409 409 'url': 'https://hg.python.org/cpython/',
410 410 'limit': 23322,
411 411 'type': 'hg',
412 412 # Do not time the first commit, as it is HUGE!
413 413 'skip': 1,
414 414 },
415 415 'hg': {
416 416 'url': 'http://selenic.com/hg',
417 417 'limit': 14396,
418 418 'type': 'hg',
419 419 },
420 420 'kernel': {
421 421 'url': 'https://github.com/torvalds/linux.git',
422 422 'limit': 46271,
423 423 'type': 'git',
424 424 },
425 425 'git': {
426 426 'url': 'https://github.com/git/git.git',
427 427 'limit': 13525,
428 428 'type': 'git',
429 429 }
430 430
431 431 }
432 432
433 433 test_names = options.tests.split(',')
434 434 if test_names == ['all']:
435 435 test_names = test_config.keys()
436 436 if not set(test_names) <= set(test_config.keys()):
437 437 print('Invalid tests: only %s are valid but specified %s' %
438 438 (test_config.keys(), test_names))
439 439 return 1
440 440
441 441 sizes = options.sizes.split(',')
442 442 sizes = map(int, sizes)
443 443
444 444 base_dir = options.dir
445 445 api_key = options.api_key
446 446 results = collections.defaultdict(dict)
447 447 for test_name, size in itertools.product(test_names, sizes):
448 448 test = get_test(base_dir,
449 449 test_config[test_name]['url'],
450 450 test_config[test_name]['type'],
451 451 size,
452 452 test_config[test_name]['limit'],
453 453 test_config[test_name].get('skip', 0),
454 454 api_key)
455 455 print('*' * 80)
456 456 print('Running performance test: %s with size %d' % (test_name, size))
457 457 print('*' * 80)
458 458 results[test_name][size] = test.run()
459 459 pprint.pprint(dict(results))
460 460
461 461
462 462 if __name__ == '__main__':
463 463 sys.exit(main(sys.argv))
@@ -1,155 +1,155 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 This is a standalone script which will start VCS and RC.
23 23
24 24 Performance numbers will be written on each interval to:
25 25 vcs_profileX.csv
26 26 rc_profileX.csv
27 27
28 28 To stop the script by press Ctrl-C
29 29 """
30 30
31 31 import datetime
32 32 import os
33 33 import psutil
34 import subprocess32
34 import subprocess
35 35 import sys
36 36 import time
37 37 import traceback
38 38 import urllib.request, urllib.parse, urllib.error
39 39
40 40 PROFILING_INTERVAL = 5
41 41 RC_WEBSITE = "http://localhost:5001/"
42 42
43 43
44 44 def get_file(prefix):
45 45 out_file = None
46 46 for i in range(100):
47 47 file_path = "%s_profile%.3d.csv" % (prefix, i)
48 48 if os.path.exists(file_path):
49 49 continue
50 50 out_file = open(file_path, "w")
51 51 out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n")
52 52 break
53 53 return out_file
54 54
55 55
56 56 def dump_system():
57 57 print("System Overview...")
58 58 print("\nCPU Count: %d (%d real)" %
59 59 (psutil.cpu_count(), psutil.cpu_count(logical=False)))
60 60 print("\nDisk:")
61 61 print(psutil.disk_usage(os.sep))
62 62 print("\nMemory:")
63 63 print(psutil.virtual_memory())
64 64 print("\nMemory (swap):")
65 65 print(psutil.swap_memory())
66 66
67 67
68 68 def count_dulwich_fds(proc):
69 p = subprocess32.Popen(["lsof", "-p", proc.pid], stdout=subprocess32.PIPE)
69 p = subprocess.Popen(["lsof", "-p", proc.pid], stdout=subprocess.PIPE)
70 70 out, err = p.communicate()
71 71
72 72 count = 0
73 73 for line in out.splitlines():
74 74 content = line.split()
75 75 # http://git-scm.com/book/en/Git-Internals-Packfiles
76 76 if content[-1].endswith(".idx"):
77 77 count += 1
78 78
79 79 return count
80 80
81 81 def dump_process(pid, out_file):
82 82 now = datetime.datetime.now()
83 83 cpu = pid.cpu_percent()
84 84 mem = pid.memory_info()
85 85 fds = pid.num_fds()
86 86 dulwich_fds = count_dulwich_fds(pid)
87 87 threads = pid.num_threads()
88 88
89 89 content = [now.strftime('%m/%d/%y %H:%M:%S'),
90 90 cpu,
91 91 "%.2f" % (mem[0]/1024.0/1024.0),
92 92 fds, dulwich_fds, threads]
93 93 out_file.write("; ".join([str(item) for item in content]))
94 94 out_file.write("\n")
95 95
96 96
97 97 # Open output files
98 98 vcs_out = get_file("vcs")
99 99 if vcs_out is None:
100 100 print("Unable to enumerate output file for VCS")
101 101 sys.exit(1)
102 102 rc_out = get_file("rc")
103 103 if rc_out is None:
104 104 print("Unable to enumerate output file for RC")
105 105 sys.exit(1)
106 106
107 107 # Show system information
108 108 dump_system()
109 109
110 110 print("\nStarting VCS...")
111 111 vcs = psutil.Popen(["vcsserver"])
112 112 time.sleep(1)
113 113 if not vcs.is_running():
114 114 print("VCS - Failed to start")
115 115 sys.exit(1)
116 116 print("VCS - Ok")
117 117
118 118 print("\nStarting RhodeCode...")
119 119 rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini",
120 shell=True, stdin=subprocess32.PIPE)
120 shell=True, stdin=subprocess.PIPE)
121 121 time.sleep(1)
122 122 if not rc.is_running():
123 123 print("RC - Failed to start")
124 124 vcs.terminate()
125 125 sys.exit(1)
126 126
127 127 # Send command to create the databases
128 128 rc.stdin.write("y\n")
129 129
130 130 # Verify that the website is up
131 131 time.sleep(4)
132 132 try:
133 133 urllib.request.urlopen(RC_WEBSITE)
134 134 except IOError:
135 135 print("RC - Website not started")
136 136 vcs.terminate()
137 137 sys.exit(1)
138 138 print("RC - Ok")
139 139
140 140 print("\nProfiling...\n%s\n" % ("-"*80))
141 141 while True:
142 142 try:
143 143 dump_process(vcs, vcs_out)
144 144 dump_process(rc, rc_out)
145 145 time.sleep(PROFILING_INTERVAL)
146 146 except Exception:
147 147 print(traceback.format_exc())
148 148 break
149 149
150 150 # Finalize the profiling
151 151 vcs_out.close()
152 152 rc_out.close()
153 153
154 154 vcs.terminate()
155 155 rc.terminate()
@@ -1,306 +1,306 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 VCS Performance measurement tool
23 23
24 24 Usage:
25 25
26 26 - Check that required vcs keys can be found in ~/.hgrc and ~/.netrc
27 27
28 28 - Start a local instance of RhodeCode Enterprise
29 29
30 30 - Launch the script:
31 31
32 32 TMPDIR=/tmp python vcs_performance.py \
33 33 --host=http://vm:5000 \
34 34 --api-key=55c4a33688577da24183dcac5fde4dddfdbf18dc \
35 35 --commits=10 --repositories=100 --log-level=info
36 36 """
37 37
38 38 import argparse
39 39 import functools
40 40 import logging
41 41 import os
42 42 import shutil
43 import subprocess32
43 import subprocess
44 44 import tempfile
45 45 import time
46 46 from itertools import chain
47 47
48 48 from api import RCApi, ApiError
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 def timed(f):
55 55 """Decorator that returns the time it took to execute the function."""
56 56 @functools.wraps(f)
57 57 def wrapped_f(*args, **kwargs):
58 58 start_time = time.time()
59 59 try:
60 60 f(*args, **kwargs)
61 61 finally:
62 62 return time.time() - start_time
63 63
64 64 return wrapped_f
65 65
66 66
67 67 def mean(container):
68 68 """Return the mean of the container."""
69 69 if not container:
70 70 return -1.0
71 71 return sum(container) / len(container)
72 72
73 73
74 74 class Config(object):
75 75 args = None
76 76
77 77 def __init__(self):
78 78 parser = argparse.ArgumentParser(description='Runs VCS load tests')
79 79 parser.add_argument(
80 80 '--host', dest='host', action='store', required=True,
81 81 help='RhodeCode Enterprise host')
82 82 parser.add_argument(
83 83 '--api-key', dest='api_key', action='store', required=True,
84 84 help='API Key')
85 85 parser.add_argument(
86 86 '--file-size', dest='file_size', action='store', required=False,
87 87 default=1, type=int, help='File size in MB')
88 88 parser.add_argument(
89 89 '--repositories', dest='repositories', action='store',
90 90 required=False, default=1, type=int,
91 91 help='Number of repositories')
92 92 parser.add_argument(
93 93 '--commits', dest='commits', action='store', required=False,
94 94 default=1, type=int, help='Number of commits')
95 95 parser.add_argument(
96 96 '--log-level', dest='log_level', action='store', required=False,
97 97 default='error', help='Logging level')
98 98 self.args = parser.parse_args()
99 99
100 100 def __getattr__(self, attr):
101 101 return getattr(self.args, attr)
102 102
103 103
104 104 class Repository(object):
105 105 FILE_NAME_TEMPLATE = "test_{:09d}.bin"
106 106
107 107 def __init__(self, name, base_path, api):
108 108 self.name = name
109 109 self.path = os.path.join(base_path, name)
110 110 self.api = api
111 111 self.url = None
112 112
113 113 def create(self):
114 114 self._create_filesystem_repo(self.path)
115 115 try:
116 116 self.url = self.api.create_repo(self.name, self.TYPE, 'Performance tests')
117 117 except ApiError as e:
118 118 log.error('api: {}'.format(e))
119 119
120 120 def delete(self):
121 121 self._delete_filesystem_repo()
122 122 try:
123 123 self.api.delete_repo(self.name)
124 124 except ApiError as e:
125 125 log.error('api: {}'.format(e))
126 126
127 127 def create_commits(self, number, file_size):
128 128 for i in range(number):
129 129 file_name = self.FILE_NAME_TEMPLATE.format(i)
130 130 log.debug("Create commit[{}] {}".format(self.name, file_name))
131 131 self._create_file(file_name, file_size)
132 132 self._create_commit(file_name)
133 133
134 134 @timed
135 135 def push(self):
136 136 raise NotImplementedError()
137 137
138 138 @timed
139 139 def clone(self, destination_path):
140 140 raise NotImplementedError()
141 141
142 142 @timed
143 143 def pull(self):
144 144 raise NotImplementedError()
145 145
146 146 def _run(self, *args):
147 147 command = [self.BASE_COMMAND] + list(args)
148 process = subprocess32.Popen(
149 command, stdout=subprocess32.PIPE, stderr=subprocess32.PIPE)
148 process = subprocess.Popen(
149 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
150 150 return process.communicate()
151 151
152 152 def _create_file(self, name, size):
153 153 file_name = os.path.join(self.path, name)
154 154 with open(file_name, 'wb') as f:
155 155 f.write(os.urandom(1024))
156 156
157 157 def _delete_filesystem_repo(self):
158 158 shutil.rmtree(self.path)
159 159
160 160 def _create_filesystem_repo(self, path):
161 161 raise NotImplementedError()
162 162
163 163 def _create_commit(self, file_name):
164 164 raise NotImplementedError()
165 165
166 166
167 167 class GitRepository(Repository):
168 168 TYPE = 'git'
169 169 BASE_COMMAND = 'git'
170 170
171 171 @timed
172 172 def push(self):
173 173 os.chdir(self.path)
174 174 self._run('push', '--set-upstream', self.url, 'master')
175 175
176 176 @timed
177 177 def clone(self, destination_path):
178 178 self._run('clone', self.url, os.path.join(destination_path, self.name))
179 179
180 180 @timed
181 181 def pull(self, destination_path):
182 182 path = os.path.join(destination_path, self.name)
183 183 self._create_filesystem_repo(path)
184 184 os.chdir(path)
185 185 self._run('remote', 'add', 'origin', self.url)
186 186 self._run('pull', 'origin', 'master')
187 187
188 188 def _create_filesystem_repo(self, path):
189 189 self._run('init', path)
190 190
191 191 def _create_commit(self, file_name):
192 192 os.chdir(self.path)
193 193 self._run('add', file_name)
194 194 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
195 195
196 196
197 197 class HgRepository(Repository):
198 198 TYPE = 'hg'
199 199 BASE_COMMAND = 'hg'
200 200
201 201 @timed
202 202 def push(self):
203 203 os.chdir(self.path)
204 204 self._run('push', self.url)
205 205
206 206 @timed
207 207 def clone(self, destination_path):
208 208 self._run('clone', self.url, os.path.join(destination_path, self.name))
209 209
210 210 @timed
211 211 def pull(self, destination_path):
212 212 path = os.path.join(destination_path, self.name)
213 213 self._create_filesystem_repo(path)
214 214 os.chdir(path)
215 215 self._run('pull', '-r', 'tip', self.url)
216 216
217 217 def _create_filesystem_repo(self, path):
218 218 self._run('init', path)
219 219
220 220 def _create_commit(self, file_name):
221 221 os.chdir(self.path)
222 222 self._run('add', file_name)
223 223 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
224 224
225 225
226 226 class Benchmark(object):
227 227 REPO_CLASSES = {
228 228 'git': GitRepository,
229 229 'hg': HgRepository
230 230 }
231 231 REPO_NAME = '{}_performance_{:03d}'
232 232
233 233 def __init__(self, config):
234 234 self.api = RCApi(api_key=config.api_key, rc_endpoint=config.host)
235 235 self.source_path = tempfile.mkdtemp(suffix='vcsperformance')
236 236
237 237 self.config = config
238 238 self.git_repos = []
239 239 self.hg_repos = []
240 240
241 241 self._set_log_level()
242 242
243 243 def start(self):
244 244 self._create_repos()
245 245 repos = {
246 246 'git': self.git_repos,
247 247 'hg': self.hg_repos
248 248 }
249 249
250 250 clone_destination_path = tempfile.mkdtemp(suffix='clone')
251 251 pull_destination_path = tempfile.mkdtemp(suffix='pull')
252 252 operations = [
253 253 ('push', ),
254 254 ('clone', clone_destination_path),
255 255 ('pull', pull_destination_path)
256 256 ]
257 257
258 258 for operation in operations:
259 259 for type_ in repos:
260 260 times = self._measure(repos[type_], *operation)
261 261 print("Mean[of {}] {:5s} {:5s} time: {:.3f} sec.".format(
262 262 len(times), type_, operation[0], mean(times)))
263 263
264 264 def cleanup(self):
265 265 log.info("Cleaning up...")
266 266 for repo in chain(self.git_repos, self.hg_repos):
267 267 repo.delete()
268 268
269 269 def _measure(self, repos, operation, *args):
270 270 times = []
271 271 for repo in repos:
272 272 method = getattr(repo, operation)
273 273 times.append(method(*args))
274 274 return times
275 275
276 276 def _create_repos(self):
277 277 log.info("Creating repositories...")
278 278 for i in range(self.config.repositories):
279 279 self.git_repos.append(self._create_repo('git', i))
280 280 self.hg_repos.append(self._create_repo('hg', i))
281 281
282 282 def _create_repo(self, type_, id_):
283 283 RepoClass = self.REPO_CLASSES[type_]
284 284 repo = RepoClass(
285 285 self.REPO_NAME.format(type_, id_), self.source_path, self.api)
286 286 repo.create()
287 287 repo.create_commits(self.config.commits, self.config.file_size)
288 288 return repo
289 289
290 290 def _set_log_level(self):
291 291 try:
292 292 log_level = getattr(logging, config.log_level.upper())
293 293 except:
294 294 log_level = logging.ERROR
295 295 handler = logging.StreamHandler()
296 296 log.addHandler(handler)
297 297 log.setLevel(log_level)
298 298
299 299
300 300 if __name__ == '__main__':
301 301 config = Config()
302 302 benchmark = Benchmark(config)
303 303 try:
304 304 benchmark.start()
305 305 finally:
306 306 benchmark.cleanup()
@@ -1,1842 +1,1842 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 import subprocess32
29 import subprocess
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import logging
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs.backends import get_backend
61 61 from rhodecode.lib.vcs.nodes import FileNode
62 62 from rhodecode.tests import (
63 63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 65 TEST_USER_REGULAR_PASS)
66 66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 67 from rhodecode.tests.fixture import Fixture
68 68 from rhodecode.config import utils as config_utils
69 69
70 70 log = logging.getLogger(__name__)
71 71
72 72 def _split_comma(value):
73 73 return value.split(',')
74 74
75 75
76 76 def pytest_addoption(parser):
77 77 parser.addoption(
78 78 '--keep-tmp-path', action='store_true',
79 79 help="Keep the test temporary directories")
80 80 parser.addoption(
81 81 '--backends', action='store', type=_split_comma,
82 82 default=['git', 'hg', 'svn'],
83 83 help="Select which backends to test for backend specific tests.")
84 84 parser.addoption(
85 85 '--dbs', action='store', type=_split_comma,
86 86 default=['sqlite'],
87 87 help="Select which database to test for database specific tests. "
88 88 "Possible options are sqlite,postgres,mysql")
89 89 parser.addoption(
90 90 '--appenlight', '--ae', action='store_true',
91 91 help="Track statistics in appenlight.")
92 92 parser.addoption(
93 93 '--appenlight-api-key', '--ae-key',
94 94 help="API key for Appenlight.")
95 95 parser.addoption(
96 96 '--appenlight-url', '--ae-url',
97 97 default="https://ae.rhodecode.com",
98 98 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
99 99 parser.addoption(
100 100 '--sqlite-connection-string', action='store',
101 101 default='', help="Connection string for the dbs tests with SQLite")
102 102 parser.addoption(
103 103 '--postgres-connection-string', action='store',
104 104 default='', help="Connection string for the dbs tests with Postgres")
105 105 parser.addoption(
106 106 '--mysql-connection-string', action='store',
107 107 default='', help="Connection string for the dbs tests with MySQL")
108 108 parser.addoption(
109 109 '--repeat', type=int, default=100,
110 110 help="Number of repetitions in performance tests.")
111 111
112 112
113 113 def pytest_configure(config):
114 114 from rhodecode.config import patches
115 115
116 116
117 117 def pytest_collection_modifyitems(session, config, items):
118 118 # nottest marked, compare nose, used for transition from nose to pytest
119 119 remaining = [
120 120 i for i in items if getattr(i.obj, '__test__', True)]
121 121 items[:] = remaining
122 122
123 123 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
124 124 # be executed at the end for faster test feedback
125 125 def sorter(item):
126 126 pos = 0
127 127 key = item._nodeid
128 128 if key.startswith('rhodecode/tests/database'):
129 129 pos = 1
130 130 elif key.startswith('rhodecode/tests/vcs_operations'):
131 131 pos = 2
132 132
133 133 return pos
134 134
135 135 items.sort(key=sorter)
136 136
137 137
138 138 def pytest_generate_tests(metafunc):
139 139
140 140 # Support test generation based on --backend parameter
141 141 if 'backend_alias' in metafunc.fixturenames:
142 142 backends = get_backends_from_metafunc(metafunc)
143 143 scope = None
144 144 if not backends:
145 145 pytest.skip("Not enabled for any of selected backends")
146 146
147 147 metafunc.parametrize('backend_alias', backends, scope=scope)
148 148
149 149 backend_mark = metafunc.definition.get_closest_marker('backends')
150 150 if backend_mark:
151 151 backends = get_backends_from_metafunc(metafunc)
152 152 if not backends:
153 153 pytest.skip("Not enabled for any of selected backends")
154 154
155 155
156 156 def get_backends_from_metafunc(metafunc):
157 157 requested_backends = set(metafunc.config.getoption('--backends'))
158 158 backend_mark = metafunc.definition.get_closest_marker('backends')
159 159 if backend_mark:
160 160 # Supported backends by this test function, created from
161 161 # pytest.mark.backends
162 162 backends = backend_mark.args
163 163 elif hasattr(metafunc.cls, 'backend_alias'):
164 164 # Support class attribute "backend_alias", this is mainly
165 165 # for legacy reasons for tests not yet using pytest.mark.backends
166 166 backends = [metafunc.cls.backend_alias]
167 167 else:
168 168 backends = metafunc.config.getoption('--backends')
169 169 return requested_backends.intersection(backends)
170 170
171 171
172 172 @pytest.fixture(scope='session', autouse=True)
173 173 def activate_example_rcextensions(request):
174 174 """
175 175 Patch in an example rcextensions module which verifies passed in kwargs.
176 176 """
177 177 from rhodecode.config import rcextensions
178 178
179 179 old_extensions = rhodecode.EXTENSIONS
180 180 rhodecode.EXTENSIONS = rcextensions
181 181 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
182 182
183 183 @request.addfinalizer
184 184 def cleanup():
185 185 rhodecode.EXTENSIONS = old_extensions
186 186
187 187
188 188 @pytest.fixture()
189 189 def capture_rcextensions():
190 190 """
191 191 Returns the recorded calls to entry points in rcextensions.
192 192 """
193 193 calls = rhodecode.EXTENSIONS.calls
194 194 calls.clear()
195 195 # Note: At this moment, it is still the empty dict, but that will
196 196 # be filled during the test run and since it is a reference this
197 197 # is enough to make it work.
198 198 return calls
199 199
200 200
201 201 @pytest.fixture(scope='session')
202 202 def http_environ_session():
203 203 """
204 204 Allow to use "http_environ" in session scope.
205 205 """
206 206 return plain_http_environ()
207 207
208 208
209 209 def plain_http_host_stub():
210 210 """
211 211 Value of HTTP_HOST in the test run.
212 212 """
213 213 return 'example.com:80'
214 214
215 215
216 216 @pytest.fixture()
217 217 def http_host_stub():
218 218 """
219 219 Value of HTTP_HOST in the test run.
220 220 """
221 221 return plain_http_host_stub()
222 222
223 223
224 224 def plain_http_host_only_stub():
225 225 """
226 226 Value of HTTP_HOST in the test run.
227 227 """
228 228 return plain_http_host_stub().split(':')[0]
229 229
230 230
231 231 @pytest.fixture()
232 232 def http_host_only_stub():
233 233 """
234 234 Value of HTTP_HOST in the test run.
235 235 """
236 236 return plain_http_host_only_stub()
237 237
238 238
239 239 def plain_http_environ():
240 240 """
241 241 HTTP extra environ keys.
242 242
243 243 User by the test application and as well for setting up the pylons
244 244 environment. In the case of the fixture "app" it should be possible
245 245 to override this for a specific test case.
246 246 """
247 247 return {
248 248 'SERVER_NAME': plain_http_host_only_stub(),
249 249 'SERVER_PORT': plain_http_host_stub().split(':')[1],
250 250 'HTTP_HOST': plain_http_host_stub(),
251 251 'HTTP_USER_AGENT': 'rc-test-agent',
252 252 'REQUEST_METHOD': 'GET'
253 253 }
254 254
255 255
256 256 @pytest.fixture()
257 257 def http_environ():
258 258 """
259 259 HTTP extra environ keys.
260 260
261 261 User by the test application and as well for setting up the pylons
262 262 environment. In the case of the fixture "app" it should be possible
263 263 to override this for a specific test case.
264 264 """
265 265 return plain_http_environ()
266 266
267 267
268 268 @pytest.fixture(scope='session')
269 269 def baseapp(ini_config, vcsserver, http_environ_session):
270 270 from rhodecode.lib.pyramid_utils import get_app_config
271 271 from rhodecode.config.middleware import make_pyramid_app
272 272
273 273 log.info("Using the RhodeCode configuration:{}".format(ini_config))
274 274 pyramid.paster.setup_logging(ini_config)
275 275
276 276 settings = get_app_config(ini_config)
277 277 app = make_pyramid_app({'__file__': ini_config}, **settings)
278 278
279 279 return app
280 280
281 281
282 282 @pytest.fixture(scope='function')
283 283 def app(request, config_stub, baseapp, http_environ):
284 284 app = CustomTestApp(
285 285 baseapp,
286 286 extra_environ=http_environ)
287 287 if request.cls:
288 288 request.cls.app = app
289 289 return app
290 290
291 291
292 292 @pytest.fixture(scope='session')
293 293 def app_settings(baseapp, ini_config):
294 294 """
295 295 Settings dictionary used to create the app.
296 296
297 297 Parses the ini file and passes the result through the sanitize and apply
298 298 defaults mechanism in `rhodecode.config.middleware`.
299 299 """
300 300 return baseapp.config.get_settings()
301 301
302 302
303 303 @pytest.fixture(scope='session')
304 304 def db_connection(ini_settings):
305 305 # Initialize the database connection.
306 306 config_utils.initialize_database(ini_settings)
307 307
308 308
309 309 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
310 310
311 311
312 312 def _autologin_user(app, *args):
313 313 session = login_user_session(app, *args)
314 314 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
315 315 return LoginData(csrf_token, session['rhodecode_user'])
316 316
317 317
318 318 @pytest.fixture()
319 319 def autologin_user(app):
320 320 """
321 321 Utility fixture which makes sure that the admin user is logged in
322 322 """
323 323 return _autologin_user(app)
324 324
325 325
326 326 @pytest.fixture()
327 327 def autologin_regular_user(app):
328 328 """
329 329 Utility fixture which makes sure that the regular user is logged in
330 330 """
331 331 return _autologin_user(
332 332 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
333 333
334 334
335 335 @pytest.fixture(scope='function')
336 336 def csrf_token(request, autologin_user):
337 337 return autologin_user.csrf_token
338 338
339 339
340 340 @pytest.fixture(scope='function')
341 341 def xhr_header(request):
342 342 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
343 343
344 344
345 345 @pytest.fixture()
346 346 def real_crypto_backend(monkeypatch):
347 347 """
348 348 Switch the production crypto backend on for this test.
349 349
350 350 During the test run the crypto backend is replaced with a faster
351 351 implementation based on the MD5 algorithm.
352 352 """
353 353 monkeypatch.setattr(rhodecode, 'is_test', False)
354 354
355 355
356 356 @pytest.fixture(scope='class')
357 357 def index_location(request, baseapp):
358 358 index_location = baseapp.config.get_settings()['search.location']
359 359 if request.cls:
360 360 request.cls.index_location = index_location
361 361 return index_location
362 362
363 363
364 364 @pytest.fixture(scope='session', autouse=True)
365 365 def tests_tmp_path(request):
366 366 """
367 367 Create temporary directory to be used during the test session.
368 368 """
369 369 if not os.path.exists(TESTS_TMP_PATH):
370 370 os.makedirs(TESTS_TMP_PATH)
371 371
372 372 if not request.config.getoption('--keep-tmp-path'):
373 373 @request.addfinalizer
374 374 def remove_tmp_path():
375 375 shutil.rmtree(TESTS_TMP_PATH)
376 376
377 377 return TESTS_TMP_PATH
378 378
379 379
380 380 @pytest.fixture()
381 381 def test_repo_group(request):
382 382 """
383 383 Create a temporary repository group, and destroy it after
384 384 usage automatically
385 385 """
386 386 fixture = Fixture()
387 387 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
388 388 repo_group = fixture.create_repo_group(repogroupid)
389 389
390 390 def _cleanup():
391 391 fixture.destroy_repo_group(repogroupid)
392 392
393 393 request.addfinalizer(_cleanup)
394 394 return repo_group
395 395
396 396
397 397 @pytest.fixture()
398 398 def test_user_group(request):
399 399 """
400 400 Create a temporary user group, and destroy it after
401 401 usage automatically
402 402 """
403 403 fixture = Fixture()
404 404 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
405 405 user_group = fixture.create_user_group(usergroupid)
406 406
407 407 def _cleanup():
408 408 fixture.destroy_user_group(user_group)
409 409
410 410 request.addfinalizer(_cleanup)
411 411 return user_group
412 412
413 413
414 414 @pytest.fixture(scope='session')
415 415 def test_repo(request):
416 416 container = TestRepoContainer()
417 417 request.addfinalizer(container._cleanup)
418 418 return container
419 419
420 420
421 421 class TestRepoContainer(object):
422 422 """
423 423 Container for test repositories which are used read only.
424 424
425 425 Repositories will be created on demand and re-used during the lifetime
426 426 of this object.
427 427
428 428 Usage to get the svn test repository "minimal"::
429 429
430 430 test_repo = TestContainer()
431 431 repo = test_repo('minimal', 'svn')
432 432
433 433 """
434 434
435 435 dump_extractors = {
436 436 'git': utils.extract_git_repo_from_dump,
437 437 'hg': utils.extract_hg_repo_from_dump,
438 438 'svn': utils.extract_svn_repo_from_dump,
439 439 }
440 440
441 441 def __init__(self):
442 442 self._cleanup_repos = []
443 443 self._fixture = Fixture()
444 444 self._repos = {}
445 445
446 446 def __call__(self, dump_name, backend_alias, config=None):
447 447 key = (dump_name, backend_alias)
448 448 if key not in self._repos:
449 449 repo = self._create_repo(dump_name, backend_alias, config)
450 450 self._repos[key] = repo.repo_id
451 451 return Repository.get(self._repos[key])
452 452
453 453 def _create_repo(self, dump_name, backend_alias, config):
454 454 repo_name = '%s-%s' % (backend_alias, dump_name)
455 455 backend = get_backend(backend_alias)
456 456 dump_extractor = self.dump_extractors[backend_alias]
457 457 repo_path = dump_extractor(dump_name, repo_name)
458 458
459 459 vcs_repo = backend(repo_path, config=config)
460 460 repo2db_mapper({repo_name: vcs_repo})
461 461
462 462 repo = RepoModel().get_by_repo_name(repo_name)
463 463 self._cleanup_repos.append(repo_name)
464 464 return repo
465 465
466 466 def _cleanup(self):
467 467 for repo_name in reversed(self._cleanup_repos):
468 468 self._fixture.destroy_repo(repo_name)
469 469
470 470
471 471 def backend_base(request, backend_alias, baseapp, test_repo):
472 472 if backend_alias not in request.config.getoption('--backends'):
473 473 pytest.skip("Backend %s not selected." % (backend_alias, ))
474 474
475 475 utils.check_xfail_backends(request.node, backend_alias)
476 476 utils.check_skip_backends(request.node, backend_alias)
477 477
478 478 repo_name = 'vcs_test_%s' % (backend_alias, )
479 479 backend = Backend(
480 480 alias=backend_alias,
481 481 repo_name=repo_name,
482 482 test_name=request.node.name,
483 483 test_repo_container=test_repo)
484 484 request.addfinalizer(backend.cleanup)
485 485 return backend
486 486
487 487
488 488 @pytest.fixture()
489 489 def backend(request, backend_alias, baseapp, test_repo):
490 490 """
491 491 Parametrized fixture which represents a single backend implementation.
492 492
493 493 It respects the option `--backends` to focus the test run on specific
494 494 backend implementations.
495 495
496 496 It also supports `pytest.mark.xfail_backends` to mark tests as failing
497 497 for specific backends. This is intended as a utility for incremental
498 498 development of a new backend implementation.
499 499 """
500 500 return backend_base(request, backend_alias, baseapp, test_repo)
501 501
502 502
503 503 @pytest.fixture()
504 504 def backend_git(request, baseapp, test_repo):
505 505 return backend_base(request, 'git', baseapp, test_repo)
506 506
507 507
508 508 @pytest.fixture()
509 509 def backend_hg(request, baseapp, test_repo):
510 510 return backend_base(request, 'hg', baseapp, test_repo)
511 511
512 512
513 513 @pytest.fixture()
514 514 def backend_svn(request, baseapp, test_repo):
515 515 return backend_base(request, 'svn', baseapp, test_repo)
516 516
517 517
518 518 @pytest.fixture()
519 519 def backend_random(backend_git):
520 520 """
521 521 Use this to express that your tests need "a backend.
522 522
523 523 A few of our tests need a backend, so that we can run the code. This
524 524 fixture is intended to be used for such cases. It will pick one of the
525 525 backends and run the tests.
526 526
527 527 The fixture `backend` would run the test multiple times for each
528 528 available backend which is a pure waste of time if the test is
529 529 independent of the backend type.
530 530 """
531 531 # TODO: johbo: Change this to pick a random backend
532 532 return backend_git
533 533
534 534
535 535 @pytest.fixture()
536 536 def backend_stub(backend_git):
537 537 """
538 538 Use this to express that your tests need a backend stub
539 539
540 540 TODO: mikhail: Implement a real stub logic instead of returning
541 541 a git backend
542 542 """
543 543 return backend_git
544 544
545 545
546 546 @pytest.fixture()
547 547 def repo_stub(backend_stub):
548 548 """
549 549 Use this to express that your tests need a repository stub
550 550 """
551 551 return backend_stub.create_repo()
552 552
553 553
554 554 class Backend(object):
555 555 """
556 556 Represents the test configuration for one supported backend
557 557
558 558 Provides easy access to different test repositories based on
559 559 `__getitem__`. Such repositories will only be created once per test
560 560 session.
561 561 """
562 562
563 563 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
564 564 _master_repo = None
565 565 _master_repo_path = ''
566 566 _commit_ids = {}
567 567
568 568 def __init__(self, alias, repo_name, test_name, test_repo_container):
569 569 self.alias = alias
570 570 self.repo_name = repo_name
571 571 self._cleanup_repos = []
572 572 self._test_name = test_name
573 573 self._test_repo_container = test_repo_container
574 574 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
575 575 # Fixture will survive in the end.
576 576 self._fixture = Fixture()
577 577
578 578 def __getitem__(self, key):
579 579 return self._test_repo_container(key, self.alias)
580 580
581 581 def create_test_repo(self, key, config=None):
582 582 return self._test_repo_container(key, self.alias, config)
583 583
584 584 @property
585 585 def repo(self):
586 586 """
587 587 Returns the "current" repository. This is the vcs_test repo or the
588 588 last repo which has been created with `create_repo`.
589 589 """
590 590 from rhodecode.model.db import Repository
591 591 return Repository.get_by_repo_name(self.repo_name)
592 592
593 593 @property
594 594 def default_branch_name(self):
595 595 VcsRepository = get_backend(self.alias)
596 596 return VcsRepository.DEFAULT_BRANCH_NAME
597 597
598 598 @property
599 599 def default_head_id(self):
600 600 """
601 601 Returns the default head id of the underlying backend.
602 602
603 603 This will be the default branch name in case the backend does have a
604 604 default branch. In the other cases it will point to a valid head
605 605 which can serve as the base to create a new commit on top of it.
606 606 """
607 607 vcsrepo = self.repo.scm_instance()
608 608 head_id = (
609 609 vcsrepo.DEFAULT_BRANCH_NAME or
610 610 vcsrepo.commit_ids[-1])
611 611 return head_id
612 612
613 613 @property
614 614 def commit_ids(self):
615 615 """
616 616 Returns the list of commits for the last created repository
617 617 """
618 618 return self._commit_ids
619 619
620 620 def create_master_repo(self, commits):
621 621 """
622 622 Create a repository and remember it as a template.
623 623
624 624 This allows to easily create derived repositories to construct
625 625 more complex scenarios for diff, compare and pull requests.
626 626
627 627 Returns a commit map which maps from commit message to raw_id.
628 628 """
629 629 self._master_repo = self.create_repo(commits=commits)
630 630 self._master_repo_path = self._master_repo.repo_full_path
631 631
632 632 return self._commit_ids
633 633
634 634 def create_repo(
635 635 self, commits=None, number_of_commits=0, heads=None,
636 636 name_suffix=u'', bare=False, **kwargs):
637 637 """
638 638 Create a repository and record it for later cleanup.
639 639
640 640 :param commits: Optional. A sequence of dict instances.
641 641 Will add a commit per entry to the new repository.
642 642 :param number_of_commits: Optional. If set to a number, this number of
643 643 commits will be added to the new repository.
644 644 :param heads: Optional. Can be set to a sequence of of commit
645 645 names which shall be pulled in from the master repository.
646 646 :param name_suffix: adds special suffix to generated repo name
647 647 :param bare: set a repo as bare (no checkout)
648 648 """
649 649 self.repo_name = self._next_repo_name() + name_suffix
650 650 repo = self._fixture.create_repo(
651 651 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
652 652 self._cleanup_repos.append(repo.repo_name)
653 653
654 654 commits = commits or [
655 655 {'message': 'Commit %s of %s' % (x, self.repo_name)}
656 656 for x in range(number_of_commits)]
657 657 vcs_repo = repo.scm_instance()
658 658 vcs_repo.count()
659 659 self._add_commits_to_repo(vcs_repo, commits)
660 660 if heads:
661 661 self.pull_heads(repo, heads)
662 662
663 663 return repo
664 664
665 665 def pull_heads(self, repo, heads):
666 666 """
667 667 Make sure that repo contains all commits mentioned in `heads`
668 668 """
669 669 vcsrepo = repo.scm_instance()
670 670 vcsrepo.config.clear_section('hooks')
671 671 commit_ids = [self._commit_ids[h] for h in heads]
672 672 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
673 673
674 674 def create_fork(self):
675 675 repo_to_fork = self.repo_name
676 676 self.repo_name = self._next_repo_name()
677 677 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
678 678 self._cleanup_repos.append(self.repo_name)
679 679 return repo
680 680
681 681 def new_repo_name(self, suffix=u''):
682 682 self.repo_name = self._next_repo_name() + suffix
683 683 self._cleanup_repos.append(self.repo_name)
684 684 return self.repo_name
685 685
686 686 def _next_repo_name(self):
687 687 return u"%s_%s" % (
688 688 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
689 689
690 690 def ensure_file(self, filename, content='Test content\n'):
691 691 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
692 692 commits = [
693 693 {'added': [
694 694 FileNode(filename, content=content),
695 695 ]},
696 696 ]
697 697 self._add_commits_to_repo(self.repo.scm_instance(), commits)
698 698
699 699 def enable_downloads(self):
700 700 repo = self.repo
701 701 repo.enable_downloads = True
702 702 Session().add(repo)
703 703 Session().commit()
704 704
705 705 def cleanup(self):
706 706 for repo_name in reversed(self._cleanup_repos):
707 707 self._fixture.destroy_repo(repo_name)
708 708
709 709 def _add_commits_to_repo(self, repo, commits):
710 710 commit_ids = _add_commits_to_repo(repo, commits)
711 711 if not commit_ids:
712 712 return
713 713 self._commit_ids = commit_ids
714 714
715 715 # Creating refs for Git to allow fetching them from remote repository
716 716 if self.alias == 'git':
717 717 refs = {}
718 718 for message in self._commit_ids:
719 719 # TODO: mikhail: do more special chars replacements
720 720 ref_name = 'refs/test-refs/{}'.format(
721 721 message.replace(' ', ''))
722 722 refs[ref_name] = self._commit_ids[message]
723 723 self._create_refs(repo, refs)
724 724
725 725 def _create_refs(self, repo, refs):
726 726 for ref_name in refs:
727 727 repo.set_refs(ref_name, refs[ref_name])
728 728
729 729
730 730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
731 731 if backend_alias not in request.config.getoption('--backends'):
732 732 pytest.skip("Backend %s not selected." % (backend_alias, ))
733 733
734 734 utils.check_xfail_backends(request.node, backend_alias)
735 735 utils.check_skip_backends(request.node, backend_alias)
736 736
737 737 repo_name = 'vcs_test_%s' % (backend_alias, )
738 738 repo_path = os.path.join(tests_tmp_path, repo_name)
739 739 backend = VcsBackend(
740 740 alias=backend_alias,
741 741 repo_path=repo_path,
742 742 test_name=request.node.name,
743 743 test_repo_container=test_repo)
744 744 request.addfinalizer(backend.cleanup)
745 745 return backend
746 746
747 747
748 748 @pytest.fixture()
749 749 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
750 750 """
751 751 Parametrized fixture which represents a single vcs backend implementation.
752 752
753 753 See the fixture `backend` for more details. This one implements the same
754 754 concept, but on vcs level. So it does not provide model instances etc.
755 755
756 756 Parameters are generated dynamically, see :func:`pytest_generate_tests`
757 757 for how this works.
758 758 """
759 759 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
760 760
761 761
762 762 @pytest.fixture()
763 763 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
764 764 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
765 765
766 766
767 767 @pytest.fixture()
768 768 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
769 769 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
770 770
771 771
772 772 @pytest.fixture()
773 773 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
774 774 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
775 775
776 776
777 777 @pytest.fixture()
778 778 def vcsbackend_stub(vcsbackend_git):
779 779 """
780 780 Use this to express that your test just needs a stub of a vcsbackend.
781 781
782 782 Plan is to eventually implement an in-memory stub to speed tests up.
783 783 """
784 784 return vcsbackend_git
785 785
786 786
787 787 class VcsBackend(object):
788 788 """
789 789 Represents the test configuration for one supported vcs backend.
790 790 """
791 791
792 792 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
793 793
794 794 def __init__(self, alias, repo_path, test_name, test_repo_container):
795 795 self.alias = alias
796 796 self._repo_path = repo_path
797 797 self._cleanup_repos = []
798 798 self._test_name = test_name
799 799 self._test_repo_container = test_repo_container
800 800
801 801 def __getitem__(self, key):
802 802 return self._test_repo_container(key, self.alias).scm_instance()
803 803
804 804 @property
805 805 def repo(self):
806 806 """
807 807 Returns the "current" repository. This is the vcs_test repo of the last
808 808 repo which has been created.
809 809 """
810 810 Repository = get_backend(self.alias)
811 811 return Repository(self._repo_path)
812 812
813 813 @property
814 814 def backend(self):
815 815 """
816 816 Returns the backend implementation class.
817 817 """
818 818 return get_backend(self.alias)
819 819
820 820 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
821 821 bare=False):
822 822 repo_name = self._next_repo_name()
823 823 self._repo_path = get_new_dir(repo_name)
824 824 repo_class = get_backend(self.alias)
825 825 src_url = None
826 826 if _clone_repo:
827 827 src_url = _clone_repo.path
828 828 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
829 829 self._cleanup_repos.append(repo)
830 830
831 831 commits = commits or [
832 832 {'message': 'Commit %s of %s' % (x, repo_name)}
833 833 for x in range(number_of_commits)]
834 834 _add_commits_to_repo(repo, commits)
835 835 return repo
836 836
837 837 def clone_repo(self, repo):
838 838 return self.create_repo(_clone_repo=repo)
839 839
840 840 def cleanup(self):
841 841 for repo in self._cleanup_repos:
842 842 shutil.rmtree(repo.path)
843 843
844 844 def new_repo_path(self):
845 845 repo_name = self._next_repo_name()
846 846 self._repo_path = get_new_dir(repo_name)
847 847 return self._repo_path
848 848
849 849 def _next_repo_name(self):
850 850 return "%s_%s" % (
851 851 self.invalid_repo_name.sub('_', self._test_name),
852 852 len(self._cleanup_repos))
853 853
854 854 def add_file(self, repo, filename, content='Test content\n'):
855 855 imc = repo.in_memory_commit
856 856 imc.add(FileNode(filename, content=content))
857 857 imc.commit(
858 858 message=u'Automatic commit from vcsbackend fixture',
859 859 author=u'Automatic <automatic@rhodecode.com>')
860 860
861 861 def ensure_file(self, filename, content='Test content\n'):
862 862 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
863 863 self.add_file(self.repo, filename, content)
864 864
865 865
866 866 def _add_commits_to_repo(vcs_repo, commits):
867 867 commit_ids = {}
868 868 if not commits:
869 869 return commit_ids
870 870
871 871 imc = vcs_repo.in_memory_commit
872 872 commit = None
873 873
874 874 for idx, commit in enumerate(commits):
875 875 message = unicode(commit.get('message', 'Commit %s' % idx))
876 876
877 877 for node in commit.get('added', []):
878 878 imc.add(FileNode(node.path, content=node.content))
879 879 for node in commit.get('changed', []):
880 880 imc.change(FileNode(node.path, content=node.content))
881 881 for node in commit.get('removed', []):
882 882 imc.remove(FileNode(node.path))
883 883
884 884 parents = [
885 885 vcs_repo.get_commit(commit_id=commit_ids[p])
886 886 for p in commit.get('parents', [])]
887 887
888 888 operations = ('added', 'changed', 'removed')
889 889 if not any((commit.get(o) for o in operations)):
890 890 imc.add(FileNode('file_%s' % idx, content=message))
891 891
892 892 commit = imc.commit(
893 893 message=message,
894 894 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
895 895 date=commit.get('date'),
896 896 branch=commit.get('branch'),
897 897 parents=parents)
898 898
899 899 commit_ids[commit.message] = commit.raw_id
900 900
901 901 return commit_ids
902 902
903 903
904 904 @pytest.fixture()
905 905 def reposerver(request):
906 906 """
907 907 Allows to serve a backend repository
908 908 """
909 909
910 910 repo_server = RepoServer()
911 911 request.addfinalizer(repo_server.cleanup)
912 912 return repo_server
913 913
914 914
915 915 class RepoServer(object):
916 916 """
917 917 Utility to serve a local repository for the duration of a test case.
918 918
919 919 Supports only Subversion so far.
920 920 """
921 921
922 922 url = None
923 923
924 924 def __init__(self):
925 925 self._cleanup_servers = []
926 926
927 927 def serve(self, vcsrepo):
928 928 if vcsrepo.alias != 'svn':
929 929 raise TypeError("Backend %s not supported" % vcsrepo.alias)
930 930
931 proc = subprocess32.Popen(
931 proc = subprocess.Popen(
932 932 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
933 933 '--root', vcsrepo.path])
934 934 self._cleanup_servers.append(proc)
935 935 self.url = 'svn://localhost'
936 936
937 937 def cleanup(self):
938 938 for proc in self._cleanup_servers:
939 939 proc.terminate()
940 940
941 941
942 942 @pytest.fixture()
943 943 def pr_util(backend, request, config_stub):
944 944 """
945 945 Utility for tests of models and for functional tests around pull requests.
946 946
947 947 It gives an instance of :class:`PRTestUtility` which provides various
948 948 utility methods around one pull request.
949 949
950 950 This fixture uses `backend` and inherits its parameterization.
951 951 """
952 952
953 953 util = PRTestUtility(backend)
954 954 request.addfinalizer(util.cleanup)
955 955
956 956 return util
957 957
958 958
959 959 class PRTestUtility(object):
960 960
961 961 pull_request = None
962 962 pull_request_id = None
963 963 mergeable_patcher = None
964 964 mergeable_mock = None
965 965 notification_patcher = None
966 966
967 967 def __init__(self, backend):
968 968 self.backend = backend
969 969
970 970 def create_pull_request(
971 971 self, commits=None, target_head=None, source_head=None,
972 972 revisions=None, approved=False, author=None, mergeable=False,
973 973 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
974 974 title=u"Test", description=u"Description"):
975 975 self.set_mergeable(mergeable)
976 976 if not enable_notifications:
977 977 # mock notification side effect
978 978 self.notification_patcher = mock.patch(
979 979 'rhodecode.model.notification.NotificationModel.create')
980 980 self.notification_patcher.start()
981 981
982 982 if not self.pull_request:
983 983 if not commits:
984 984 commits = [
985 985 {'message': 'c1'},
986 986 {'message': 'c2'},
987 987 {'message': 'c3'},
988 988 ]
989 989 target_head = 'c1'
990 990 source_head = 'c2'
991 991 revisions = ['c2']
992 992
993 993 self.commit_ids = self.backend.create_master_repo(commits)
994 994 self.target_repository = self.backend.create_repo(
995 995 heads=[target_head], name_suffix=name_suffix)
996 996 self.source_repository = self.backend.create_repo(
997 997 heads=[source_head], name_suffix=name_suffix)
998 998 self.author = author or UserModel().get_by_username(
999 999 TEST_USER_ADMIN_LOGIN)
1000 1000
1001 1001 model = PullRequestModel()
1002 1002 self.create_parameters = {
1003 1003 'created_by': self.author,
1004 1004 'source_repo': self.source_repository.repo_name,
1005 1005 'source_ref': self._default_branch_reference(source_head),
1006 1006 'target_repo': self.target_repository.repo_name,
1007 1007 'target_ref': self._default_branch_reference(target_head),
1008 1008 'revisions': [self.commit_ids[r] for r in revisions],
1009 1009 'reviewers': reviewers or self._get_reviewers(),
1010 1010 'observers': observers or self._get_observers(),
1011 1011 'title': title,
1012 1012 'description': description,
1013 1013 }
1014 1014 self.pull_request = model.create(**self.create_parameters)
1015 1015 assert model.get_versions(self.pull_request) == []
1016 1016
1017 1017 self.pull_request_id = self.pull_request.pull_request_id
1018 1018
1019 1019 if approved:
1020 1020 self.approve()
1021 1021
1022 1022 Session().add(self.pull_request)
1023 1023 Session().commit()
1024 1024
1025 1025 return self.pull_request
1026 1026
1027 1027 def approve(self):
1028 1028 self.create_status_votes(
1029 1029 ChangesetStatus.STATUS_APPROVED,
1030 1030 *self.pull_request.reviewers)
1031 1031
1032 1032 def close(self):
1033 1033 PullRequestModel().close_pull_request(self.pull_request, self.author)
1034 1034
1035 1035 def _default_branch_reference(self, commit_message):
1036 1036 reference = '%s:%s:%s' % (
1037 1037 'branch',
1038 1038 self.backend.default_branch_name,
1039 1039 self.commit_ids[commit_message])
1040 1040 return reference
1041 1041
1042 1042 def _get_reviewers(self):
1043 1043 role = PullRequestReviewers.ROLE_REVIEWER
1044 1044 return [
1045 1045 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1046 1046 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1047 1047 ]
1048 1048
1049 1049 def _get_observers(self):
1050 1050 return [
1051 1051
1052 1052 ]
1053 1053
1054 1054 def update_source_repository(self, head=None):
1055 1055 heads = [head or 'c3']
1056 1056 self.backend.pull_heads(self.source_repository, heads=heads)
1057 1057
1058 1058 def add_one_commit(self, head=None):
1059 1059 self.update_source_repository(head=head)
1060 1060 old_commit_ids = set(self.pull_request.revisions)
1061 1061 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1062 1062 commit_ids = set(self.pull_request.revisions)
1063 1063 new_commit_ids = commit_ids - old_commit_ids
1064 1064 assert len(new_commit_ids) == 1
1065 1065 return new_commit_ids.pop()
1066 1066
1067 1067 def remove_one_commit(self):
1068 1068 assert len(self.pull_request.revisions) == 2
1069 1069 source_vcs = self.source_repository.scm_instance()
1070 1070 removed_commit_id = source_vcs.commit_ids[-1]
1071 1071
1072 1072 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1073 1073 # remove the if once that's sorted out.
1074 1074 if self.backend.alias == "git":
1075 1075 kwargs = {'branch_name': self.backend.default_branch_name}
1076 1076 else:
1077 1077 kwargs = {}
1078 1078 source_vcs.strip(removed_commit_id, **kwargs)
1079 1079
1080 1080 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1081 1081 assert len(self.pull_request.revisions) == 1
1082 1082 return removed_commit_id
1083 1083
1084 1084 def create_comment(self, linked_to=None):
1085 1085 comment = CommentsModel().create(
1086 1086 text=u"Test comment",
1087 1087 repo=self.target_repository.repo_name,
1088 1088 user=self.author,
1089 1089 pull_request=self.pull_request)
1090 1090 assert comment.pull_request_version_id is None
1091 1091
1092 1092 if linked_to:
1093 1093 PullRequestModel()._link_comments_to_version(linked_to)
1094 1094
1095 1095 return comment
1096 1096
1097 1097 def create_inline_comment(
1098 1098 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1099 1099 comment = CommentsModel().create(
1100 1100 text=u"Test comment",
1101 1101 repo=self.target_repository.repo_name,
1102 1102 user=self.author,
1103 1103 line_no=line_no,
1104 1104 f_path=file_path,
1105 1105 pull_request=self.pull_request)
1106 1106 assert comment.pull_request_version_id is None
1107 1107
1108 1108 if linked_to:
1109 1109 PullRequestModel()._link_comments_to_version(linked_to)
1110 1110
1111 1111 return comment
1112 1112
1113 1113 def create_version_of_pull_request(self):
1114 1114 pull_request = self.create_pull_request()
1115 1115 version = PullRequestModel()._create_version_from_snapshot(
1116 1116 pull_request)
1117 1117 return version
1118 1118
1119 1119 def create_status_votes(self, status, *reviewers):
1120 1120 for reviewer in reviewers:
1121 1121 ChangesetStatusModel().set_status(
1122 1122 repo=self.pull_request.target_repo,
1123 1123 status=status,
1124 1124 user=reviewer.user_id,
1125 1125 pull_request=self.pull_request)
1126 1126
1127 1127 def set_mergeable(self, value):
1128 1128 if not self.mergeable_patcher:
1129 1129 self.mergeable_patcher = mock.patch.object(
1130 1130 VcsSettingsModel, 'get_general_settings')
1131 1131 self.mergeable_mock = self.mergeable_patcher.start()
1132 1132 self.mergeable_mock.return_value = {
1133 1133 'rhodecode_pr_merge_enabled': value}
1134 1134
1135 1135 def cleanup(self):
1136 1136 # In case the source repository is already cleaned up, the pull
1137 1137 # request will already be deleted.
1138 1138 pull_request = PullRequest().get(self.pull_request_id)
1139 1139 if pull_request:
1140 1140 PullRequestModel().delete(pull_request, pull_request.author)
1141 1141 Session().commit()
1142 1142
1143 1143 if self.notification_patcher:
1144 1144 self.notification_patcher.stop()
1145 1145
1146 1146 if self.mergeable_patcher:
1147 1147 self.mergeable_patcher.stop()
1148 1148
1149 1149
1150 1150 @pytest.fixture()
1151 1151 def user_admin(baseapp):
1152 1152 """
1153 1153 Provides the default admin test user as an instance of `db.User`.
1154 1154 """
1155 1155 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1156 1156 return user
1157 1157
1158 1158
1159 1159 @pytest.fixture()
1160 1160 def user_regular(baseapp):
1161 1161 """
1162 1162 Provides the default regular test user as an instance of `db.User`.
1163 1163 """
1164 1164 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1165 1165 return user
1166 1166
1167 1167
1168 1168 @pytest.fixture()
1169 1169 def user_util(request, db_connection):
1170 1170 """
1171 1171 Provides a wired instance of `UserUtility` with integrated cleanup.
1172 1172 """
1173 1173 utility = UserUtility(test_name=request.node.name)
1174 1174 request.addfinalizer(utility.cleanup)
1175 1175 return utility
1176 1176
1177 1177
1178 1178 # TODO: johbo: Split this up into utilities per domain or something similar
1179 1179 class UserUtility(object):
1180 1180
1181 1181 def __init__(self, test_name="test"):
1182 1182 self._test_name = self._sanitize_name(test_name)
1183 1183 self.fixture = Fixture()
1184 1184 self.repo_group_ids = []
1185 1185 self.repos_ids = []
1186 1186 self.user_ids = []
1187 1187 self.user_group_ids = []
1188 1188 self.user_repo_permission_ids = []
1189 1189 self.user_group_repo_permission_ids = []
1190 1190 self.user_repo_group_permission_ids = []
1191 1191 self.user_group_repo_group_permission_ids = []
1192 1192 self.user_user_group_permission_ids = []
1193 1193 self.user_group_user_group_permission_ids = []
1194 1194 self.user_permissions = []
1195 1195
1196 1196 def _sanitize_name(self, name):
1197 1197 for char in ['[', ']']:
1198 1198 name = name.replace(char, '_')
1199 1199 return name
1200 1200
1201 1201 def create_repo_group(
1202 1202 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1203 1203 group_name = "{prefix}_repogroup_{count}".format(
1204 1204 prefix=self._test_name,
1205 1205 count=len(self.repo_group_ids))
1206 1206 repo_group = self.fixture.create_repo_group(
1207 1207 group_name, cur_user=owner)
1208 1208 if auto_cleanup:
1209 1209 self.repo_group_ids.append(repo_group.group_id)
1210 1210 return repo_group
1211 1211
1212 1212 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1213 1213 auto_cleanup=True, repo_type='hg', bare=False):
1214 1214 repo_name = "{prefix}_repository_{count}".format(
1215 1215 prefix=self._test_name,
1216 1216 count=len(self.repos_ids))
1217 1217
1218 1218 repository = self.fixture.create_repo(
1219 1219 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1220 1220 if auto_cleanup:
1221 1221 self.repos_ids.append(repository.repo_id)
1222 1222 return repository
1223 1223
1224 1224 def create_user(self, auto_cleanup=True, **kwargs):
1225 1225 user_name = "{prefix}_user_{count}".format(
1226 1226 prefix=self._test_name,
1227 1227 count=len(self.user_ids))
1228 1228 user = self.fixture.create_user(user_name, **kwargs)
1229 1229 if auto_cleanup:
1230 1230 self.user_ids.append(user.user_id)
1231 1231 return user
1232 1232
1233 1233 def create_additional_user_email(self, user, email):
1234 1234 uem = self.fixture.create_additional_user_email(user=user, email=email)
1235 1235 return uem
1236 1236
1237 1237 def create_user_with_group(self):
1238 1238 user = self.create_user()
1239 1239 user_group = self.create_user_group(members=[user])
1240 1240 return user, user_group
1241 1241
1242 1242 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1243 1243 auto_cleanup=True, **kwargs):
1244 1244 group_name = "{prefix}_usergroup_{count}".format(
1245 1245 prefix=self._test_name,
1246 1246 count=len(self.user_group_ids))
1247 1247 user_group = self.fixture.create_user_group(
1248 1248 group_name, cur_user=owner, **kwargs)
1249 1249
1250 1250 if auto_cleanup:
1251 1251 self.user_group_ids.append(user_group.users_group_id)
1252 1252 if members:
1253 1253 for user in members:
1254 1254 UserGroupModel().add_user_to_group(user_group, user)
1255 1255 return user_group
1256 1256
1257 1257 def grant_user_permission(self, user_name, permission_name):
1258 1258 self.inherit_default_user_permissions(user_name, False)
1259 1259 self.user_permissions.append((user_name, permission_name))
1260 1260
1261 1261 def grant_user_permission_to_repo_group(
1262 1262 self, repo_group, user, permission_name):
1263 1263 permission = RepoGroupModel().grant_user_permission(
1264 1264 repo_group, user, permission_name)
1265 1265 self.user_repo_group_permission_ids.append(
1266 1266 (repo_group.group_id, user.user_id))
1267 1267 return permission
1268 1268
1269 1269 def grant_user_group_permission_to_repo_group(
1270 1270 self, repo_group, user_group, permission_name):
1271 1271 permission = RepoGroupModel().grant_user_group_permission(
1272 1272 repo_group, user_group, permission_name)
1273 1273 self.user_group_repo_group_permission_ids.append(
1274 1274 (repo_group.group_id, user_group.users_group_id))
1275 1275 return permission
1276 1276
1277 1277 def grant_user_permission_to_repo(
1278 1278 self, repo, user, permission_name):
1279 1279 permission = RepoModel().grant_user_permission(
1280 1280 repo, user, permission_name)
1281 1281 self.user_repo_permission_ids.append(
1282 1282 (repo.repo_id, user.user_id))
1283 1283 return permission
1284 1284
1285 1285 def grant_user_group_permission_to_repo(
1286 1286 self, repo, user_group, permission_name):
1287 1287 permission = RepoModel().grant_user_group_permission(
1288 1288 repo, user_group, permission_name)
1289 1289 self.user_group_repo_permission_ids.append(
1290 1290 (repo.repo_id, user_group.users_group_id))
1291 1291 return permission
1292 1292
1293 1293 def grant_user_permission_to_user_group(
1294 1294 self, target_user_group, user, permission_name):
1295 1295 permission = UserGroupModel().grant_user_permission(
1296 1296 target_user_group, user, permission_name)
1297 1297 self.user_user_group_permission_ids.append(
1298 1298 (target_user_group.users_group_id, user.user_id))
1299 1299 return permission
1300 1300
1301 1301 def grant_user_group_permission_to_user_group(
1302 1302 self, target_user_group, user_group, permission_name):
1303 1303 permission = UserGroupModel().grant_user_group_permission(
1304 1304 target_user_group, user_group, permission_name)
1305 1305 self.user_group_user_group_permission_ids.append(
1306 1306 (target_user_group.users_group_id, user_group.users_group_id))
1307 1307 return permission
1308 1308
1309 1309 def revoke_user_permission(self, user_name, permission_name):
1310 1310 self.inherit_default_user_permissions(user_name, True)
1311 1311 UserModel().revoke_perm(user_name, permission_name)
1312 1312
1313 1313 def inherit_default_user_permissions(self, user_name, value):
1314 1314 user = UserModel().get_by_username(user_name)
1315 1315 user.inherit_default_permissions = value
1316 1316 Session().add(user)
1317 1317 Session().commit()
1318 1318
1319 1319 def cleanup(self):
1320 1320 self._cleanup_permissions()
1321 1321 self._cleanup_repos()
1322 1322 self._cleanup_repo_groups()
1323 1323 self._cleanup_user_groups()
1324 1324 self._cleanup_users()
1325 1325
1326 1326 def _cleanup_permissions(self):
1327 1327 if self.user_permissions:
1328 1328 for user_name, permission_name in self.user_permissions:
1329 1329 self.revoke_user_permission(user_name, permission_name)
1330 1330
1331 1331 for permission in self.user_repo_permission_ids:
1332 1332 RepoModel().revoke_user_permission(*permission)
1333 1333
1334 1334 for permission in self.user_group_repo_permission_ids:
1335 1335 RepoModel().revoke_user_group_permission(*permission)
1336 1336
1337 1337 for permission in self.user_repo_group_permission_ids:
1338 1338 RepoGroupModel().revoke_user_permission(*permission)
1339 1339
1340 1340 for permission in self.user_group_repo_group_permission_ids:
1341 1341 RepoGroupModel().revoke_user_group_permission(*permission)
1342 1342
1343 1343 for permission in self.user_user_group_permission_ids:
1344 1344 UserGroupModel().revoke_user_permission(*permission)
1345 1345
1346 1346 for permission in self.user_group_user_group_permission_ids:
1347 1347 UserGroupModel().revoke_user_group_permission(*permission)
1348 1348
1349 1349 def _cleanup_repo_groups(self):
1350 1350 def _repo_group_compare(first_group_id, second_group_id):
1351 1351 """
1352 1352 Gives higher priority to the groups with the most complex paths
1353 1353 """
1354 1354 first_group = RepoGroup.get(first_group_id)
1355 1355 second_group = RepoGroup.get(second_group_id)
1356 1356 first_group_parts = (
1357 1357 len(first_group.group_name.split('/')) if first_group else 0)
1358 1358 second_group_parts = (
1359 1359 len(second_group.group_name.split('/')) if second_group else 0)
1360 1360 return cmp(second_group_parts, first_group_parts)
1361 1361
1362 1362 sorted_repo_group_ids = sorted(
1363 1363 self.repo_group_ids, cmp=_repo_group_compare)
1364 1364 for repo_group_id in sorted_repo_group_ids:
1365 1365 self.fixture.destroy_repo_group(repo_group_id)
1366 1366
1367 1367 def _cleanup_repos(self):
1368 1368 sorted_repos_ids = sorted(self.repos_ids)
1369 1369 for repo_id in sorted_repos_ids:
1370 1370 self.fixture.destroy_repo(repo_id)
1371 1371
1372 1372 def _cleanup_user_groups(self):
1373 1373 def _user_group_compare(first_group_id, second_group_id):
1374 1374 """
1375 1375 Gives higher priority to the groups with the most complex paths
1376 1376 """
1377 1377 first_group = UserGroup.get(first_group_id)
1378 1378 second_group = UserGroup.get(second_group_id)
1379 1379 first_group_parts = (
1380 1380 len(first_group.users_group_name.split('/'))
1381 1381 if first_group else 0)
1382 1382 second_group_parts = (
1383 1383 len(second_group.users_group_name.split('/'))
1384 1384 if second_group else 0)
1385 1385 return cmp(second_group_parts, first_group_parts)
1386 1386
1387 1387 sorted_user_group_ids = sorted(
1388 1388 self.user_group_ids, cmp=_user_group_compare)
1389 1389 for user_group_id in sorted_user_group_ids:
1390 1390 self.fixture.destroy_user_group(user_group_id)
1391 1391
1392 1392 def _cleanup_users(self):
1393 1393 for user_id in self.user_ids:
1394 1394 self.fixture.destroy_user(user_id)
1395 1395
1396 1396
1397 1397 # TODO: Think about moving this into a pytest-pyro package and make it a
1398 1398 # pytest plugin
1399 1399 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1400 1400 def pytest_runtest_makereport(item, call):
1401 1401 """
1402 1402 Adding the remote traceback if the exception has this information.
1403 1403
1404 1404 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1405 1405 to the exception instance.
1406 1406 """
1407 1407 outcome = yield
1408 1408 report = outcome.get_result()
1409 1409 if call.excinfo:
1410 1410 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1411 1411
1412 1412
1413 1413 def _add_vcsserver_remote_traceback(report, exc):
1414 1414 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1415 1415
1416 1416 if vcsserver_traceback:
1417 1417 section = 'VCSServer remote traceback ' + report.when
1418 1418 report.sections.append((section, vcsserver_traceback))
1419 1419
1420 1420
1421 1421 @pytest.fixture(scope='session')
1422 1422 def testrun():
1423 1423 return {
1424 1424 'uuid': uuid.uuid4(),
1425 1425 'start': datetime.datetime.utcnow().isoformat(),
1426 1426 'timestamp': int(time.time()),
1427 1427 }
1428 1428
1429 1429
1430 1430 class AppenlightClient(object):
1431 1431
1432 1432 url_template = '{url}?protocol_version=0.5'
1433 1433
1434 1434 def __init__(
1435 1435 self, url, api_key, add_server=True, add_timestamp=True,
1436 1436 namespace=None, request=None, testrun=None):
1437 1437 self.url = self.url_template.format(url=url)
1438 1438 self.api_key = api_key
1439 1439 self.add_server = add_server
1440 1440 self.add_timestamp = add_timestamp
1441 1441 self.namespace = namespace
1442 1442 self.request = request
1443 1443 self.server = socket.getfqdn(socket.gethostname())
1444 1444 self.tags_before = {}
1445 1445 self.tags_after = {}
1446 1446 self.stats = []
1447 1447 self.testrun = testrun or {}
1448 1448
1449 1449 def tag_before(self, tag, value):
1450 1450 self.tags_before[tag] = value
1451 1451
1452 1452 def tag_after(self, tag, value):
1453 1453 self.tags_after[tag] = value
1454 1454
1455 1455 def collect(self, data):
1456 1456 if self.add_server:
1457 1457 data.setdefault('server', self.server)
1458 1458 if self.add_timestamp:
1459 1459 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1460 1460 if self.namespace:
1461 1461 data.setdefault('namespace', self.namespace)
1462 1462 if self.request:
1463 1463 data.setdefault('request', self.request)
1464 1464 self.stats.append(data)
1465 1465
1466 1466 def send_stats(self):
1467 1467 tags = [
1468 1468 ('testrun', self.request),
1469 1469 ('testrun.start', self.testrun['start']),
1470 1470 ('testrun.timestamp', self.testrun['timestamp']),
1471 1471 ('test', self.namespace),
1472 1472 ]
1473 1473 for key, value in self.tags_before.items():
1474 1474 tags.append((key + '.before', value))
1475 1475 try:
1476 1476 delta = self.tags_after[key] - value
1477 1477 tags.append((key + '.delta', delta))
1478 1478 except Exception:
1479 1479 pass
1480 1480 for key, value in self.tags_after.items():
1481 1481 tags.append((key + '.after', value))
1482 1482 self.collect({
1483 1483 'message': "Collected tags",
1484 1484 'tags': tags,
1485 1485 })
1486 1486
1487 1487 response = requests.post(
1488 1488 self.url,
1489 1489 headers={
1490 1490 'X-appenlight-api-key': self.api_key},
1491 1491 json=self.stats,
1492 1492 )
1493 1493
1494 1494 if not response.status_code == 200:
1495 1495 pprint.pprint(self.stats)
1496 1496 print(response.headers)
1497 1497 print(response.text)
1498 1498 raise Exception('Sending to appenlight failed')
1499 1499
1500 1500
1501 1501 @pytest.fixture()
1502 1502 def gist_util(request, db_connection):
1503 1503 """
1504 1504 Provides a wired instance of `GistUtility` with integrated cleanup.
1505 1505 """
1506 1506 utility = GistUtility()
1507 1507 request.addfinalizer(utility.cleanup)
1508 1508 return utility
1509 1509
1510 1510
1511 1511 class GistUtility(object):
1512 1512 def __init__(self):
1513 1513 self.fixture = Fixture()
1514 1514 self.gist_ids = []
1515 1515
1516 1516 def create_gist(self, **kwargs):
1517 1517 gist = self.fixture.create_gist(**kwargs)
1518 1518 self.gist_ids.append(gist.gist_id)
1519 1519 return gist
1520 1520
1521 1521 def cleanup(self):
1522 1522 for id_ in self.gist_ids:
1523 1523 self.fixture.destroy_gists(str(id_))
1524 1524
1525 1525
1526 1526 @pytest.fixture()
1527 1527 def enabled_backends(request):
1528 1528 backends = request.config.option.backends
1529 1529 return backends[:]
1530 1530
1531 1531
1532 1532 @pytest.fixture()
1533 1533 def settings_util(request, db_connection):
1534 1534 """
1535 1535 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1536 1536 """
1537 1537 utility = SettingsUtility()
1538 1538 request.addfinalizer(utility.cleanup)
1539 1539 return utility
1540 1540
1541 1541
1542 1542 class SettingsUtility(object):
1543 1543 def __init__(self):
1544 1544 self.rhodecode_ui_ids = []
1545 1545 self.rhodecode_setting_ids = []
1546 1546 self.repo_rhodecode_ui_ids = []
1547 1547 self.repo_rhodecode_setting_ids = []
1548 1548
1549 1549 def create_repo_rhodecode_ui(
1550 1550 self, repo, section, value, key=None, active=True, cleanup=True):
1551 1551 key = key or hashlib.sha1(
1552 1552 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1553 1553
1554 1554 setting = RepoRhodeCodeUi()
1555 1555 setting.repository_id = repo.repo_id
1556 1556 setting.ui_section = section
1557 1557 setting.ui_value = value
1558 1558 setting.ui_key = key
1559 1559 setting.ui_active = active
1560 1560 Session().add(setting)
1561 1561 Session().commit()
1562 1562
1563 1563 if cleanup:
1564 1564 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1565 1565 return setting
1566 1566
1567 1567 def create_rhodecode_ui(
1568 1568 self, section, value, key=None, active=True, cleanup=True):
1569 1569 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1570 1570
1571 1571 setting = RhodeCodeUi()
1572 1572 setting.ui_section = section
1573 1573 setting.ui_value = value
1574 1574 setting.ui_key = key
1575 1575 setting.ui_active = active
1576 1576 Session().add(setting)
1577 1577 Session().commit()
1578 1578
1579 1579 if cleanup:
1580 1580 self.rhodecode_ui_ids.append(setting.ui_id)
1581 1581 return setting
1582 1582
1583 1583 def create_repo_rhodecode_setting(
1584 1584 self, repo, name, value, type_, cleanup=True):
1585 1585 setting = RepoRhodeCodeSetting(
1586 1586 repo.repo_id, key=name, val=value, type=type_)
1587 1587 Session().add(setting)
1588 1588 Session().commit()
1589 1589
1590 1590 if cleanup:
1591 1591 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1592 1592 return setting
1593 1593
1594 1594 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1595 1595 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1596 1596 Session().add(setting)
1597 1597 Session().commit()
1598 1598
1599 1599 if cleanup:
1600 1600 self.rhodecode_setting_ids.append(setting.app_settings_id)
1601 1601
1602 1602 return setting
1603 1603
1604 1604 def cleanup(self):
1605 1605 for id_ in self.rhodecode_ui_ids:
1606 1606 setting = RhodeCodeUi.get(id_)
1607 1607 Session().delete(setting)
1608 1608
1609 1609 for id_ in self.rhodecode_setting_ids:
1610 1610 setting = RhodeCodeSetting.get(id_)
1611 1611 Session().delete(setting)
1612 1612
1613 1613 for id_ in self.repo_rhodecode_ui_ids:
1614 1614 setting = RepoRhodeCodeUi.get(id_)
1615 1615 Session().delete(setting)
1616 1616
1617 1617 for id_ in self.repo_rhodecode_setting_ids:
1618 1618 setting = RepoRhodeCodeSetting.get(id_)
1619 1619 Session().delete(setting)
1620 1620
1621 1621 Session().commit()
1622 1622
1623 1623
1624 1624 @pytest.fixture()
1625 1625 def no_notifications(request):
1626 1626 notification_patcher = mock.patch(
1627 1627 'rhodecode.model.notification.NotificationModel.create')
1628 1628 notification_patcher.start()
1629 1629 request.addfinalizer(notification_patcher.stop)
1630 1630
1631 1631
1632 1632 @pytest.fixture(scope='session')
1633 1633 def repeat(request):
1634 1634 """
1635 1635 The number of repetitions is based on this fixture.
1636 1636
1637 1637 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1638 1638 tests are not too slow in our default test suite.
1639 1639 """
1640 1640 return request.config.getoption('--repeat')
1641 1641
1642 1642
1643 1643 @pytest.fixture()
1644 1644 def rhodecode_fixtures():
1645 1645 return Fixture()
1646 1646
1647 1647
1648 1648 @pytest.fixture()
1649 1649 def context_stub():
1650 1650 """
1651 1651 Stub context object.
1652 1652 """
1653 1653 context = pyramid.testing.DummyResource()
1654 1654 return context
1655 1655
1656 1656
1657 1657 @pytest.fixture()
1658 1658 def request_stub():
1659 1659 """
1660 1660 Stub request object.
1661 1661 """
1662 1662 from rhodecode.lib.base import bootstrap_request
1663 1663 request = bootstrap_request(scheme='https')
1664 1664 return request
1665 1665
1666 1666
1667 1667 @pytest.fixture()
1668 1668 def config_stub(request, request_stub):
1669 1669 """
1670 1670 Set up pyramid.testing and return the Configurator.
1671 1671 """
1672 1672 from rhodecode.lib.base import bootstrap_config
1673 1673 config = bootstrap_config(request=request_stub)
1674 1674
1675 1675 @request.addfinalizer
1676 1676 def cleanup():
1677 1677 pyramid.testing.tearDown()
1678 1678
1679 1679 return config
1680 1680
1681 1681
1682 1682 @pytest.fixture()
1683 1683 def StubIntegrationType():
1684 1684 class _StubIntegrationType(IntegrationTypeBase):
1685 1685 """ Test integration type class """
1686 1686
1687 1687 key = 'test'
1688 1688 display_name = 'Test integration type'
1689 1689 description = 'A test integration type for testing'
1690 1690
1691 1691 @classmethod
1692 1692 def icon(cls):
1693 1693 return 'test_icon_html_image'
1694 1694
1695 1695 def __init__(self, settings):
1696 1696 super(_StubIntegrationType, self).__init__(settings)
1697 1697 self.sent_events = [] # for testing
1698 1698
1699 1699 def send_event(self, event):
1700 1700 self.sent_events.append(event)
1701 1701
1702 1702 def settings_schema(self):
1703 1703 class SettingsSchema(colander.Schema):
1704 1704 test_string_field = colander.SchemaNode(
1705 1705 colander.String(),
1706 1706 missing=colander.required,
1707 1707 title='test string field',
1708 1708 )
1709 1709 test_int_field = colander.SchemaNode(
1710 1710 colander.Int(),
1711 1711 title='some integer setting',
1712 1712 )
1713 1713 return SettingsSchema()
1714 1714
1715 1715
1716 1716 integration_type_registry.register_integration_type(_StubIntegrationType)
1717 1717 return _StubIntegrationType
1718 1718
1719 1719 @pytest.fixture()
1720 1720 def stub_integration_settings():
1721 1721 return {
1722 1722 'test_string_field': 'some data',
1723 1723 'test_int_field': 100,
1724 1724 }
1725 1725
1726 1726
1727 1727 @pytest.fixture()
1728 1728 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1729 1729 stub_integration_settings):
1730 1730 integration = IntegrationModel().create(
1731 1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 1732 name='test repo integration',
1733 1733 repo=repo_stub, repo_group=None, child_repos_only=None)
1734 1734
1735 1735 @request.addfinalizer
1736 1736 def cleanup():
1737 1737 IntegrationModel().delete(integration)
1738 1738
1739 1739 return integration
1740 1740
1741 1741
1742 1742 @pytest.fixture()
1743 1743 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1744 1744 stub_integration_settings):
1745 1745 integration = IntegrationModel().create(
1746 1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 1747 name='test repogroup integration',
1748 1748 repo=None, repo_group=test_repo_group, child_repos_only=True)
1749 1749
1750 1750 @request.addfinalizer
1751 1751 def cleanup():
1752 1752 IntegrationModel().delete(integration)
1753 1753
1754 1754 return integration
1755 1755
1756 1756
1757 1757 @pytest.fixture()
1758 1758 def repogroup_recursive_integration_stub(request, test_repo_group,
1759 1759 StubIntegrationType, stub_integration_settings):
1760 1760 integration = IntegrationModel().create(
1761 1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 1762 name='test recursive repogroup integration',
1763 1763 repo=None, repo_group=test_repo_group, child_repos_only=False)
1764 1764
1765 1765 @request.addfinalizer
1766 1766 def cleanup():
1767 1767 IntegrationModel().delete(integration)
1768 1768
1769 1769 return integration
1770 1770
1771 1771
1772 1772 @pytest.fixture()
1773 1773 def global_integration_stub(request, StubIntegrationType,
1774 1774 stub_integration_settings):
1775 1775 integration = IntegrationModel().create(
1776 1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 1777 name='test global integration',
1778 1778 repo=None, repo_group=None, child_repos_only=None)
1779 1779
1780 1780 @request.addfinalizer
1781 1781 def cleanup():
1782 1782 IntegrationModel().delete(integration)
1783 1783
1784 1784 return integration
1785 1785
1786 1786
1787 1787 @pytest.fixture()
1788 1788 def root_repos_integration_stub(request, StubIntegrationType,
1789 1789 stub_integration_settings):
1790 1790 integration = IntegrationModel().create(
1791 1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 1792 name='test global integration',
1793 1793 repo=None, repo_group=None, child_repos_only=True)
1794 1794
1795 1795 @request.addfinalizer
1796 1796 def cleanup():
1797 1797 IntegrationModel().delete(integration)
1798 1798
1799 1799 return integration
1800 1800
1801 1801
1802 1802 @pytest.fixture()
1803 1803 def local_dt_to_utc():
1804 1804 def _factory(dt):
1805 1805 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1806 1806 dateutil.tz.tzutc()).replace(tzinfo=None)
1807 1807 return _factory
1808 1808
1809 1809
1810 1810 @pytest.fixture()
1811 1811 def disable_anonymous_user(request, baseapp):
1812 1812 set_anonymous_access(False)
1813 1813
1814 1814 @request.addfinalizer
1815 1815 def cleanup():
1816 1816 set_anonymous_access(True)
1817 1817
1818 1818
1819 1819 @pytest.fixture(scope='module')
1820 1820 def rc_fixture(request):
1821 1821 return Fixture()
1822 1822
1823 1823
1824 1824 @pytest.fixture()
1825 1825 def repo_groups(request):
1826 1826 fixture = Fixture()
1827 1827
1828 1828 session = Session()
1829 1829 zombie_group = fixture.create_repo_group('zombie')
1830 1830 parent_group = fixture.create_repo_group('parent')
1831 1831 child_group = fixture.create_repo_group('parent/child')
1832 1832 groups_in_db = session.query(RepoGroup).all()
1833 1833 assert len(groups_in_db) == 3
1834 1834 assert child_group.group_parent_id == parent_group.group_id
1835 1835
1836 1836 @request.addfinalizer
1837 1837 def cleanup():
1838 1838 fixture.destroy_repo_group(zombie_group)
1839 1839 fixture.destroy_repo_group(child_group)
1840 1840 fixture.destroy_repo_group(parent_group)
1841 1841
1842 1842 return zombie_group, parent_group, child_group
@@ -1,203 +1,203 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Test suite for making push/pull operations
23 23 """
24 24
25 25 import os
26 26 import sys
27 27 import shutil
28 28 import logging
29 29 from os.path import join as jn
30 30 from os.path import dirname as dn
31 31
32 32 from tempfile import _RandomNameSequence
33 from subprocess32 import Popen, PIPE
33 from subprocess import Popen, PIPE
34 34
35 35 from rhodecode.lib.utils2 import engine_from_config
36 36 from rhodecode.lib.auth import get_crypt_password
37 37 from rhodecode.model import init_model
38 38 from rhodecode.model import meta
39 39 from rhodecode.model.db import User, Repository
40 40
41 41 from rhodecode.tests import TESTS_TMP_PATH, HG_REPO
42 42
43 43 rel_path = dn(dn(dn(dn(os.path.abspath(__file__)))))
44 44
45 45
46 46 USER = 'test_admin'
47 47 PASS = 'test12'
48 48 HOST = 'rc.local'
49 49 METHOD = 'pull'
50 50 DEBUG = True
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class Command(object):
55 55
56 56 def __init__(self, cwd):
57 57 self.cwd = cwd
58 58
59 59 def execute(self, cmd, *args):
60 60 """Runs command on the system with given ``args``.
61 61 """
62 62
63 63 command = cmd + ' ' + ' '.join(args)
64 64 log.debug('Executing %s', command)
65 65 if DEBUG:
66 66 print(command)
67 67 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
68 68 stdout, stderr = p.communicate()
69 69 if DEBUG:
70 70 print('{} {}'.format(stdout, stderr))
71 71 return stdout, stderr
72 72
73 73
74 74 def get_session():
75 75 conf = {}
76 76 engine = engine_from_config(conf, 'sqlalchemy.db1.')
77 77 init_model(engine)
78 78 sa = meta.Session
79 79 return sa
80 80
81 81
82 82 def create_test_user(force=True):
83 83 print('creating test user')
84 84 sa = get_session()
85 85
86 86 user = sa.query(User).filter(User.username == USER).scalar()
87 87
88 88 if force and user is not None:
89 89 print('removing current user')
90 90 for repo in sa.query(Repository).filter(Repository.user == user).all():
91 91 sa.delete(repo)
92 92 sa.delete(user)
93 93 sa.commit()
94 94
95 95 if user is None or force:
96 96 print('creating new one')
97 97 new_usr = User()
98 98 new_usr.username = USER
99 99 new_usr.password = get_crypt_password(PASS)
100 100 new_usr.email = 'mail@mail.com'
101 101 new_usr.name = 'test'
102 102 new_usr.lastname = 'lasttestname'
103 103 new_usr.active = True
104 104 new_usr.admin = True
105 105 sa.add(new_usr)
106 106 sa.commit()
107 107
108 108 print('done')
109 109
110 110
111 111 def create_test_repo(force=True):
112 112 print('creating test repo')
113 113 from rhodecode.model.repo import RepoModel
114 114 sa = get_session()
115 115
116 116 user = sa.query(User).filter(User.username == USER).scalar()
117 117 if user is None:
118 118 raise Exception('user not found')
119 119
120 120 repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
121 121
122 122 if repo is None:
123 123 print('repo not found creating')
124 124
125 125 form_data = {'repo_name': HG_REPO,
126 126 'repo_type': 'hg',
127 127 'private':False,
128 128 'clone_uri': '' }
129 129 rm = RepoModel(sa)
130 130 rm.base_path = '/home/hg'
131 131 rm.create(form_data, user)
132 132
133 133 print('done')
134 134
135 135
136 136 def get_anonymous_access():
137 137 sa = get_session()
138 138 return sa.query(User).filter(User.username == 'default').one().active
139 139
140 140
141 141 #==============================================================================
142 142 # TESTS
143 143 #==============================================================================
144 144 def test_clone_with_credentials(repo=HG_REPO, method=METHOD,
145 145 seq=None, backend='hg', check_output=True):
146 146 cwd = path = jn(TESTS_TMP_PATH, repo)
147 147
148 148 if seq is None:
149 149 seq = _RandomNameSequence().next()
150 150
151 151 try:
152 152 shutil.rmtree(path, ignore_errors=True)
153 153 os.makedirs(path)
154 154 except OSError:
155 155 raise
156 156
157 157 clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
158 158 {'user': USER,
159 159 'pass': PASS,
160 160 'host': HOST,
161 161 'cloned_repo': repo, }
162 162
163 163 dest = path + seq
164 164 if method == 'pull':
165 165 stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
166 166 else:
167 167 stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
168 168 if check_output:
169 169 if backend == 'hg':
170 170 assert """adding file changes""" in stdout, 'no messages about cloning'
171 171 assert """abort""" not in stderr, 'got error from clone'
172 172 elif backend == 'git':
173 173 assert """Cloning into""" in stdout, 'no messages about cloning'
174 174
175 175
176 176 if __name__ == '__main__':
177 177 try:
178 178 create_test_user(force=False)
179 179 seq = None
180 180 import time
181 181
182 182 try:
183 183 METHOD = sys.argv[3]
184 184 except Exception:
185 185 pass
186 186
187 187 try:
188 188 backend = sys.argv[4]
189 189 except Exception:
190 190 backend = 'hg'
191 191
192 192 if METHOD == 'pull':
193 193 seq = _RandomNameSequence().next()
194 194 test_clone_with_credentials(repo=sys.argv[1], method='clone',
195 195 seq=seq, backend=backend)
196 196 s = time.time()
197 197 for i in range(1, int(sys.argv[2]) + 1):
198 198 print('take {}'.format(i))
199 199 test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
200 200 seq=seq, backend=backend)
201 201 print('time taken %.4f' % (time.time() - s))
202 202 except Exception as e:
203 203 sys.exit('stop on %s' % e)
@@ -1,200 +1,200 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import os
23 23 import time
24 24 import tempfile
25 25 import pytest
26 import subprocess32
26 import subprocess
27 27 import configobj
28 28 import logging
29 29 from urllib.request import urlopen
30 30 from urllib.error import URLError
31 31 from pyramid.compat import configparser
32 32
33 33
34 34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
35 35 from rhodecode.tests.utils import is_url_reachable
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 def get_port(pyramid_config):
41 41 config = configparser.ConfigParser()
42 42 config.read(pyramid_config)
43 43 return config.get('server:main', 'port')
44 44
45 45
46 46 def get_host_url(pyramid_config):
47 47 """Construct the host url using the port in the test configuration."""
48 48 return '127.0.0.1:%s' % get_port(pyramid_config)
49 49
50 50
51 51 def assert_no_running_instance(url):
52 52 if is_url_reachable(url):
53 53 print("Hint: Usually this means another instance of server "
54 54 "is running in the background at %s." % url)
55 55 pytest.fail(
56 56 "Port is not free at %s, cannot start server at" % url)
57 57
58 58
59 59 class ServerBase(object):
60 60 _args = []
61 61 log_file_name = 'NOT_DEFINED.log'
62 62 status_url_tmpl = 'http://{host}:{port}'
63 63
64 64 def __init__(self, config_file, log_file):
65 65 self.config_file = config_file
66 66 config_data = configobj.ConfigObj(config_file)
67 67 self._config = config_data['server:main']
68 68
69 69 self._args = []
70 70 self.log_file = log_file or os.path.join(
71 71 tempfile.gettempdir(), self.log_file_name)
72 72 self.process = None
73 73 self.server_out = None
74 74 log.info("Using the {} configuration:{}".format(
75 75 self.__class__.__name__, config_file))
76 76
77 77 if not os.path.isfile(config_file):
78 78 raise RuntimeError('Failed to get config at {}'.format(config_file))
79 79
80 80 @property
81 81 def command(self):
82 82 return ' '.join(self._args)
83 83
84 84 @property
85 85 def http_url(self):
86 86 template = 'http://{host}:{port}/'
87 87 return template.format(**self._config)
88 88
89 89 def host_url(self):
90 90 return 'http://' + get_host_url(self.config_file)
91 91
92 92 def get_rc_log(self):
93 93 with open(self.log_file) as f:
94 94 return f.read()
95 95
96 96 def wait_until_ready(self, timeout=30):
97 97 host = self._config['host']
98 98 port = self._config['port']
99 99 status_url = self.status_url_tmpl.format(host=host, port=port)
100 100 start = time.time()
101 101
102 102 while time.time() - start < timeout:
103 103 try:
104 104 urlopen(status_url)
105 105 break
106 106 except URLError:
107 107 time.sleep(0.2)
108 108 else:
109 109 pytest.fail(
110 110 "Starting the {} failed or took more than {} "
111 111 "seconds. cmd: `{}`".format(
112 112 self.__class__.__name__, timeout, self.command))
113 113
114 114 log.info('Server of {} ready at url {}'.format(
115 115 self.__class__.__name__, status_url))
116 116
117 117 def shutdown(self):
118 118 self.process.kill()
119 119 self.server_out.flush()
120 120 self.server_out.close()
121 121
122 122 def get_log_file_with_port(self):
123 123 log_file = list(self.log_file.partition('.log'))
124 124 log_file.insert(1, get_port(self.config_file))
125 125 log_file = ''.join(log_file)
126 126 return log_file
127 127
128 128
129 129 class RcVCSServer(ServerBase):
130 130 """
131 131 Represents a running VCSServer instance.
132 132 """
133 133
134 134 log_file_name = 'rc-vcsserver.log'
135 135 status_url_tmpl = 'http://{host}:{port}/status'
136 136
137 137 def __init__(self, config_file, log_file=None):
138 138 super(RcVCSServer, self).__init__(config_file, log_file)
139 139 self._args = ['gunicorn', '--paste', self.config_file]
140 140
141 141 def start(self):
142 142 env = os.environ.copy()
143 143
144 144 self.log_file = self.get_log_file_with_port()
145 145 self.server_out = open(self.log_file, 'w')
146 146
147 147 host_url = self.host_url()
148 148 assert_no_running_instance(host_url)
149 149
150 150 log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args)))
151 151 log.info('rhodecode-vcsserver starting at: {}'.format(host_url))
152 152 log.info('rhodecode-vcsserver command: {}'.format(self.command))
153 153 log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file))
154 154
155 self.process = subprocess32.Popen(
155 self.process = subprocess.Popen(
156 156 self._args, bufsize=0, env=env,
157 157 stdout=self.server_out, stderr=self.server_out)
158 158
159 159
160 160 class RcWebServer(ServerBase):
161 161 """
162 162 Represents a running RCE web server used as a test fixture.
163 163 """
164 164
165 165 log_file_name = 'rc-web.log'
166 166 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
167 167
168 168 def __init__(self, config_file, log_file=None):
169 169 super(RcWebServer, self).__init__(config_file, log_file)
170 170 self._args = [
171 171 'gunicorn', '--worker-class', 'gevent', '--paste', config_file]
172 172
173 173 def start(self):
174 174 env = os.environ.copy()
175 175 env['RC_NO_TMP_PATH'] = '1'
176 176
177 177 self.log_file = self.get_log_file_with_port()
178 178 self.server_out = open(self.log_file, 'w')
179 179
180 180 host_url = self.host_url()
181 181 assert_no_running_instance(host_url)
182 182
183 183 log.info('rhodecode-web starting at: {}'.format(host_url))
184 184 log.info('rhodecode-web command: {}'.format(self.command))
185 185 log.info('rhodecode-web logfile: {}'.format(self.log_file))
186 186
187 self.process = subprocess32.Popen(
187 self.process = subprocess.Popen(
188 188 self._args, bufsize=0, env=env,
189 189 stdout=self.server_out, stderr=self.server_out)
190 190
191 191 def repo_clone_url(self, repo_name, **kwargs):
192 192 params = {
193 193 'user': TEST_USER_ADMIN_LOGIN,
194 194 'passwd': TEST_USER_ADMIN_PASS,
195 195 'host': get_host_url(self.config_file),
196 196 'cloned_repo': repo_name,
197 197 }
198 198 params.update(**kwargs)
199 199 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
200 200 return _url
@@ -1,468 +1,468 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import threading
22 22 import time
23 23 import logging
24 24 import os.path
25 import subprocess32
25 import subprocess
26 26 import tempfile
27 27 import urllib.request, urllib.error, urllib.parse
28 28 from lxml.html import fromstring, tostring
29 29 from lxml.cssselect import CSSSelector
30 30 import urllib.parse.urlparse
31 31 from urllib.parse import unquote_plus
32 32 import webob
33 33
34 34 from webtest.app import TestResponse, TestApp
35 35 from webtest.compat import print_stderr
36 36
37 37 import pytest
38 38 import rc_testdata
39 39
40 40 from rhodecode.model.db import User, Repository
41 41 from rhodecode.model.meta import Session
42 42 from rhodecode.model.scm import ScmModel
43 43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.tests import login_user_session
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class CustomTestResponse(TestResponse):
51 51
52 52 def _save_output(self, out):
53 53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
54 54 f.write(out)
55 55 return f.name
56 56
57 57 def mustcontain(self, *strings, **kw):
58 58 """
59 59 Assert that the response contains all of the strings passed
60 60 in as arguments.
61 61
62 62 Equivalent to::
63 63
64 64 assert string in res
65 65 """
66 66 print_body = kw.pop('print_body', False)
67 67 if 'no' in kw:
68 68 no = kw['no']
69 69 del kw['no']
70 70 if isinstance(no, str):
71 71 no = [no]
72 72 else:
73 73 no = []
74 74 if kw:
75 75 raise TypeError(
76 76 "The only keyword argument allowed is 'no' got %s" % kw)
77 77
78 78 f = self._save_output(str(self))
79 79
80 80 for s in strings:
81 81 if not s in self:
82 82 print_stderr("Actual response (no %r):" % s)
83 83 print_stderr("body output saved as `%s`" % f)
84 84 if print_body:
85 85 print_stderr(str(self))
86 86 raise IndexError(
87 87 "Body does not contain string %r, body output saved as %s" % (s, f))
88 88
89 89 for no_s in no:
90 90 if no_s in self:
91 91 print_stderr("Actual response (has %r)" % no_s)
92 92 print_stderr("body output saved as `%s`" % f)
93 93 if print_body:
94 94 print_stderr(str(self))
95 95 raise IndexError(
96 96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
97 97
98 98 def assert_response(self):
99 99 return AssertResponse(self)
100 100
101 101 def get_session_from_response(self):
102 102 """
103 103 This returns the session from a response object.
104 104 """
105 105 from rhodecode.lib.rc_beaker import session_factory_from_settings
106 106 session = session_factory_from_settings(self.test_app._pyramid_settings)
107 107 return session(self.request)
108 108
109 109
110 110 class TestRequest(webob.BaseRequest):
111 111
112 112 # for py.test
113 113 disabled = True
114 114 ResponseClass = CustomTestResponse
115 115
116 116 def add_response_callback(self, callback):
117 117 pass
118 118
119 119
120 120 class CustomTestApp(TestApp):
121 121 """
122 122 Custom app to make mustcontain more Useful, and extract special methods
123 123 """
124 124 RequestClass = TestRequest
125 125 rc_login_data = {}
126 126 rc_current_session = None
127 127
128 128 def login(self, username=None, password=None):
129 129 from rhodecode.lib import auth
130 130
131 131 if username and password:
132 132 session = login_user_session(self, username, password)
133 133 else:
134 134 session = login_user_session(self)
135 135
136 136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
137 137 self.rc_current_session = session
138 138 return session['rhodecode_user']
139 139
140 140 @property
141 141 def csrf_token(self):
142 142 return self.rc_login_data['csrf_token']
143 143
144 144 @property
145 145 def _pyramid_registry(self):
146 146 return self.app.config.registry
147 147
148 148 @property
149 149 def _pyramid_settings(self):
150 150 return self._pyramid_registry.settings
151 151
152 152
153 153 def set_anonymous_access(enabled):
154 154 """(Dis)allows anonymous access depending on parameter `enabled`"""
155 155 user = User.get_default_user()
156 156 user.active = enabled
157 157 Session().add(user)
158 158 Session().commit()
159 159 time.sleep(1.5) # must sleep for cache (1s to expire)
160 160 log.info('anonymous access is now: %s', enabled)
161 161 assert enabled == User.get_default_user().active, (
162 162 'Cannot set anonymous access')
163 163
164 164
165 165 def check_xfail_backends(node, backend_alias):
166 166 # Using "xfail_backends" here intentionally, since this marks work
167 167 # which is "to be done" soon.
168 168 skip_marker = node.get_closest_marker('xfail_backends')
169 169 if skip_marker and backend_alias in skip_marker.args:
170 170 msg = "Support for backend %s to be developed." % (backend_alias, )
171 171 msg = skip_marker.kwargs.get('reason', msg)
172 172 pytest.xfail(msg)
173 173
174 174
175 175 def check_skip_backends(node, backend_alias):
176 176 # Using "skip_backends" here intentionally, since this marks work which is
177 177 # not supported.
178 178 skip_marker = node.get_closest_marker('skip_backends')
179 179 if skip_marker and backend_alias in skip_marker.args:
180 180 msg = "Feature not supported for backend %s." % (backend_alias, )
181 181 msg = skip_marker.kwargs.get('reason', msg)
182 182 pytest.skip(msg)
183 183
184 184
185 185 def extract_git_repo_from_dump(dump_name, repo_name):
186 186 """Create git repo `repo_name` from dump `dump_name`."""
187 187 repos_path = ScmModel().repos_path
188 188 target_path = os.path.join(repos_path, repo_name)
189 189 rc_testdata.extract_git_dump(dump_name, target_path)
190 190 return target_path
191 191
192 192
193 193 def extract_hg_repo_from_dump(dump_name, repo_name):
194 194 """Create hg repo `repo_name` from dump `dump_name`."""
195 195 repos_path = ScmModel().repos_path
196 196 target_path = os.path.join(repos_path, repo_name)
197 197 rc_testdata.extract_hg_dump(dump_name, target_path)
198 198 return target_path
199 199
200 200
201 201 def extract_svn_repo_from_dump(dump_name, repo_name):
202 202 """Create a svn repo `repo_name` from dump `dump_name`."""
203 203 repos_path = ScmModel().repos_path
204 204 target_path = os.path.join(repos_path, repo_name)
205 205 SubversionRepository(target_path, create=True)
206 206 _load_svn_dump_into_repo(dump_name, target_path)
207 207 return target_path
208 208
209 209
210 210 def assert_message_in_log(log_records, message, levelno, module):
211 211 messages = [
212 212 r.message for r in log_records
213 213 if r.module == module and r.levelno == levelno
214 214 ]
215 215 assert message in messages
216 216
217 217
218 218 def _load_svn_dump_into_repo(dump_name, repo_path):
219 219 """
220 220 Utility to populate a svn repository with a named dump
221 221
222 222 Currently the dumps are in rc_testdata. They might later on be
223 223 integrated with the main repository once they stabilize more.
224 224 """
225 225 dump = rc_testdata.load_svn_dump(dump_name)
226 load_dump = subprocess32.Popen(
226 load_dump = subprocess.Popen(
227 227 ['svnadmin', 'load', repo_path],
228 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
229 stderr=subprocess32.PIPE)
228 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
229 stderr=subprocess.PIPE)
230 230 out, err = load_dump.communicate(dump)
231 231 if load_dump.returncode != 0:
232 232 log.error("Output of load_dump command: %s", out)
233 233 log.error("Error output of load_dump command: %s", err)
234 234 raise Exception(
235 235 'Failed to load dump "%s" into repository at path "%s".'
236 236 % (dump_name, repo_path))
237 237
238 238
239 239 class AssertResponse(object):
240 240 """
241 241 Utility that helps to assert things about a given HTML response.
242 242 """
243 243
244 244 def __init__(self, response):
245 245 self.response = response
246 246
247 247 def get_imports(self):
248 248 return fromstring, tostring, CSSSelector
249 249
250 250 def one_element_exists(self, css_selector):
251 251 self.get_element(css_selector)
252 252
253 253 def no_element_exists(self, css_selector):
254 254 assert not self._get_elements(css_selector)
255 255
256 256 def element_equals_to(self, css_selector, expected_content):
257 257 element = self.get_element(css_selector)
258 258 element_text = self._element_to_string(element)
259 259 assert expected_content in element_text
260 260
261 261 def element_contains(self, css_selector, expected_content):
262 262 element = self.get_element(css_selector)
263 263 assert expected_content in element.text_content()
264 264
265 265 def element_value_contains(self, css_selector, expected_content):
266 266 element = self.get_element(css_selector)
267 267 assert expected_content in element.value
268 268
269 269 def contains_one_link(self, link_text, href):
270 270 fromstring, tostring, CSSSelector = self.get_imports()
271 271 doc = fromstring(self.response.body)
272 272 sel = CSSSelector('a[href]')
273 273 elements = [
274 274 e for e in sel(doc) if e.text_content().strip() == link_text]
275 275 assert len(elements) == 1, "Did not find link or found multiple links"
276 276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
277 277
278 278 def contains_one_anchor(self, anchor_id):
279 279 fromstring, tostring, CSSSelector = self.get_imports()
280 280 doc = fromstring(self.response.body)
281 281 sel = CSSSelector('#' + anchor_id)
282 282 elements = sel(doc)
283 283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
284 284
285 285 def _ensure_url_equal(self, found, expected):
286 286 assert _Url(found) == _Url(expected)
287 287
288 288 def get_element(self, css_selector):
289 289 elements = self._get_elements(css_selector)
290 290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
291 291 return elements[0]
292 292
293 293 def get_elements(self, css_selector):
294 294 return self._get_elements(css_selector)
295 295
296 296 def _get_elements(self, css_selector):
297 297 fromstring, tostring, CSSSelector = self.get_imports()
298 298 doc = fromstring(self.response.body)
299 299 sel = CSSSelector(css_selector)
300 300 elements = sel(doc)
301 301 return elements
302 302
303 303 def _element_to_string(self, element):
304 304 fromstring, tostring, CSSSelector = self.get_imports()
305 305 return tostring(element)
306 306
307 307
308 308 class _Url(object):
309 309 """
310 310 A url object that can be compared with other url orbjects
311 311 without regard to the vagaries of encoding, escaping, and ordering
312 312 of parameters in query strings.
313 313
314 314 Inspired by
315 315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
316 316 """
317 317
318 318 def __init__(self, url):
319 319 parts = urllib.parse.urlparse(url)
320 320 _query = frozenset(urllib.parse.parse_qsl(parts.query))
321 321 _path = unquote_plus(parts.path)
322 322 parts = parts._replace(query=_query, path=_path)
323 323 self.parts = parts
324 324
325 325 def __eq__(self, other):
326 326 return self.parts == other.parts
327 327
328 328 def __hash__(self):
329 329 return hash(self.parts)
330 330
331 331
332 332 def run_test_concurrently(times, raise_catched_exc=True):
333 333 """
334 334 Add this decorator to small pieces of code that you want to test
335 335 concurrently
336 336
337 337 ex:
338 338
339 339 @test_concurrently(25)
340 340 def my_test_function():
341 341 ...
342 342 """
343 343 def test_concurrently_decorator(test_func):
344 344 def wrapper(*args, **kwargs):
345 345 exceptions = []
346 346
347 347 def call_test_func():
348 348 try:
349 349 test_func(*args, **kwargs)
350 350 except Exception as e:
351 351 exceptions.append(e)
352 352 if raise_catched_exc:
353 353 raise
354 354 threads = []
355 355 for i in range(times):
356 356 threads.append(threading.Thread(target=call_test_func))
357 357 for t in threads:
358 358 t.start()
359 359 for t in threads:
360 360 t.join()
361 361 if exceptions:
362 362 raise Exception(
363 363 'test_concurrently intercepted %s exceptions: %s' % (
364 364 len(exceptions), exceptions))
365 365 return wrapper
366 366 return test_concurrently_decorator
367 367
368 368
369 369 def wait_for_url(url, timeout=10):
370 370 """
371 371 Wait until URL becomes reachable.
372 372
373 373 It polls the URL until the timeout is reached or it became reachable.
374 374 If will call to `py.test.fail` in case the URL is not reachable.
375 375 """
376 376 timeout = time.time() + timeout
377 377 last = 0
378 378 wait = 0.1
379 379
380 380 while timeout > last:
381 381 last = time.time()
382 382 if is_url_reachable(url):
383 383 break
384 384 elif (last + wait) > time.time():
385 385 # Go to sleep because not enough time has passed since last check.
386 386 time.sleep(wait)
387 387 else:
388 388 pytest.fail("Timeout while waiting for URL {}".format(url))
389 389
390 390
391 391 def is_url_reachable(url):
392 392 try:
393 393 urllib.request.urlopen(url)
394 394 except urllib.error.URLError:
395 395 log.exception('URL `{}` reach error'.format(url))
396 396 return False
397 397 return True
398 398
399 399
400 400 def repo_on_filesystem(repo_name):
401 401 from rhodecode.lib import vcs
402 402 from rhodecode.tests import TESTS_TMP_PATH
403 403 repo = vcs.get_vcs_instance(
404 404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
405 405 return repo is not None
406 406
407 407
408 408 def commit_change(
409 409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
410 410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
411 411
412 412 repo = Repository.get_by_repo_name(repo)
413 413 _commit = parent
414 414 if not parent:
415 415 _commit = EmptyCommit(alias=vcs_type)
416 416
417 417 if newfile:
418 418 nodes = {
419 419 filename: {
420 420 'content': content
421 421 }
422 422 }
423 423 commit = ScmModel().create_nodes(
424 424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
425 425 message=message,
426 426 nodes=nodes,
427 427 parent_commit=_commit,
428 428 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
429 429 )
430 430 else:
431 431 commit = ScmModel().commit_change(
432 432 repo=repo.scm_instance(), repo_name=repo.repo_name,
433 433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
434 434 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
435 435 message=message,
436 436 content=content,
437 437 f_path=filename
438 438 )
439 439 return commit
440 440
441 441
442 442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
443 443 if not default:
444 444 raise ValueError('Permission for default user must be given')
445 445 form_data = [(
446 446 'csrf_token', csrf_token
447 447 )]
448 448 # add default
449 449 form_data.extend([
450 450 ('u_perm_1', default)
451 451 ])
452 452
453 453 if grant:
454 454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
455 455 form_data.extend([
456 456 ('perm_new_member_perm_new{}'.format(cnt), perm),
457 457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
458 458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
459 459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
460 460
461 461 ])
462 462 if revoke:
463 463 for obj_id, obj_type in revoke:
464 464 form_data.extend([
465 465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
466 466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
467 467 ])
468 468 return form_data
@@ -1,195 +1,195 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import datetime
23 import subprocess32
23 import subprocess
24 24
25 25 import pytest
26 26
27 27 from rhodecode.lib.vcs.exceptions import VCSError
28 28 from rhodecode.lib.vcs.utils import author_email, author_name
29 29 from rhodecode.lib.vcs.utils.helpers import get_scm
30 30 from rhodecode.lib.vcs.utils.helpers import get_scms_for_path
31 31 from rhodecode.lib.vcs.utils.helpers import parse_datetime
32 32 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
33 33
34 34
35 35 @pytest.mark.usefixtures("baseapp")
36 36 class TestPaths(object):
37 37
38 38 def _test_get_dirs_for_path(self, path, expected):
39 39 """
40 40 Tests if get_dirs_for_path returns same as expected.
41 41 """
42 42 expected = sorted(expected)
43 43 result = sorted(get_dirs_for_path(path))
44 44 assert result == expected, (
45 45 "%s != %s which was expected result for path %s"
46 46 % (result, expected, path))
47 47
48 48 def test_get_dirs_for_path(self):
49 49 path = 'foo/bar/baz/file'
50 50 paths_and_results = (
51 51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
52 52 ('foo/bar/', ['foo', 'foo/bar']),
53 53 ('foo/bar', ['foo']),
54 54 )
55 55 for path, expected in paths_and_results:
56 56 self._test_get_dirs_for_path(path, expected)
57 57
58 58 def test_get_scms_for_path(self, tmpdir):
59 59 new = tmpdir.strpath
60 60 assert get_scms_for_path(new) == []
61 61
62 62 os.mkdir(os.path.join(new, '.tux'))
63 63 assert get_scms_for_path(new) == []
64 64
65 65 os.mkdir(os.path.join(new, '.git'))
66 66 assert set(get_scms_for_path(new)) == set(['git'])
67 67
68 68 os.mkdir(os.path.join(new, '.hg'))
69 69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
70 70
71 71
72 72 class TestGetScm(object):
73 73
74 74 def test_existing_repository(self, vcs_repository_support):
75 75 alias, repo = vcs_repository_support
76 76 assert (alias, repo.path) == get_scm(repo.path)
77 77
78 78 def test_raises_if_path_is_empty(self, tmpdir):
79 79 with pytest.raises(VCSError):
80 80 get_scm(str(tmpdir))
81 81
82 82 def test_get_scm_error_path(self):
83 83 with pytest.raises(VCSError):
84 84 get_scm('err')
85 85
86 86 def test_get_two_scms_for_path(self, tmpdir):
87 87 multialias_repo_path = str(tmpdir)
88 88
89 subprocess32.check_call(['hg', 'init', multialias_repo_path])
90 subprocess32.check_call(['git', 'init', multialias_repo_path])
89 subprocess.check_call(['hg', 'init', multialias_repo_path])
90 subprocess.check_call(['git', 'init', multialias_repo_path])
91 91
92 92 with pytest.raises(VCSError):
93 93 get_scm(multialias_repo_path)
94 94
95 95 def test_ignores_svn_working_copy(self, tmpdir):
96 96 tmpdir.mkdir('.svn')
97 97 with pytest.raises(VCSError):
98 98 get_scm(tmpdir.strpath)
99 99
100 100
101 101 class TestParseDatetime(object):
102 102
103 103 def test_datetime_text(self):
104 104 assert parse_datetime('2010-04-07 21:29:41') == \
105 105 datetime.datetime(2010, 4, 7, 21, 29, 41)
106 106
107 107 def test_no_seconds(self):
108 108 assert parse_datetime('2010-04-07 21:29') == \
109 109 datetime.datetime(2010, 4, 7, 21, 29)
110 110
111 111 def test_date_only(self):
112 112 assert parse_datetime('2010-04-07') == \
113 113 datetime.datetime(2010, 4, 7)
114 114
115 115 def test_another_format(self):
116 116 assert parse_datetime('04/07/10 21:29:41') == \
117 117 datetime.datetime(2010, 4, 7, 21, 29, 41)
118 118
119 119 def test_now(self):
120 120 assert parse_datetime('now') - datetime.datetime.now() < \
121 121 datetime.timedelta(seconds=1)
122 122
123 123 def test_today(self):
124 124 today = datetime.date.today()
125 125 assert parse_datetime('today') == \
126 126 datetime.datetime(*today.timetuple()[:3])
127 127
128 128 def test_yesterday(self):
129 129 yesterday = datetime.date.today() - datetime.timedelta(days=1)
130 130 assert parse_datetime('yesterday') == \
131 131 datetime.datetime(*yesterday.timetuple()[:3])
132 132
133 133 def test_tomorrow(self):
134 134 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
135 135 args = tomorrow.timetuple()[:3] + (23, 59, 59)
136 136 assert parse_datetime('tomorrow') == datetime.datetime(*args)
137 137
138 138 def test_days(self):
139 139 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
140 140 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
141 141 expected = datetime.datetime(*args)
142 142 assert parse_datetime('3d') == expected
143 143 assert parse_datetime('3 d') == expected
144 144 assert parse_datetime('3 day') == expected
145 145 assert parse_datetime('3 days') == expected
146 146
147 147 def test_weeks(self):
148 148 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
149 149 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
150 150 expected = datetime.datetime(*args)
151 151 assert parse_datetime('3w') == expected
152 152 assert parse_datetime('3 w') == expected
153 153 assert parse_datetime('3 week') == expected
154 154 assert parse_datetime('3 weeks') == expected
155 155
156 156 def test_mixed(self):
157 157 timestamp = (
158 158 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
159 159 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
160 160 expected = datetime.datetime(*args)
161 161 assert parse_datetime('2w3d') == expected
162 162 assert parse_datetime('2w 3d') == expected
163 163 assert parse_datetime('2w 3 days') == expected
164 164 assert parse_datetime('2 weeks 3 days') == expected
165 165
166 166
167 167 @pytest.mark.parametrize("test_str, name, email", [
168 168 ('Marcin Kuzminski <marcin@python-works.com>',
169 169 'Marcin Kuzminski', 'marcin@python-works.com'),
170 170 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
171 171 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
172 172 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
173 173 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
174 174 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
175 175 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
176 176 ('username <user@email.com>',
177 177 'username', 'user@email.com'),
178 178 ('username <user@email.com',
179 179 'username', 'user@email.com'),
180 180 ('broken missing@email.com',
181 181 'broken', 'missing@email.com'),
182 182 ('<justemail@mail.com>',
183 183 '', 'justemail@mail.com'),
184 184 ('justname',
185 185 'justname', ''),
186 186 ('Mr Double Name withemail@email.com ',
187 187 'Mr Double Name', 'withemail@email.com'),
188 188 ])
189 189 class TestAuthorExtractors(object):
190 190
191 191 def test_author_email(self, test_str, name, email):
192 192 assert email == author_email(test_str)
193 193
194 194 def test_author_name(self, test_str, name, email):
195 195 assert name == author_name(test_str)
@@ -1,118 +1,118 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities for tests only. These are not or should not be used normally -
23 23 functions here are crafted as we don't want to use ``vcs`` to verify tests.
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import sys
29 29
30 from subprocess32 import Popen
30 from subprocess import Popen
31 31
32 32
33 33 class VCSTestError(Exception):
34 34 pass
35 35
36 36
37 37 def run_command(cmd, args):
38 38 """
39 39 Runs command on the system with given ``args``.
40 40 """
41 41 command = ' '.join((cmd, args))
42 42 p = Popen(command, shell=True)
43 43 status = os.waitpid(p.pid, 0)[1]
44 44 return status
45 45
46 46
47 47 def eprint(msg):
48 48 """
49 49 Prints given ``msg`` into sys.stderr as nose test runner hides all output
50 50 from sys.stdout by default and if we want to pipe stream somewhere we don't
51 51 need those verbose messages anyway.
52 52 Appends line break.
53 53 """
54 54 sys.stderr.write(msg)
55 55 sys.stderr.write('\n')
56 56
57 57
58 58 # TODO: Revisit once we have CI running, if this is not helping us, remove it
59 59 class SCMFetcher(object):
60 60
61 61 def __init__(self, alias, test_repo_path):
62 62 """
63 63 :param clone_cmd: command which would clone remote repository; pass
64 64 only first bits - remote path and destination would be appended
65 65 using ``remote_repo`` and ``test_repo_path``
66 66 """
67 67 self.alias = alias
68 68 self.test_repo_path = test_repo_path
69 69
70 70 def setup(self):
71 71 if not os.path.isdir(self.test_repo_path):
72 72 self.fetch_repo()
73 73
74 74 def fetch_repo(self):
75 75 """
76 76 Tries to fetch repository from remote path.
77 77 """
78 78 remote = self.remote_repo
79 79 eprint(
80 80 "Fetching repository %s into %s" % (remote, self.test_repo_path))
81 81 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
82 82
83 83
84 84 def get_normalized_path(path):
85 85 """
86 86 If given path exists, new path would be generated and returned. Otherwise
87 87 same whats given is returned. Assumes that there would be no more than
88 88 10000 same named files.
89 89 """
90 90 if os.path.exists(path):
91 91 dir, basename = os.path.split(path)
92 92 splitted_name = basename.split('.')
93 93 if len(splitted_name) > 1:
94 94 ext = splitted_name[-1]
95 95 else:
96 96 ext = None
97 97 name = '.'.join(splitted_name[:-1])
98 98 matcher = re.compile(r'^.*-(\d{5})$')
99 99 start = 0
100 100 m = matcher.match(name)
101 101 if not m:
102 102 # Haven't append number yet so return first
103 103 newname = '%s-00000' % name
104 104 newpath = os.path.join(dir, newname)
105 105 if ext:
106 106 newpath = '.'.join((newpath, ext))
107 107 return get_normalized_path(newpath)
108 108 else:
109 109 start = int(m.group(1)[-5:]) + 1
110 110 for x in range(start, 10000):
111 111 newname = name[:-5] + str(x).rjust(5, '0')
112 112 newpath = os.path.join(dir, newname)
113 113 if ext:
114 114 newpath = '.'.join((newpath, ext))
115 115 if not os.path.exists(newpath):
116 116 return newpath
117 117 raise VCSTestError("Couldn't compute new path for %s" % path)
118 118 return path
@@ -1,193 +1,193 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base for test suite for making push/pull operations.
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30 from os.path import join as jn
31 from subprocess32 import Popen, PIPE
31 from subprocess import Popen, PIPE
32 32 import logging
33 33 import os
34 34 import tempfile
35 35
36 36 from rhodecode.tests import GIT_REPO, HG_REPO
37 37
38 38 DEBUG = True
39 39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 40 REPO_GROUP = 'a_repo_group'
41 41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class Command(object):
48 48
49 49 def __init__(self, cwd):
50 50 self.cwd = cwd
51 51 self.process = None
52 52
53 53 def execute(self, cmd, *args):
54 54 """
55 55 Runs command on the system with given ``args``.
56 56 """
57 57
58 58 command = cmd + ' ' + ' '.join(args)
59 59 if DEBUG:
60 60 log.debug('*** CMD %s ***', command)
61 61
62 62 env = dict(os.environ)
63 63 # Delete coverage variables, as they make the test fail for Mercurial
64 64 for key in env.keys():
65 65 if key.startswith('COV_CORE_'):
66 66 del env[key]
67 67
68 68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 69 cwd=self.cwd, env=env)
70 70 stdout, stderr = self.process.communicate()
71 71 if DEBUG:
72 72 log.debug('STDOUT:%s', stdout)
73 73 log.debug('STDERR:%s', stderr)
74 74 return stdout, stderr
75 75
76 76 def assert_returncode_success(self):
77 77 assert self.process.returncode == 0
78 78
79 79
80 80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
81 81 git_ident = "git config user.name {} && git config user.email {}".format(
82 82 'Marcin KuΕΊminski', 'me@email.com')
83 83 cwd = path = jn(dest)
84 84
85 85 tags = tags or []
86 86 added_file = jn(path, '%s_setup.py' % tempfile._RandomNameSequence().next())
87 87 Command(cwd).execute('touch %s' % added_file)
88 88 Command(cwd).execute('%s add %s' % (vcs, added_file))
89 89 author_str = 'Marcin KuΕΊminski <me@email.com>'
90 90
91 91 for i in range(kwargs.get('files_no', 3)):
92 92 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
93 93 Command(cwd).execute(cmd)
94 94
95 95 if vcs == 'hg':
96 96 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
97 97 i, author_str, added_file
98 98 )
99 99 elif vcs == 'git':
100 100 cmd = """%s && git commit -m 'committed new %s' %s""" % (
101 101 git_ident, i, added_file)
102 102 Command(cwd).execute(cmd)
103 103
104 104 for tag in tags:
105 105 if vcs == 'hg':
106 106 Command(cwd).execute(
107 107 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
108 108 elif vcs == 'git':
109 109 if tag['commit']:
110 110 # annotated tag
111 111 _stdout, _stderr = Command(cwd).execute(
112 112 """%s && git tag -a %s -m "%s" """ % (
113 113 git_ident, tag['name'], tag['commit']))
114 114 else:
115 115 # lightweight tag
116 116 _stdout, _stderr = Command(cwd).execute(
117 117 """%s && git tag %s""" % (
118 118 git_ident, tag['name']))
119 119
120 120
121 121 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
122 122 new_branch=False, **kwargs):
123 123 """
124 124 Generate some files, add it to DEST repo and push back
125 125 vcs is git or hg and defines what VCS we want to make those files for
126 126 """
127 127 git_ident = "git config user.name {} && git config user.email {}".format(
128 128 'Marcin KuΕΊminski', 'me@email.com')
129 129 cwd = path = jn(dest)
130 130
131 131 # commit some stuff into this repo
132 132 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
133 133
134 134 default_target_branch = {
135 135 'git': 'master',
136 136 'hg': 'default'
137 137 }.get(vcs)
138 138
139 139 target_branch = target_branch or default_target_branch
140 140
141 141 # PUSH it back
142 142 stdout = stderr = None
143 143 if vcs == 'hg':
144 144 maybe_new_branch = ''
145 145 if new_branch:
146 146 maybe_new_branch = '--new-branch'
147 147 stdout, stderr = Command(cwd).execute(
148 148 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
149 149 )
150 150 elif vcs == 'git':
151 151 stdout, stderr = Command(cwd).execute(
152 152 """{} &&
153 153 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
154 154 )
155 155
156 156 return stdout, stderr
157 157
158 158
159 159 def _check_proper_git_push(
160 160 stdout, stderr, branch='master', should_set_default_branch=False):
161 161 # Note: Git is writing most information to stderr intentionally
162 162 assert 'fatal' not in stderr
163 163 assert 'rejected' not in stderr
164 164 assert 'Pushing to' in stderr
165 165 assert '%s -> %s' % (branch, branch) in stderr
166 166
167 167 if should_set_default_branch:
168 168 assert "Setting default branch to %s" % branch in stderr
169 169 else:
170 170 assert "Setting default branch" not in stderr
171 171
172 172
173 173 def _check_proper_hg_push(stdout, stderr, branch='default'):
174 174 assert 'pushing to' in stdout
175 175 assert 'searching for changes' in stdout
176 176
177 177 assert 'abort:' not in stderr
178 178
179 179
180 180 def _check_proper_clone(stdout, stderr, vcs):
181 181 if vcs == 'hg':
182 182 assert 'requesting all changes' in stdout
183 183 assert 'adding changesets' in stdout
184 184 assert 'adding manifests' in stdout
185 185 assert 'adding file changes' in stdout
186 186
187 187 assert stderr == ''
188 188
189 189 if vcs == 'git':
190 190 assert '' == stdout
191 191 assert 'Cloning into' in stderr
192 192 assert 'abort:' not in stderr
193 193 assert 'fatal:' not in stderr
General Comments 0
You need to be logged in to leave comments. Login now