##// END OF EJS Templates
python3: remove usage of subprocess32
super-admin -
r4926:cf2cc324 default
parent child Browse files
Show More
@@ -1,398 +1,398 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import io
20 import io
21 import shlex
21 import shlex
22
22
23 import math
23 import math
24 import re
24 import re
25 import os
25 import os
26 import datetime
26 import datetime
27 import logging
27 import logging
28 import Queue
28 import Queue
29 import subprocess32
29 import subprocess
30
30
31
31
32 from dateutil.parser import parse
32 from dateutil.parser import parse
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34 from pyramid.interfaces import IRoutesMapper
34 from pyramid.interfaces import IRoutesMapper
35 from pyramid.settings import asbool
35 from pyramid.settings import asbool
36 from pyramid.path import AssetResolver
36 from pyramid.path import AssetResolver
37 from threading import Thread
37 from threading import Thread
38
38
39 from rhodecode.config.jsroutes import generate_jsroutes_content
39 from rhodecode.config.jsroutes import generate_jsroutes_content
40 from rhodecode.lib.base import get_auth_user
40 from rhodecode.lib.base import get_auth_user
41
41
42 import rhodecode
42 import rhodecode
43
43
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def add_renderer_globals(event):
48 def add_renderer_globals(event):
49 from rhodecode.lib import helpers
49 from rhodecode.lib import helpers
50
50
51 # TODO: When executed in pyramid view context the request is not available
51 # TODO: When executed in pyramid view context the request is not available
52 # in the event. Find a better solution to get the request.
52 # in the event. Find a better solution to get the request.
53 request = event['request'] or get_current_request()
53 request = event['request'] or get_current_request()
54
54
55 # Add Pyramid translation as '_' to context
55 # Add Pyramid translation as '_' to context
56 event['_'] = request.translate
56 event['_'] = request.translate
57 event['_ungettext'] = request.plularize
57 event['_ungettext'] = request.plularize
58 event['h'] = helpers
58 event['h'] = helpers
59
59
60
60
61 def set_user_lang(event):
61 def set_user_lang(event):
62 request = event.request
62 request = event.request
63 cur_user = getattr(request, 'user', None)
63 cur_user = getattr(request, 'user', None)
64
64
65 if cur_user:
65 if cur_user:
66 user_lang = cur_user.get_instance().user_data.get('language')
66 user_lang = cur_user.get_instance().user_data.get('language')
67 if user_lang:
67 if user_lang:
68 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
69 event.request._LOCALE_ = user_lang
69 event.request._LOCALE_ = user_lang
70
70
71
71
72 def update_celery_conf(event):
72 def update_celery_conf(event):
73 from rhodecode.lib.celerylib.loader import set_celery_conf
73 from rhodecode.lib.celerylib.loader import set_celery_conf
74 log.debug('Setting celery config from new request')
74 log.debug('Setting celery config from new request')
75 set_celery_conf(request=event.request, registry=event.request.registry)
75 set_celery_conf(request=event.request, registry=event.request.registry)
76
76
77
77
78 def add_request_user_context(event):
78 def add_request_user_context(event):
79 """
79 """
80 Adds auth user into request context
80 Adds auth user into request context
81 """
81 """
82 request = event.request
82 request = event.request
83 # access req_id as soon as possible
83 # access req_id as soon as possible
84 req_id = request.req_id
84 req_id = request.req_id
85
85
86 if hasattr(request, 'vcs_call'):
86 if hasattr(request, 'vcs_call'):
87 # skip vcs calls
87 # skip vcs calls
88 return
88 return
89
89
90 if hasattr(request, 'rpc_method'):
90 if hasattr(request, 'rpc_method'):
91 # skip api calls
91 # skip api calls
92 return
92 return
93
93
94 auth_user, auth_token = get_auth_user(request)
94 auth_user, auth_token = get_auth_user(request)
95 request.user = auth_user
95 request.user = auth_user
96 request.user_auth_token = auth_token
96 request.user_auth_token = auth_token
97 request.environ['rc_auth_user'] = auth_user
97 request.environ['rc_auth_user'] = auth_user
98 request.environ['rc_auth_user_id'] = auth_user.user_id
98 request.environ['rc_auth_user_id'] = auth_user.user_id
99 request.environ['rc_req_id'] = req_id
99 request.environ['rc_req_id'] = req_id
100
100
101
101
102 def reset_log_bucket(event):
102 def reset_log_bucket(event):
103 """
103 """
104 reset the log bucket on new request
104 reset the log bucket on new request
105 """
105 """
106 request = event.request
106 request = event.request
107 request.req_id_records_init()
107 request.req_id_records_init()
108
108
109
109
110 def scan_repositories_if_enabled(event):
110 def scan_repositories_if_enabled(event):
111 """
111 """
112 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
113 does a repository scan if enabled in the settings.
113 does a repository scan if enabled in the settings.
114 """
114 """
115 settings = event.app.registry.settings
115 settings = event.app.registry.settings
116 vcs_server_enabled = settings['vcs.server.enable']
116 vcs_server_enabled = settings['vcs.server.enable']
117 import_on_startup = settings['startup.import_repos']
117 import_on_startup = settings['startup.import_repos']
118 if vcs_server_enabled and import_on_startup:
118 if vcs_server_enabled and import_on_startup:
119 from rhodecode.model.scm import ScmModel
119 from rhodecode.model.scm import ScmModel
120 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
120 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
121 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
121 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
122 repo2db_mapper(repositories, remove_obsolete=False)
122 repo2db_mapper(repositories, remove_obsolete=False)
123
123
124
124
125 def write_metadata_if_needed(event):
125 def write_metadata_if_needed(event):
126 """
126 """
127 Writes upgrade metadata
127 Writes upgrade metadata
128 """
128 """
129 import rhodecode
129 import rhodecode
130 from rhodecode.lib import system_info
130 from rhodecode.lib import system_info
131 from rhodecode.lib import ext_json
131 from rhodecode.lib import ext_json
132
132
133 fname = '.rcmetadata.json'
133 fname = '.rcmetadata.json'
134 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
134 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
135 metadata_destination = os.path.join(ini_loc, fname)
135 metadata_destination = os.path.join(ini_loc, fname)
136
136
137 def get_update_age():
137 def get_update_age():
138 now = datetime.datetime.utcnow()
138 now = datetime.datetime.utcnow()
139
139
140 with open(metadata_destination, 'rb') as f:
140 with open(metadata_destination, 'rb') as f:
141 data = ext_json.json.loads(f.read())
141 data = ext_json.json.loads(f.read())
142 if 'created_on' in data:
142 if 'created_on' in data:
143 update_date = parse(data['created_on'])
143 update_date = parse(data['created_on'])
144 diff = now - update_date
144 diff = now - update_date
145 return diff.total_seconds() / 60.0
145 return diff.total_seconds() / 60.0
146
146
147 return 0
147 return 0
148
148
149 def write():
149 def write():
150 configuration = system_info.SysInfo(
150 configuration = system_info.SysInfo(
151 system_info.rhodecode_config)()['value']
151 system_info.rhodecode_config)()['value']
152 license_token = configuration['config']['license_token']
152 license_token = configuration['config']['license_token']
153
153
154 setup = dict(
154 setup = dict(
155 workers=configuration['config']['server:main'].get(
155 workers=configuration['config']['server:main'].get(
156 'workers', '?'),
156 'workers', '?'),
157 worker_type=configuration['config']['server:main'].get(
157 worker_type=configuration['config']['server:main'].get(
158 'worker_class', 'sync'),
158 'worker_class', 'sync'),
159 )
159 )
160 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
160 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
161 del dbinfo['url']
161 del dbinfo['url']
162
162
163 metadata = dict(
163 metadata = dict(
164 desc='upgrade metadata info',
164 desc='upgrade metadata info',
165 license_token=license_token,
165 license_token=license_token,
166 created_on=datetime.datetime.utcnow().isoformat(),
166 created_on=datetime.datetime.utcnow().isoformat(),
167 usage=system_info.SysInfo(system_info.usage_info)()['value'],
167 usage=system_info.SysInfo(system_info.usage_info)()['value'],
168 platform=system_info.SysInfo(system_info.platform_type)()['value'],
168 platform=system_info.SysInfo(system_info.platform_type)()['value'],
169 database=dbinfo,
169 database=dbinfo,
170 cpu=system_info.SysInfo(system_info.cpu)()['value'],
170 cpu=system_info.SysInfo(system_info.cpu)()['value'],
171 memory=system_info.SysInfo(system_info.memory)()['value'],
171 memory=system_info.SysInfo(system_info.memory)()['value'],
172 setup=setup
172 setup=setup
173 )
173 )
174
174
175 with open(metadata_destination, 'wb') as f:
175 with open(metadata_destination, 'wb') as f:
176 f.write(ext_json.json.dumps(metadata))
176 f.write(ext_json.json.dumps(metadata))
177
177
178 settings = event.app.registry.settings
178 settings = event.app.registry.settings
179 if settings.get('metadata.skip'):
179 if settings.get('metadata.skip'):
180 return
180 return
181
181
182 # only write this every 24h, workers restart caused unwanted delays
182 # only write this every 24h, workers restart caused unwanted delays
183 try:
183 try:
184 age_in_min = get_update_age()
184 age_in_min = get_update_age()
185 except Exception:
185 except Exception:
186 age_in_min = 0
186 age_in_min = 0
187
187
188 if age_in_min > 60 * 60 * 24:
188 if age_in_min > 60 * 60 * 24:
189 return
189 return
190
190
191 try:
191 try:
192 write()
192 write()
193 except Exception:
193 except Exception:
194 pass
194 pass
195
195
196
196
197 def write_usage_data(event):
197 def write_usage_data(event):
198 import rhodecode
198 import rhodecode
199 from rhodecode.lib import system_info
199 from rhodecode.lib import system_info
200 from rhodecode.lib import ext_json
200 from rhodecode.lib import ext_json
201
201
202 settings = event.app.registry.settings
202 settings = event.app.registry.settings
203 instance_tag = settings.get('metadata.write_usage_tag')
203 instance_tag = settings.get('metadata.write_usage_tag')
204 if not settings.get('metadata.write_usage'):
204 if not settings.get('metadata.write_usage'):
205 return
205 return
206
206
207 def get_update_age(dest_file):
207 def get_update_age(dest_file):
208 now = datetime.datetime.utcnow()
208 now = datetime.datetime.utcnow()
209
209
210 with open(dest_file, 'rb') as f:
210 with open(dest_file, 'rb') as f:
211 data = ext_json.json.loads(f.read())
211 data = ext_json.json.loads(f.read())
212 if 'created_on' in data:
212 if 'created_on' in data:
213 update_date = parse(data['created_on'])
213 update_date = parse(data['created_on'])
214 diff = now - update_date
214 diff = now - update_date
215 return math.ceil(diff.total_seconds() / 60.0)
215 return math.ceil(diff.total_seconds() / 60.0)
216
216
217 return 0
217 return 0
218
218
219 utc_date = datetime.datetime.utcnow()
219 utc_date = datetime.datetime.utcnow()
220 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
220 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
221 fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format(
221 fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format(
222 date=utc_date, hour=hour_quarter)
222 date=utc_date, hour=hour_quarter)
223 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
223 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
224
224
225 usage_dir = os.path.join(ini_loc, '.rcusage')
225 usage_dir = os.path.join(ini_loc, '.rcusage')
226 if not os.path.isdir(usage_dir):
226 if not os.path.isdir(usage_dir):
227 os.makedirs(usage_dir)
227 os.makedirs(usage_dir)
228 usage_metadata_destination = os.path.join(usage_dir, fname)
228 usage_metadata_destination = os.path.join(usage_dir, fname)
229
229
230 try:
230 try:
231 age_in_min = get_update_age(usage_metadata_destination)
231 age_in_min = get_update_age(usage_metadata_destination)
232 except Exception:
232 except Exception:
233 age_in_min = 0
233 age_in_min = 0
234
234
235 # write every 6th hour
235 # write every 6th hour
236 if age_in_min and age_in_min < 60 * 6:
236 if age_in_min and age_in_min < 60 * 6:
237 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
237 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
238 age_in_min, 60 * 6)
238 age_in_min, 60 * 6)
239 return
239 return
240
240
241 def write(dest_file):
241 def write(dest_file):
242 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
242 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
243 license_token = configuration['config']['license_token']
243 license_token = configuration['config']['license_token']
244
244
245 metadata = dict(
245 metadata = dict(
246 desc='Usage data',
246 desc='Usage data',
247 instance_tag=instance_tag,
247 instance_tag=instance_tag,
248 license_token=license_token,
248 license_token=license_token,
249 created_on=datetime.datetime.utcnow().isoformat(),
249 created_on=datetime.datetime.utcnow().isoformat(),
250 usage=system_info.SysInfo(system_info.usage_info)()['value'],
250 usage=system_info.SysInfo(system_info.usage_info)()['value'],
251 )
251 )
252
252
253 with open(dest_file, 'wb') as f:
253 with open(dest_file, 'wb') as f:
254 f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True))
254 f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True))
255
255
256 try:
256 try:
257 log.debug('Writing usage file at: %s', usage_metadata_destination)
257 log.debug('Writing usage file at: %s', usage_metadata_destination)
258 write(usage_metadata_destination)
258 write(usage_metadata_destination)
259 except Exception:
259 except Exception:
260 pass
260 pass
261
261
262
262
263 def write_js_routes_if_enabled(event):
263 def write_js_routes_if_enabled(event):
264 registry = event.app.registry
264 registry = event.app.registry
265
265
266 mapper = registry.queryUtility(IRoutesMapper)
266 mapper = registry.queryUtility(IRoutesMapper)
267 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
267 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
268
268
269 def _extract_route_information(route):
269 def _extract_route_information(route):
270 """
270 """
271 Convert a route into tuple(name, path, args), eg:
271 Convert a route into tuple(name, path, args), eg:
272 ('show_user', '/profile/%(username)s', ['username'])
272 ('show_user', '/profile/%(username)s', ['username'])
273 """
273 """
274
274
275 routepath = route.pattern
275 routepath = route.pattern
276 pattern = route.pattern
276 pattern = route.pattern
277
277
278 def replace(matchobj):
278 def replace(matchobj):
279 if matchobj.group(1):
279 if matchobj.group(1):
280 return "%%(%s)s" % matchobj.group(1).split(':')[0]
280 return "%%(%s)s" % matchobj.group(1).split(':')[0]
281 else:
281 else:
282 return "%%(%s)s" % matchobj.group(2)
282 return "%%(%s)s" % matchobj.group(2)
283
283
284 routepath = _argument_prog.sub(replace, routepath)
284 routepath = _argument_prog.sub(replace, routepath)
285
285
286 if not routepath.startswith('/'):
286 if not routepath.startswith('/'):
287 routepath = '/'+routepath
287 routepath = '/'+routepath
288
288
289 return (
289 return (
290 route.name,
290 route.name,
291 routepath,
291 routepath,
292 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
292 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
293 for arg in _argument_prog.findall(pattern)]
293 for arg in _argument_prog.findall(pattern)]
294 )
294 )
295
295
296 def get_routes():
296 def get_routes():
297 # pyramid routes
297 # pyramid routes
298 for route in mapper.get_routes():
298 for route in mapper.get_routes():
299 if not route.name.startswith('__'):
299 if not route.name.startswith('__'):
300 yield _extract_route_information(route)
300 yield _extract_route_information(route)
301
301
302 if asbool(registry.settings.get('generate_js_files', 'false')):
302 if asbool(registry.settings.get('generate_js_files', 'false')):
303 static_path = AssetResolver().resolve('rhodecode:public').abspath()
303 static_path = AssetResolver().resolve('rhodecode:public').abspath()
304 jsroutes = get_routes()
304 jsroutes = get_routes()
305 jsroutes_file_content = generate_jsroutes_content(jsroutes)
305 jsroutes_file_content = generate_jsroutes_content(jsroutes)
306 jsroutes_file_path = os.path.join(
306 jsroutes_file_path = os.path.join(
307 static_path, 'js', 'rhodecode', 'routes.js')
307 static_path, 'js', 'rhodecode', 'routes.js')
308
308
309 try:
309 try:
310 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
310 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
311 f.write(jsroutes_file_content)
311 f.write(jsroutes_file_content)
312 except Exception:
312 except Exception:
313 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
313 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
314
314
315
315
316 class Subscriber(object):
316 class Subscriber(object):
317 """
317 """
318 Base class for subscribers to the pyramid event system.
318 Base class for subscribers to the pyramid event system.
319 """
319 """
320 def __call__(self, event):
320 def __call__(self, event):
321 self.run(event)
321 self.run(event)
322
322
323 def run(self, event):
323 def run(self, event):
324 raise NotImplementedError('Subclass has to implement this.')
324 raise NotImplementedError('Subclass has to implement this.')
325
325
326
326
327 class AsyncSubscriber(Subscriber):
327 class AsyncSubscriber(Subscriber):
328 """
328 """
329 Subscriber that handles the execution of events in a separate task to not
329 Subscriber that handles the execution of events in a separate task to not
330 block the execution of the code which triggers the event. It puts the
330 block the execution of the code which triggers the event. It puts the
331 received events into a queue from which the worker process takes them in
331 received events into a queue from which the worker process takes them in
332 order.
332 order.
333 """
333 """
334 def __init__(self):
334 def __init__(self):
335 self._stop = False
335 self._stop = False
336 self._eventq = Queue.Queue()
336 self._eventq = Queue.Queue()
337 self._worker = self.create_worker()
337 self._worker = self.create_worker()
338 self._worker.start()
338 self._worker.start()
339
339
340 def __call__(self, event):
340 def __call__(self, event):
341 self._eventq.put(event)
341 self._eventq.put(event)
342
342
343 def create_worker(self):
343 def create_worker(self):
344 worker = Thread(target=self.do_work)
344 worker = Thread(target=self.do_work)
345 worker.daemon = True
345 worker.daemon = True
346 return worker
346 return worker
347
347
348 def stop_worker(self):
348 def stop_worker(self):
349 self._stop = False
349 self._stop = False
350 self._eventq.put(None)
350 self._eventq.put(None)
351 self._worker.join()
351 self._worker.join()
352
352
353 def do_work(self):
353 def do_work(self):
354 while not self._stop:
354 while not self._stop:
355 event = self._eventq.get()
355 event = self._eventq.get()
356 if event is not None:
356 if event is not None:
357 self.run(event)
357 self.run(event)
358
358
359
359
360 class AsyncSubprocessSubscriber(AsyncSubscriber):
360 class AsyncSubprocessSubscriber(AsyncSubscriber):
361 """
361 """
362 Subscriber that uses the subprocess32 module to execute a command if an
362 Subscriber that uses the subprocess module to execute a command if an
363 event is received. Events are handled asynchronously::
363 event is received. Events are handled asynchronously::
364
364
365 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
365 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
366 subscriber(dummyEvent) # running __call__(event)
366 subscriber(dummyEvent) # running __call__(event)
367
367
368 """
368 """
369
369
370 def __init__(self, cmd, timeout=None):
370 def __init__(self, cmd, timeout=None):
371 if not isinstance(cmd, (list, tuple)):
371 if not isinstance(cmd, (list, tuple)):
372 cmd = shlex.split(cmd)
372 cmd = shlex.split(cmd)
373 super(AsyncSubprocessSubscriber, self).__init__()
373 super(AsyncSubprocessSubscriber, self).__init__()
374 self._cmd = cmd
374 self._cmd = cmd
375 self._timeout = timeout
375 self._timeout = timeout
376
376
377 def run(self, event):
377 def run(self, event):
378 cmd = self._cmd
378 cmd = self._cmd
379 timeout = self._timeout
379 timeout = self._timeout
380 log.debug('Executing command %s.', cmd)
380 log.debug('Executing command %s.', cmd)
381
381
382 try:
382 try:
383 output = subprocess32.check_output(
383 output = subprocess.check_output(
384 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
384 cmd, timeout=timeout, stderr=subprocess.STDOUT)
385 log.debug('Command finished %s', cmd)
385 log.debug('Command finished %s', cmd)
386 if output:
386 if output:
387 log.debug('Command output: %s', output)
387 log.debug('Command output: %s', output)
388 except subprocess32.TimeoutExpired as e:
388 except subprocess.TimeoutExpired as e:
389 log.exception('Timeout while executing command.')
389 log.exception('Timeout while executing command.')
390 if e.output:
390 if e.output:
391 log.error('Command output: %s', e.output)
391 log.error('Command output: %s', e.output)
392 except subprocess32.CalledProcessError as e:
392 except subprocess.CalledProcessError as e:
393 log.exception('Error while executing command.')
393 log.exception('Error while executing command.')
394 if e.output:
394 if e.output:
395 log.error('Command output: %s', e.output)
395 log.error('Command output: %s', e.output)
396 except Exception:
396 except Exception:
397 log.exception(
397 log.exception(
398 'Exception while executing command %s.', cmd)
398 'Exception while executing command %s.', cmd)
@@ -1,293 +1,293 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from subprocess32 import Popen, PIPE
21 from subprocess import Popen, PIPE
22 import os
22 import os
23 import shutil
23 import shutil
24 import sys
24 import sys
25 import tempfile
25 import tempfile
26
26
27 import pytest
27 import pytest
28 from sqlalchemy.engine import url
28 from sqlalchemy.engine import url
29
29
30 from rhodecode.tests.fixture import TestINI
30 from rhodecode.tests.fixture import TestINI
31
31
32
32
33 def _get_dbs_from_metafunc(metafunc):
33 def _get_dbs_from_metafunc(metafunc):
34 dbs_mark = metafunc.definition.get_closest_marker('dbs')
34 dbs_mark = metafunc.definition.get_closest_marker('dbs')
35
35
36 if dbs_mark:
36 if dbs_mark:
37 # Supported backends by this test function, created from pytest.mark.dbs
37 # Supported backends by this test function, created from pytest.mark.dbs
38 backends = dbs_mark.args
38 backends = dbs_mark.args
39 else:
39 else:
40 backends = metafunc.config.getoption('--dbs')
40 backends = metafunc.config.getoption('--dbs')
41 return backends
41 return backends
42
42
43
43
44 def pytest_generate_tests(metafunc):
44 def pytest_generate_tests(metafunc):
45 # Support test generation based on --dbs parameter
45 # Support test generation based on --dbs parameter
46 if 'db_backend' in metafunc.fixturenames:
46 if 'db_backend' in metafunc.fixturenames:
47 requested_backends = set(metafunc.config.getoption('--dbs'))
47 requested_backends = set(metafunc.config.getoption('--dbs'))
48 backends = _get_dbs_from_metafunc(metafunc)
48 backends = _get_dbs_from_metafunc(metafunc)
49 backends = requested_backends.intersection(backends)
49 backends = requested_backends.intersection(backends)
50 # TODO: johbo: Disabling a backend did not work out with
50 # TODO: johbo: Disabling a backend did not work out with
51 # parametrization, find better way to achieve this.
51 # parametrization, find better way to achieve this.
52 if not backends:
52 if not backends:
53 metafunc.function._skip = True
53 metafunc.function._skip = True
54 metafunc.parametrize('db_backend_name', backends)
54 metafunc.parametrize('db_backend_name', backends)
55
55
56
56
57 def pytest_collection_modifyitems(session, config, items):
57 def pytest_collection_modifyitems(session, config, items):
58 remaining = [
58 remaining = [
59 i for i in items if not getattr(i.obj, '_skip', False)]
59 i for i in items if not getattr(i.obj, '_skip', False)]
60 items[:] = remaining
60 items[:] = remaining
61
61
62
62
63 @pytest.fixture()
63 @pytest.fixture()
64 def db_backend(
64 def db_backend(
65 request, db_backend_name, ini_config, tmpdir_factory):
65 request, db_backend_name, ini_config, tmpdir_factory):
66 basetemp = tmpdir_factory.getbasetemp().strpath
66 basetemp = tmpdir_factory.getbasetemp().strpath
67 klass = _get_backend(db_backend_name)
67 klass = _get_backend(db_backend_name)
68
68
69 option_name = '--{}-connection-string'.format(db_backend_name)
69 option_name = '--{}-connection-string'.format(db_backend_name)
70 connection_string = request.config.getoption(option_name) or None
70 connection_string = request.config.getoption(option_name) or None
71
71
72 return klass(
72 return klass(
73 config_file=ini_config, basetemp=basetemp,
73 config_file=ini_config, basetemp=basetemp,
74 connection_string=connection_string)
74 connection_string=connection_string)
75
75
76
76
77 def _get_backend(backend_type):
77 def _get_backend(backend_type):
78 return {
78 return {
79 'sqlite': SQLiteDBBackend,
79 'sqlite': SQLiteDBBackend,
80 'postgres': PostgresDBBackend,
80 'postgres': PostgresDBBackend,
81 'mysql': MySQLDBBackend,
81 'mysql': MySQLDBBackend,
82 '': EmptyDBBackend
82 '': EmptyDBBackend
83 }[backend_type]
83 }[backend_type]
84
84
85
85
86 class DBBackend(object):
86 class DBBackend(object):
87 _store = os.path.dirname(os.path.abspath(__file__))
87 _store = os.path.dirname(os.path.abspath(__file__))
88 _type = None
88 _type = None
89 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
89 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
90 'startup.import_repos': 'false',
90 'startup.import_repos': 'false',
91 'is_test': 'False'}}]
91 'is_test': 'False'}}]
92 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
92 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
93 _base_db_name = 'rhodecode_test_db_backend'
93 _base_db_name = 'rhodecode_test_db_backend'
94
94
95 def __init__(
95 def __init__(
96 self, config_file, db_name=None, basetemp=None,
96 self, config_file, db_name=None, basetemp=None,
97 connection_string=None):
97 connection_string=None):
98
98
99 from rhodecode.lib.vcs.backends.hg import largefiles_store
99 from rhodecode.lib.vcs.backends.hg import largefiles_store
100 from rhodecode.lib.vcs.backends.git import lfs_store
100 from rhodecode.lib.vcs.backends.git import lfs_store
101
101
102 self.fixture_store = os.path.join(self._store, self._type)
102 self.fixture_store = os.path.join(self._store, self._type)
103 self.db_name = db_name or self._base_db_name
103 self.db_name = db_name or self._base_db_name
104 self._base_ini_file = config_file
104 self._base_ini_file = config_file
105 self.stderr = ''
105 self.stderr = ''
106 self.stdout = ''
106 self.stdout = ''
107 self._basetemp = basetemp or tempfile.gettempdir()
107 self._basetemp = basetemp or tempfile.gettempdir()
108 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
108 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
109 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
109 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
110 self._repos_git_lfs_store = lfs_store(self._basetemp)
110 self._repos_git_lfs_store = lfs_store(self._basetemp)
111 self.connection_string = connection_string
111 self.connection_string = connection_string
112
112
113 @property
113 @property
114 def connection_string(self):
114 def connection_string(self):
115 return self._connection_string
115 return self._connection_string
116
116
117 @connection_string.setter
117 @connection_string.setter
118 def connection_string(self, new_connection_string):
118 def connection_string(self, new_connection_string):
119 if not new_connection_string:
119 if not new_connection_string:
120 new_connection_string = self.get_default_connection_string()
120 new_connection_string = self.get_default_connection_string()
121 else:
121 else:
122 new_connection_string = new_connection_string.format(
122 new_connection_string = new_connection_string.format(
123 db_name=self.db_name)
123 db_name=self.db_name)
124 url_parts = url.make_url(new_connection_string)
124 url_parts = url.make_url(new_connection_string)
125 self._connection_string = new_connection_string
125 self._connection_string = new_connection_string
126 self.user = url_parts.username
126 self.user = url_parts.username
127 self.password = url_parts.password
127 self.password = url_parts.password
128 self.host = url_parts.host
128 self.host = url_parts.host
129
129
130 def get_default_connection_string(self):
130 def get_default_connection_string(self):
131 raise NotImplementedError('default connection_string is required.')
131 raise NotImplementedError('default connection_string is required.')
132
132
133 def execute(self, cmd, env=None, *args):
133 def execute(self, cmd, env=None, *args):
134 """
134 """
135 Runs command on the system with given ``args``.
135 Runs command on the system with given ``args``.
136 """
136 """
137
137
138 command = cmd + ' ' + ' '.join(args)
138 command = cmd + ' ' + ' '.join(args)
139 sys.stdout.write(command)
139 sys.stdout.write(command)
140
140
141 # Tell Python to use UTF-8 encoding out stdout
141 # Tell Python to use UTF-8 encoding out stdout
142 _env = os.environ.copy()
142 _env = os.environ.copy()
143 _env['PYTHONIOENCODING'] = 'UTF-8'
143 _env['PYTHONIOENCODING'] = 'UTF-8'
144 if env:
144 if env:
145 _env.update(env)
145 _env.update(env)
146 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
146 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
147 self.stdout, self.stderr = self.p.communicate()
147 self.stdout, self.stderr = self.p.communicate()
148 sys.stdout.write('COMMAND:'+command+'\n')
148 sys.stdout.write('COMMAND:'+command+'\n')
149 sys.stdout.write(self.stdout)
149 sys.stdout.write(self.stdout)
150 return self.stdout, self.stderr
150 return self.stdout, self.stderr
151
151
152 def assert_returncode_success(self):
152 def assert_returncode_success(self):
153 if not self.p.returncode == 0:
153 if not self.p.returncode == 0:
154 print(self.stderr)
154 print(self.stderr)
155 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
155 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
156
156
157 def assert_correct_output(self, stdout, version):
157 def assert_correct_output(self, stdout, version):
158 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
158 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
159
159
160 def setup_rhodecode_db(self, ini_params=None, env=None):
160 def setup_rhodecode_db(self, ini_params=None, env=None):
161 if not ini_params:
161 if not ini_params:
162 ini_params = self._base_ini_config
162 ini_params = self._base_ini_config
163
163
164 ini_params.extend(self._db_url)
164 ini_params.extend(self._db_url)
165 with TestINI(self._base_ini_file, ini_params,
165 with TestINI(self._base_ini_file, ini_params,
166 self._type, destroy=True) as _ini_file:
166 self._type, destroy=True) as _ini_file:
167
167
168 if not os.path.isdir(self._repos_location):
168 if not os.path.isdir(self._repos_location):
169 os.makedirs(self._repos_location)
169 os.makedirs(self._repos_location)
170 if not os.path.isdir(self._repos_hg_largefiles_store):
170 if not os.path.isdir(self._repos_hg_largefiles_store):
171 os.makedirs(self._repos_hg_largefiles_store)
171 os.makedirs(self._repos_hg_largefiles_store)
172 if not os.path.isdir(self._repos_git_lfs_store):
172 if not os.path.isdir(self._repos_git_lfs_store):
173 os.makedirs(self._repos_git_lfs_store)
173 os.makedirs(self._repos_git_lfs_store)
174
174
175 return self.execute(
175 return self.execute(
176 "rc-setup-app {0} --user=marcink "
176 "rc-setup-app {0} --user=marcink "
177 "--email=marcin@rhodeocode.com --password={1} "
177 "--email=marcin@rhodeocode.com --password={1} "
178 "--repos={2} --force-yes".format(
178 "--repos={2} --force-yes".format(
179 _ini_file, 'qweqwe', self._repos_location), env=env)
179 _ini_file, 'qweqwe', self._repos_location), env=env)
180
180
181 def upgrade_database(self, ini_params=None):
181 def upgrade_database(self, ini_params=None):
182 if not ini_params:
182 if not ini_params:
183 ini_params = self._base_ini_config
183 ini_params = self._base_ini_config
184 ini_params.extend(self._db_url)
184 ini_params.extend(self._db_url)
185
185
186 test_ini = TestINI(
186 test_ini = TestINI(
187 self._base_ini_file, ini_params, self._type, destroy=True)
187 self._base_ini_file, ini_params, self._type, destroy=True)
188 with test_ini as ini_file:
188 with test_ini as ini_file:
189 if not os.path.isdir(self._repos_location):
189 if not os.path.isdir(self._repos_location):
190 os.makedirs(self._repos_location)
190 os.makedirs(self._repos_location)
191
191
192 return self.execute(
192 return self.execute(
193 "rc-upgrade-db {0} --force-yes".format(ini_file))
193 "rc-upgrade-db {0} --force-yes".format(ini_file))
194
194
195 def setup_db(self):
195 def setup_db(self):
196 raise NotImplementedError
196 raise NotImplementedError
197
197
198 def teardown_db(self):
198 def teardown_db(self):
199 raise NotImplementedError
199 raise NotImplementedError
200
200
201 def import_dump(self, dumpname):
201 def import_dump(self, dumpname):
202 raise NotImplementedError
202 raise NotImplementedError
203
203
204
204
205 class EmptyDBBackend(DBBackend):
205 class EmptyDBBackend(DBBackend):
206 _type = ''
206 _type = ''
207
207
208 def setup_db(self):
208 def setup_db(self):
209 pass
209 pass
210
210
211 def teardown_db(self):
211 def teardown_db(self):
212 pass
212 pass
213
213
214 def import_dump(self, dumpname):
214 def import_dump(self, dumpname):
215 pass
215 pass
216
216
217 def assert_returncode_success(self):
217 def assert_returncode_success(self):
218 assert True
218 assert True
219
219
220
220
221 class SQLiteDBBackend(DBBackend):
221 class SQLiteDBBackend(DBBackend):
222 _type = 'sqlite'
222 _type = 'sqlite'
223
223
224 def get_default_connection_string(self):
224 def get_default_connection_string(self):
225 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
225 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
226
226
227 def setup_db(self):
227 def setup_db(self):
228 # dump schema for tests
228 # dump schema for tests
229 # cp -v $TEST_DB_NAME
229 # cp -v $TEST_DB_NAME
230 self._db_url = [{'app:main': {
230 self._db_url = [{'app:main': {
231 'sqlalchemy.db1.url': self.connection_string}}]
231 'sqlalchemy.db1.url': self.connection_string}}]
232
232
233 def import_dump(self, dumpname):
233 def import_dump(self, dumpname):
234 dump = os.path.join(self.fixture_store, dumpname)
234 dump = os.path.join(self.fixture_store, dumpname)
235 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
235 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
236 return self.execute('cp -v {} {}'.format(dump, target))
236 return self.execute('cp -v {} {}'.format(dump, target))
237
237
238 def teardown_db(self):
238 def teardown_db(self):
239 return self.execute("rm -rf {}.sqlite".format(
239 return self.execute("rm -rf {}.sqlite".format(
240 os.path.join(self._basetemp, self.db_name)))
240 os.path.join(self._basetemp, self.db_name)))
241
241
242
242
243 class MySQLDBBackend(DBBackend):
243 class MySQLDBBackend(DBBackend):
244 _type = 'mysql'
244 _type = 'mysql'
245
245
246 def get_default_connection_string(self):
246 def get_default_connection_string(self):
247 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
247 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
248
248
249 def setup_db(self):
249 def setup_db(self):
250 # dump schema for tests
250 # dump schema for tests
251 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
251 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
252 self._db_url = [{'app:main': {
252 self._db_url = [{'app:main': {
253 'sqlalchemy.db1.url': self.connection_string}}]
253 'sqlalchemy.db1.url': self.connection_string}}]
254 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
254 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
255 self.user, self.password, self.db_name))
255 self.user, self.password, self.db_name))
256
256
257 def import_dump(self, dumpname):
257 def import_dump(self, dumpname):
258 dump = os.path.join(self.fixture_store, dumpname)
258 dump = os.path.join(self.fixture_store, dumpname)
259 return self.execute("mysql -u{} -p{} {} < {}".format(
259 return self.execute("mysql -u{} -p{} {} < {}".format(
260 self.user, self.password, self.db_name, dump))
260 self.user, self.password, self.db_name, dump))
261
261
262 def teardown_db(self):
262 def teardown_db(self):
263 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
263 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
264 self.user, self.password, self.db_name))
264 self.user, self.password, self.db_name))
265
265
266
266
267 class PostgresDBBackend(DBBackend):
267 class PostgresDBBackend(DBBackend):
268 _type = 'postgres'
268 _type = 'postgres'
269
269
270 def get_default_connection_string(self):
270 def get_default_connection_string(self):
271 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
271 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
272
272
273 def setup_db(self):
273 def setup_db(self):
274 # dump schema for tests
274 # dump schema for tests
275 # pg_dump -U postgres -h localhost $TEST_DB_NAME
275 # pg_dump -U postgres -h localhost $TEST_DB_NAME
276 self._db_url = [{'app:main': {
276 self._db_url = [{'app:main': {
277 'sqlalchemy.db1.url':
277 'sqlalchemy.db1.url':
278 self.connection_string}}]
278 self.connection_string}}]
279 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
280 "-c 'create database '{}';'".format(
280 "-c 'create database '{}';'".format(
281 self.password, self.user, self.db_name))
281 self.password, self.user, self.db_name))
282
282
283 def teardown_db(self):
283 def teardown_db(self):
284 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
284 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
285 "-c 'drop database if exists '{}';'".format(
285 "-c 'drop database if exists '{}';'".format(
286 self.password, self.user, self.db_name))
286 self.password, self.user, self.db_name))
287
287
288 def import_dump(self, dumpname):
288 def import_dump(self, dumpname):
289 dump = os.path.join(self.fixture_store, dumpname)
289 dump = os.path.join(self.fixture_store, dumpname)
290 return self.execute(
290 return self.execute(
291 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
291 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
292 "-f {}".format(
292 "-f {}".format(
293 self.password, self.user, self.db_name, dump))
293 self.password, self.user, self.db_name, dump))
@@ -1,136 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Checking the chunked data transfer via HTTP
22 Checking the chunked data transfer via HTTP
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import subprocess32
27 import subprocess
28
28
29 import pytest
29 import pytest
30 import requests
30 import requests
31
31
32 from rhodecode.lib.middleware.utils import scm_app_http
32 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.tests.utils import wait_for_url
33 from rhodecode.tests.utils import wait_for_url
34
34
35
35
36 def test_does_chunked_end_to_end_transfer(scm_app):
36 def test_does_chunked_end_to_end_transfer(scm_app):
37 response = requests.post(scm_app, data='', stream=True)
37 response = requests.post(scm_app, data='', stream=True)
38 assert response.headers['Transfer-Encoding'] == 'chunked'
38 assert response.headers['Transfer-Encoding'] == 'chunked'
39 times = [time.time() for chunk in response.raw.read_chunked()]
39 times = [time.time() for chunk in response.raw.read_chunked()]
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
41
41
42
42
43 @pytest.fixture()
43 @pytest.fixture()
44 def echo_app_chunking(request, available_port_factory):
44 def echo_app_chunking(request, available_port_factory):
45 """
45 """
46 Run the EchoApp via Waitress in a subprocess.
46 Run the EchoApp via Waitress in a subprocess.
47
47
48 Return the URL endpoint to reach the app.
48 Return the URL endpoint to reach the app.
49 """
49 """
50 port = available_port_factory()
50 port = available_port_factory()
51 command = (
51 command = (
52 'waitress-serve --send-bytes 1 --port {port} --call '
52 'waitress-serve --send-bytes 1 --port {port} --call '
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
54 ':create_echo_app')
54 ':create_echo_app')
55 command = command.format(port=port)
55 command = command.format(port=port)
56 proc = subprocess32.Popen(command.split(' '), bufsize=0)
56 proc = subprocess.Popen(command.split(' '), bufsize=0)
57 echo_app_url = 'http://localhost:' + str(port)
57 echo_app_url = 'http://localhost:' + str(port)
58
58
59 @request.addfinalizer
59 @request.addfinalizer
60 def stop_echo_app():
60 def stop_echo_app():
61 proc.kill()
61 proc.kill()
62
62
63 return echo_app_url
63 return echo_app_url
64
64
65
65
66 @pytest.fixture()
66 @pytest.fixture()
67 def scm_app(request, available_port_factory, echo_app_chunking):
67 def scm_app(request, available_port_factory, echo_app_chunking):
68 """
68 """
69 Run the scm_app in Waitress.
69 Run the scm_app in Waitress.
70
70
71 Returns the URL endpoint where this app can be reached.
71 Returns the URL endpoint where this app can be reached.
72 """
72 """
73 port = available_port_factory()
73 port = available_port_factory()
74 command = (
74 command = (
75 'waitress-serve --send-bytes 1 --port {port} --call '
75 'waitress-serve --send-bytes 1 --port {port} --call '
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
77 ':create_scm_app')
77 ':create_scm_app')
78 command = command.format(port=port)
78 command = command.format(port=port)
79 env = os.environ.copy()
79 env = os.environ.copy()
80 env["RC_ECHO_URL"] = echo_app_chunking
80 env["RC_ECHO_URL"] = echo_app_chunking
81 proc = subprocess32.Popen(command.split(' '), bufsize=0, env=env)
81 proc = subprocess.Popen(command.split(' '), bufsize=0, env=env)
82 scm_app_url = 'http://localhost:' + str(port)
82 scm_app_url = 'http://localhost:' + str(port)
83 wait_for_url(scm_app_url)
83 wait_for_url(scm_app_url)
84
84
85 @request.addfinalizer
85 @request.addfinalizer
86 def stop_echo_app():
86 def stop_echo_app():
87 proc.kill()
87 proc.kill()
88
88
89 return scm_app_url
89 return scm_app_url
90
90
91
91
92 class EchoApp(object):
92 class EchoApp(object):
93 """
93 """
94 Stub WSGI application which returns a chunked response to every request.
94 Stub WSGI application which returns a chunked response to every request.
95 """
95 """
96
96
97 def __init__(self, repo_path, repo_name, config):
97 def __init__(self, repo_path, repo_name, config):
98 self._repo_path = repo_path
98 self._repo_path = repo_path
99
99
100 def __call__(self, environ, start_response):
100 def __call__(self, environ, start_response):
101 environ['wsgi.input'].read()
101 environ['wsgi.input'].read()
102 status = '200 OK'
102 status = '200 OK'
103 headers = []
103 headers = []
104 start_response(status, headers)
104 start_response(status, headers)
105 return result_generator()
105 return result_generator()
106
106
107
107
108 def result_generator():
108 def result_generator():
109 """
109 """
110 Simulate chunked results.
110 Simulate chunked results.
111
111
112 The intended usage is to simulate a chunked response as we would get it
112 The intended usage is to simulate a chunked response as we would get it
113 out of a vcs operation during a call to "hg clone".
113 out of a vcs operation during a call to "hg clone".
114 """
114 """
115 yield 'waiting 2 seconds'
115 yield 'waiting 2 seconds'
116 # Wait long enough so that the first chunk can go out
116 # Wait long enough so that the first chunk can go out
117 time.sleep(2)
117 time.sleep(2)
118 yield 'final chunk'
118 yield 'final chunk'
119 # Another small wait, otherwise they go together
119 # Another small wait, otherwise they go together
120 time.sleep(0.1)
120 time.sleep(0.1)
121
121
122
122
123 def create_echo_app():
123 def create_echo_app():
124 """
124 """
125 Create EchoApp filled with stub data.
125 Create EchoApp filled with stub data.
126 """
126 """
127 return EchoApp('stub_path', 'repo_name', {})
127 return EchoApp('stub_path', 'repo_name', {})
128
128
129
129
130 def create_scm_app():
130 def create_scm_app():
131 """
131 """
132 Create a scm_app hooked up to speak to EchoApp.
132 Create a scm_app hooked up to speak to EchoApp.
133 """
133 """
134 echo_app_url = os.environ["RC_ECHO_URL"]
134 echo_app_url = os.environ["RC_ECHO_URL"]
135 return scm_app_http.VcsHttpProxy(
135 return scm_app_http.VcsHttpProxy(
136 echo_app_url, 'stub_path', 'stub_name', None)
136 echo_app_url, 'stub_path', 'stub_name', None)
@@ -1,463 +1,463 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module to test the performance of pull, push and clone operations.
22 Module to test the performance of pull, push and clone operations.
23
23
24 It works by replaying a group of commits to the repo.
24 It works by replaying a group of commits to the repo.
25 """
25 """
26
26
27 import argparse
27 import argparse
28 import collections
28 import collections
29 import ConfigParser
29 import ConfigParser
30 import functools
30 import functools
31 import itertools
31 import itertools
32 import os
32 import os
33 import pprint
33 import pprint
34 import shutil
34 import shutil
35 import subprocess32
35 import subprocess
36 import sys
36 import sys
37 import time
37 import time
38
38
39 import api
39 import api
40
40
41
41
42 def mean(container):
42 def mean(container):
43 """Return the mean of the container."""
43 """Return the mean of the container."""
44 if not container:
44 if not container:
45 return -1.0
45 return -1.0
46 return sum(container) / len(container)
46 return sum(container) / len(container)
47
47
48
48
49 def keep_cwd(f):
49 def keep_cwd(f):
50 """Decorator that keeps track of the starting working directory."""
50 """Decorator that keeps track of the starting working directory."""
51 @functools.wraps(f)
51 @functools.wraps(f)
52 def wrapped_f(*args, **kwargs):
52 def wrapped_f(*args, **kwargs):
53 cur_dir = os.getcwd()
53 cur_dir = os.getcwd()
54 try:
54 try:
55 return f(*args, **kwargs)
55 return f(*args, **kwargs)
56 finally:
56 finally:
57 os.chdir(cur_dir)
57 os.chdir(cur_dir)
58
58
59 return wrapped_f
59 return wrapped_f
60
60
61
61
62 def timed(f):
62 def timed(f):
63 """Decorator that returns the time it took to execute the function."""
63 """Decorator that returns the time it took to execute the function."""
64 @functools.wraps(f)
64 @functools.wraps(f)
65 def wrapped_f(*args, **kwargs):
65 def wrapped_f(*args, **kwargs):
66 start_time = time.time()
66 start_time = time.time()
67 try:
67 try:
68 f(*args, **kwargs)
68 f(*args, **kwargs)
69 finally:
69 finally:
70 return time.time() - start_time
70 return time.time() - start_time
71
71
72 return wrapped_f
72 return wrapped_f
73
73
74
74
75 def execute(*popenargs, **kwargs):
75 def execute(*popenargs, **kwargs):
76 """Extension of subprocess.check_output to support writing to stdin."""
76 """Extension of subprocess.check_output to support writing to stdin."""
77 input = kwargs.pop('stdin', None)
77 input = kwargs.pop('stdin', None)
78 stdin = None
78 stdin = None
79 if input:
79 if input:
80 stdin = subprocess32.PIPE
80 stdin = subprocess.PIPE
81 #if 'stderr' not in kwargs:
81 #if 'stderr' not in kwargs:
82 # kwargs['stderr'] = subprocess32.PIPE
82 # kwargs['stderr'] = subprocess.PIPE
83 if 'stdout' in kwargs:
83 if 'stdout' in kwargs:
84 raise ValueError('stdout argument not allowed, it will be overridden.')
84 raise ValueError('stdout argument not allowed, it will be overridden.')
85 process = subprocess32.Popen(stdin=stdin, stdout=subprocess32.PIPE,
85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
86 *popenargs, **kwargs)
86 *popenargs, **kwargs)
87 output, error = process.communicate(input=input)
87 output, error = process.communicate(input=input)
88 retcode = process.poll()
88 retcode = process.poll()
89 if retcode:
89 if retcode:
90 cmd = kwargs.get("args")
90 cmd = kwargs.get("args")
91 if cmd is None:
91 if cmd is None:
92 cmd = popenargs[0]
92 cmd = popenargs[0]
93 print('{} {} {} '.format(cmd, output, error))
93 print('{} {} {} '.format(cmd, output, error))
94 raise subprocess32.CalledProcessError(retcode, cmd, output=output)
94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
95 return output
95 return output
96
96
97
97
98 def get_repo_name(repo_url):
98 def get_repo_name(repo_url):
99 """Extract the repo name from its url."""
99 """Extract the repo name from its url."""
100 repo_url = repo_url.rstrip('/')
100 repo_url = repo_url.rstrip('/')
101 return repo_url.split('/')[-1].split('.')[0]
101 return repo_url.split('/')[-1].split('.')[0]
102
102
103
103
104 class TestPerformanceBase(object):
104 class TestPerformanceBase(object):
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
106 skip_commits):
106 skip_commits):
107 self.repo_url = repo_url
107 self.repo_url = repo_url
108 self.repo_name = get_repo_name(self.repo_url)
108 self.repo_name = get_repo_name(self.repo_url)
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
110 self.base_dir = os.path.abspath(base_dir)
110 self.base_dir = os.path.abspath(base_dir)
111 self.n_commits = n_commits
111 self.n_commits = n_commits
112 self.max_commits = max_commits
112 self.max_commits = max_commits
113 self.skip_commits = skip_commits
113 self.skip_commits = skip_commits
114 self.push_times = []
114 self.push_times = []
115 self.pull_times = []
115 self.pull_times = []
116 self.empty_pull_times = []
116 self.empty_pull_times = []
117 self.clone_time = -1.0
117 self.clone_time = -1.0
118 self.last_commit = None
118 self.last_commit = None
119
119
120 self.cloned_repo = ''
120 self.cloned_repo = ''
121 self.pull_repo = ''
121 self.pull_repo = ''
122 self.orig_repo = ''
122 self.orig_repo = ''
123
123
124 def run(self):
124 def run(self):
125 try:
125 try:
126 self.test()
126 self.test()
127 except Exception as error:
127 except Exception as error:
128 print(error)
128 print(error)
129 finally:
129 finally:
130 self.cleanup()
130 self.cleanup()
131
131
132 print('Clone time :{}'.format(self.clone_time))
132 print('Clone time :{}'.format(self.clone_time))
133 print('Push time :{}'.format(mean(self.push_times)))
133 print('Push time :{}'.format(mean(self.push_times)))
134 print('Pull time :{}'.format(mean(self.pull_times)))
134 print('Pull time :{}'.format(mean(self.pull_times)))
135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
136
136
137 return {
137 return {
138 'clone': self.clone_time,
138 'clone': self.clone_time,
139 'push': mean(self.push_times),
139 'push': mean(self.push_times),
140 'pull': mean(self.pull_times),
140 'pull': mean(self.pull_times),
141 'empty_pull': mean(self.empty_pull_times),
141 'empty_pull': mean(self.empty_pull_times),
142 }
142 }
143
143
144 @keep_cwd
144 @keep_cwd
145 def test(self):
145 def test(self):
146 os.chdir(self.base_dir)
146 os.chdir(self.base_dir)
147
147
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
149 if not os.path.exists(self.orig_repo):
149 if not os.path.exists(self.orig_repo):
150 self.clone_repo(self.repo_url, default_only=True)
150 self.clone_repo(self.repo_url, default_only=True)
151
151
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
153
153
154 self.add_remote(self.orig_repo, upstream_url)
154 self.add_remote(self.orig_repo, upstream_url)
155
155
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
157 self.clone_repo(upstream_url, self.pull_repo)
157 self.clone_repo(upstream_url, self.pull_repo)
158
158
159 commits = self.get_commits(self.orig_repo)
159 commits = self.get_commits(self.orig_repo)
160 self.last_commit = commits[-1]
160 self.last_commit = commits[-1]
161 if self.skip_commits:
161 if self.skip_commits:
162 self.push(
162 self.push(
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
164 commits = commits[self.skip_commits:self.max_commits]
164 commits = commits[self.skip_commits:self.max_commits]
165
165
166 print('Working with %d commits' % len(commits))
166 print('Working with %d commits' % len(commits))
167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
168 commit = commits[i]
168 commit = commits[i]
169 print('Processing commit %s (%d)' % (commit, i + 1))
169 print('Processing commit %s (%d)' % (commit, i + 1))
170 self.push_times.append(
170 self.push_times.append(
171 self.push(self.orig_repo, commit, 'upstream'))
171 self.push(self.orig_repo, commit, 'upstream'))
172 self.check_remote_last_commit_is(commit, upstream_url)
172 self.check_remote_last_commit_is(commit, upstream_url)
173
173
174 self.pull_times.append(self.pull(self.pull_repo))
174 self.pull_times.append(self.pull(self.pull_repo))
175 self.check_local_last_commit_is(commit, self.pull_repo)
175 self.check_local_last_commit_is(commit, self.pull_repo)
176
176
177 self.empty_pull_times.append(self.pull(self.pull_repo))
177 self.empty_pull_times.append(self.pull(self.pull_repo))
178
178
179 self.cloned_repo = os.path.join(self.base_dir,
179 self.cloned_repo = os.path.join(self.base_dir,
180 '%s_clone' % self.repo_name)
180 '%s_clone' % self.repo_name)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
182
182
183 def cleanup(self):
183 def cleanup(self):
184 try:
184 try:
185 self.delete_repo(self.upstream_repo_name)
185 self.delete_repo(self.upstream_repo_name)
186 except api.ApiError:
186 except api.ApiError:
187 # Continue in case we could not delete the repo. Maybe we did not
187 # Continue in case we could not delete the repo. Maybe we did not
188 # create it in the first place.
188 # create it in the first place.
189 pass
189 pass
190
190
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
193
193
194 if os.path.exists(self.orig_repo):
194 if os.path.exists(self.orig_repo):
195 self.remove_remote(self.orig_repo)
195 self.remove_remote(self.orig_repo)
196
196
197
197
198 class RhodeCodeMixin(object):
198 class RhodeCodeMixin(object):
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
200 def __init__(self, api_key):
200 def __init__(self, api_key):
201 self.api = api.RCApi(api_key=api_key)
201 self.api = api.RCApi(api_key=api_key)
202
202
203 def create_repo(self, repo_name, repo_type):
203 def create_repo(self, repo_name, repo_type):
204 return self.api.create_repo(repo_name, repo_type,
204 return self.api.create_repo(repo_name, repo_type,
205 'Repo for perfomance testing')
205 'Repo for perfomance testing')
206
206
207 def delete_repo(self, repo_name):
207 def delete_repo(self, repo_name):
208 return self.api.delete_repo(repo_name)
208 return self.api.delete_repo(repo_name)
209
209
210
210
211 class GitMixin(object):
211 class GitMixin(object):
212 """Mixin providing the git operations."""
212 """Mixin providing the git operations."""
213 @timed
213 @timed
214 def clone_repo(self, repo_url, destination=None, default_only=False):
214 def clone_repo(self, repo_url, destination=None, default_only=False):
215 args = ['git', 'clone']
215 args = ['git', 'clone']
216 if default_only:
216 if default_only:
217 args.extend(['--branch', 'master', '--single-branch'])
217 args.extend(['--branch', 'master', '--single-branch'])
218 args.append(repo_url)
218 args.append(repo_url)
219 if destination:
219 if destination:
220 args.append(destination)
220 args.append(destination)
221 execute(args)
221 execute(args)
222
222
223 @keep_cwd
223 @keep_cwd
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
225 self.remove_remote(repo, remote_name)
225 self.remove_remote(repo, remote_name)
226 os.chdir(repo)
226 os.chdir(repo)
227 execute(['git', 'remote', 'add', remote_name, remote_url])
227 execute(['git', 'remote', 'add', remote_name, remote_url])
228
228
229 @keep_cwd
229 @keep_cwd
230 def remove_remote(self, repo, remote_name='upstream'):
230 def remove_remote(self, repo, remote_name='upstream'):
231 os.chdir(repo)
231 os.chdir(repo)
232 remotes = execute(['git', 'remote']).split('\n')
232 remotes = execute(['git', 'remote']).split('\n')
233 if remote_name in remotes:
233 if remote_name in remotes:
234 execute(['git', 'remote', 'remove', remote_name])
234 execute(['git', 'remote', 'remove', remote_name])
235
235
236 @keep_cwd
236 @keep_cwd
237 def get_commits(self, repo, branch='master'):
237 def get_commits(self, repo, branch='master'):
238 os.chdir(repo)
238 os.chdir(repo)
239 commits_list = execute(
239 commits_list = execute(
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
241 return commits_list.strip().split('\n')[::-1]
241 return commits_list.strip().split('\n')[::-1]
242
242
243 @timed
243 @timed
244 def push(self, repo, commit, remote_name=None):
244 def push(self, repo, commit, remote_name=None):
245 os.chdir(repo)
245 os.chdir(repo)
246 try:
246 try:
247 execute(['git', 'reset', '--soft', commit])
247 execute(['git', 'reset', '--soft', commit])
248 args = ['git', 'push']
248 args = ['git', 'push']
249 if remote_name:
249 if remote_name:
250 args.append(remote_name)
250 args.append(remote_name)
251 execute(args)
251 execute(args)
252 finally:
252 finally:
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
254
254
255 @timed
255 @timed
256 def pull(self, repo):
256 def pull(self, repo):
257 os.chdir(repo)
257 os.chdir(repo)
258 execute(['git', 'pull'])
258 execute(['git', 'pull'])
259
259
260 def _remote_last_commit(self, repo_url):
260 def _remote_last_commit(self, repo_url):
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
262 return output.split()[0]
262 return output.split()[0]
263
263
264 def check_remote_last_commit_is(self, commit, repo_url):
264 def check_remote_last_commit_is(self, commit, repo_url):
265 last_remote_commit = self._remote_last_commit(repo_url)
265 last_remote_commit = self._remote_last_commit(repo_url)
266 if last_remote_commit != commit:
266 if last_remote_commit != commit:
267 raise Exception('Push did not work, expected commit %s but got %s' %
267 raise Exception('Push did not work, expected commit %s but got %s' %
268 (commit, last_remote_commit))
268 (commit, last_remote_commit))
269
269
270 @keep_cwd
270 @keep_cwd
271 def _local_last_commit(self, repo):
271 def _local_last_commit(self, repo):
272 os.chdir(repo)
272 os.chdir(repo)
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
274
274
275 def check_local_last_commit_is(self, commit, repo):
275 def check_local_last_commit_is(self, commit, repo):
276 last_local_commit = self._local_last_commit(repo)
276 last_local_commit = self._local_last_commit(repo)
277 if last_local_commit != commit:
277 if last_local_commit != commit:
278 raise Exception('Pull did not work, expected commit %s but got %s' %
278 raise Exception('Pull did not work, expected commit %s but got %s' %
279 (commit, last_local_commit))
279 (commit, last_local_commit))
280
280
281
281
282 class HgMixin(object):
282 class HgMixin(object):
283 """Mixin providing the mercurial operations."""
283 """Mixin providing the mercurial operations."""
284 @timed
284 @timed
285 def clone_repo(self, repo_url, destination=None, default_only=False):
285 def clone_repo(self, repo_url, destination=None, default_only=False):
286 args = ['hg', 'clone']
286 args = ['hg', 'clone']
287 if default_only:
287 if default_only:
288 args.extend(['--branch', 'default'])
288 args.extend(['--branch', 'default'])
289 args.append(repo_url)
289 args.append(repo_url)
290 if destination:
290 if destination:
291 args.append(destination)
291 args.append(destination)
292 execute(args)
292 execute(args)
293
293
294 @keep_cwd
294 @keep_cwd
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
296 self.remove_remote(repo, remote_name)
296 self.remove_remote(repo, remote_name)
297 os.chdir(repo)
297 os.chdir(repo)
298 hgrc = ConfigParser.RawConfigParser()
298 hgrc = ConfigParser.RawConfigParser()
299 hgrc.read('.hg/hgrc')
299 hgrc.read('.hg/hgrc')
300 hgrc.set('paths', remote_name, remote_url)
300 hgrc.set('paths', remote_name, remote_url)
301 with open('.hg/hgrc', 'w') as f:
301 with open('.hg/hgrc', 'w') as f:
302 hgrc.write(f)
302 hgrc.write(f)
303
303
304 @keep_cwd
304 @keep_cwd
305 def remove_remote(self, repo, remote_name='upstream'):
305 def remove_remote(self, repo, remote_name='upstream'):
306 os.chdir(repo)
306 os.chdir(repo)
307 hgrc = ConfigParser.RawConfigParser()
307 hgrc = ConfigParser.RawConfigParser()
308 hgrc.read('.hg/hgrc')
308 hgrc.read('.hg/hgrc')
309 hgrc.remove_option('paths', remote_name)
309 hgrc.remove_option('paths', remote_name)
310 with open('.hg/hgrc', 'w') as f:
310 with open('.hg/hgrc', 'w') as f:
311 hgrc.write(f)
311 hgrc.write(f)
312
312
313 @keep_cwd
313 @keep_cwd
314 def get_commits(self, repo, branch='default'):
314 def get_commits(self, repo, branch='default'):
315 os.chdir(repo)
315 os.chdir(repo)
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
318 '{node}\n', '--follow-first'])
318 '{node}\n', '--follow-first'])
319 return commits_list.strip().split('\n')[::-1]
319 return commits_list.strip().split('\n')[::-1]
320
320
321 @timed
321 @timed
322 def push(self, repo, commit, remote_name=None):
322 def push(self, repo, commit, remote_name=None):
323 os.chdir(repo)
323 os.chdir(repo)
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
325 if remote_name:
325 if remote_name:
326 args.append(remote_name)
326 args.append(remote_name)
327 execute(args)
327 execute(args)
328
328
329 @timed
329 @timed
330 def pull(self, repo):
330 def pull(self, repo):
331 os.chdir(repo)
331 os.chdir(repo)
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
333
333
334 def _remote_last_commit(self, repo_url):
334 def _remote_last_commit(self, repo_url):
335 return execute(['hg', 'identify', repo_url])[:12]
335 return execute(['hg', 'identify', repo_url])[:12]
336
336
337 def check_remote_last_commit_is(self, commit, repo_url):
337 def check_remote_last_commit_is(self, commit, repo_url):
338 last_remote_commit = self._remote_last_commit(repo_url)
338 last_remote_commit = self._remote_last_commit(repo_url)
339 if not commit.startswith(last_remote_commit):
339 if not commit.startswith(last_remote_commit):
340 raise Exception('Push did not work, expected commit %s but got %s' %
340 raise Exception('Push did not work, expected commit %s but got %s' %
341 (commit, last_remote_commit))
341 (commit, last_remote_commit))
342
342
343 @keep_cwd
343 @keep_cwd
344 def _local_last_commit(self, repo):
344 def _local_last_commit(self, repo):
345 os.chdir(repo)
345 os.chdir(repo)
346 return execute(['hg', 'identify'])[:12]
346 return execute(['hg', 'identify'])[:12]
347
347
348 def check_local_last_commit_is(self, commit, repo):
348 def check_local_last_commit_is(self, commit, repo):
349 last_local_commit = self._local_last_commit(repo)
349 last_local_commit = self._local_last_commit(repo)
350 if not commit.startswith(last_local_commit):
350 if not commit.startswith(last_local_commit):
351 raise Exception('Pull did not work, expected commit %s but got %s' %
351 raise Exception('Pull did not work, expected commit %s but got %s' %
352 (commit, last_local_commit))
352 (commit, last_local_commit))
353
353
354
354
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
357 api_key):
357 api_key):
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
359 max_commits, skip_commits)
359 max_commits, skip_commits)
360 RhodeCodeMixin.__init__(self, api_key)
360 RhodeCodeMixin.__init__(self, api_key)
361 self.repo_type = 'git'
361 self.repo_type = 'git'
362
362
363
363
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
366 api_key):
366 api_key):
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
368 max_commits, skip_commits)
368 max_commits, skip_commits)
369 RhodeCodeMixin.__init__(self, api_key)
369 RhodeCodeMixin.__init__(self, api_key)
370 self.repo_type = 'hg'
370 self.repo_type = 'hg'
371
371
372
372
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
374 api_key):
374 api_key):
375 max_commits = min(10 * step,
375 max_commits = min(10 * step,
376 int((max_commits - skip_commits) / step) * step)
376 int((max_commits - skip_commits) / step) * step)
377 max_commits += skip_commits
377 max_commits += skip_commits
378 if repo_type == 'git':
378 if repo_type == 'git':
379 return GitTestPerformance(
379 return GitTestPerformance(
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
381 elif repo_type == 'hg':
381 elif repo_type == 'hg':
382 return HgTestPerformance(
382 return HgTestPerformance(
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
384
384
385
385
386 def main(argv):
386 def main(argv):
387 parser = argparse.ArgumentParser(
387 parser = argparse.ArgumentParser(
388 description='Performance tests for push/pull/clone for git and ' +
388 description='Performance tests for push/pull/clone for git and ' +
389 'mercurial repos.')
389 'mercurial repos.')
390 parser.add_argument(
390 parser.add_argument(
391 '--tests', dest='tests', action='store', required=False, default='all',
391 '--tests', dest='tests', action='store', required=False, default='all',
392 help='The tests to run. Default: all. But could be any comma ' +
392 help='The tests to run. Default: all. But could be any comma ' +
393 'separated list with python, hg, kernel or git')
393 'separated list with python, hg, kernel or git')
394 parser.add_argument(
394 parser.add_argument(
395 '--sizes', dest='sizes', action='store', required=False,
395 '--sizes', dest='sizes', action='store', required=False,
396 default='1,10,100,1000,2500',
396 default='1,10,100,1000,2500',
397 help='The sizes to use. Default: 1,10,100,1000,2500')
397 help='The sizes to use. Default: 1,10,100,1000,2500')
398 parser.add_argument(
398 parser.add_argument(
399 '--dir', dest='dir', action='store', required=True,
399 '--dir', dest='dir', action='store', required=True,
400 help='The dir where to store the repos')
400 help='The dir where to store the repos')
401 parser.add_argument(
401 parser.add_argument(
402 '--api-key', dest='api_key', action='store', required=True,
402 '--api-key', dest='api_key', action='store', required=True,
403 help='The api key of RhodeCode')
403 help='The api key of RhodeCode')
404 options = parser.parse_args(argv[1:])
404 options = parser.parse_args(argv[1:])
405 print(options)
405 print(options)
406
406
407 test_config = {
407 test_config = {
408 'python': {
408 'python': {
409 'url': 'https://hg.python.org/cpython/',
409 'url': 'https://hg.python.org/cpython/',
410 'limit': 23322,
410 'limit': 23322,
411 'type': 'hg',
411 'type': 'hg',
412 # Do not time the first commit, as it is HUGE!
412 # Do not time the first commit, as it is HUGE!
413 'skip': 1,
413 'skip': 1,
414 },
414 },
415 'hg': {
415 'hg': {
416 'url': 'http://selenic.com/hg',
416 'url': 'http://selenic.com/hg',
417 'limit': 14396,
417 'limit': 14396,
418 'type': 'hg',
418 'type': 'hg',
419 },
419 },
420 'kernel': {
420 'kernel': {
421 'url': 'https://github.com/torvalds/linux.git',
421 'url': 'https://github.com/torvalds/linux.git',
422 'limit': 46271,
422 'limit': 46271,
423 'type': 'git',
423 'type': 'git',
424 },
424 },
425 'git': {
425 'git': {
426 'url': 'https://github.com/git/git.git',
426 'url': 'https://github.com/git/git.git',
427 'limit': 13525,
427 'limit': 13525,
428 'type': 'git',
428 'type': 'git',
429 }
429 }
430
430
431 }
431 }
432
432
433 test_names = options.tests.split(',')
433 test_names = options.tests.split(',')
434 if test_names == ['all']:
434 if test_names == ['all']:
435 test_names = test_config.keys()
435 test_names = test_config.keys()
436 if not set(test_names) <= set(test_config.keys()):
436 if not set(test_names) <= set(test_config.keys()):
437 print('Invalid tests: only %s are valid but specified %s' %
437 print('Invalid tests: only %s are valid but specified %s' %
438 (test_config.keys(), test_names))
438 (test_config.keys(), test_names))
439 return 1
439 return 1
440
440
441 sizes = options.sizes.split(',')
441 sizes = options.sizes.split(',')
442 sizes = map(int, sizes)
442 sizes = map(int, sizes)
443
443
444 base_dir = options.dir
444 base_dir = options.dir
445 api_key = options.api_key
445 api_key = options.api_key
446 results = collections.defaultdict(dict)
446 results = collections.defaultdict(dict)
447 for test_name, size in itertools.product(test_names, sizes):
447 for test_name, size in itertools.product(test_names, sizes):
448 test = get_test(base_dir,
448 test = get_test(base_dir,
449 test_config[test_name]['url'],
449 test_config[test_name]['url'],
450 test_config[test_name]['type'],
450 test_config[test_name]['type'],
451 size,
451 size,
452 test_config[test_name]['limit'],
452 test_config[test_name]['limit'],
453 test_config[test_name].get('skip', 0),
453 test_config[test_name].get('skip', 0),
454 api_key)
454 api_key)
455 print('*' * 80)
455 print('*' * 80)
456 print('Running performance test: %s with size %d' % (test_name, size))
456 print('Running performance test: %s with size %d' % (test_name, size))
457 print('*' * 80)
457 print('*' * 80)
458 results[test_name][size] = test.run()
458 results[test_name][size] = test.run()
459 pprint.pprint(dict(results))
459 pprint.pprint(dict(results))
460
460
461
461
462 if __name__ == '__main__':
462 if __name__ == '__main__':
463 sys.exit(main(sys.argv))
463 sys.exit(main(sys.argv))
@@ -1,155 +1,155 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 This is a standalone script which will start VCS and RC.
22 This is a standalone script which will start VCS and RC.
23
23
24 Performance numbers will be written on each interval to:
24 Performance numbers will be written on each interval to:
25 vcs_profileX.csv
25 vcs_profileX.csv
26 rc_profileX.csv
26 rc_profileX.csv
27
27
28 To stop the script by press Ctrl-C
28 To stop the script by press Ctrl-C
29 """
29 """
30
30
31 import datetime
31 import datetime
32 import os
32 import os
33 import psutil
33 import psutil
34 import subprocess32
34 import subprocess
35 import sys
35 import sys
36 import time
36 import time
37 import traceback
37 import traceback
38 import urllib.request, urllib.parse, urllib.error
38 import urllib.request, urllib.parse, urllib.error
39
39
40 PROFILING_INTERVAL = 5
40 PROFILING_INTERVAL = 5
41 RC_WEBSITE = "http://localhost:5001/"
41 RC_WEBSITE = "http://localhost:5001/"
42
42
43
43
44 def get_file(prefix):
44 def get_file(prefix):
45 out_file = None
45 out_file = None
46 for i in range(100):
46 for i in range(100):
47 file_path = "%s_profile%.3d.csv" % (prefix, i)
47 file_path = "%s_profile%.3d.csv" % (prefix, i)
48 if os.path.exists(file_path):
48 if os.path.exists(file_path):
49 continue
49 continue
50 out_file = open(file_path, "w")
50 out_file = open(file_path, "w")
51 out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n")
51 out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n")
52 break
52 break
53 return out_file
53 return out_file
54
54
55
55
56 def dump_system():
56 def dump_system():
57 print("System Overview...")
57 print("System Overview...")
58 print("\nCPU Count: %d (%d real)" %
58 print("\nCPU Count: %d (%d real)" %
59 (psutil.cpu_count(), psutil.cpu_count(logical=False)))
59 (psutil.cpu_count(), psutil.cpu_count(logical=False)))
60 print("\nDisk:")
60 print("\nDisk:")
61 print(psutil.disk_usage(os.sep))
61 print(psutil.disk_usage(os.sep))
62 print("\nMemory:")
62 print("\nMemory:")
63 print(psutil.virtual_memory())
63 print(psutil.virtual_memory())
64 print("\nMemory (swap):")
64 print("\nMemory (swap):")
65 print(psutil.swap_memory())
65 print(psutil.swap_memory())
66
66
67
67
68 def count_dulwich_fds(proc):
68 def count_dulwich_fds(proc):
69 p = subprocess32.Popen(["lsof", "-p", proc.pid], stdout=subprocess32.PIPE)
69 p = subprocess.Popen(["lsof", "-p", proc.pid], stdout=subprocess.PIPE)
70 out, err = p.communicate()
70 out, err = p.communicate()
71
71
72 count = 0
72 count = 0
73 for line in out.splitlines():
73 for line in out.splitlines():
74 content = line.split()
74 content = line.split()
75 # http://git-scm.com/book/en/Git-Internals-Packfiles
75 # http://git-scm.com/book/en/Git-Internals-Packfiles
76 if content[-1].endswith(".idx"):
76 if content[-1].endswith(".idx"):
77 count += 1
77 count += 1
78
78
79 return count
79 return count
80
80
81 def dump_process(pid, out_file):
81 def dump_process(pid, out_file):
82 now = datetime.datetime.now()
82 now = datetime.datetime.now()
83 cpu = pid.cpu_percent()
83 cpu = pid.cpu_percent()
84 mem = pid.memory_info()
84 mem = pid.memory_info()
85 fds = pid.num_fds()
85 fds = pid.num_fds()
86 dulwich_fds = count_dulwich_fds(pid)
86 dulwich_fds = count_dulwich_fds(pid)
87 threads = pid.num_threads()
87 threads = pid.num_threads()
88
88
89 content = [now.strftime('%m/%d/%y %H:%M:%S'),
89 content = [now.strftime('%m/%d/%y %H:%M:%S'),
90 cpu,
90 cpu,
91 "%.2f" % (mem[0]/1024.0/1024.0),
91 "%.2f" % (mem[0]/1024.0/1024.0),
92 fds, dulwich_fds, threads]
92 fds, dulwich_fds, threads]
93 out_file.write("; ".join([str(item) for item in content]))
93 out_file.write("; ".join([str(item) for item in content]))
94 out_file.write("\n")
94 out_file.write("\n")
95
95
96
96
97 # Open output files
97 # Open output files
98 vcs_out = get_file("vcs")
98 vcs_out = get_file("vcs")
99 if vcs_out is None:
99 if vcs_out is None:
100 print("Unable to enumerate output file for VCS")
100 print("Unable to enumerate output file for VCS")
101 sys.exit(1)
101 sys.exit(1)
102 rc_out = get_file("rc")
102 rc_out = get_file("rc")
103 if rc_out is None:
103 if rc_out is None:
104 print("Unable to enumerate output file for RC")
104 print("Unable to enumerate output file for RC")
105 sys.exit(1)
105 sys.exit(1)
106
106
107 # Show system information
107 # Show system information
108 dump_system()
108 dump_system()
109
109
110 print("\nStarting VCS...")
110 print("\nStarting VCS...")
111 vcs = psutil.Popen(["vcsserver"])
111 vcs = psutil.Popen(["vcsserver"])
112 time.sleep(1)
112 time.sleep(1)
113 if not vcs.is_running():
113 if not vcs.is_running():
114 print("VCS - Failed to start")
114 print("VCS - Failed to start")
115 sys.exit(1)
115 sys.exit(1)
116 print("VCS - Ok")
116 print("VCS - Ok")
117
117
118 print("\nStarting RhodeCode...")
118 print("\nStarting RhodeCode...")
119 rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini",
119 rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini",
120 shell=True, stdin=subprocess32.PIPE)
120 shell=True, stdin=subprocess.PIPE)
121 time.sleep(1)
121 time.sleep(1)
122 if not rc.is_running():
122 if not rc.is_running():
123 print("RC - Failed to start")
123 print("RC - Failed to start")
124 vcs.terminate()
124 vcs.terminate()
125 sys.exit(1)
125 sys.exit(1)
126
126
127 # Send command to create the databases
127 # Send command to create the databases
128 rc.stdin.write("y\n")
128 rc.stdin.write("y\n")
129
129
130 # Verify that the website is up
130 # Verify that the website is up
131 time.sleep(4)
131 time.sleep(4)
132 try:
132 try:
133 urllib.request.urlopen(RC_WEBSITE)
133 urllib.request.urlopen(RC_WEBSITE)
134 except IOError:
134 except IOError:
135 print("RC - Website not started")
135 print("RC - Website not started")
136 vcs.terminate()
136 vcs.terminate()
137 sys.exit(1)
137 sys.exit(1)
138 print("RC - Ok")
138 print("RC - Ok")
139
139
140 print("\nProfiling...\n%s\n" % ("-"*80))
140 print("\nProfiling...\n%s\n" % ("-"*80))
141 while True:
141 while True:
142 try:
142 try:
143 dump_process(vcs, vcs_out)
143 dump_process(vcs, vcs_out)
144 dump_process(rc, rc_out)
144 dump_process(rc, rc_out)
145 time.sleep(PROFILING_INTERVAL)
145 time.sleep(PROFILING_INTERVAL)
146 except Exception:
146 except Exception:
147 print(traceback.format_exc())
147 print(traceback.format_exc())
148 break
148 break
149
149
150 # Finalize the profiling
150 # Finalize the profiling
151 vcs_out.close()
151 vcs_out.close()
152 rc_out.close()
152 rc_out.close()
153
153
154 vcs.terminate()
154 vcs.terminate()
155 rc.terminate()
155 rc.terminate()
@@ -1,306 +1,306 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 VCS Performance measurement tool
22 VCS Performance measurement tool
23
23
24 Usage:
24 Usage:
25
25
26 - Check that required vcs keys can be found in ~/.hgrc and ~/.netrc
26 - Check that required vcs keys can be found in ~/.hgrc and ~/.netrc
27
27
28 - Start a local instance of RhodeCode Enterprise
28 - Start a local instance of RhodeCode Enterprise
29
29
30 - Launch the script:
30 - Launch the script:
31
31
32 TMPDIR=/tmp python vcs_performance.py \
32 TMPDIR=/tmp python vcs_performance.py \
33 --host=http://vm:5000 \
33 --host=http://vm:5000 \
34 --api-key=55c4a33688577da24183dcac5fde4dddfdbf18dc \
34 --api-key=55c4a33688577da24183dcac5fde4dddfdbf18dc \
35 --commits=10 --repositories=100 --log-level=info
35 --commits=10 --repositories=100 --log-level=info
36 """
36 """
37
37
38 import argparse
38 import argparse
39 import functools
39 import functools
40 import logging
40 import logging
41 import os
41 import os
42 import shutil
42 import shutil
43 import subprocess32
43 import subprocess
44 import tempfile
44 import tempfile
45 import time
45 import time
46 from itertools import chain
46 from itertools import chain
47
47
48 from api import RCApi, ApiError
48 from api import RCApi, ApiError
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 def timed(f):
54 def timed(f):
55 """Decorator that returns the time it took to execute the function."""
55 """Decorator that returns the time it took to execute the function."""
56 @functools.wraps(f)
56 @functools.wraps(f)
57 def wrapped_f(*args, **kwargs):
57 def wrapped_f(*args, **kwargs):
58 start_time = time.time()
58 start_time = time.time()
59 try:
59 try:
60 f(*args, **kwargs)
60 f(*args, **kwargs)
61 finally:
61 finally:
62 return time.time() - start_time
62 return time.time() - start_time
63
63
64 return wrapped_f
64 return wrapped_f
65
65
66
66
67 def mean(container):
67 def mean(container):
68 """Return the mean of the container."""
68 """Return the mean of the container."""
69 if not container:
69 if not container:
70 return -1.0
70 return -1.0
71 return sum(container) / len(container)
71 return sum(container) / len(container)
72
72
73
73
74 class Config(object):
74 class Config(object):
75 args = None
75 args = None
76
76
77 def __init__(self):
77 def __init__(self):
78 parser = argparse.ArgumentParser(description='Runs VCS load tests')
78 parser = argparse.ArgumentParser(description='Runs VCS load tests')
79 parser.add_argument(
79 parser.add_argument(
80 '--host', dest='host', action='store', required=True,
80 '--host', dest='host', action='store', required=True,
81 help='RhodeCode Enterprise host')
81 help='RhodeCode Enterprise host')
82 parser.add_argument(
82 parser.add_argument(
83 '--api-key', dest='api_key', action='store', required=True,
83 '--api-key', dest='api_key', action='store', required=True,
84 help='API Key')
84 help='API Key')
85 parser.add_argument(
85 parser.add_argument(
86 '--file-size', dest='file_size', action='store', required=False,
86 '--file-size', dest='file_size', action='store', required=False,
87 default=1, type=int, help='File size in MB')
87 default=1, type=int, help='File size in MB')
88 parser.add_argument(
88 parser.add_argument(
89 '--repositories', dest='repositories', action='store',
89 '--repositories', dest='repositories', action='store',
90 required=False, default=1, type=int,
90 required=False, default=1, type=int,
91 help='Number of repositories')
91 help='Number of repositories')
92 parser.add_argument(
92 parser.add_argument(
93 '--commits', dest='commits', action='store', required=False,
93 '--commits', dest='commits', action='store', required=False,
94 default=1, type=int, help='Number of commits')
94 default=1, type=int, help='Number of commits')
95 parser.add_argument(
95 parser.add_argument(
96 '--log-level', dest='log_level', action='store', required=False,
96 '--log-level', dest='log_level', action='store', required=False,
97 default='error', help='Logging level')
97 default='error', help='Logging level')
98 self.args = parser.parse_args()
98 self.args = parser.parse_args()
99
99
100 def __getattr__(self, attr):
100 def __getattr__(self, attr):
101 return getattr(self.args, attr)
101 return getattr(self.args, attr)
102
102
103
103
104 class Repository(object):
104 class Repository(object):
105 FILE_NAME_TEMPLATE = "test_{:09d}.bin"
105 FILE_NAME_TEMPLATE = "test_{:09d}.bin"
106
106
107 def __init__(self, name, base_path, api):
107 def __init__(self, name, base_path, api):
108 self.name = name
108 self.name = name
109 self.path = os.path.join(base_path, name)
109 self.path = os.path.join(base_path, name)
110 self.api = api
110 self.api = api
111 self.url = None
111 self.url = None
112
112
113 def create(self):
113 def create(self):
114 self._create_filesystem_repo(self.path)
114 self._create_filesystem_repo(self.path)
115 try:
115 try:
116 self.url = self.api.create_repo(self.name, self.TYPE, 'Performance tests')
116 self.url = self.api.create_repo(self.name, self.TYPE, 'Performance tests')
117 except ApiError as e:
117 except ApiError as e:
118 log.error('api: {}'.format(e))
118 log.error('api: {}'.format(e))
119
119
120 def delete(self):
120 def delete(self):
121 self._delete_filesystem_repo()
121 self._delete_filesystem_repo()
122 try:
122 try:
123 self.api.delete_repo(self.name)
123 self.api.delete_repo(self.name)
124 except ApiError as e:
124 except ApiError as e:
125 log.error('api: {}'.format(e))
125 log.error('api: {}'.format(e))
126
126
127 def create_commits(self, number, file_size):
127 def create_commits(self, number, file_size):
128 for i in range(number):
128 for i in range(number):
129 file_name = self.FILE_NAME_TEMPLATE.format(i)
129 file_name = self.FILE_NAME_TEMPLATE.format(i)
130 log.debug("Create commit[{}] {}".format(self.name, file_name))
130 log.debug("Create commit[{}] {}".format(self.name, file_name))
131 self._create_file(file_name, file_size)
131 self._create_file(file_name, file_size)
132 self._create_commit(file_name)
132 self._create_commit(file_name)
133
133
134 @timed
134 @timed
135 def push(self):
135 def push(self):
136 raise NotImplementedError()
136 raise NotImplementedError()
137
137
138 @timed
138 @timed
139 def clone(self, destination_path):
139 def clone(self, destination_path):
140 raise NotImplementedError()
140 raise NotImplementedError()
141
141
142 @timed
142 @timed
143 def pull(self):
143 def pull(self):
144 raise NotImplementedError()
144 raise NotImplementedError()
145
145
146 def _run(self, *args):
146 def _run(self, *args):
147 command = [self.BASE_COMMAND] + list(args)
147 command = [self.BASE_COMMAND] + list(args)
148 process = subprocess32.Popen(
148 process = subprocess.Popen(
149 command, stdout=subprocess32.PIPE, stderr=subprocess32.PIPE)
149 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
150 return process.communicate()
150 return process.communicate()
151
151
152 def _create_file(self, name, size):
152 def _create_file(self, name, size):
153 file_name = os.path.join(self.path, name)
153 file_name = os.path.join(self.path, name)
154 with open(file_name, 'wb') as f:
154 with open(file_name, 'wb') as f:
155 f.write(os.urandom(1024))
155 f.write(os.urandom(1024))
156
156
157 def _delete_filesystem_repo(self):
157 def _delete_filesystem_repo(self):
158 shutil.rmtree(self.path)
158 shutil.rmtree(self.path)
159
159
160 def _create_filesystem_repo(self, path):
160 def _create_filesystem_repo(self, path):
161 raise NotImplementedError()
161 raise NotImplementedError()
162
162
163 def _create_commit(self, file_name):
163 def _create_commit(self, file_name):
164 raise NotImplementedError()
164 raise NotImplementedError()
165
165
166
166
167 class GitRepository(Repository):
167 class GitRepository(Repository):
168 TYPE = 'git'
168 TYPE = 'git'
169 BASE_COMMAND = 'git'
169 BASE_COMMAND = 'git'
170
170
171 @timed
171 @timed
172 def push(self):
172 def push(self):
173 os.chdir(self.path)
173 os.chdir(self.path)
174 self._run('push', '--set-upstream', self.url, 'master')
174 self._run('push', '--set-upstream', self.url, 'master')
175
175
176 @timed
176 @timed
177 def clone(self, destination_path):
177 def clone(self, destination_path):
178 self._run('clone', self.url, os.path.join(destination_path, self.name))
178 self._run('clone', self.url, os.path.join(destination_path, self.name))
179
179
180 @timed
180 @timed
181 def pull(self, destination_path):
181 def pull(self, destination_path):
182 path = os.path.join(destination_path, self.name)
182 path = os.path.join(destination_path, self.name)
183 self._create_filesystem_repo(path)
183 self._create_filesystem_repo(path)
184 os.chdir(path)
184 os.chdir(path)
185 self._run('remote', 'add', 'origin', self.url)
185 self._run('remote', 'add', 'origin', self.url)
186 self._run('pull', 'origin', 'master')
186 self._run('pull', 'origin', 'master')
187
187
188 def _create_filesystem_repo(self, path):
188 def _create_filesystem_repo(self, path):
189 self._run('init', path)
189 self._run('init', path)
190
190
191 def _create_commit(self, file_name):
191 def _create_commit(self, file_name):
192 os.chdir(self.path)
192 os.chdir(self.path)
193 self._run('add', file_name)
193 self._run('add', file_name)
194 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
194 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
195
195
196
196
197 class HgRepository(Repository):
197 class HgRepository(Repository):
198 TYPE = 'hg'
198 TYPE = 'hg'
199 BASE_COMMAND = 'hg'
199 BASE_COMMAND = 'hg'
200
200
201 @timed
201 @timed
202 def push(self):
202 def push(self):
203 os.chdir(self.path)
203 os.chdir(self.path)
204 self._run('push', self.url)
204 self._run('push', self.url)
205
205
206 @timed
206 @timed
207 def clone(self, destination_path):
207 def clone(self, destination_path):
208 self._run('clone', self.url, os.path.join(destination_path, self.name))
208 self._run('clone', self.url, os.path.join(destination_path, self.name))
209
209
210 @timed
210 @timed
211 def pull(self, destination_path):
211 def pull(self, destination_path):
212 path = os.path.join(destination_path, self.name)
212 path = os.path.join(destination_path, self.name)
213 self._create_filesystem_repo(path)
213 self._create_filesystem_repo(path)
214 os.chdir(path)
214 os.chdir(path)
215 self._run('pull', '-r', 'tip', self.url)
215 self._run('pull', '-r', 'tip', self.url)
216
216
217 def _create_filesystem_repo(self, path):
217 def _create_filesystem_repo(self, path):
218 self._run('init', path)
218 self._run('init', path)
219
219
220 def _create_commit(self, file_name):
220 def _create_commit(self, file_name):
221 os.chdir(self.path)
221 os.chdir(self.path)
222 self._run('add', file_name)
222 self._run('add', file_name)
223 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
223 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
224
224
225
225
226 class Benchmark(object):
226 class Benchmark(object):
227 REPO_CLASSES = {
227 REPO_CLASSES = {
228 'git': GitRepository,
228 'git': GitRepository,
229 'hg': HgRepository
229 'hg': HgRepository
230 }
230 }
231 REPO_NAME = '{}_performance_{:03d}'
231 REPO_NAME = '{}_performance_{:03d}'
232
232
233 def __init__(self, config):
233 def __init__(self, config):
234 self.api = RCApi(api_key=config.api_key, rc_endpoint=config.host)
234 self.api = RCApi(api_key=config.api_key, rc_endpoint=config.host)
235 self.source_path = tempfile.mkdtemp(suffix='vcsperformance')
235 self.source_path = tempfile.mkdtemp(suffix='vcsperformance')
236
236
237 self.config = config
237 self.config = config
238 self.git_repos = []
238 self.git_repos = []
239 self.hg_repos = []
239 self.hg_repos = []
240
240
241 self._set_log_level()
241 self._set_log_level()
242
242
243 def start(self):
243 def start(self):
244 self._create_repos()
244 self._create_repos()
245 repos = {
245 repos = {
246 'git': self.git_repos,
246 'git': self.git_repos,
247 'hg': self.hg_repos
247 'hg': self.hg_repos
248 }
248 }
249
249
250 clone_destination_path = tempfile.mkdtemp(suffix='clone')
250 clone_destination_path = tempfile.mkdtemp(suffix='clone')
251 pull_destination_path = tempfile.mkdtemp(suffix='pull')
251 pull_destination_path = tempfile.mkdtemp(suffix='pull')
252 operations = [
252 operations = [
253 ('push', ),
253 ('push', ),
254 ('clone', clone_destination_path),
254 ('clone', clone_destination_path),
255 ('pull', pull_destination_path)
255 ('pull', pull_destination_path)
256 ]
256 ]
257
257
258 for operation in operations:
258 for operation in operations:
259 for type_ in repos:
259 for type_ in repos:
260 times = self._measure(repos[type_], *operation)
260 times = self._measure(repos[type_], *operation)
261 print("Mean[of {}] {:5s} {:5s} time: {:.3f} sec.".format(
261 print("Mean[of {}] {:5s} {:5s} time: {:.3f} sec.".format(
262 len(times), type_, operation[0], mean(times)))
262 len(times), type_, operation[0], mean(times)))
263
263
264 def cleanup(self):
264 def cleanup(self):
265 log.info("Cleaning up...")
265 log.info("Cleaning up...")
266 for repo in chain(self.git_repos, self.hg_repos):
266 for repo in chain(self.git_repos, self.hg_repos):
267 repo.delete()
267 repo.delete()
268
268
269 def _measure(self, repos, operation, *args):
269 def _measure(self, repos, operation, *args):
270 times = []
270 times = []
271 for repo in repos:
271 for repo in repos:
272 method = getattr(repo, operation)
272 method = getattr(repo, operation)
273 times.append(method(*args))
273 times.append(method(*args))
274 return times
274 return times
275
275
276 def _create_repos(self):
276 def _create_repos(self):
277 log.info("Creating repositories...")
277 log.info("Creating repositories...")
278 for i in range(self.config.repositories):
278 for i in range(self.config.repositories):
279 self.git_repos.append(self._create_repo('git', i))
279 self.git_repos.append(self._create_repo('git', i))
280 self.hg_repos.append(self._create_repo('hg', i))
280 self.hg_repos.append(self._create_repo('hg', i))
281
281
282 def _create_repo(self, type_, id_):
282 def _create_repo(self, type_, id_):
283 RepoClass = self.REPO_CLASSES[type_]
283 RepoClass = self.REPO_CLASSES[type_]
284 repo = RepoClass(
284 repo = RepoClass(
285 self.REPO_NAME.format(type_, id_), self.source_path, self.api)
285 self.REPO_NAME.format(type_, id_), self.source_path, self.api)
286 repo.create()
286 repo.create()
287 repo.create_commits(self.config.commits, self.config.file_size)
287 repo.create_commits(self.config.commits, self.config.file_size)
288 return repo
288 return repo
289
289
290 def _set_log_level(self):
290 def _set_log_level(self):
291 try:
291 try:
292 log_level = getattr(logging, config.log_level.upper())
292 log_level = getattr(logging, config.log_level.upper())
293 except:
293 except:
294 log_level = logging.ERROR
294 log_level = logging.ERROR
295 handler = logging.StreamHandler()
295 handler = logging.StreamHandler()
296 log.addHandler(handler)
296 log.addHandler(handler)
297 log.setLevel(log_level)
297 log.setLevel(log_level)
298
298
299
299
300 if __name__ == '__main__':
300 if __name__ == '__main__':
301 config = Config()
301 config = Config()
302 benchmark = Benchmark(config)
302 benchmark = Benchmark(config)
303 try:
303 try:
304 benchmark.start()
304 benchmark.start()
305 finally:
305 finally:
306 benchmark.cleanup()
306 benchmark.cleanup()
@@ -1,1842 +1,1842 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import logging
33 import logging
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.tests import (
62 from rhodecode.tests import (
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_REGULAR_PASS)
65 TEST_USER_REGULAR_PASS)
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.fixture import Fixture
67 from rhodecode.tests.fixture import Fixture
68 from rhodecode.config import utils as config_utils
68 from rhodecode.config import utils as config_utils
69
69
70 log = logging.getLogger(__name__)
70 log = logging.getLogger(__name__)
71
71
72 def _split_comma(value):
72 def _split_comma(value):
73 return value.split(',')
73 return value.split(',')
74
74
75
75
76 def pytest_addoption(parser):
76 def pytest_addoption(parser):
77 parser.addoption(
77 parser.addoption(
78 '--keep-tmp-path', action='store_true',
78 '--keep-tmp-path', action='store_true',
79 help="Keep the test temporary directories")
79 help="Keep the test temporary directories")
80 parser.addoption(
80 parser.addoption(
81 '--backends', action='store', type=_split_comma,
81 '--backends', action='store', type=_split_comma,
82 default=['git', 'hg', 'svn'],
82 default=['git', 'hg', 'svn'],
83 help="Select which backends to test for backend specific tests.")
83 help="Select which backends to test for backend specific tests.")
84 parser.addoption(
84 parser.addoption(
85 '--dbs', action='store', type=_split_comma,
85 '--dbs', action='store', type=_split_comma,
86 default=['sqlite'],
86 default=['sqlite'],
87 help="Select which database to test for database specific tests. "
87 help="Select which database to test for database specific tests. "
88 "Possible options are sqlite,postgres,mysql")
88 "Possible options are sqlite,postgres,mysql")
89 parser.addoption(
89 parser.addoption(
90 '--appenlight', '--ae', action='store_true',
90 '--appenlight', '--ae', action='store_true',
91 help="Track statistics in appenlight.")
91 help="Track statistics in appenlight.")
92 parser.addoption(
92 parser.addoption(
93 '--appenlight-api-key', '--ae-key',
93 '--appenlight-api-key', '--ae-key',
94 help="API key for Appenlight.")
94 help="API key for Appenlight.")
95 parser.addoption(
95 parser.addoption(
96 '--appenlight-url', '--ae-url',
96 '--appenlight-url', '--ae-url',
97 default="https://ae.rhodecode.com",
97 default="https://ae.rhodecode.com",
98 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
99 parser.addoption(
99 parser.addoption(
100 '--sqlite-connection-string', action='store',
100 '--sqlite-connection-string', action='store',
101 default='', help="Connection string for the dbs tests with SQLite")
101 default='', help="Connection string for the dbs tests with SQLite")
102 parser.addoption(
102 parser.addoption(
103 '--postgres-connection-string', action='store',
103 '--postgres-connection-string', action='store',
104 default='', help="Connection string for the dbs tests with Postgres")
104 default='', help="Connection string for the dbs tests with Postgres")
105 parser.addoption(
105 parser.addoption(
106 '--mysql-connection-string', action='store',
106 '--mysql-connection-string', action='store',
107 default='', help="Connection string for the dbs tests with MySQL")
107 default='', help="Connection string for the dbs tests with MySQL")
108 parser.addoption(
108 parser.addoption(
109 '--repeat', type=int, default=100,
109 '--repeat', type=int, default=100,
110 help="Number of repetitions in performance tests.")
110 help="Number of repetitions in performance tests.")
111
111
112
112
113 def pytest_configure(config):
113 def pytest_configure(config):
114 from rhodecode.config import patches
114 from rhodecode.config import patches
115
115
116
116
117 def pytest_collection_modifyitems(session, config, items):
117 def pytest_collection_modifyitems(session, config, items):
118 # nottest marked, compare nose, used for transition from nose to pytest
118 # nottest marked, compare nose, used for transition from nose to pytest
119 remaining = [
119 remaining = [
120 i for i in items if getattr(i.obj, '__test__', True)]
120 i for i in items if getattr(i.obj, '__test__', True)]
121 items[:] = remaining
121 items[:] = remaining
122
122
123 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
123 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
124 # be executed at the end for faster test feedback
124 # be executed at the end for faster test feedback
125 def sorter(item):
125 def sorter(item):
126 pos = 0
126 pos = 0
127 key = item._nodeid
127 key = item._nodeid
128 if key.startswith('rhodecode/tests/database'):
128 if key.startswith('rhodecode/tests/database'):
129 pos = 1
129 pos = 1
130 elif key.startswith('rhodecode/tests/vcs_operations'):
130 elif key.startswith('rhodecode/tests/vcs_operations'):
131 pos = 2
131 pos = 2
132
132
133 return pos
133 return pos
134
134
135 items.sort(key=sorter)
135 items.sort(key=sorter)
136
136
137
137
138 def pytest_generate_tests(metafunc):
138 def pytest_generate_tests(metafunc):
139
139
140 # Support test generation based on --backend parameter
140 # Support test generation based on --backend parameter
141 if 'backend_alias' in metafunc.fixturenames:
141 if 'backend_alias' in metafunc.fixturenames:
142 backends = get_backends_from_metafunc(metafunc)
142 backends = get_backends_from_metafunc(metafunc)
143 scope = None
143 scope = None
144 if not backends:
144 if not backends:
145 pytest.skip("Not enabled for any of selected backends")
145 pytest.skip("Not enabled for any of selected backends")
146
146
147 metafunc.parametrize('backend_alias', backends, scope=scope)
147 metafunc.parametrize('backend_alias', backends, scope=scope)
148
148
149 backend_mark = metafunc.definition.get_closest_marker('backends')
149 backend_mark = metafunc.definition.get_closest_marker('backends')
150 if backend_mark:
150 if backend_mark:
151 backends = get_backends_from_metafunc(metafunc)
151 backends = get_backends_from_metafunc(metafunc)
152 if not backends:
152 if not backends:
153 pytest.skip("Not enabled for any of selected backends")
153 pytest.skip("Not enabled for any of selected backends")
154
154
155
155
156 def get_backends_from_metafunc(metafunc):
156 def get_backends_from_metafunc(metafunc):
157 requested_backends = set(metafunc.config.getoption('--backends'))
157 requested_backends = set(metafunc.config.getoption('--backends'))
158 backend_mark = metafunc.definition.get_closest_marker('backends')
158 backend_mark = metafunc.definition.get_closest_marker('backends')
159 if backend_mark:
159 if backend_mark:
160 # Supported backends by this test function, created from
160 # Supported backends by this test function, created from
161 # pytest.mark.backends
161 # pytest.mark.backends
162 backends = backend_mark.args
162 backends = backend_mark.args
163 elif hasattr(metafunc.cls, 'backend_alias'):
163 elif hasattr(metafunc.cls, 'backend_alias'):
164 # Support class attribute "backend_alias", this is mainly
164 # Support class attribute "backend_alias", this is mainly
165 # for legacy reasons for tests not yet using pytest.mark.backends
165 # for legacy reasons for tests not yet using pytest.mark.backends
166 backends = [metafunc.cls.backend_alias]
166 backends = [metafunc.cls.backend_alias]
167 else:
167 else:
168 backends = metafunc.config.getoption('--backends')
168 backends = metafunc.config.getoption('--backends')
169 return requested_backends.intersection(backends)
169 return requested_backends.intersection(backends)
170
170
171
171
172 @pytest.fixture(scope='session', autouse=True)
172 @pytest.fixture(scope='session', autouse=True)
173 def activate_example_rcextensions(request):
173 def activate_example_rcextensions(request):
174 """
174 """
175 Patch in an example rcextensions module which verifies passed in kwargs.
175 Patch in an example rcextensions module which verifies passed in kwargs.
176 """
176 """
177 from rhodecode.config import rcextensions
177 from rhodecode.config import rcextensions
178
178
179 old_extensions = rhodecode.EXTENSIONS
179 old_extensions = rhodecode.EXTENSIONS
180 rhodecode.EXTENSIONS = rcextensions
180 rhodecode.EXTENSIONS = rcextensions
181 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
181 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
182
182
183 @request.addfinalizer
183 @request.addfinalizer
184 def cleanup():
184 def cleanup():
185 rhodecode.EXTENSIONS = old_extensions
185 rhodecode.EXTENSIONS = old_extensions
186
186
187
187
188 @pytest.fixture()
188 @pytest.fixture()
189 def capture_rcextensions():
189 def capture_rcextensions():
190 """
190 """
191 Returns the recorded calls to entry points in rcextensions.
191 Returns the recorded calls to entry points in rcextensions.
192 """
192 """
193 calls = rhodecode.EXTENSIONS.calls
193 calls = rhodecode.EXTENSIONS.calls
194 calls.clear()
194 calls.clear()
195 # Note: At this moment, it is still the empty dict, but that will
195 # Note: At this moment, it is still the empty dict, but that will
196 # be filled during the test run and since it is a reference this
196 # be filled during the test run and since it is a reference this
197 # is enough to make it work.
197 # is enough to make it work.
198 return calls
198 return calls
199
199
200
200
201 @pytest.fixture(scope='session')
201 @pytest.fixture(scope='session')
202 def http_environ_session():
202 def http_environ_session():
203 """
203 """
204 Allow to use "http_environ" in session scope.
204 Allow to use "http_environ" in session scope.
205 """
205 """
206 return plain_http_environ()
206 return plain_http_environ()
207
207
208
208
209 def plain_http_host_stub():
209 def plain_http_host_stub():
210 """
210 """
211 Value of HTTP_HOST in the test run.
211 Value of HTTP_HOST in the test run.
212 """
212 """
213 return 'example.com:80'
213 return 'example.com:80'
214
214
215
215
216 @pytest.fixture()
216 @pytest.fixture()
217 def http_host_stub():
217 def http_host_stub():
218 """
218 """
219 Value of HTTP_HOST in the test run.
219 Value of HTTP_HOST in the test run.
220 """
220 """
221 return plain_http_host_stub()
221 return plain_http_host_stub()
222
222
223
223
224 def plain_http_host_only_stub():
224 def plain_http_host_only_stub():
225 """
225 """
226 Value of HTTP_HOST in the test run.
226 Value of HTTP_HOST in the test run.
227 """
227 """
228 return plain_http_host_stub().split(':')[0]
228 return plain_http_host_stub().split(':')[0]
229
229
230
230
231 @pytest.fixture()
231 @pytest.fixture()
232 def http_host_only_stub():
232 def http_host_only_stub():
233 """
233 """
234 Value of HTTP_HOST in the test run.
234 Value of HTTP_HOST in the test run.
235 """
235 """
236 return plain_http_host_only_stub()
236 return plain_http_host_only_stub()
237
237
238
238
239 def plain_http_environ():
239 def plain_http_environ():
240 """
240 """
241 HTTP extra environ keys.
241 HTTP extra environ keys.
242
242
243 User by the test application and as well for setting up the pylons
243 User by the test application and as well for setting up the pylons
244 environment. In the case of the fixture "app" it should be possible
244 environment. In the case of the fixture "app" it should be possible
245 to override this for a specific test case.
245 to override this for a specific test case.
246 """
246 """
247 return {
247 return {
248 'SERVER_NAME': plain_http_host_only_stub(),
248 'SERVER_NAME': plain_http_host_only_stub(),
249 'SERVER_PORT': plain_http_host_stub().split(':')[1],
249 'SERVER_PORT': plain_http_host_stub().split(':')[1],
250 'HTTP_HOST': plain_http_host_stub(),
250 'HTTP_HOST': plain_http_host_stub(),
251 'HTTP_USER_AGENT': 'rc-test-agent',
251 'HTTP_USER_AGENT': 'rc-test-agent',
252 'REQUEST_METHOD': 'GET'
252 'REQUEST_METHOD': 'GET'
253 }
253 }
254
254
255
255
256 @pytest.fixture()
256 @pytest.fixture()
257 def http_environ():
257 def http_environ():
258 """
258 """
259 HTTP extra environ keys.
259 HTTP extra environ keys.
260
260
261 User by the test application and as well for setting up the pylons
261 User by the test application and as well for setting up the pylons
262 environment. In the case of the fixture "app" it should be possible
262 environment. In the case of the fixture "app" it should be possible
263 to override this for a specific test case.
263 to override this for a specific test case.
264 """
264 """
265 return plain_http_environ()
265 return plain_http_environ()
266
266
267
267
268 @pytest.fixture(scope='session')
268 @pytest.fixture(scope='session')
269 def baseapp(ini_config, vcsserver, http_environ_session):
269 def baseapp(ini_config, vcsserver, http_environ_session):
270 from rhodecode.lib.pyramid_utils import get_app_config
270 from rhodecode.lib.pyramid_utils import get_app_config
271 from rhodecode.config.middleware import make_pyramid_app
271 from rhodecode.config.middleware import make_pyramid_app
272
272
273 log.info("Using the RhodeCode configuration:{}".format(ini_config))
273 log.info("Using the RhodeCode configuration:{}".format(ini_config))
274 pyramid.paster.setup_logging(ini_config)
274 pyramid.paster.setup_logging(ini_config)
275
275
276 settings = get_app_config(ini_config)
276 settings = get_app_config(ini_config)
277 app = make_pyramid_app({'__file__': ini_config}, **settings)
277 app = make_pyramid_app({'__file__': ini_config}, **settings)
278
278
279 return app
279 return app
280
280
281
281
282 @pytest.fixture(scope='function')
282 @pytest.fixture(scope='function')
283 def app(request, config_stub, baseapp, http_environ):
283 def app(request, config_stub, baseapp, http_environ):
284 app = CustomTestApp(
284 app = CustomTestApp(
285 baseapp,
285 baseapp,
286 extra_environ=http_environ)
286 extra_environ=http_environ)
287 if request.cls:
287 if request.cls:
288 request.cls.app = app
288 request.cls.app = app
289 return app
289 return app
290
290
291
291
292 @pytest.fixture(scope='session')
292 @pytest.fixture(scope='session')
293 def app_settings(baseapp, ini_config):
293 def app_settings(baseapp, ini_config):
294 """
294 """
295 Settings dictionary used to create the app.
295 Settings dictionary used to create the app.
296
296
297 Parses the ini file and passes the result through the sanitize and apply
297 Parses the ini file and passes the result through the sanitize and apply
298 defaults mechanism in `rhodecode.config.middleware`.
298 defaults mechanism in `rhodecode.config.middleware`.
299 """
299 """
300 return baseapp.config.get_settings()
300 return baseapp.config.get_settings()
301
301
302
302
303 @pytest.fixture(scope='session')
303 @pytest.fixture(scope='session')
304 def db_connection(ini_settings):
304 def db_connection(ini_settings):
305 # Initialize the database connection.
305 # Initialize the database connection.
306 config_utils.initialize_database(ini_settings)
306 config_utils.initialize_database(ini_settings)
307
307
308
308
309 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
309 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
310
310
311
311
312 def _autologin_user(app, *args):
312 def _autologin_user(app, *args):
313 session = login_user_session(app, *args)
313 session = login_user_session(app, *args)
314 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
314 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
315 return LoginData(csrf_token, session['rhodecode_user'])
315 return LoginData(csrf_token, session['rhodecode_user'])
316
316
317
317
318 @pytest.fixture()
318 @pytest.fixture()
319 def autologin_user(app):
319 def autologin_user(app):
320 """
320 """
321 Utility fixture which makes sure that the admin user is logged in
321 Utility fixture which makes sure that the admin user is logged in
322 """
322 """
323 return _autologin_user(app)
323 return _autologin_user(app)
324
324
325
325
326 @pytest.fixture()
326 @pytest.fixture()
327 def autologin_regular_user(app):
327 def autologin_regular_user(app):
328 """
328 """
329 Utility fixture which makes sure that the regular user is logged in
329 Utility fixture which makes sure that the regular user is logged in
330 """
330 """
331 return _autologin_user(
331 return _autologin_user(
332 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
332 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
333
333
334
334
335 @pytest.fixture(scope='function')
335 @pytest.fixture(scope='function')
336 def csrf_token(request, autologin_user):
336 def csrf_token(request, autologin_user):
337 return autologin_user.csrf_token
337 return autologin_user.csrf_token
338
338
339
339
340 @pytest.fixture(scope='function')
340 @pytest.fixture(scope='function')
341 def xhr_header(request):
341 def xhr_header(request):
342 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
342 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
343
343
344
344
345 @pytest.fixture()
345 @pytest.fixture()
346 def real_crypto_backend(monkeypatch):
346 def real_crypto_backend(monkeypatch):
347 """
347 """
348 Switch the production crypto backend on for this test.
348 Switch the production crypto backend on for this test.
349
349
350 During the test run the crypto backend is replaced with a faster
350 During the test run the crypto backend is replaced with a faster
351 implementation based on the MD5 algorithm.
351 implementation based on the MD5 algorithm.
352 """
352 """
353 monkeypatch.setattr(rhodecode, 'is_test', False)
353 monkeypatch.setattr(rhodecode, 'is_test', False)
354
354
355
355
356 @pytest.fixture(scope='class')
356 @pytest.fixture(scope='class')
357 def index_location(request, baseapp):
357 def index_location(request, baseapp):
358 index_location = baseapp.config.get_settings()['search.location']
358 index_location = baseapp.config.get_settings()['search.location']
359 if request.cls:
359 if request.cls:
360 request.cls.index_location = index_location
360 request.cls.index_location = index_location
361 return index_location
361 return index_location
362
362
363
363
364 @pytest.fixture(scope='session', autouse=True)
364 @pytest.fixture(scope='session', autouse=True)
365 def tests_tmp_path(request):
365 def tests_tmp_path(request):
366 """
366 """
367 Create temporary directory to be used during the test session.
367 Create temporary directory to be used during the test session.
368 """
368 """
369 if not os.path.exists(TESTS_TMP_PATH):
369 if not os.path.exists(TESTS_TMP_PATH):
370 os.makedirs(TESTS_TMP_PATH)
370 os.makedirs(TESTS_TMP_PATH)
371
371
372 if not request.config.getoption('--keep-tmp-path'):
372 if not request.config.getoption('--keep-tmp-path'):
373 @request.addfinalizer
373 @request.addfinalizer
374 def remove_tmp_path():
374 def remove_tmp_path():
375 shutil.rmtree(TESTS_TMP_PATH)
375 shutil.rmtree(TESTS_TMP_PATH)
376
376
377 return TESTS_TMP_PATH
377 return TESTS_TMP_PATH
378
378
379
379
380 @pytest.fixture()
380 @pytest.fixture()
381 def test_repo_group(request):
381 def test_repo_group(request):
382 """
382 """
383 Create a temporary repository group, and destroy it after
383 Create a temporary repository group, and destroy it after
384 usage automatically
384 usage automatically
385 """
385 """
386 fixture = Fixture()
386 fixture = Fixture()
387 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
387 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
388 repo_group = fixture.create_repo_group(repogroupid)
388 repo_group = fixture.create_repo_group(repogroupid)
389
389
390 def _cleanup():
390 def _cleanup():
391 fixture.destroy_repo_group(repogroupid)
391 fixture.destroy_repo_group(repogroupid)
392
392
393 request.addfinalizer(_cleanup)
393 request.addfinalizer(_cleanup)
394 return repo_group
394 return repo_group
395
395
396
396
397 @pytest.fixture()
397 @pytest.fixture()
398 def test_user_group(request):
398 def test_user_group(request):
399 """
399 """
400 Create a temporary user group, and destroy it after
400 Create a temporary user group, and destroy it after
401 usage automatically
401 usage automatically
402 """
402 """
403 fixture = Fixture()
403 fixture = Fixture()
404 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
404 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
405 user_group = fixture.create_user_group(usergroupid)
405 user_group = fixture.create_user_group(usergroupid)
406
406
407 def _cleanup():
407 def _cleanup():
408 fixture.destroy_user_group(user_group)
408 fixture.destroy_user_group(user_group)
409
409
410 request.addfinalizer(_cleanup)
410 request.addfinalizer(_cleanup)
411 return user_group
411 return user_group
412
412
413
413
414 @pytest.fixture(scope='session')
414 @pytest.fixture(scope='session')
415 def test_repo(request):
415 def test_repo(request):
416 container = TestRepoContainer()
416 container = TestRepoContainer()
417 request.addfinalizer(container._cleanup)
417 request.addfinalizer(container._cleanup)
418 return container
418 return container
419
419
420
420
421 class TestRepoContainer(object):
421 class TestRepoContainer(object):
422 """
422 """
423 Container for test repositories which are used read only.
423 Container for test repositories which are used read only.
424
424
425 Repositories will be created on demand and re-used during the lifetime
425 Repositories will be created on demand and re-used during the lifetime
426 of this object.
426 of this object.
427
427
428 Usage to get the svn test repository "minimal"::
428 Usage to get the svn test repository "minimal"::
429
429
430 test_repo = TestContainer()
430 test_repo = TestContainer()
431 repo = test_repo('minimal', 'svn')
431 repo = test_repo('minimal', 'svn')
432
432
433 """
433 """
434
434
435 dump_extractors = {
435 dump_extractors = {
436 'git': utils.extract_git_repo_from_dump,
436 'git': utils.extract_git_repo_from_dump,
437 'hg': utils.extract_hg_repo_from_dump,
437 'hg': utils.extract_hg_repo_from_dump,
438 'svn': utils.extract_svn_repo_from_dump,
438 'svn': utils.extract_svn_repo_from_dump,
439 }
439 }
440
440
441 def __init__(self):
441 def __init__(self):
442 self._cleanup_repos = []
442 self._cleanup_repos = []
443 self._fixture = Fixture()
443 self._fixture = Fixture()
444 self._repos = {}
444 self._repos = {}
445
445
446 def __call__(self, dump_name, backend_alias, config=None):
446 def __call__(self, dump_name, backend_alias, config=None):
447 key = (dump_name, backend_alias)
447 key = (dump_name, backend_alias)
448 if key not in self._repos:
448 if key not in self._repos:
449 repo = self._create_repo(dump_name, backend_alias, config)
449 repo = self._create_repo(dump_name, backend_alias, config)
450 self._repos[key] = repo.repo_id
450 self._repos[key] = repo.repo_id
451 return Repository.get(self._repos[key])
451 return Repository.get(self._repos[key])
452
452
453 def _create_repo(self, dump_name, backend_alias, config):
453 def _create_repo(self, dump_name, backend_alias, config):
454 repo_name = '%s-%s' % (backend_alias, dump_name)
454 repo_name = '%s-%s' % (backend_alias, dump_name)
455 backend = get_backend(backend_alias)
455 backend = get_backend(backend_alias)
456 dump_extractor = self.dump_extractors[backend_alias]
456 dump_extractor = self.dump_extractors[backend_alias]
457 repo_path = dump_extractor(dump_name, repo_name)
457 repo_path = dump_extractor(dump_name, repo_name)
458
458
459 vcs_repo = backend(repo_path, config=config)
459 vcs_repo = backend(repo_path, config=config)
460 repo2db_mapper({repo_name: vcs_repo})
460 repo2db_mapper({repo_name: vcs_repo})
461
461
462 repo = RepoModel().get_by_repo_name(repo_name)
462 repo = RepoModel().get_by_repo_name(repo_name)
463 self._cleanup_repos.append(repo_name)
463 self._cleanup_repos.append(repo_name)
464 return repo
464 return repo
465
465
466 def _cleanup(self):
466 def _cleanup(self):
467 for repo_name in reversed(self._cleanup_repos):
467 for repo_name in reversed(self._cleanup_repos):
468 self._fixture.destroy_repo(repo_name)
468 self._fixture.destroy_repo(repo_name)
469
469
470
470
471 def backend_base(request, backend_alias, baseapp, test_repo):
471 def backend_base(request, backend_alias, baseapp, test_repo):
472 if backend_alias not in request.config.getoption('--backends'):
472 if backend_alias not in request.config.getoption('--backends'):
473 pytest.skip("Backend %s not selected." % (backend_alias, ))
473 pytest.skip("Backend %s not selected." % (backend_alias, ))
474
474
475 utils.check_xfail_backends(request.node, backend_alias)
475 utils.check_xfail_backends(request.node, backend_alias)
476 utils.check_skip_backends(request.node, backend_alias)
476 utils.check_skip_backends(request.node, backend_alias)
477
477
478 repo_name = 'vcs_test_%s' % (backend_alias, )
478 repo_name = 'vcs_test_%s' % (backend_alias, )
479 backend = Backend(
479 backend = Backend(
480 alias=backend_alias,
480 alias=backend_alias,
481 repo_name=repo_name,
481 repo_name=repo_name,
482 test_name=request.node.name,
482 test_name=request.node.name,
483 test_repo_container=test_repo)
483 test_repo_container=test_repo)
484 request.addfinalizer(backend.cleanup)
484 request.addfinalizer(backend.cleanup)
485 return backend
485 return backend
486
486
487
487
488 @pytest.fixture()
488 @pytest.fixture()
489 def backend(request, backend_alias, baseapp, test_repo):
489 def backend(request, backend_alias, baseapp, test_repo):
490 """
490 """
491 Parametrized fixture which represents a single backend implementation.
491 Parametrized fixture which represents a single backend implementation.
492
492
493 It respects the option `--backends` to focus the test run on specific
493 It respects the option `--backends` to focus the test run on specific
494 backend implementations.
494 backend implementations.
495
495
496 It also supports `pytest.mark.xfail_backends` to mark tests as failing
496 It also supports `pytest.mark.xfail_backends` to mark tests as failing
497 for specific backends. This is intended as a utility for incremental
497 for specific backends. This is intended as a utility for incremental
498 development of a new backend implementation.
498 development of a new backend implementation.
499 """
499 """
500 return backend_base(request, backend_alias, baseapp, test_repo)
500 return backend_base(request, backend_alias, baseapp, test_repo)
501
501
502
502
503 @pytest.fixture()
503 @pytest.fixture()
504 def backend_git(request, baseapp, test_repo):
504 def backend_git(request, baseapp, test_repo):
505 return backend_base(request, 'git', baseapp, test_repo)
505 return backend_base(request, 'git', baseapp, test_repo)
506
506
507
507
508 @pytest.fixture()
508 @pytest.fixture()
509 def backend_hg(request, baseapp, test_repo):
509 def backend_hg(request, baseapp, test_repo):
510 return backend_base(request, 'hg', baseapp, test_repo)
510 return backend_base(request, 'hg', baseapp, test_repo)
511
511
512
512
513 @pytest.fixture()
513 @pytest.fixture()
514 def backend_svn(request, baseapp, test_repo):
514 def backend_svn(request, baseapp, test_repo):
515 return backend_base(request, 'svn', baseapp, test_repo)
515 return backend_base(request, 'svn', baseapp, test_repo)
516
516
517
517
518 @pytest.fixture()
518 @pytest.fixture()
519 def backend_random(backend_git):
519 def backend_random(backend_git):
520 """
520 """
521 Use this to express that your tests need "a backend.
521 Use this to express that your tests need "a backend.
522
522
523 A few of our tests need a backend, so that we can run the code. This
523 A few of our tests need a backend, so that we can run the code. This
524 fixture is intended to be used for such cases. It will pick one of the
524 fixture is intended to be used for such cases. It will pick one of the
525 backends and run the tests.
525 backends and run the tests.
526
526
527 The fixture `backend` would run the test multiple times for each
527 The fixture `backend` would run the test multiple times for each
528 available backend which is a pure waste of time if the test is
528 available backend which is a pure waste of time if the test is
529 independent of the backend type.
529 independent of the backend type.
530 """
530 """
531 # TODO: johbo: Change this to pick a random backend
531 # TODO: johbo: Change this to pick a random backend
532 return backend_git
532 return backend_git
533
533
534
534
535 @pytest.fixture()
535 @pytest.fixture()
536 def backend_stub(backend_git):
536 def backend_stub(backend_git):
537 """
537 """
538 Use this to express that your tests need a backend stub
538 Use this to express that your tests need a backend stub
539
539
540 TODO: mikhail: Implement a real stub logic instead of returning
540 TODO: mikhail: Implement a real stub logic instead of returning
541 a git backend
541 a git backend
542 """
542 """
543 return backend_git
543 return backend_git
544
544
545
545
546 @pytest.fixture()
546 @pytest.fixture()
547 def repo_stub(backend_stub):
547 def repo_stub(backend_stub):
548 """
548 """
549 Use this to express that your tests need a repository stub
549 Use this to express that your tests need a repository stub
550 """
550 """
551 return backend_stub.create_repo()
551 return backend_stub.create_repo()
552
552
553
553
554 class Backend(object):
554 class Backend(object):
555 """
555 """
556 Represents the test configuration for one supported backend
556 Represents the test configuration for one supported backend
557
557
558 Provides easy access to different test repositories based on
558 Provides easy access to different test repositories based on
559 `__getitem__`. Such repositories will only be created once per test
559 `__getitem__`. Such repositories will only be created once per test
560 session.
560 session.
561 """
561 """
562
562
563 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
563 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
564 _master_repo = None
564 _master_repo = None
565 _master_repo_path = ''
565 _master_repo_path = ''
566 _commit_ids = {}
566 _commit_ids = {}
567
567
568 def __init__(self, alias, repo_name, test_name, test_repo_container):
568 def __init__(self, alias, repo_name, test_name, test_repo_container):
569 self.alias = alias
569 self.alias = alias
570 self.repo_name = repo_name
570 self.repo_name = repo_name
571 self._cleanup_repos = []
571 self._cleanup_repos = []
572 self._test_name = test_name
572 self._test_name = test_name
573 self._test_repo_container = test_repo_container
573 self._test_repo_container = test_repo_container
574 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
574 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
575 # Fixture will survive in the end.
575 # Fixture will survive in the end.
576 self._fixture = Fixture()
576 self._fixture = Fixture()
577
577
578 def __getitem__(self, key):
578 def __getitem__(self, key):
579 return self._test_repo_container(key, self.alias)
579 return self._test_repo_container(key, self.alias)
580
580
581 def create_test_repo(self, key, config=None):
581 def create_test_repo(self, key, config=None):
582 return self._test_repo_container(key, self.alias, config)
582 return self._test_repo_container(key, self.alias, config)
583
583
584 @property
584 @property
585 def repo(self):
585 def repo(self):
586 """
586 """
587 Returns the "current" repository. This is the vcs_test repo or the
587 Returns the "current" repository. This is the vcs_test repo or the
588 last repo which has been created with `create_repo`.
588 last repo which has been created with `create_repo`.
589 """
589 """
590 from rhodecode.model.db import Repository
590 from rhodecode.model.db import Repository
591 return Repository.get_by_repo_name(self.repo_name)
591 return Repository.get_by_repo_name(self.repo_name)
592
592
593 @property
593 @property
594 def default_branch_name(self):
594 def default_branch_name(self):
595 VcsRepository = get_backend(self.alias)
595 VcsRepository = get_backend(self.alias)
596 return VcsRepository.DEFAULT_BRANCH_NAME
596 return VcsRepository.DEFAULT_BRANCH_NAME
597
597
598 @property
598 @property
599 def default_head_id(self):
599 def default_head_id(self):
600 """
600 """
601 Returns the default head id of the underlying backend.
601 Returns the default head id of the underlying backend.
602
602
603 This will be the default branch name in case the backend does have a
603 This will be the default branch name in case the backend does have a
604 default branch. In the other cases it will point to a valid head
604 default branch. In the other cases it will point to a valid head
605 which can serve as the base to create a new commit on top of it.
605 which can serve as the base to create a new commit on top of it.
606 """
606 """
607 vcsrepo = self.repo.scm_instance()
607 vcsrepo = self.repo.scm_instance()
608 head_id = (
608 head_id = (
609 vcsrepo.DEFAULT_BRANCH_NAME or
609 vcsrepo.DEFAULT_BRANCH_NAME or
610 vcsrepo.commit_ids[-1])
610 vcsrepo.commit_ids[-1])
611 return head_id
611 return head_id
612
612
613 @property
613 @property
614 def commit_ids(self):
614 def commit_ids(self):
615 """
615 """
616 Returns the list of commits for the last created repository
616 Returns the list of commits for the last created repository
617 """
617 """
618 return self._commit_ids
618 return self._commit_ids
619
619
620 def create_master_repo(self, commits):
620 def create_master_repo(self, commits):
621 """
621 """
622 Create a repository and remember it as a template.
622 Create a repository and remember it as a template.
623
623
624 This allows to easily create derived repositories to construct
624 This allows to easily create derived repositories to construct
625 more complex scenarios for diff, compare and pull requests.
625 more complex scenarios for diff, compare and pull requests.
626
626
627 Returns a commit map which maps from commit message to raw_id.
627 Returns a commit map which maps from commit message to raw_id.
628 """
628 """
629 self._master_repo = self.create_repo(commits=commits)
629 self._master_repo = self.create_repo(commits=commits)
630 self._master_repo_path = self._master_repo.repo_full_path
630 self._master_repo_path = self._master_repo.repo_full_path
631
631
632 return self._commit_ids
632 return self._commit_ids
633
633
634 def create_repo(
634 def create_repo(
635 self, commits=None, number_of_commits=0, heads=None,
635 self, commits=None, number_of_commits=0, heads=None,
636 name_suffix=u'', bare=False, **kwargs):
636 name_suffix=u'', bare=False, **kwargs):
637 """
637 """
638 Create a repository and record it for later cleanup.
638 Create a repository and record it for later cleanup.
639
639
640 :param commits: Optional. A sequence of dict instances.
640 :param commits: Optional. A sequence of dict instances.
641 Will add a commit per entry to the new repository.
641 Will add a commit per entry to the new repository.
642 :param number_of_commits: Optional. If set to a number, this number of
642 :param number_of_commits: Optional. If set to a number, this number of
643 commits will be added to the new repository.
643 commits will be added to the new repository.
644 :param heads: Optional. Can be set to a sequence of of commit
644 :param heads: Optional. Can be set to a sequence of of commit
645 names which shall be pulled in from the master repository.
645 names which shall be pulled in from the master repository.
646 :param name_suffix: adds special suffix to generated repo name
646 :param name_suffix: adds special suffix to generated repo name
647 :param bare: set a repo as bare (no checkout)
647 :param bare: set a repo as bare (no checkout)
648 """
648 """
649 self.repo_name = self._next_repo_name() + name_suffix
649 self.repo_name = self._next_repo_name() + name_suffix
650 repo = self._fixture.create_repo(
650 repo = self._fixture.create_repo(
651 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
651 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
652 self._cleanup_repos.append(repo.repo_name)
652 self._cleanup_repos.append(repo.repo_name)
653
653
654 commits = commits or [
654 commits = commits or [
655 {'message': 'Commit %s of %s' % (x, self.repo_name)}
655 {'message': 'Commit %s of %s' % (x, self.repo_name)}
656 for x in range(number_of_commits)]
656 for x in range(number_of_commits)]
657 vcs_repo = repo.scm_instance()
657 vcs_repo = repo.scm_instance()
658 vcs_repo.count()
658 vcs_repo.count()
659 self._add_commits_to_repo(vcs_repo, commits)
659 self._add_commits_to_repo(vcs_repo, commits)
660 if heads:
660 if heads:
661 self.pull_heads(repo, heads)
661 self.pull_heads(repo, heads)
662
662
663 return repo
663 return repo
664
664
665 def pull_heads(self, repo, heads):
665 def pull_heads(self, repo, heads):
666 """
666 """
667 Make sure that repo contains all commits mentioned in `heads`
667 Make sure that repo contains all commits mentioned in `heads`
668 """
668 """
669 vcsrepo = repo.scm_instance()
669 vcsrepo = repo.scm_instance()
670 vcsrepo.config.clear_section('hooks')
670 vcsrepo.config.clear_section('hooks')
671 commit_ids = [self._commit_ids[h] for h in heads]
671 commit_ids = [self._commit_ids[h] for h in heads]
672 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
672 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
673
673
674 def create_fork(self):
674 def create_fork(self):
675 repo_to_fork = self.repo_name
675 repo_to_fork = self.repo_name
676 self.repo_name = self._next_repo_name()
676 self.repo_name = self._next_repo_name()
677 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
677 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
678 self._cleanup_repos.append(self.repo_name)
678 self._cleanup_repos.append(self.repo_name)
679 return repo
679 return repo
680
680
681 def new_repo_name(self, suffix=u''):
681 def new_repo_name(self, suffix=u''):
682 self.repo_name = self._next_repo_name() + suffix
682 self.repo_name = self._next_repo_name() + suffix
683 self._cleanup_repos.append(self.repo_name)
683 self._cleanup_repos.append(self.repo_name)
684 return self.repo_name
684 return self.repo_name
685
685
686 def _next_repo_name(self):
686 def _next_repo_name(self):
687 return u"%s_%s" % (
687 return u"%s_%s" % (
688 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
688 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
689
689
690 def ensure_file(self, filename, content='Test content\n'):
690 def ensure_file(self, filename, content='Test content\n'):
691 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
691 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
692 commits = [
692 commits = [
693 {'added': [
693 {'added': [
694 FileNode(filename, content=content),
694 FileNode(filename, content=content),
695 ]},
695 ]},
696 ]
696 ]
697 self._add_commits_to_repo(self.repo.scm_instance(), commits)
697 self._add_commits_to_repo(self.repo.scm_instance(), commits)
698
698
699 def enable_downloads(self):
699 def enable_downloads(self):
700 repo = self.repo
700 repo = self.repo
701 repo.enable_downloads = True
701 repo.enable_downloads = True
702 Session().add(repo)
702 Session().add(repo)
703 Session().commit()
703 Session().commit()
704
704
705 def cleanup(self):
705 def cleanup(self):
706 for repo_name in reversed(self._cleanup_repos):
706 for repo_name in reversed(self._cleanup_repos):
707 self._fixture.destroy_repo(repo_name)
707 self._fixture.destroy_repo(repo_name)
708
708
709 def _add_commits_to_repo(self, repo, commits):
709 def _add_commits_to_repo(self, repo, commits):
710 commit_ids = _add_commits_to_repo(repo, commits)
710 commit_ids = _add_commits_to_repo(repo, commits)
711 if not commit_ids:
711 if not commit_ids:
712 return
712 return
713 self._commit_ids = commit_ids
713 self._commit_ids = commit_ids
714
714
715 # Creating refs for Git to allow fetching them from remote repository
715 # Creating refs for Git to allow fetching them from remote repository
716 if self.alias == 'git':
716 if self.alias == 'git':
717 refs = {}
717 refs = {}
718 for message in self._commit_ids:
718 for message in self._commit_ids:
719 # TODO: mikhail: do more special chars replacements
719 # TODO: mikhail: do more special chars replacements
720 ref_name = 'refs/test-refs/{}'.format(
720 ref_name = 'refs/test-refs/{}'.format(
721 message.replace(' ', ''))
721 message.replace(' ', ''))
722 refs[ref_name] = self._commit_ids[message]
722 refs[ref_name] = self._commit_ids[message]
723 self._create_refs(repo, refs)
723 self._create_refs(repo, refs)
724
724
725 def _create_refs(self, repo, refs):
725 def _create_refs(self, repo, refs):
726 for ref_name in refs:
726 for ref_name in refs:
727 repo.set_refs(ref_name, refs[ref_name])
727 repo.set_refs(ref_name, refs[ref_name])
728
728
729
729
730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
731 if backend_alias not in request.config.getoption('--backends'):
731 if backend_alias not in request.config.getoption('--backends'):
732 pytest.skip("Backend %s not selected." % (backend_alias, ))
732 pytest.skip("Backend %s not selected." % (backend_alias, ))
733
733
734 utils.check_xfail_backends(request.node, backend_alias)
734 utils.check_xfail_backends(request.node, backend_alias)
735 utils.check_skip_backends(request.node, backend_alias)
735 utils.check_skip_backends(request.node, backend_alias)
736
736
737 repo_name = 'vcs_test_%s' % (backend_alias, )
737 repo_name = 'vcs_test_%s' % (backend_alias, )
738 repo_path = os.path.join(tests_tmp_path, repo_name)
738 repo_path = os.path.join(tests_tmp_path, repo_name)
739 backend = VcsBackend(
739 backend = VcsBackend(
740 alias=backend_alias,
740 alias=backend_alias,
741 repo_path=repo_path,
741 repo_path=repo_path,
742 test_name=request.node.name,
742 test_name=request.node.name,
743 test_repo_container=test_repo)
743 test_repo_container=test_repo)
744 request.addfinalizer(backend.cleanup)
744 request.addfinalizer(backend.cleanup)
745 return backend
745 return backend
746
746
747
747
748 @pytest.fixture()
748 @pytest.fixture()
749 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
749 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
750 """
750 """
751 Parametrized fixture which represents a single vcs backend implementation.
751 Parametrized fixture which represents a single vcs backend implementation.
752
752
753 See the fixture `backend` for more details. This one implements the same
753 See the fixture `backend` for more details. This one implements the same
754 concept, but on vcs level. So it does not provide model instances etc.
754 concept, but on vcs level. So it does not provide model instances etc.
755
755
756 Parameters are generated dynamically, see :func:`pytest_generate_tests`
756 Parameters are generated dynamically, see :func:`pytest_generate_tests`
757 for how this works.
757 for how this works.
758 """
758 """
759 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
759 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
760
760
761
761
762 @pytest.fixture()
762 @pytest.fixture()
763 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
763 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
764 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
764 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
765
765
766
766
767 @pytest.fixture()
767 @pytest.fixture()
768 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
768 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
769 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
769 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
770
770
771
771
772 @pytest.fixture()
772 @pytest.fixture()
773 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
773 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
774 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
774 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
775
775
776
776
777 @pytest.fixture()
777 @pytest.fixture()
778 def vcsbackend_stub(vcsbackend_git):
778 def vcsbackend_stub(vcsbackend_git):
779 """
779 """
780 Use this to express that your test just needs a stub of a vcsbackend.
780 Use this to express that your test just needs a stub of a vcsbackend.
781
781
782 Plan is to eventually implement an in-memory stub to speed tests up.
782 Plan is to eventually implement an in-memory stub to speed tests up.
783 """
783 """
784 return vcsbackend_git
784 return vcsbackend_git
785
785
786
786
787 class VcsBackend(object):
787 class VcsBackend(object):
788 """
788 """
789 Represents the test configuration for one supported vcs backend.
789 Represents the test configuration for one supported vcs backend.
790 """
790 """
791
791
792 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
792 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
793
793
794 def __init__(self, alias, repo_path, test_name, test_repo_container):
794 def __init__(self, alias, repo_path, test_name, test_repo_container):
795 self.alias = alias
795 self.alias = alias
796 self._repo_path = repo_path
796 self._repo_path = repo_path
797 self._cleanup_repos = []
797 self._cleanup_repos = []
798 self._test_name = test_name
798 self._test_name = test_name
799 self._test_repo_container = test_repo_container
799 self._test_repo_container = test_repo_container
800
800
801 def __getitem__(self, key):
801 def __getitem__(self, key):
802 return self._test_repo_container(key, self.alias).scm_instance()
802 return self._test_repo_container(key, self.alias).scm_instance()
803
803
804 @property
804 @property
805 def repo(self):
805 def repo(self):
806 """
806 """
807 Returns the "current" repository. This is the vcs_test repo of the last
807 Returns the "current" repository. This is the vcs_test repo of the last
808 repo which has been created.
808 repo which has been created.
809 """
809 """
810 Repository = get_backend(self.alias)
810 Repository = get_backend(self.alias)
811 return Repository(self._repo_path)
811 return Repository(self._repo_path)
812
812
813 @property
813 @property
814 def backend(self):
814 def backend(self):
815 """
815 """
816 Returns the backend implementation class.
816 Returns the backend implementation class.
817 """
817 """
818 return get_backend(self.alias)
818 return get_backend(self.alias)
819
819
820 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
820 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
821 bare=False):
821 bare=False):
822 repo_name = self._next_repo_name()
822 repo_name = self._next_repo_name()
823 self._repo_path = get_new_dir(repo_name)
823 self._repo_path = get_new_dir(repo_name)
824 repo_class = get_backend(self.alias)
824 repo_class = get_backend(self.alias)
825 src_url = None
825 src_url = None
826 if _clone_repo:
826 if _clone_repo:
827 src_url = _clone_repo.path
827 src_url = _clone_repo.path
828 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
828 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
829 self._cleanup_repos.append(repo)
829 self._cleanup_repos.append(repo)
830
830
831 commits = commits or [
831 commits = commits or [
832 {'message': 'Commit %s of %s' % (x, repo_name)}
832 {'message': 'Commit %s of %s' % (x, repo_name)}
833 for x in range(number_of_commits)]
833 for x in range(number_of_commits)]
834 _add_commits_to_repo(repo, commits)
834 _add_commits_to_repo(repo, commits)
835 return repo
835 return repo
836
836
837 def clone_repo(self, repo):
837 def clone_repo(self, repo):
838 return self.create_repo(_clone_repo=repo)
838 return self.create_repo(_clone_repo=repo)
839
839
840 def cleanup(self):
840 def cleanup(self):
841 for repo in self._cleanup_repos:
841 for repo in self._cleanup_repos:
842 shutil.rmtree(repo.path)
842 shutil.rmtree(repo.path)
843
843
844 def new_repo_path(self):
844 def new_repo_path(self):
845 repo_name = self._next_repo_name()
845 repo_name = self._next_repo_name()
846 self._repo_path = get_new_dir(repo_name)
846 self._repo_path = get_new_dir(repo_name)
847 return self._repo_path
847 return self._repo_path
848
848
849 def _next_repo_name(self):
849 def _next_repo_name(self):
850 return "%s_%s" % (
850 return "%s_%s" % (
851 self.invalid_repo_name.sub('_', self._test_name),
851 self.invalid_repo_name.sub('_', self._test_name),
852 len(self._cleanup_repos))
852 len(self._cleanup_repos))
853
853
854 def add_file(self, repo, filename, content='Test content\n'):
854 def add_file(self, repo, filename, content='Test content\n'):
855 imc = repo.in_memory_commit
855 imc = repo.in_memory_commit
856 imc.add(FileNode(filename, content=content))
856 imc.add(FileNode(filename, content=content))
857 imc.commit(
857 imc.commit(
858 message=u'Automatic commit from vcsbackend fixture',
858 message=u'Automatic commit from vcsbackend fixture',
859 author=u'Automatic <automatic@rhodecode.com>')
859 author=u'Automatic <automatic@rhodecode.com>')
860
860
861 def ensure_file(self, filename, content='Test content\n'):
861 def ensure_file(self, filename, content='Test content\n'):
862 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
862 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
863 self.add_file(self.repo, filename, content)
863 self.add_file(self.repo, filename, content)
864
864
865
865
866 def _add_commits_to_repo(vcs_repo, commits):
866 def _add_commits_to_repo(vcs_repo, commits):
867 commit_ids = {}
867 commit_ids = {}
868 if not commits:
868 if not commits:
869 return commit_ids
869 return commit_ids
870
870
871 imc = vcs_repo.in_memory_commit
871 imc = vcs_repo.in_memory_commit
872 commit = None
872 commit = None
873
873
874 for idx, commit in enumerate(commits):
874 for idx, commit in enumerate(commits):
875 message = unicode(commit.get('message', 'Commit %s' % idx))
875 message = unicode(commit.get('message', 'Commit %s' % idx))
876
876
877 for node in commit.get('added', []):
877 for node in commit.get('added', []):
878 imc.add(FileNode(node.path, content=node.content))
878 imc.add(FileNode(node.path, content=node.content))
879 for node in commit.get('changed', []):
879 for node in commit.get('changed', []):
880 imc.change(FileNode(node.path, content=node.content))
880 imc.change(FileNode(node.path, content=node.content))
881 for node in commit.get('removed', []):
881 for node in commit.get('removed', []):
882 imc.remove(FileNode(node.path))
882 imc.remove(FileNode(node.path))
883
883
884 parents = [
884 parents = [
885 vcs_repo.get_commit(commit_id=commit_ids[p])
885 vcs_repo.get_commit(commit_id=commit_ids[p])
886 for p in commit.get('parents', [])]
886 for p in commit.get('parents', [])]
887
887
888 operations = ('added', 'changed', 'removed')
888 operations = ('added', 'changed', 'removed')
889 if not any((commit.get(o) for o in operations)):
889 if not any((commit.get(o) for o in operations)):
890 imc.add(FileNode('file_%s' % idx, content=message))
890 imc.add(FileNode('file_%s' % idx, content=message))
891
891
892 commit = imc.commit(
892 commit = imc.commit(
893 message=message,
893 message=message,
894 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
894 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
895 date=commit.get('date'),
895 date=commit.get('date'),
896 branch=commit.get('branch'),
896 branch=commit.get('branch'),
897 parents=parents)
897 parents=parents)
898
898
899 commit_ids[commit.message] = commit.raw_id
899 commit_ids[commit.message] = commit.raw_id
900
900
901 return commit_ids
901 return commit_ids
902
902
903
903
904 @pytest.fixture()
904 @pytest.fixture()
905 def reposerver(request):
905 def reposerver(request):
906 """
906 """
907 Allows to serve a backend repository
907 Allows to serve a backend repository
908 """
908 """
909
909
910 repo_server = RepoServer()
910 repo_server = RepoServer()
911 request.addfinalizer(repo_server.cleanup)
911 request.addfinalizer(repo_server.cleanup)
912 return repo_server
912 return repo_server
913
913
914
914
915 class RepoServer(object):
915 class RepoServer(object):
916 """
916 """
917 Utility to serve a local repository for the duration of a test case.
917 Utility to serve a local repository for the duration of a test case.
918
918
919 Supports only Subversion so far.
919 Supports only Subversion so far.
920 """
920 """
921
921
922 url = None
922 url = None
923
923
924 def __init__(self):
924 def __init__(self):
925 self._cleanup_servers = []
925 self._cleanup_servers = []
926
926
927 def serve(self, vcsrepo):
927 def serve(self, vcsrepo):
928 if vcsrepo.alias != 'svn':
928 if vcsrepo.alias != 'svn':
929 raise TypeError("Backend %s not supported" % vcsrepo.alias)
929 raise TypeError("Backend %s not supported" % vcsrepo.alias)
930
930
931 proc = subprocess32.Popen(
931 proc = subprocess.Popen(
932 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
932 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
933 '--root', vcsrepo.path])
933 '--root', vcsrepo.path])
934 self._cleanup_servers.append(proc)
934 self._cleanup_servers.append(proc)
935 self.url = 'svn://localhost'
935 self.url = 'svn://localhost'
936
936
937 def cleanup(self):
937 def cleanup(self):
938 for proc in self._cleanup_servers:
938 for proc in self._cleanup_servers:
939 proc.terminate()
939 proc.terminate()
940
940
941
941
942 @pytest.fixture()
942 @pytest.fixture()
943 def pr_util(backend, request, config_stub):
943 def pr_util(backend, request, config_stub):
944 """
944 """
945 Utility for tests of models and for functional tests around pull requests.
945 Utility for tests of models and for functional tests around pull requests.
946
946
947 It gives an instance of :class:`PRTestUtility` which provides various
947 It gives an instance of :class:`PRTestUtility` which provides various
948 utility methods around one pull request.
948 utility methods around one pull request.
949
949
950 This fixture uses `backend` and inherits its parameterization.
950 This fixture uses `backend` and inherits its parameterization.
951 """
951 """
952
952
953 util = PRTestUtility(backend)
953 util = PRTestUtility(backend)
954 request.addfinalizer(util.cleanup)
954 request.addfinalizer(util.cleanup)
955
955
956 return util
956 return util
957
957
958
958
959 class PRTestUtility(object):
959 class PRTestUtility(object):
960
960
961 pull_request = None
961 pull_request = None
962 pull_request_id = None
962 pull_request_id = None
963 mergeable_patcher = None
963 mergeable_patcher = None
964 mergeable_mock = None
964 mergeable_mock = None
965 notification_patcher = None
965 notification_patcher = None
966
966
967 def __init__(self, backend):
967 def __init__(self, backend):
968 self.backend = backend
968 self.backend = backend
969
969
970 def create_pull_request(
970 def create_pull_request(
971 self, commits=None, target_head=None, source_head=None,
971 self, commits=None, target_head=None, source_head=None,
972 revisions=None, approved=False, author=None, mergeable=False,
972 revisions=None, approved=False, author=None, mergeable=False,
973 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
973 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
974 title=u"Test", description=u"Description"):
974 title=u"Test", description=u"Description"):
975 self.set_mergeable(mergeable)
975 self.set_mergeable(mergeable)
976 if not enable_notifications:
976 if not enable_notifications:
977 # mock notification side effect
977 # mock notification side effect
978 self.notification_patcher = mock.patch(
978 self.notification_patcher = mock.patch(
979 'rhodecode.model.notification.NotificationModel.create')
979 'rhodecode.model.notification.NotificationModel.create')
980 self.notification_patcher.start()
980 self.notification_patcher.start()
981
981
982 if not self.pull_request:
982 if not self.pull_request:
983 if not commits:
983 if not commits:
984 commits = [
984 commits = [
985 {'message': 'c1'},
985 {'message': 'c1'},
986 {'message': 'c2'},
986 {'message': 'c2'},
987 {'message': 'c3'},
987 {'message': 'c3'},
988 ]
988 ]
989 target_head = 'c1'
989 target_head = 'c1'
990 source_head = 'c2'
990 source_head = 'c2'
991 revisions = ['c2']
991 revisions = ['c2']
992
992
993 self.commit_ids = self.backend.create_master_repo(commits)
993 self.commit_ids = self.backend.create_master_repo(commits)
994 self.target_repository = self.backend.create_repo(
994 self.target_repository = self.backend.create_repo(
995 heads=[target_head], name_suffix=name_suffix)
995 heads=[target_head], name_suffix=name_suffix)
996 self.source_repository = self.backend.create_repo(
996 self.source_repository = self.backend.create_repo(
997 heads=[source_head], name_suffix=name_suffix)
997 heads=[source_head], name_suffix=name_suffix)
998 self.author = author or UserModel().get_by_username(
998 self.author = author or UserModel().get_by_username(
999 TEST_USER_ADMIN_LOGIN)
999 TEST_USER_ADMIN_LOGIN)
1000
1000
1001 model = PullRequestModel()
1001 model = PullRequestModel()
1002 self.create_parameters = {
1002 self.create_parameters = {
1003 'created_by': self.author,
1003 'created_by': self.author,
1004 'source_repo': self.source_repository.repo_name,
1004 'source_repo': self.source_repository.repo_name,
1005 'source_ref': self._default_branch_reference(source_head),
1005 'source_ref': self._default_branch_reference(source_head),
1006 'target_repo': self.target_repository.repo_name,
1006 'target_repo': self.target_repository.repo_name,
1007 'target_ref': self._default_branch_reference(target_head),
1007 'target_ref': self._default_branch_reference(target_head),
1008 'revisions': [self.commit_ids[r] for r in revisions],
1008 'revisions': [self.commit_ids[r] for r in revisions],
1009 'reviewers': reviewers or self._get_reviewers(),
1009 'reviewers': reviewers or self._get_reviewers(),
1010 'observers': observers or self._get_observers(),
1010 'observers': observers or self._get_observers(),
1011 'title': title,
1011 'title': title,
1012 'description': description,
1012 'description': description,
1013 }
1013 }
1014 self.pull_request = model.create(**self.create_parameters)
1014 self.pull_request = model.create(**self.create_parameters)
1015 assert model.get_versions(self.pull_request) == []
1015 assert model.get_versions(self.pull_request) == []
1016
1016
1017 self.pull_request_id = self.pull_request.pull_request_id
1017 self.pull_request_id = self.pull_request.pull_request_id
1018
1018
1019 if approved:
1019 if approved:
1020 self.approve()
1020 self.approve()
1021
1021
1022 Session().add(self.pull_request)
1022 Session().add(self.pull_request)
1023 Session().commit()
1023 Session().commit()
1024
1024
1025 return self.pull_request
1025 return self.pull_request
1026
1026
1027 def approve(self):
1027 def approve(self):
1028 self.create_status_votes(
1028 self.create_status_votes(
1029 ChangesetStatus.STATUS_APPROVED,
1029 ChangesetStatus.STATUS_APPROVED,
1030 *self.pull_request.reviewers)
1030 *self.pull_request.reviewers)
1031
1031
1032 def close(self):
1032 def close(self):
1033 PullRequestModel().close_pull_request(self.pull_request, self.author)
1033 PullRequestModel().close_pull_request(self.pull_request, self.author)
1034
1034
1035 def _default_branch_reference(self, commit_message):
1035 def _default_branch_reference(self, commit_message):
1036 reference = '%s:%s:%s' % (
1036 reference = '%s:%s:%s' % (
1037 'branch',
1037 'branch',
1038 self.backend.default_branch_name,
1038 self.backend.default_branch_name,
1039 self.commit_ids[commit_message])
1039 self.commit_ids[commit_message])
1040 return reference
1040 return reference
1041
1041
1042 def _get_reviewers(self):
1042 def _get_reviewers(self):
1043 role = PullRequestReviewers.ROLE_REVIEWER
1043 role = PullRequestReviewers.ROLE_REVIEWER
1044 return [
1044 return [
1045 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1045 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1046 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1046 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1047 ]
1047 ]
1048
1048
1049 def _get_observers(self):
1049 def _get_observers(self):
1050 return [
1050 return [
1051
1051
1052 ]
1052 ]
1053
1053
1054 def update_source_repository(self, head=None):
1054 def update_source_repository(self, head=None):
1055 heads = [head or 'c3']
1055 heads = [head or 'c3']
1056 self.backend.pull_heads(self.source_repository, heads=heads)
1056 self.backend.pull_heads(self.source_repository, heads=heads)
1057
1057
1058 def add_one_commit(self, head=None):
1058 def add_one_commit(self, head=None):
1059 self.update_source_repository(head=head)
1059 self.update_source_repository(head=head)
1060 old_commit_ids = set(self.pull_request.revisions)
1060 old_commit_ids = set(self.pull_request.revisions)
1061 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1061 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1062 commit_ids = set(self.pull_request.revisions)
1062 commit_ids = set(self.pull_request.revisions)
1063 new_commit_ids = commit_ids - old_commit_ids
1063 new_commit_ids = commit_ids - old_commit_ids
1064 assert len(new_commit_ids) == 1
1064 assert len(new_commit_ids) == 1
1065 return new_commit_ids.pop()
1065 return new_commit_ids.pop()
1066
1066
1067 def remove_one_commit(self):
1067 def remove_one_commit(self):
1068 assert len(self.pull_request.revisions) == 2
1068 assert len(self.pull_request.revisions) == 2
1069 source_vcs = self.source_repository.scm_instance()
1069 source_vcs = self.source_repository.scm_instance()
1070 removed_commit_id = source_vcs.commit_ids[-1]
1070 removed_commit_id = source_vcs.commit_ids[-1]
1071
1071
1072 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1072 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1073 # remove the if once that's sorted out.
1073 # remove the if once that's sorted out.
1074 if self.backend.alias == "git":
1074 if self.backend.alias == "git":
1075 kwargs = {'branch_name': self.backend.default_branch_name}
1075 kwargs = {'branch_name': self.backend.default_branch_name}
1076 else:
1076 else:
1077 kwargs = {}
1077 kwargs = {}
1078 source_vcs.strip(removed_commit_id, **kwargs)
1078 source_vcs.strip(removed_commit_id, **kwargs)
1079
1079
1080 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1080 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1081 assert len(self.pull_request.revisions) == 1
1081 assert len(self.pull_request.revisions) == 1
1082 return removed_commit_id
1082 return removed_commit_id
1083
1083
1084 def create_comment(self, linked_to=None):
1084 def create_comment(self, linked_to=None):
1085 comment = CommentsModel().create(
1085 comment = CommentsModel().create(
1086 text=u"Test comment",
1086 text=u"Test comment",
1087 repo=self.target_repository.repo_name,
1087 repo=self.target_repository.repo_name,
1088 user=self.author,
1088 user=self.author,
1089 pull_request=self.pull_request)
1089 pull_request=self.pull_request)
1090 assert comment.pull_request_version_id is None
1090 assert comment.pull_request_version_id is None
1091
1091
1092 if linked_to:
1092 if linked_to:
1093 PullRequestModel()._link_comments_to_version(linked_to)
1093 PullRequestModel()._link_comments_to_version(linked_to)
1094
1094
1095 return comment
1095 return comment
1096
1096
1097 def create_inline_comment(
1097 def create_inline_comment(
1098 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1098 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1099 comment = CommentsModel().create(
1099 comment = CommentsModel().create(
1100 text=u"Test comment",
1100 text=u"Test comment",
1101 repo=self.target_repository.repo_name,
1101 repo=self.target_repository.repo_name,
1102 user=self.author,
1102 user=self.author,
1103 line_no=line_no,
1103 line_no=line_no,
1104 f_path=file_path,
1104 f_path=file_path,
1105 pull_request=self.pull_request)
1105 pull_request=self.pull_request)
1106 assert comment.pull_request_version_id is None
1106 assert comment.pull_request_version_id is None
1107
1107
1108 if linked_to:
1108 if linked_to:
1109 PullRequestModel()._link_comments_to_version(linked_to)
1109 PullRequestModel()._link_comments_to_version(linked_to)
1110
1110
1111 return comment
1111 return comment
1112
1112
1113 def create_version_of_pull_request(self):
1113 def create_version_of_pull_request(self):
1114 pull_request = self.create_pull_request()
1114 pull_request = self.create_pull_request()
1115 version = PullRequestModel()._create_version_from_snapshot(
1115 version = PullRequestModel()._create_version_from_snapshot(
1116 pull_request)
1116 pull_request)
1117 return version
1117 return version
1118
1118
1119 def create_status_votes(self, status, *reviewers):
1119 def create_status_votes(self, status, *reviewers):
1120 for reviewer in reviewers:
1120 for reviewer in reviewers:
1121 ChangesetStatusModel().set_status(
1121 ChangesetStatusModel().set_status(
1122 repo=self.pull_request.target_repo,
1122 repo=self.pull_request.target_repo,
1123 status=status,
1123 status=status,
1124 user=reviewer.user_id,
1124 user=reviewer.user_id,
1125 pull_request=self.pull_request)
1125 pull_request=self.pull_request)
1126
1126
1127 def set_mergeable(self, value):
1127 def set_mergeable(self, value):
1128 if not self.mergeable_patcher:
1128 if not self.mergeable_patcher:
1129 self.mergeable_patcher = mock.patch.object(
1129 self.mergeable_patcher = mock.patch.object(
1130 VcsSettingsModel, 'get_general_settings')
1130 VcsSettingsModel, 'get_general_settings')
1131 self.mergeable_mock = self.mergeable_patcher.start()
1131 self.mergeable_mock = self.mergeable_patcher.start()
1132 self.mergeable_mock.return_value = {
1132 self.mergeable_mock.return_value = {
1133 'rhodecode_pr_merge_enabled': value}
1133 'rhodecode_pr_merge_enabled': value}
1134
1134
1135 def cleanup(self):
1135 def cleanup(self):
1136 # In case the source repository is already cleaned up, the pull
1136 # In case the source repository is already cleaned up, the pull
1137 # request will already be deleted.
1137 # request will already be deleted.
1138 pull_request = PullRequest().get(self.pull_request_id)
1138 pull_request = PullRequest().get(self.pull_request_id)
1139 if pull_request:
1139 if pull_request:
1140 PullRequestModel().delete(pull_request, pull_request.author)
1140 PullRequestModel().delete(pull_request, pull_request.author)
1141 Session().commit()
1141 Session().commit()
1142
1142
1143 if self.notification_patcher:
1143 if self.notification_patcher:
1144 self.notification_patcher.stop()
1144 self.notification_patcher.stop()
1145
1145
1146 if self.mergeable_patcher:
1146 if self.mergeable_patcher:
1147 self.mergeable_patcher.stop()
1147 self.mergeable_patcher.stop()
1148
1148
1149
1149
1150 @pytest.fixture()
1150 @pytest.fixture()
1151 def user_admin(baseapp):
1151 def user_admin(baseapp):
1152 """
1152 """
1153 Provides the default admin test user as an instance of `db.User`.
1153 Provides the default admin test user as an instance of `db.User`.
1154 """
1154 """
1155 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1155 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1156 return user
1156 return user
1157
1157
1158
1158
1159 @pytest.fixture()
1159 @pytest.fixture()
1160 def user_regular(baseapp):
1160 def user_regular(baseapp):
1161 """
1161 """
1162 Provides the default regular test user as an instance of `db.User`.
1162 Provides the default regular test user as an instance of `db.User`.
1163 """
1163 """
1164 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1164 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1165 return user
1165 return user
1166
1166
1167
1167
1168 @pytest.fixture()
1168 @pytest.fixture()
1169 def user_util(request, db_connection):
1169 def user_util(request, db_connection):
1170 """
1170 """
1171 Provides a wired instance of `UserUtility` with integrated cleanup.
1171 Provides a wired instance of `UserUtility` with integrated cleanup.
1172 """
1172 """
1173 utility = UserUtility(test_name=request.node.name)
1173 utility = UserUtility(test_name=request.node.name)
1174 request.addfinalizer(utility.cleanup)
1174 request.addfinalizer(utility.cleanup)
1175 return utility
1175 return utility
1176
1176
1177
1177
1178 # TODO: johbo: Split this up into utilities per domain or something similar
1178 # TODO: johbo: Split this up into utilities per domain or something similar
1179 class UserUtility(object):
1179 class UserUtility(object):
1180
1180
1181 def __init__(self, test_name="test"):
1181 def __init__(self, test_name="test"):
1182 self._test_name = self._sanitize_name(test_name)
1182 self._test_name = self._sanitize_name(test_name)
1183 self.fixture = Fixture()
1183 self.fixture = Fixture()
1184 self.repo_group_ids = []
1184 self.repo_group_ids = []
1185 self.repos_ids = []
1185 self.repos_ids = []
1186 self.user_ids = []
1186 self.user_ids = []
1187 self.user_group_ids = []
1187 self.user_group_ids = []
1188 self.user_repo_permission_ids = []
1188 self.user_repo_permission_ids = []
1189 self.user_group_repo_permission_ids = []
1189 self.user_group_repo_permission_ids = []
1190 self.user_repo_group_permission_ids = []
1190 self.user_repo_group_permission_ids = []
1191 self.user_group_repo_group_permission_ids = []
1191 self.user_group_repo_group_permission_ids = []
1192 self.user_user_group_permission_ids = []
1192 self.user_user_group_permission_ids = []
1193 self.user_group_user_group_permission_ids = []
1193 self.user_group_user_group_permission_ids = []
1194 self.user_permissions = []
1194 self.user_permissions = []
1195
1195
1196 def _sanitize_name(self, name):
1196 def _sanitize_name(self, name):
1197 for char in ['[', ']']:
1197 for char in ['[', ']']:
1198 name = name.replace(char, '_')
1198 name = name.replace(char, '_')
1199 return name
1199 return name
1200
1200
1201 def create_repo_group(
1201 def create_repo_group(
1202 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1202 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1203 group_name = "{prefix}_repogroup_{count}".format(
1203 group_name = "{prefix}_repogroup_{count}".format(
1204 prefix=self._test_name,
1204 prefix=self._test_name,
1205 count=len(self.repo_group_ids))
1205 count=len(self.repo_group_ids))
1206 repo_group = self.fixture.create_repo_group(
1206 repo_group = self.fixture.create_repo_group(
1207 group_name, cur_user=owner)
1207 group_name, cur_user=owner)
1208 if auto_cleanup:
1208 if auto_cleanup:
1209 self.repo_group_ids.append(repo_group.group_id)
1209 self.repo_group_ids.append(repo_group.group_id)
1210 return repo_group
1210 return repo_group
1211
1211
1212 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1212 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1213 auto_cleanup=True, repo_type='hg', bare=False):
1213 auto_cleanup=True, repo_type='hg', bare=False):
1214 repo_name = "{prefix}_repository_{count}".format(
1214 repo_name = "{prefix}_repository_{count}".format(
1215 prefix=self._test_name,
1215 prefix=self._test_name,
1216 count=len(self.repos_ids))
1216 count=len(self.repos_ids))
1217
1217
1218 repository = self.fixture.create_repo(
1218 repository = self.fixture.create_repo(
1219 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1219 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1220 if auto_cleanup:
1220 if auto_cleanup:
1221 self.repos_ids.append(repository.repo_id)
1221 self.repos_ids.append(repository.repo_id)
1222 return repository
1222 return repository
1223
1223
1224 def create_user(self, auto_cleanup=True, **kwargs):
1224 def create_user(self, auto_cleanup=True, **kwargs):
1225 user_name = "{prefix}_user_{count}".format(
1225 user_name = "{prefix}_user_{count}".format(
1226 prefix=self._test_name,
1226 prefix=self._test_name,
1227 count=len(self.user_ids))
1227 count=len(self.user_ids))
1228 user = self.fixture.create_user(user_name, **kwargs)
1228 user = self.fixture.create_user(user_name, **kwargs)
1229 if auto_cleanup:
1229 if auto_cleanup:
1230 self.user_ids.append(user.user_id)
1230 self.user_ids.append(user.user_id)
1231 return user
1231 return user
1232
1232
1233 def create_additional_user_email(self, user, email):
1233 def create_additional_user_email(self, user, email):
1234 uem = self.fixture.create_additional_user_email(user=user, email=email)
1234 uem = self.fixture.create_additional_user_email(user=user, email=email)
1235 return uem
1235 return uem
1236
1236
1237 def create_user_with_group(self):
1237 def create_user_with_group(self):
1238 user = self.create_user()
1238 user = self.create_user()
1239 user_group = self.create_user_group(members=[user])
1239 user_group = self.create_user_group(members=[user])
1240 return user, user_group
1240 return user, user_group
1241
1241
1242 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1242 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1243 auto_cleanup=True, **kwargs):
1243 auto_cleanup=True, **kwargs):
1244 group_name = "{prefix}_usergroup_{count}".format(
1244 group_name = "{prefix}_usergroup_{count}".format(
1245 prefix=self._test_name,
1245 prefix=self._test_name,
1246 count=len(self.user_group_ids))
1246 count=len(self.user_group_ids))
1247 user_group = self.fixture.create_user_group(
1247 user_group = self.fixture.create_user_group(
1248 group_name, cur_user=owner, **kwargs)
1248 group_name, cur_user=owner, **kwargs)
1249
1249
1250 if auto_cleanup:
1250 if auto_cleanup:
1251 self.user_group_ids.append(user_group.users_group_id)
1251 self.user_group_ids.append(user_group.users_group_id)
1252 if members:
1252 if members:
1253 for user in members:
1253 for user in members:
1254 UserGroupModel().add_user_to_group(user_group, user)
1254 UserGroupModel().add_user_to_group(user_group, user)
1255 return user_group
1255 return user_group
1256
1256
1257 def grant_user_permission(self, user_name, permission_name):
1257 def grant_user_permission(self, user_name, permission_name):
1258 self.inherit_default_user_permissions(user_name, False)
1258 self.inherit_default_user_permissions(user_name, False)
1259 self.user_permissions.append((user_name, permission_name))
1259 self.user_permissions.append((user_name, permission_name))
1260
1260
1261 def grant_user_permission_to_repo_group(
1261 def grant_user_permission_to_repo_group(
1262 self, repo_group, user, permission_name):
1262 self, repo_group, user, permission_name):
1263 permission = RepoGroupModel().grant_user_permission(
1263 permission = RepoGroupModel().grant_user_permission(
1264 repo_group, user, permission_name)
1264 repo_group, user, permission_name)
1265 self.user_repo_group_permission_ids.append(
1265 self.user_repo_group_permission_ids.append(
1266 (repo_group.group_id, user.user_id))
1266 (repo_group.group_id, user.user_id))
1267 return permission
1267 return permission
1268
1268
1269 def grant_user_group_permission_to_repo_group(
1269 def grant_user_group_permission_to_repo_group(
1270 self, repo_group, user_group, permission_name):
1270 self, repo_group, user_group, permission_name):
1271 permission = RepoGroupModel().grant_user_group_permission(
1271 permission = RepoGroupModel().grant_user_group_permission(
1272 repo_group, user_group, permission_name)
1272 repo_group, user_group, permission_name)
1273 self.user_group_repo_group_permission_ids.append(
1273 self.user_group_repo_group_permission_ids.append(
1274 (repo_group.group_id, user_group.users_group_id))
1274 (repo_group.group_id, user_group.users_group_id))
1275 return permission
1275 return permission
1276
1276
1277 def grant_user_permission_to_repo(
1277 def grant_user_permission_to_repo(
1278 self, repo, user, permission_name):
1278 self, repo, user, permission_name):
1279 permission = RepoModel().grant_user_permission(
1279 permission = RepoModel().grant_user_permission(
1280 repo, user, permission_name)
1280 repo, user, permission_name)
1281 self.user_repo_permission_ids.append(
1281 self.user_repo_permission_ids.append(
1282 (repo.repo_id, user.user_id))
1282 (repo.repo_id, user.user_id))
1283 return permission
1283 return permission
1284
1284
1285 def grant_user_group_permission_to_repo(
1285 def grant_user_group_permission_to_repo(
1286 self, repo, user_group, permission_name):
1286 self, repo, user_group, permission_name):
1287 permission = RepoModel().grant_user_group_permission(
1287 permission = RepoModel().grant_user_group_permission(
1288 repo, user_group, permission_name)
1288 repo, user_group, permission_name)
1289 self.user_group_repo_permission_ids.append(
1289 self.user_group_repo_permission_ids.append(
1290 (repo.repo_id, user_group.users_group_id))
1290 (repo.repo_id, user_group.users_group_id))
1291 return permission
1291 return permission
1292
1292
1293 def grant_user_permission_to_user_group(
1293 def grant_user_permission_to_user_group(
1294 self, target_user_group, user, permission_name):
1294 self, target_user_group, user, permission_name):
1295 permission = UserGroupModel().grant_user_permission(
1295 permission = UserGroupModel().grant_user_permission(
1296 target_user_group, user, permission_name)
1296 target_user_group, user, permission_name)
1297 self.user_user_group_permission_ids.append(
1297 self.user_user_group_permission_ids.append(
1298 (target_user_group.users_group_id, user.user_id))
1298 (target_user_group.users_group_id, user.user_id))
1299 return permission
1299 return permission
1300
1300
1301 def grant_user_group_permission_to_user_group(
1301 def grant_user_group_permission_to_user_group(
1302 self, target_user_group, user_group, permission_name):
1302 self, target_user_group, user_group, permission_name):
1303 permission = UserGroupModel().grant_user_group_permission(
1303 permission = UserGroupModel().grant_user_group_permission(
1304 target_user_group, user_group, permission_name)
1304 target_user_group, user_group, permission_name)
1305 self.user_group_user_group_permission_ids.append(
1305 self.user_group_user_group_permission_ids.append(
1306 (target_user_group.users_group_id, user_group.users_group_id))
1306 (target_user_group.users_group_id, user_group.users_group_id))
1307 return permission
1307 return permission
1308
1308
1309 def revoke_user_permission(self, user_name, permission_name):
1309 def revoke_user_permission(self, user_name, permission_name):
1310 self.inherit_default_user_permissions(user_name, True)
1310 self.inherit_default_user_permissions(user_name, True)
1311 UserModel().revoke_perm(user_name, permission_name)
1311 UserModel().revoke_perm(user_name, permission_name)
1312
1312
1313 def inherit_default_user_permissions(self, user_name, value):
1313 def inherit_default_user_permissions(self, user_name, value):
1314 user = UserModel().get_by_username(user_name)
1314 user = UserModel().get_by_username(user_name)
1315 user.inherit_default_permissions = value
1315 user.inherit_default_permissions = value
1316 Session().add(user)
1316 Session().add(user)
1317 Session().commit()
1317 Session().commit()
1318
1318
1319 def cleanup(self):
1319 def cleanup(self):
1320 self._cleanup_permissions()
1320 self._cleanup_permissions()
1321 self._cleanup_repos()
1321 self._cleanup_repos()
1322 self._cleanup_repo_groups()
1322 self._cleanup_repo_groups()
1323 self._cleanup_user_groups()
1323 self._cleanup_user_groups()
1324 self._cleanup_users()
1324 self._cleanup_users()
1325
1325
1326 def _cleanup_permissions(self):
1326 def _cleanup_permissions(self):
1327 if self.user_permissions:
1327 if self.user_permissions:
1328 for user_name, permission_name in self.user_permissions:
1328 for user_name, permission_name in self.user_permissions:
1329 self.revoke_user_permission(user_name, permission_name)
1329 self.revoke_user_permission(user_name, permission_name)
1330
1330
1331 for permission in self.user_repo_permission_ids:
1331 for permission in self.user_repo_permission_ids:
1332 RepoModel().revoke_user_permission(*permission)
1332 RepoModel().revoke_user_permission(*permission)
1333
1333
1334 for permission in self.user_group_repo_permission_ids:
1334 for permission in self.user_group_repo_permission_ids:
1335 RepoModel().revoke_user_group_permission(*permission)
1335 RepoModel().revoke_user_group_permission(*permission)
1336
1336
1337 for permission in self.user_repo_group_permission_ids:
1337 for permission in self.user_repo_group_permission_ids:
1338 RepoGroupModel().revoke_user_permission(*permission)
1338 RepoGroupModel().revoke_user_permission(*permission)
1339
1339
1340 for permission in self.user_group_repo_group_permission_ids:
1340 for permission in self.user_group_repo_group_permission_ids:
1341 RepoGroupModel().revoke_user_group_permission(*permission)
1341 RepoGroupModel().revoke_user_group_permission(*permission)
1342
1342
1343 for permission in self.user_user_group_permission_ids:
1343 for permission in self.user_user_group_permission_ids:
1344 UserGroupModel().revoke_user_permission(*permission)
1344 UserGroupModel().revoke_user_permission(*permission)
1345
1345
1346 for permission in self.user_group_user_group_permission_ids:
1346 for permission in self.user_group_user_group_permission_ids:
1347 UserGroupModel().revoke_user_group_permission(*permission)
1347 UserGroupModel().revoke_user_group_permission(*permission)
1348
1348
1349 def _cleanup_repo_groups(self):
1349 def _cleanup_repo_groups(self):
1350 def _repo_group_compare(first_group_id, second_group_id):
1350 def _repo_group_compare(first_group_id, second_group_id):
1351 """
1351 """
1352 Gives higher priority to the groups with the most complex paths
1352 Gives higher priority to the groups with the most complex paths
1353 """
1353 """
1354 first_group = RepoGroup.get(first_group_id)
1354 first_group = RepoGroup.get(first_group_id)
1355 second_group = RepoGroup.get(second_group_id)
1355 second_group = RepoGroup.get(second_group_id)
1356 first_group_parts = (
1356 first_group_parts = (
1357 len(first_group.group_name.split('/')) if first_group else 0)
1357 len(first_group.group_name.split('/')) if first_group else 0)
1358 second_group_parts = (
1358 second_group_parts = (
1359 len(second_group.group_name.split('/')) if second_group else 0)
1359 len(second_group.group_name.split('/')) if second_group else 0)
1360 return cmp(second_group_parts, first_group_parts)
1360 return cmp(second_group_parts, first_group_parts)
1361
1361
1362 sorted_repo_group_ids = sorted(
1362 sorted_repo_group_ids = sorted(
1363 self.repo_group_ids, cmp=_repo_group_compare)
1363 self.repo_group_ids, cmp=_repo_group_compare)
1364 for repo_group_id in sorted_repo_group_ids:
1364 for repo_group_id in sorted_repo_group_ids:
1365 self.fixture.destroy_repo_group(repo_group_id)
1365 self.fixture.destroy_repo_group(repo_group_id)
1366
1366
1367 def _cleanup_repos(self):
1367 def _cleanup_repos(self):
1368 sorted_repos_ids = sorted(self.repos_ids)
1368 sorted_repos_ids = sorted(self.repos_ids)
1369 for repo_id in sorted_repos_ids:
1369 for repo_id in sorted_repos_ids:
1370 self.fixture.destroy_repo(repo_id)
1370 self.fixture.destroy_repo(repo_id)
1371
1371
1372 def _cleanup_user_groups(self):
1372 def _cleanup_user_groups(self):
1373 def _user_group_compare(first_group_id, second_group_id):
1373 def _user_group_compare(first_group_id, second_group_id):
1374 """
1374 """
1375 Gives higher priority to the groups with the most complex paths
1375 Gives higher priority to the groups with the most complex paths
1376 """
1376 """
1377 first_group = UserGroup.get(first_group_id)
1377 first_group = UserGroup.get(first_group_id)
1378 second_group = UserGroup.get(second_group_id)
1378 second_group = UserGroup.get(second_group_id)
1379 first_group_parts = (
1379 first_group_parts = (
1380 len(first_group.users_group_name.split('/'))
1380 len(first_group.users_group_name.split('/'))
1381 if first_group else 0)
1381 if first_group else 0)
1382 second_group_parts = (
1382 second_group_parts = (
1383 len(second_group.users_group_name.split('/'))
1383 len(second_group.users_group_name.split('/'))
1384 if second_group else 0)
1384 if second_group else 0)
1385 return cmp(second_group_parts, first_group_parts)
1385 return cmp(second_group_parts, first_group_parts)
1386
1386
1387 sorted_user_group_ids = sorted(
1387 sorted_user_group_ids = sorted(
1388 self.user_group_ids, cmp=_user_group_compare)
1388 self.user_group_ids, cmp=_user_group_compare)
1389 for user_group_id in sorted_user_group_ids:
1389 for user_group_id in sorted_user_group_ids:
1390 self.fixture.destroy_user_group(user_group_id)
1390 self.fixture.destroy_user_group(user_group_id)
1391
1391
1392 def _cleanup_users(self):
1392 def _cleanup_users(self):
1393 for user_id in self.user_ids:
1393 for user_id in self.user_ids:
1394 self.fixture.destroy_user(user_id)
1394 self.fixture.destroy_user(user_id)
1395
1395
1396
1396
1397 # TODO: Think about moving this into a pytest-pyro package and make it a
1397 # TODO: Think about moving this into a pytest-pyro package and make it a
1398 # pytest plugin
1398 # pytest plugin
1399 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1399 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1400 def pytest_runtest_makereport(item, call):
1400 def pytest_runtest_makereport(item, call):
1401 """
1401 """
1402 Adding the remote traceback if the exception has this information.
1402 Adding the remote traceback if the exception has this information.
1403
1403
1404 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1404 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1405 to the exception instance.
1405 to the exception instance.
1406 """
1406 """
1407 outcome = yield
1407 outcome = yield
1408 report = outcome.get_result()
1408 report = outcome.get_result()
1409 if call.excinfo:
1409 if call.excinfo:
1410 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1410 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1411
1411
1412
1412
1413 def _add_vcsserver_remote_traceback(report, exc):
1413 def _add_vcsserver_remote_traceback(report, exc):
1414 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1414 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1415
1415
1416 if vcsserver_traceback:
1416 if vcsserver_traceback:
1417 section = 'VCSServer remote traceback ' + report.when
1417 section = 'VCSServer remote traceback ' + report.when
1418 report.sections.append((section, vcsserver_traceback))
1418 report.sections.append((section, vcsserver_traceback))
1419
1419
1420
1420
1421 @pytest.fixture(scope='session')
1421 @pytest.fixture(scope='session')
1422 def testrun():
1422 def testrun():
1423 return {
1423 return {
1424 'uuid': uuid.uuid4(),
1424 'uuid': uuid.uuid4(),
1425 'start': datetime.datetime.utcnow().isoformat(),
1425 'start': datetime.datetime.utcnow().isoformat(),
1426 'timestamp': int(time.time()),
1426 'timestamp': int(time.time()),
1427 }
1427 }
1428
1428
1429
1429
1430 class AppenlightClient(object):
1430 class AppenlightClient(object):
1431
1431
1432 url_template = '{url}?protocol_version=0.5'
1432 url_template = '{url}?protocol_version=0.5'
1433
1433
1434 def __init__(
1434 def __init__(
1435 self, url, api_key, add_server=True, add_timestamp=True,
1435 self, url, api_key, add_server=True, add_timestamp=True,
1436 namespace=None, request=None, testrun=None):
1436 namespace=None, request=None, testrun=None):
1437 self.url = self.url_template.format(url=url)
1437 self.url = self.url_template.format(url=url)
1438 self.api_key = api_key
1438 self.api_key = api_key
1439 self.add_server = add_server
1439 self.add_server = add_server
1440 self.add_timestamp = add_timestamp
1440 self.add_timestamp = add_timestamp
1441 self.namespace = namespace
1441 self.namespace = namespace
1442 self.request = request
1442 self.request = request
1443 self.server = socket.getfqdn(socket.gethostname())
1443 self.server = socket.getfqdn(socket.gethostname())
1444 self.tags_before = {}
1444 self.tags_before = {}
1445 self.tags_after = {}
1445 self.tags_after = {}
1446 self.stats = []
1446 self.stats = []
1447 self.testrun = testrun or {}
1447 self.testrun = testrun or {}
1448
1448
1449 def tag_before(self, tag, value):
1449 def tag_before(self, tag, value):
1450 self.tags_before[tag] = value
1450 self.tags_before[tag] = value
1451
1451
1452 def tag_after(self, tag, value):
1452 def tag_after(self, tag, value):
1453 self.tags_after[tag] = value
1453 self.tags_after[tag] = value
1454
1454
1455 def collect(self, data):
1455 def collect(self, data):
1456 if self.add_server:
1456 if self.add_server:
1457 data.setdefault('server', self.server)
1457 data.setdefault('server', self.server)
1458 if self.add_timestamp:
1458 if self.add_timestamp:
1459 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1459 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1460 if self.namespace:
1460 if self.namespace:
1461 data.setdefault('namespace', self.namespace)
1461 data.setdefault('namespace', self.namespace)
1462 if self.request:
1462 if self.request:
1463 data.setdefault('request', self.request)
1463 data.setdefault('request', self.request)
1464 self.stats.append(data)
1464 self.stats.append(data)
1465
1465
1466 def send_stats(self):
1466 def send_stats(self):
1467 tags = [
1467 tags = [
1468 ('testrun', self.request),
1468 ('testrun', self.request),
1469 ('testrun.start', self.testrun['start']),
1469 ('testrun.start', self.testrun['start']),
1470 ('testrun.timestamp', self.testrun['timestamp']),
1470 ('testrun.timestamp', self.testrun['timestamp']),
1471 ('test', self.namespace),
1471 ('test', self.namespace),
1472 ]
1472 ]
1473 for key, value in self.tags_before.items():
1473 for key, value in self.tags_before.items():
1474 tags.append((key + '.before', value))
1474 tags.append((key + '.before', value))
1475 try:
1475 try:
1476 delta = self.tags_after[key] - value
1476 delta = self.tags_after[key] - value
1477 tags.append((key + '.delta', delta))
1477 tags.append((key + '.delta', delta))
1478 except Exception:
1478 except Exception:
1479 pass
1479 pass
1480 for key, value in self.tags_after.items():
1480 for key, value in self.tags_after.items():
1481 tags.append((key + '.after', value))
1481 tags.append((key + '.after', value))
1482 self.collect({
1482 self.collect({
1483 'message': "Collected tags",
1483 'message': "Collected tags",
1484 'tags': tags,
1484 'tags': tags,
1485 })
1485 })
1486
1486
1487 response = requests.post(
1487 response = requests.post(
1488 self.url,
1488 self.url,
1489 headers={
1489 headers={
1490 'X-appenlight-api-key': self.api_key},
1490 'X-appenlight-api-key': self.api_key},
1491 json=self.stats,
1491 json=self.stats,
1492 )
1492 )
1493
1493
1494 if not response.status_code == 200:
1494 if not response.status_code == 200:
1495 pprint.pprint(self.stats)
1495 pprint.pprint(self.stats)
1496 print(response.headers)
1496 print(response.headers)
1497 print(response.text)
1497 print(response.text)
1498 raise Exception('Sending to appenlight failed')
1498 raise Exception('Sending to appenlight failed')
1499
1499
1500
1500
1501 @pytest.fixture()
1501 @pytest.fixture()
1502 def gist_util(request, db_connection):
1502 def gist_util(request, db_connection):
1503 """
1503 """
1504 Provides a wired instance of `GistUtility` with integrated cleanup.
1504 Provides a wired instance of `GistUtility` with integrated cleanup.
1505 """
1505 """
1506 utility = GistUtility()
1506 utility = GistUtility()
1507 request.addfinalizer(utility.cleanup)
1507 request.addfinalizer(utility.cleanup)
1508 return utility
1508 return utility
1509
1509
1510
1510
1511 class GistUtility(object):
1511 class GistUtility(object):
1512 def __init__(self):
1512 def __init__(self):
1513 self.fixture = Fixture()
1513 self.fixture = Fixture()
1514 self.gist_ids = []
1514 self.gist_ids = []
1515
1515
1516 def create_gist(self, **kwargs):
1516 def create_gist(self, **kwargs):
1517 gist = self.fixture.create_gist(**kwargs)
1517 gist = self.fixture.create_gist(**kwargs)
1518 self.gist_ids.append(gist.gist_id)
1518 self.gist_ids.append(gist.gist_id)
1519 return gist
1519 return gist
1520
1520
1521 def cleanup(self):
1521 def cleanup(self):
1522 for id_ in self.gist_ids:
1522 for id_ in self.gist_ids:
1523 self.fixture.destroy_gists(str(id_))
1523 self.fixture.destroy_gists(str(id_))
1524
1524
1525
1525
1526 @pytest.fixture()
1526 @pytest.fixture()
1527 def enabled_backends(request):
1527 def enabled_backends(request):
1528 backends = request.config.option.backends
1528 backends = request.config.option.backends
1529 return backends[:]
1529 return backends[:]
1530
1530
1531
1531
1532 @pytest.fixture()
1532 @pytest.fixture()
1533 def settings_util(request, db_connection):
1533 def settings_util(request, db_connection):
1534 """
1534 """
1535 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1535 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1536 """
1536 """
1537 utility = SettingsUtility()
1537 utility = SettingsUtility()
1538 request.addfinalizer(utility.cleanup)
1538 request.addfinalizer(utility.cleanup)
1539 return utility
1539 return utility
1540
1540
1541
1541
1542 class SettingsUtility(object):
1542 class SettingsUtility(object):
1543 def __init__(self):
1543 def __init__(self):
1544 self.rhodecode_ui_ids = []
1544 self.rhodecode_ui_ids = []
1545 self.rhodecode_setting_ids = []
1545 self.rhodecode_setting_ids = []
1546 self.repo_rhodecode_ui_ids = []
1546 self.repo_rhodecode_ui_ids = []
1547 self.repo_rhodecode_setting_ids = []
1547 self.repo_rhodecode_setting_ids = []
1548
1548
1549 def create_repo_rhodecode_ui(
1549 def create_repo_rhodecode_ui(
1550 self, repo, section, value, key=None, active=True, cleanup=True):
1550 self, repo, section, value, key=None, active=True, cleanup=True):
1551 key = key or hashlib.sha1(
1551 key = key or hashlib.sha1(
1552 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1552 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1553
1553
1554 setting = RepoRhodeCodeUi()
1554 setting = RepoRhodeCodeUi()
1555 setting.repository_id = repo.repo_id
1555 setting.repository_id = repo.repo_id
1556 setting.ui_section = section
1556 setting.ui_section = section
1557 setting.ui_value = value
1557 setting.ui_value = value
1558 setting.ui_key = key
1558 setting.ui_key = key
1559 setting.ui_active = active
1559 setting.ui_active = active
1560 Session().add(setting)
1560 Session().add(setting)
1561 Session().commit()
1561 Session().commit()
1562
1562
1563 if cleanup:
1563 if cleanup:
1564 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1564 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1565 return setting
1565 return setting
1566
1566
1567 def create_rhodecode_ui(
1567 def create_rhodecode_ui(
1568 self, section, value, key=None, active=True, cleanup=True):
1568 self, section, value, key=None, active=True, cleanup=True):
1569 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1569 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1570
1570
1571 setting = RhodeCodeUi()
1571 setting = RhodeCodeUi()
1572 setting.ui_section = section
1572 setting.ui_section = section
1573 setting.ui_value = value
1573 setting.ui_value = value
1574 setting.ui_key = key
1574 setting.ui_key = key
1575 setting.ui_active = active
1575 setting.ui_active = active
1576 Session().add(setting)
1576 Session().add(setting)
1577 Session().commit()
1577 Session().commit()
1578
1578
1579 if cleanup:
1579 if cleanup:
1580 self.rhodecode_ui_ids.append(setting.ui_id)
1580 self.rhodecode_ui_ids.append(setting.ui_id)
1581 return setting
1581 return setting
1582
1582
1583 def create_repo_rhodecode_setting(
1583 def create_repo_rhodecode_setting(
1584 self, repo, name, value, type_, cleanup=True):
1584 self, repo, name, value, type_, cleanup=True):
1585 setting = RepoRhodeCodeSetting(
1585 setting = RepoRhodeCodeSetting(
1586 repo.repo_id, key=name, val=value, type=type_)
1586 repo.repo_id, key=name, val=value, type=type_)
1587 Session().add(setting)
1587 Session().add(setting)
1588 Session().commit()
1588 Session().commit()
1589
1589
1590 if cleanup:
1590 if cleanup:
1591 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1591 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1592 return setting
1592 return setting
1593
1593
1594 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1594 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1595 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1595 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1596 Session().add(setting)
1596 Session().add(setting)
1597 Session().commit()
1597 Session().commit()
1598
1598
1599 if cleanup:
1599 if cleanup:
1600 self.rhodecode_setting_ids.append(setting.app_settings_id)
1600 self.rhodecode_setting_ids.append(setting.app_settings_id)
1601
1601
1602 return setting
1602 return setting
1603
1603
1604 def cleanup(self):
1604 def cleanup(self):
1605 for id_ in self.rhodecode_ui_ids:
1605 for id_ in self.rhodecode_ui_ids:
1606 setting = RhodeCodeUi.get(id_)
1606 setting = RhodeCodeUi.get(id_)
1607 Session().delete(setting)
1607 Session().delete(setting)
1608
1608
1609 for id_ in self.rhodecode_setting_ids:
1609 for id_ in self.rhodecode_setting_ids:
1610 setting = RhodeCodeSetting.get(id_)
1610 setting = RhodeCodeSetting.get(id_)
1611 Session().delete(setting)
1611 Session().delete(setting)
1612
1612
1613 for id_ in self.repo_rhodecode_ui_ids:
1613 for id_ in self.repo_rhodecode_ui_ids:
1614 setting = RepoRhodeCodeUi.get(id_)
1614 setting = RepoRhodeCodeUi.get(id_)
1615 Session().delete(setting)
1615 Session().delete(setting)
1616
1616
1617 for id_ in self.repo_rhodecode_setting_ids:
1617 for id_ in self.repo_rhodecode_setting_ids:
1618 setting = RepoRhodeCodeSetting.get(id_)
1618 setting = RepoRhodeCodeSetting.get(id_)
1619 Session().delete(setting)
1619 Session().delete(setting)
1620
1620
1621 Session().commit()
1621 Session().commit()
1622
1622
1623
1623
1624 @pytest.fixture()
1624 @pytest.fixture()
1625 def no_notifications(request):
1625 def no_notifications(request):
1626 notification_patcher = mock.patch(
1626 notification_patcher = mock.patch(
1627 'rhodecode.model.notification.NotificationModel.create')
1627 'rhodecode.model.notification.NotificationModel.create')
1628 notification_patcher.start()
1628 notification_patcher.start()
1629 request.addfinalizer(notification_patcher.stop)
1629 request.addfinalizer(notification_patcher.stop)
1630
1630
1631
1631
1632 @pytest.fixture(scope='session')
1632 @pytest.fixture(scope='session')
1633 def repeat(request):
1633 def repeat(request):
1634 """
1634 """
1635 The number of repetitions is based on this fixture.
1635 The number of repetitions is based on this fixture.
1636
1636
1637 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1637 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1638 tests are not too slow in our default test suite.
1638 tests are not too slow in our default test suite.
1639 """
1639 """
1640 return request.config.getoption('--repeat')
1640 return request.config.getoption('--repeat')
1641
1641
1642
1642
1643 @pytest.fixture()
1643 @pytest.fixture()
1644 def rhodecode_fixtures():
1644 def rhodecode_fixtures():
1645 return Fixture()
1645 return Fixture()
1646
1646
1647
1647
1648 @pytest.fixture()
1648 @pytest.fixture()
1649 def context_stub():
1649 def context_stub():
1650 """
1650 """
1651 Stub context object.
1651 Stub context object.
1652 """
1652 """
1653 context = pyramid.testing.DummyResource()
1653 context = pyramid.testing.DummyResource()
1654 return context
1654 return context
1655
1655
1656
1656
1657 @pytest.fixture()
1657 @pytest.fixture()
1658 def request_stub():
1658 def request_stub():
1659 """
1659 """
1660 Stub request object.
1660 Stub request object.
1661 """
1661 """
1662 from rhodecode.lib.base import bootstrap_request
1662 from rhodecode.lib.base import bootstrap_request
1663 request = bootstrap_request(scheme='https')
1663 request = bootstrap_request(scheme='https')
1664 return request
1664 return request
1665
1665
1666
1666
1667 @pytest.fixture()
1667 @pytest.fixture()
1668 def config_stub(request, request_stub):
1668 def config_stub(request, request_stub):
1669 """
1669 """
1670 Set up pyramid.testing and return the Configurator.
1670 Set up pyramid.testing and return the Configurator.
1671 """
1671 """
1672 from rhodecode.lib.base import bootstrap_config
1672 from rhodecode.lib.base import bootstrap_config
1673 config = bootstrap_config(request=request_stub)
1673 config = bootstrap_config(request=request_stub)
1674
1674
1675 @request.addfinalizer
1675 @request.addfinalizer
1676 def cleanup():
1676 def cleanup():
1677 pyramid.testing.tearDown()
1677 pyramid.testing.tearDown()
1678
1678
1679 return config
1679 return config
1680
1680
1681
1681
1682 @pytest.fixture()
1682 @pytest.fixture()
1683 def StubIntegrationType():
1683 def StubIntegrationType():
1684 class _StubIntegrationType(IntegrationTypeBase):
1684 class _StubIntegrationType(IntegrationTypeBase):
1685 """ Test integration type class """
1685 """ Test integration type class """
1686
1686
1687 key = 'test'
1687 key = 'test'
1688 display_name = 'Test integration type'
1688 display_name = 'Test integration type'
1689 description = 'A test integration type for testing'
1689 description = 'A test integration type for testing'
1690
1690
1691 @classmethod
1691 @classmethod
1692 def icon(cls):
1692 def icon(cls):
1693 return 'test_icon_html_image'
1693 return 'test_icon_html_image'
1694
1694
1695 def __init__(self, settings):
1695 def __init__(self, settings):
1696 super(_StubIntegrationType, self).__init__(settings)
1696 super(_StubIntegrationType, self).__init__(settings)
1697 self.sent_events = [] # for testing
1697 self.sent_events = [] # for testing
1698
1698
1699 def send_event(self, event):
1699 def send_event(self, event):
1700 self.sent_events.append(event)
1700 self.sent_events.append(event)
1701
1701
1702 def settings_schema(self):
1702 def settings_schema(self):
1703 class SettingsSchema(colander.Schema):
1703 class SettingsSchema(colander.Schema):
1704 test_string_field = colander.SchemaNode(
1704 test_string_field = colander.SchemaNode(
1705 colander.String(),
1705 colander.String(),
1706 missing=colander.required,
1706 missing=colander.required,
1707 title='test string field',
1707 title='test string field',
1708 )
1708 )
1709 test_int_field = colander.SchemaNode(
1709 test_int_field = colander.SchemaNode(
1710 colander.Int(),
1710 colander.Int(),
1711 title='some integer setting',
1711 title='some integer setting',
1712 )
1712 )
1713 return SettingsSchema()
1713 return SettingsSchema()
1714
1714
1715
1715
1716 integration_type_registry.register_integration_type(_StubIntegrationType)
1716 integration_type_registry.register_integration_type(_StubIntegrationType)
1717 return _StubIntegrationType
1717 return _StubIntegrationType
1718
1718
1719 @pytest.fixture()
1719 @pytest.fixture()
1720 def stub_integration_settings():
1720 def stub_integration_settings():
1721 return {
1721 return {
1722 'test_string_field': 'some data',
1722 'test_string_field': 'some data',
1723 'test_int_field': 100,
1723 'test_int_field': 100,
1724 }
1724 }
1725
1725
1726
1726
1727 @pytest.fixture()
1727 @pytest.fixture()
1728 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1728 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1729 stub_integration_settings):
1729 stub_integration_settings):
1730 integration = IntegrationModel().create(
1730 integration = IntegrationModel().create(
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 name='test repo integration',
1732 name='test repo integration',
1733 repo=repo_stub, repo_group=None, child_repos_only=None)
1733 repo=repo_stub, repo_group=None, child_repos_only=None)
1734
1734
1735 @request.addfinalizer
1735 @request.addfinalizer
1736 def cleanup():
1736 def cleanup():
1737 IntegrationModel().delete(integration)
1737 IntegrationModel().delete(integration)
1738
1738
1739 return integration
1739 return integration
1740
1740
1741
1741
1742 @pytest.fixture()
1742 @pytest.fixture()
1743 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1743 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1744 stub_integration_settings):
1744 stub_integration_settings):
1745 integration = IntegrationModel().create(
1745 integration = IntegrationModel().create(
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 name='test repogroup integration',
1747 name='test repogroup integration',
1748 repo=None, repo_group=test_repo_group, child_repos_only=True)
1748 repo=None, repo_group=test_repo_group, child_repos_only=True)
1749
1749
1750 @request.addfinalizer
1750 @request.addfinalizer
1751 def cleanup():
1751 def cleanup():
1752 IntegrationModel().delete(integration)
1752 IntegrationModel().delete(integration)
1753
1753
1754 return integration
1754 return integration
1755
1755
1756
1756
1757 @pytest.fixture()
1757 @pytest.fixture()
1758 def repogroup_recursive_integration_stub(request, test_repo_group,
1758 def repogroup_recursive_integration_stub(request, test_repo_group,
1759 StubIntegrationType, stub_integration_settings):
1759 StubIntegrationType, stub_integration_settings):
1760 integration = IntegrationModel().create(
1760 integration = IntegrationModel().create(
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 name='test recursive repogroup integration',
1762 name='test recursive repogroup integration',
1763 repo=None, repo_group=test_repo_group, child_repos_only=False)
1763 repo=None, repo_group=test_repo_group, child_repos_only=False)
1764
1764
1765 @request.addfinalizer
1765 @request.addfinalizer
1766 def cleanup():
1766 def cleanup():
1767 IntegrationModel().delete(integration)
1767 IntegrationModel().delete(integration)
1768
1768
1769 return integration
1769 return integration
1770
1770
1771
1771
1772 @pytest.fixture()
1772 @pytest.fixture()
1773 def global_integration_stub(request, StubIntegrationType,
1773 def global_integration_stub(request, StubIntegrationType,
1774 stub_integration_settings):
1774 stub_integration_settings):
1775 integration = IntegrationModel().create(
1775 integration = IntegrationModel().create(
1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 name='test global integration',
1777 name='test global integration',
1778 repo=None, repo_group=None, child_repos_only=None)
1778 repo=None, repo_group=None, child_repos_only=None)
1779
1779
1780 @request.addfinalizer
1780 @request.addfinalizer
1781 def cleanup():
1781 def cleanup():
1782 IntegrationModel().delete(integration)
1782 IntegrationModel().delete(integration)
1783
1783
1784 return integration
1784 return integration
1785
1785
1786
1786
1787 @pytest.fixture()
1787 @pytest.fixture()
1788 def root_repos_integration_stub(request, StubIntegrationType,
1788 def root_repos_integration_stub(request, StubIntegrationType,
1789 stub_integration_settings):
1789 stub_integration_settings):
1790 integration = IntegrationModel().create(
1790 integration = IntegrationModel().create(
1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 name='test global integration',
1792 name='test global integration',
1793 repo=None, repo_group=None, child_repos_only=True)
1793 repo=None, repo_group=None, child_repos_only=True)
1794
1794
1795 @request.addfinalizer
1795 @request.addfinalizer
1796 def cleanup():
1796 def cleanup():
1797 IntegrationModel().delete(integration)
1797 IntegrationModel().delete(integration)
1798
1798
1799 return integration
1799 return integration
1800
1800
1801
1801
1802 @pytest.fixture()
1802 @pytest.fixture()
1803 def local_dt_to_utc():
1803 def local_dt_to_utc():
1804 def _factory(dt):
1804 def _factory(dt):
1805 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1805 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1806 dateutil.tz.tzutc()).replace(tzinfo=None)
1806 dateutil.tz.tzutc()).replace(tzinfo=None)
1807 return _factory
1807 return _factory
1808
1808
1809
1809
1810 @pytest.fixture()
1810 @pytest.fixture()
1811 def disable_anonymous_user(request, baseapp):
1811 def disable_anonymous_user(request, baseapp):
1812 set_anonymous_access(False)
1812 set_anonymous_access(False)
1813
1813
1814 @request.addfinalizer
1814 @request.addfinalizer
1815 def cleanup():
1815 def cleanup():
1816 set_anonymous_access(True)
1816 set_anonymous_access(True)
1817
1817
1818
1818
1819 @pytest.fixture(scope='module')
1819 @pytest.fixture(scope='module')
1820 def rc_fixture(request):
1820 def rc_fixture(request):
1821 return Fixture()
1821 return Fixture()
1822
1822
1823
1823
1824 @pytest.fixture()
1824 @pytest.fixture()
1825 def repo_groups(request):
1825 def repo_groups(request):
1826 fixture = Fixture()
1826 fixture = Fixture()
1827
1827
1828 session = Session()
1828 session = Session()
1829 zombie_group = fixture.create_repo_group('zombie')
1829 zombie_group = fixture.create_repo_group('zombie')
1830 parent_group = fixture.create_repo_group('parent')
1830 parent_group = fixture.create_repo_group('parent')
1831 child_group = fixture.create_repo_group('parent/child')
1831 child_group = fixture.create_repo_group('parent/child')
1832 groups_in_db = session.query(RepoGroup).all()
1832 groups_in_db = session.query(RepoGroup).all()
1833 assert len(groups_in_db) == 3
1833 assert len(groups_in_db) == 3
1834 assert child_group.group_parent_id == parent_group.group_id
1834 assert child_group.group_parent_id == parent_group.group_id
1835
1835
1836 @request.addfinalizer
1836 @request.addfinalizer
1837 def cleanup():
1837 def cleanup():
1838 fixture.destroy_repo_group(zombie_group)
1838 fixture.destroy_repo_group(zombie_group)
1839 fixture.destroy_repo_group(child_group)
1839 fixture.destroy_repo_group(child_group)
1840 fixture.destroy_repo_group(parent_group)
1840 fixture.destroy_repo_group(parent_group)
1841
1841
1842 return zombie_group, parent_group, child_group
1842 return zombie_group, parent_group, child_group
@@ -1,203 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Test suite for making push/pull operations
22 Test suite for making push/pull operations
23 """
23 """
24
24
25 import os
25 import os
26 import sys
26 import sys
27 import shutil
27 import shutil
28 import logging
28 import logging
29 from os.path import join as jn
29 from os.path import join as jn
30 from os.path import dirname as dn
30 from os.path import dirname as dn
31
31
32 from tempfile import _RandomNameSequence
32 from tempfile import _RandomNameSequence
33 from subprocess32 import Popen, PIPE
33 from subprocess import Popen, PIPE
34
34
35 from rhodecode.lib.utils2 import engine_from_config
35 from rhodecode.lib.utils2 import engine_from_config
36 from rhodecode.lib.auth import get_crypt_password
36 from rhodecode.lib.auth import get_crypt_password
37 from rhodecode.model import init_model
37 from rhodecode.model import init_model
38 from rhodecode.model import meta
38 from rhodecode.model import meta
39 from rhodecode.model.db import User, Repository
39 from rhodecode.model.db import User, Repository
40
40
41 from rhodecode.tests import TESTS_TMP_PATH, HG_REPO
41 from rhodecode.tests import TESTS_TMP_PATH, HG_REPO
42
42
43 rel_path = dn(dn(dn(dn(os.path.abspath(__file__)))))
43 rel_path = dn(dn(dn(dn(os.path.abspath(__file__)))))
44
44
45
45
46 USER = 'test_admin'
46 USER = 'test_admin'
47 PASS = 'test12'
47 PASS = 'test12'
48 HOST = 'rc.local'
48 HOST = 'rc.local'
49 METHOD = 'pull'
49 METHOD = 'pull'
50 DEBUG = True
50 DEBUG = True
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class Command(object):
54 class Command(object):
55
55
56 def __init__(self, cwd):
56 def __init__(self, cwd):
57 self.cwd = cwd
57 self.cwd = cwd
58
58
59 def execute(self, cmd, *args):
59 def execute(self, cmd, *args):
60 """Runs command on the system with given ``args``.
60 """Runs command on the system with given ``args``.
61 """
61 """
62
62
63 command = cmd + ' ' + ' '.join(args)
63 command = cmd + ' ' + ' '.join(args)
64 log.debug('Executing %s', command)
64 log.debug('Executing %s', command)
65 if DEBUG:
65 if DEBUG:
66 print(command)
66 print(command)
67 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
67 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
68 stdout, stderr = p.communicate()
68 stdout, stderr = p.communicate()
69 if DEBUG:
69 if DEBUG:
70 print('{} {}'.format(stdout, stderr))
70 print('{} {}'.format(stdout, stderr))
71 return stdout, stderr
71 return stdout, stderr
72
72
73
73
74 def get_session():
74 def get_session():
75 conf = {}
75 conf = {}
76 engine = engine_from_config(conf, 'sqlalchemy.db1.')
76 engine = engine_from_config(conf, 'sqlalchemy.db1.')
77 init_model(engine)
77 init_model(engine)
78 sa = meta.Session
78 sa = meta.Session
79 return sa
79 return sa
80
80
81
81
82 def create_test_user(force=True):
82 def create_test_user(force=True):
83 print('creating test user')
83 print('creating test user')
84 sa = get_session()
84 sa = get_session()
85
85
86 user = sa.query(User).filter(User.username == USER).scalar()
86 user = sa.query(User).filter(User.username == USER).scalar()
87
87
88 if force and user is not None:
88 if force and user is not None:
89 print('removing current user')
89 print('removing current user')
90 for repo in sa.query(Repository).filter(Repository.user == user).all():
90 for repo in sa.query(Repository).filter(Repository.user == user).all():
91 sa.delete(repo)
91 sa.delete(repo)
92 sa.delete(user)
92 sa.delete(user)
93 sa.commit()
93 sa.commit()
94
94
95 if user is None or force:
95 if user is None or force:
96 print('creating new one')
96 print('creating new one')
97 new_usr = User()
97 new_usr = User()
98 new_usr.username = USER
98 new_usr.username = USER
99 new_usr.password = get_crypt_password(PASS)
99 new_usr.password = get_crypt_password(PASS)
100 new_usr.email = 'mail@mail.com'
100 new_usr.email = 'mail@mail.com'
101 new_usr.name = 'test'
101 new_usr.name = 'test'
102 new_usr.lastname = 'lasttestname'
102 new_usr.lastname = 'lasttestname'
103 new_usr.active = True
103 new_usr.active = True
104 new_usr.admin = True
104 new_usr.admin = True
105 sa.add(new_usr)
105 sa.add(new_usr)
106 sa.commit()
106 sa.commit()
107
107
108 print('done')
108 print('done')
109
109
110
110
111 def create_test_repo(force=True):
111 def create_test_repo(force=True):
112 print('creating test repo')
112 print('creating test repo')
113 from rhodecode.model.repo import RepoModel
113 from rhodecode.model.repo import RepoModel
114 sa = get_session()
114 sa = get_session()
115
115
116 user = sa.query(User).filter(User.username == USER).scalar()
116 user = sa.query(User).filter(User.username == USER).scalar()
117 if user is None:
117 if user is None:
118 raise Exception('user not found')
118 raise Exception('user not found')
119
119
120 repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
120 repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
121
121
122 if repo is None:
122 if repo is None:
123 print('repo not found creating')
123 print('repo not found creating')
124
124
125 form_data = {'repo_name': HG_REPO,
125 form_data = {'repo_name': HG_REPO,
126 'repo_type': 'hg',
126 'repo_type': 'hg',
127 'private':False,
127 'private':False,
128 'clone_uri': '' }
128 'clone_uri': '' }
129 rm = RepoModel(sa)
129 rm = RepoModel(sa)
130 rm.base_path = '/home/hg'
130 rm.base_path = '/home/hg'
131 rm.create(form_data, user)
131 rm.create(form_data, user)
132
132
133 print('done')
133 print('done')
134
134
135
135
136 def get_anonymous_access():
136 def get_anonymous_access():
137 sa = get_session()
137 sa = get_session()
138 return sa.query(User).filter(User.username == 'default').one().active
138 return sa.query(User).filter(User.username == 'default').one().active
139
139
140
140
141 #==============================================================================
141 #==============================================================================
142 # TESTS
142 # TESTS
143 #==============================================================================
143 #==============================================================================
144 def test_clone_with_credentials(repo=HG_REPO, method=METHOD,
144 def test_clone_with_credentials(repo=HG_REPO, method=METHOD,
145 seq=None, backend='hg', check_output=True):
145 seq=None, backend='hg', check_output=True):
146 cwd = path = jn(TESTS_TMP_PATH, repo)
146 cwd = path = jn(TESTS_TMP_PATH, repo)
147
147
148 if seq is None:
148 if seq is None:
149 seq = _RandomNameSequence().next()
149 seq = _RandomNameSequence().next()
150
150
151 try:
151 try:
152 shutil.rmtree(path, ignore_errors=True)
152 shutil.rmtree(path, ignore_errors=True)
153 os.makedirs(path)
153 os.makedirs(path)
154 except OSError:
154 except OSError:
155 raise
155 raise
156
156
157 clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
157 clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
158 {'user': USER,
158 {'user': USER,
159 'pass': PASS,
159 'pass': PASS,
160 'host': HOST,
160 'host': HOST,
161 'cloned_repo': repo, }
161 'cloned_repo': repo, }
162
162
163 dest = path + seq
163 dest = path + seq
164 if method == 'pull':
164 if method == 'pull':
165 stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
165 stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
166 else:
166 else:
167 stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
167 stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
168 if check_output:
168 if check_output:
169 if backend == 'hg':
169 if backend == 'hg':
170 assert """adding file changes""" in stdout, 'no messages about cloning'
170 assert """adding file changes""" in stdout, 'no messages about cloning'
171 assert """abort""" not in stderr, 'got error from clone'
171 assert """abort""" not in stderr, 'got error from clone'
172 elif backend == 'git':
172 elif backend == 'git':
173 assert """Cloning into""" in stdout, 'no messages about cloning'
173 assert """Cloning into""" in stdout, 'no messages about cloning'
174
174
175
175
176 if __name__ == '__main__':
176 if __name__ == '__main__':
177 try:
177 try:
178 create_test_user(force=False)
178 create_test_user(force=False)
179 seq = None
179 seq = None
180 import time
180 import time
181
181
182 try:
182 try:
183 METHOD = sys.argv[3]
183 METHOD = sys.argv[3]
184 except Exception:
184 except Exception:
185 pass
185 pass
186
186
187 try:
187 try:
188 backend = sys.argv[4]
188 backend = sys.argv[4]
189 except Exception:
189 except Exception:
190 backend = 'hg'
190 backend = 'hg'
191
191
192 if METHOD == 'pull':
192 if METHOD == 'pull':
193 seq = _RandomNameSequence().next()
193 seq = _RandomNameSequence().next()
194 test_clone_with_credentials(repo=sys.argv[1], method='clone',
194 test_clone_with_credentials(repo=sys.argv[1], method='clone',
195 seq=seq, backend=backend)
195 seq=seq, backend=backend)
196 s = time.time()
196 s = time.time()
197 for i in range(1, int(sys.argv[2]) + 1):
197 for i in range(1, int(sys.argv[2]) + 1):
198 print('take {}'.format(i))
198 print('take {}'.format(i))
199 test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
199 test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
200 seq=seq, backend=backend)
200 seq=seq, backend=backend)
201 print('time taken %.4f' % (time.time() - s))
201 print('time taken %.4f' % (time.time() - s))
202 except Exception as e:
202 except Exception as e:
203 sys.exit('stop on %s' % e)
203 sys.exit('stop on %s' % e)
@@ -1,200 +1,200 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import os
22 import os
23 import time
23 import time
24 import tempfile
24 import tempfile
25 import pytest
25 import pytest
26 import subprocess32
26 import subprocess
27 import configobj
27 import configobj
28 import logging
28 import logging
29 from urllib.request import urlopen
29 from urllib.request import urlopen
30 from urllib.error import URLError
30 from urllib.error import URLError
31 from pyramid.compat import configparser
31 from pyramid.compat import configparser
32
32
33
33
34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
35 from rhodecode.tests.utils import is_url_reachable
35 from rhodecode.tests.utils import is_url_reachable
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 def get_port(pyramid_config):
40 def get_port(pyramid_config):
41 config = configparser.ConfigParser()
41 config = configparser.ConfigParser()
42 config.read(pyramid_config)
42 config.read(pyramid_config)
43 return config.get('server:main', 'port')
43 return config.get('server:main', 'port')
44
44
45
45
46 def get_host_url(pyramid_config):
46 def get_host_url(pyramid_config):
47 """Construct the host url using the port in the test configuration."""
47 """Construct the host url using the port in the test configuration."""
48 return '127.0.0.1:%s' % get_port(pyramid_config)
48 return '127.0.0.1:%s' % get_port(pyramid_config)
49
49
50
50
51 def assert_no_running_instance(url):
51 def assert_no_running_instance(url):
52 if is_url_reachable(url):
52 if is_url_reachable(url):
53 print("Hint: Usually this means another instance of server "
53 print("Hint: Usually this means another instance of server "
54 "is running in the background at %s." % url)
54 "is running in the background at %s." % url)
55 pytest.fail(
55 pytest.fail(
56 "Port is not free at %s, cannot start server at" % url)
56 "Port is not free at %s, cannot start server at" % url)
57
57
58
58
59 class ServerBase(object):
59 class ServerBase(object):
60 _args = []
60 _args = []
61 log_file_name = 'NOT_DEFINED.log'
61 log_file_name = 'NOT_DEFINED.log'
62 status_url_tmpl = 'http://{host}:{port}'
62 status_url_tmpl = 'http://{host}:{port}'
63
63
64 def __init__(self, config_file, log_file):
64 def __init__(self, config_file, log_file):
65 self.config_file = config_file
65 self.config_file = config_file
66 config_data = configobj.ConfigObj(config_file)
66 config_data = configobj.ConfigObj(config_file)
67 self._config = config_data['server:main']
67 self._config = config_data['server:main']
68
68
69 self._args = []
69 self._args = []
70 self.log_file = log_file or os.path.join(
70 self.log_file = log_file or os.path.join(
71 tempfile.gettempdir(), self.log_file_name)
71 tempfile.gettempdir(), self.log_file_name)
72 self.process = None
72 self.process = None
73 self.server_out = None
73 self.server_out = None
74 log.info("Using the {} configuration:{}".format(
74 log.info("Using the {} configuration:{}".format(
75 self.__class__.__name__, config_file))
75 self.__class__.__name__, config_file))
76
76
77 if not os.path.isfile(config_file):
77 if not os.path.isfile(config_file):
78 raise RuntimeError('Failed to get config at {}'.format(config_file))
78 raise RuntimeError('Failed to get config at {}'.format(config_file))
79
79
80 @property
80 @property
81 def command(self):
81 def command(self):
82 return ' '.join(self._args)
82 return ' '.join(self._args)
83
83
84 @property
84 @property
85 def http_url(self):
85 def http_url(self):
86 template = 'http://{host}:{port}/'
86 template = 'http://{host}:{port}/'
87 return template.format(**self._config)
87 return template.format(**self._config)
88
88
89 def host_url(self):
89 def host_url(self):
90 return 'http://' + get_host_url(self.config_file)
90 return 'http://' + get_host_url(self.config_file)
91
91
92 def get_rc_log(self):
92 def get_rc_log(self):
93 with open(self.log_file) as f:
93 with open(self.log_file) as f:
94 return f.read()
94 return f.read()
95
95
96 def wait_until_ready(self, timeout=30):
96 def wait_until_ready(self, timeout=30):
97 host = self._config['host']
97 host = self._config['host']
98 port = self._config['port']
98 port = self._config['port']
99 status_url = self.status_url_tmpl.format(host=host, port=port)
99 status_url = self.status_url_tmpl.format(host=host, port=port)
100 start = time.time()
100 start = time.time()
101
101
102 while time.time() - start < timeout:
102 while time.time() - start < timeout:
103 try:
103 try:
104 urlopen(status_url)
104 urlopen(status_url)
105 break
105 break
106 except URLError:
106 except URLError:
107 time.sleep(0.2)
107 time.sleep(0.2)
108 else:
108 else:
109 pytest.fail(
109 pytest.fail(
110 "Starting the {} failed or took more than {} "
110 "Starting the {} failed or took more than {} "
111 "seconds. cmd: `{}`".format(
111 "seconds. cmd: `{}`".format(
112 self.__class__.__name__, timeout, self.command))
112 self.__class__.__name__, timeout, self.command))
113
113
114 log.info('Server of {} ready at url {}'.format(
114 log.info('Server of {} ready at url {}'.format(
115 self.__class__.__name__, status_url))
115 self.__class__.__name__, status_url))
116
116
117 def shutdown(self):
117 def shutdown(self):
118 self.process.kill()
118 self.process.kill()
119 self.server_out.flush()
119 self.server_out.flush()
120 self.server_out.close()
120 self.server_out.close()
121
121
122 def get_log_file_with_port(self):
122 def get_log_file_with_port(self):
123 log_file = list(self.log_file.partition('.log'))
123 log_file = list(self.log_file.partition('.log'))
124 log_file.insert(1, get_port(self.config_file))
124 log_file.insert(1, get_port(self.config_file))
125 log_file = ''.join(log_file)
125 log_file = ''.join(log_file)
126 return log_file
126 return log_file
127
127
128
128
129 class RcVCSServer(ServerBase):
129 class RcVCSServer(ServerBase):
130 """
130 """
131 Represents a running VCSServer instance.
131 Represents a running VCSServer instance.
132 """
132 """
133
133
134 log_file_name = 'rc-vcsserver.log'
134 log_file_name = 'rc-vcsserver.log'
135 status_url_tmpl = 'http://{host}:{port}/status'
135 status_url_tmpl = 'http://{host}:{port}/status'
136
136
137 def __init__(self, config_file, log_file=None):
137 def __init__(self, config_file, log_file=None):
138 super(RcVCSServer, self).__init__(config_file, log_file)
138 super(RcVCSServer, self).__init__(config_file, log_file)
139 self._args = ['gunicorn', '--paste', self.config_file]
139 self._args = ['gunicorn', '--paste', self.config_file]
140
140
141 def start(self):
141 def start(self):
142 env = os.environ.copy()
142 env = os.environ.copy()
143
143
144 self.log_file = self.get_log_file_with_port()
144 self.log_file = self.get_log_file_with_port()
145 self.server_out = open(self.log_file, 'w')
145 self.server_out = open(self.log_file, 'w')
146
146
147 host_url = self.host_url()
147 host_url = self.host_url()
148 assert_no_running_instance(host_url)
148 assert_no_running_instance(host_url)
149
149
150 log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args)))
150 log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args)))
151 log.info('rhodecode-vcsserver starting at: {}'.format(host_url))
151 log.info('rhodecode-vcsserver starting at: {}'.format(host_url))
152 log.info('rhodecode-vcsserver command: {}'.format(self.command))
152 log.info('rhodecode-vcsserver command: {}'.format(self.command))
153 log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file))
153 log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file))
154
154
155 self.process = subprocess32.Popen(
155 self.process = subprocess.Popen(
156 self._args, bufsize=0, env=env,
156 self._args, bufsize=0, env=env,
157 stdout=self.server_out, stderr=self.server_out)
157 stdout=self.server_out, stderr=self.server_out)
158
158
159
159
160 class RcWebServer(ServerBase):
160 class RcWebServer(ServerBase):
161 """
161 """
162 Represents a running RCE web server used as a test fixture.
162 Represents a running RCE web server used as a test fixture.
163 """
163 """
164
164
165 log_file_name = 'rc-web.log'
165 log_file_name = 'rc-web.log'
166 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
166 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
167
167
168 def __init__(self, config_file, log_file=None):
168 def __init__(self, config_file, log_file=None):
169 super(RcWebServer, self).__init__(config_file, log_file)
169 super(RcWebServer, self).__init__(config_file, log_file)
170 self._args = [
170 self._args = [
171 'gunicorn', '--worker-class', 'gevent', '--paste', config_file]
171 'gunicorn', '--worker-class', 'gevent', '--paste', config_file]
172
172
173 def start(self):
173 def start(self):
174 env = os.environ.copy()
174 env = os.environ.copy()
175 env['RC_NO_TMP_PATH'] = '1'
175 env['RC_NO_TMP_PATH'] = '1'
176
176
177 self.log_file = self.get_log_file_with_port()
177 self.log_file = self.get_log_file_with_port()
178 self.server_out = open(self.log_file, 'w')
178 self.server_out = open(self.log_file, 'w')
179
179
180 host_url = self.host_url()
180 host_url = self.host_url()
181 assert_no_running_instance(host_url)
181 assert_no_running_instance(host_url)
182
182
183 log.info('rhodecode-web starting at: {}'.format(host_url))
183 log.info('rhodecode-web starting at: {}'.format(host_url))
184 log.info('rhodecode-web command: {}'.format(self.command))
184 log.info('rhodecode-web command: {}'.format(self.command))
185 log.info('rhodecode-web logfile: {}'.format(self.log_file))
185 log.info('rhodecode-web logfile: {}'.format(self.log_file))
186
186
187 self.process = subprocess32.Popen(
187 self.process = subprocess.Popen(
188 self._args, bufsize=0, env=env,
188 self._args, bufsize=0, env=env,
189 stdout=self.server_out, stderr=self.server_out)
189 stdout=self.server_out, stderr=self.server_out)
190
190
191 def repo_clone_url(self, repo_name, **kwargs):
191 def repo_clone_url(self, repo_name, **kwargs):
192 params = {
192 params = {
193 'user': TEST_USER_ADMIN_LOGIN,
193 'user': TEST_USER_ADMIN_LOGIN,
194 'passwd': TEST_USER_ADMIN_PASS,
194 'passwd': TEST_USER_ADMIN_PASS,
195 'host': get_host_url(self.config_file),
195 'host': get_host_url(self.config_file),
196 'cloned_repo': repo_name,
196 'cloned_repo': repo_name,
197 }
197 }
198 params.update(**kwargs)
198 params.update(**kwargs)
199 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
199 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
200 return _url
200 return _url
@@ -1,468 +1,468 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess32
25 import subprocess
26 import tempfile
26 import tempfile
27 import urllib.request, urllib.error, urllib.parse
27 import urllib.request, urllib.error, urllib.parse
28 from lxml.html import fromstring, tostring
28 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
29 from lxml.cssselect import CSSSelector
30 import urllib.parse.urlparse
30 import urllib.parse.urlparse
31 from urllib.parse import unquote_plus
31 from urllib.parse import unquote_plus
32 import webob
32 import webob
33
33
34 from webtest.app import TestResponse, TestApp
34 from webtest.app import TestResponse, TestApp
35 from webtest.compat import print_stderr
35 from webtest.compat import print_stderr
36
36
37 import pytest
37 import pytest
38 import rc_testdata
38 import rc_testdata
39
39
40 from rhodecode.model.db import User, Repository
40 from rhodecode.model.db import User, Repository
41 from rhodecode.model.meta import Session
41 from rhodecode.model.meta import Session
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.model.scm import ScmModel
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.tests import login_user_session
45 from rhodecode.tests import login_user_session
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class CustomTestResponse(TestResponse):
50 class CustomTestResponse(TestResponse):
51
51
52 def _save_output(self, out):
52 def _save_output(self, out):
53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
54 f.write(out)
54 f.write(out)
55 return f.name
55 return f.name
56
56
57 def mustcontain(self, *strings, **kw):
57 def mustcontain(self, *strings, **kw):
58 """
58 """
59 Assert that the response contains all of the strings passed
59 Assert that the response contains all of the strings passed
60 in as arguments.
60 in as arguments.
61
61
62 Equivalent to::
62 Equivalent to::
63
63
64 assert string in res
64 assert string in res
65 """
65 """
66 print_body = kw.pop('print_body', False)
66 print_body = kw.pop('print_body', False)
67 if 'no' in kw:
67 if 'no' in kw:
68 no = kw['no']
68 no = kw['no']
69 del kw['no']
69 del kw['no']
70 if isinstance(no, str):
70 if isinstance(no, str):
71 no = [no]
71 no = [no]
72 else:
72 else:
73 no = []
73 no = []
74 if kw:
74 if kw:
75 raise TypeError(
75 raise TypeError(
76 "The only keyword argument allowed is 'no' got %s" % kw)
76 "The only keyword argument allowed is 'no' got %s" % kw)
77
77
78 f = self._save_output(str(self))
78 f = self._save_output(str(self))
79
79
80 for s in strings:
80 for s in strings:
81 if not s in self:
81 if not s in self:
82 print_stderr("Actual response (no %r):" % s)
82 print_stderr("Actual response (no %r):" % s)
83 print_stderr("body output saved as `%s`" % f)
83 print_stderr("body output saved as `%s`" % f)
84 if print_body:
84 if print_body:
85 print_stderr(str(self))
85 print_stderr(str(self))
86 raise IndexError(
86 raise IndexError(
87 "Body does not contain string %r, body output saved as %s" % (s, f))
87 "Body does not contain string %r, body output saved as %s" % (s, f))
88
88
89 for no_s in no:
89 for no_s in no:
90 if no_s in self:
90 if no_s in self:
91 print_stderr("Actual response (has %r)" % no_s)
91 print_stderr("Actual response (has %r)" % no_s)
92 print_stderr("body output saved as `%s`" % f)
92 print_stderr("body output saved as `%s`" % f)
93 if print_body:
93 if print_body:
94 print_stderr(str(self))
94 print_stderr(str(self))
95 raise IndexError(
95 raise IndexError(
96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
97
97
98 def assert_response(self):
98 def assert_response(self):
99 return AssertResponse(self)
99 return AssertResponse(self)
100
100
101 def get_session_from_response(self):
101 def get_session_from_response(self):
102 """
102 """
103 This returns the session from a response object.
103 This returns the session from a response object.
104 """
104 """
105 from rhodecode.lib.rc_beaker import session_factory_from_settings
105 from rhodecode.lib.rc_beaker import session_factory_from_settings
106 session = session_factory_from_settings(self.test_app._pyramid_settings)
106 session = session_factory_from_settings(self.test_app._pyramid_settings)
107 return session(self.request)
107 return session(self.request)
108
108
109
109
110 class TestRequest(webob.BaseRequest):
110 class TestRequest(webob.BaseRequest):
111
111
112 # for py.test
112 # for py.test
113 disabled = True
113 disabled = True
114 ResponseClass = CustomTestResponse
114 ResponseClass = CustomTestResponse
115
115
116 def add_response_callback(self, callback):
116 def add_response_callback(self, callback):
117 pass
117 pass
118
118
119
119
120 class CustomTestApp(TestApp):
120 class CustomTestApp(TestApp):
121 """
121 """
122 Custom app to make mustcontain more Useful, and extract special methods
122 Custom app to make mustcontain more Useful, and extract special methods
123 """
123 """
124 RequestClass = TestRequest
124 RequestClass = TestRequest
125 rc_login_data = {}
125 rc_login_data = {}
126 rc_current_session = None
126 rc_current_session = None
127
127
128 def login(self, username=None, password=None):
128 def login(self, username=None, password=None):
129 from rhodecode.lib import auth
129 from rhodecode.lib import auth
130
130
131 if username and password:
131 if username and password:
132 session = login_user_session(self, username, password)
132 session = login_user_session(self, username, password)
133 else:
133 else:
134 session = login_user_session(self)
134 session = login_user_session(self)
135
135
136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
137 self.rc_current_session = session
137 self.rc_current_session = session
138 return session['rhodecode_user']
138 return session['rhodecode_user']
139
139
140 @property
140 @property
141 def csrf_token(self):
141 def csrf_token(self):
142 return self.rc_login_data['csrf_token']
142 return self.rc_login_data['csrf_token']
143
143
144 @property
144 @property
145 def _pyramid_registry(self):
145 def _pyramid_registry(self):
146 return self.app.config.registry
146 return self.app.config.registry
147
147
148 @property
148 @property
149 def _pyramid_settings(self):
149 def _pyramid_settings(self):
150 return self._pyramid_registry.settings
150 return self._pyramid_registry.settings
151
151
152
152
153 def set_anonymous_access(enabled):
153 def set_anonymous_access(enabled):
154 """(Dis)allows anonymous access depending on parameter `enabled`"""
154 """(Dis)allows anonymous access depending on parameter `enabled`"""
155 user = User.get_default_user()
155 user = User.get_default_user()
156 user.active = enabled
156 user.active = enabled
157 Session().add(user)
157 Session().add(user)
158 Session().commit()
158 Session().commit()
159 time.sleep(1.5) # must sleep for cache (1s to expire)
159 time.sleep(1.5) # must sleep for cache (1s to expire)
160 log.info('anonymous access is now: %s', enabled)
160 log.info('anonymous access is now: %s', enabled)
161 assert enabled == User.get_default_user().active, (
161 assert enabled == User.get_default_user().active, (
162 'Cannot set anonymous access')
162 'Cannot set anonymous access')
163
163
164
164
165 def check_xfail_backends(node, backend_alias):
165 def check_xfail_backends(node, backend_alias):
166 # Using "xfail_backends" here intentionally, since this marks work
166 # Using "xfail_backends" here intentionally, since this marks work
167 # which is "to be done" soon.
167 # which is "to be done" soon.
168 skip_marker = node.get_closest_marker('xfail_backends')
168 skip_marker = node.get_closest_marker('xfail_backends')
169 if skip_marker and backend_alias in skip_marker.args:
169 if skip_marker and backend_alias in skip_marker.args:
170 msg = "Support for backend %s to be developed." % (backend_alias, )
170 msg = "Support for backend %s to be developed." % (backend_alias, )
171 msg = skip_marker.kwargs.get('reason', msg)
171 msg = skip_marker.kwargs.get('reason', msg)
172 pytest.xfail(msg)
172 pytest.xfail(msg)
173
173
174
174
175 def check_skip_backends(node, backend_alias):
175 def check_skip_backends(node, backend_alias):
176 # Using "skip_backends" here intentionally, since this marks work which is
176 # Using "skip_backends" here intentionally, since this marks work which is
177 # not supported.
177 # not supported.
178 skip_marker = node.get_closest_marker('skip_backends')
178 skip_marker = node.get_closest_marker('skip_backends')
179 if skip_marker and backend_alias in skip_marker.args:
179 if skip_marker and backend_alias in skip_marker.args:
180 msg = "Feature not supported for backend %s." % (backend_alias, )
180 msg = "Feature not supported for backend %s." % (backend_alias, )
181 msg = skip_marker.kwargs.get('reason', msg)
181 msg = skip_marker.kwargs.get('reason', msg)
182 pytest.skip(msg)
182 pytest.skip(msg)
183
183
184
184
185 def extract_git_repo_from_dump(dump_name, repo_name):
185 def extract_git_repo_from_dump(dump_name, repo_name):
186 """Create git repo `repo_name` from dump `dump_name`."""
186 """Create git repo `repo_name` from dump `dump_name`."""
187 repos_path = ScmModel().repos_path
187 repos_path = ScmModel().repos_path
188 target_path = os.path.join(repos_path, repo_name)
188 target_path = os.path.join(repos_path, repo_name)
189 rc_testdata.extract_git_dump(dump_name, target_path)
189 rc_testdata.extract_git_dump(dump_name, target_path)
190 return target_path
190 return target_path
191
191
192
192
193 def extract_hg_repo_from_dump(dump_name, repo_name):
193 def extract_hg_repo_from_dump(dump_name, repo_name):
194 """Create hg repo `repo_name` from dump `dump_name`."""
194 """Create hg repo `repo_name` from dump `dump_name`."""
195 repos_path = ScmModel().repos_path
195 repos_path = ScmModel().repos_path
196 target_path = os.path.join(repos_path, repo_name)
196 target_path = os.path.join(repos_path, repo_name)
197 rc_testdata.extract_hg_dump(dump_name, target_path)
197 rc_testdata.extract_hg_dump(dump_name, target_path)
198 return target_path
198 return target_path
199
199
200
200
201 def extract_svn_repo_from_dump(dump_name, repo_name):
201 def extract_svn_repo_from_dump(dump_name, repo_name):
202 """Create a svn repo `repo_name` from dump `dump_name`."""
202 """Create a svn repo `repo_name` from dump `dump_name`."""
203 repos_path = ScmModel().repos_path
203 repos_path = ScmModel().repos_path
204 target_path = os.path.join(repos_path, repo_name)
204 target_path = os.path.join(repos_path, repo_name)
205 SubversionRepository(target_path, create=True)
205 SubversionRepository(target_path, create=True)
206 _load_svn_dump_into_repo(dump_name, target_path)
206 _load_svn_dump_into_repo(dump_name, target_path)
207 return target_path
207 return target_path
208
208
209
209
210 def assert_message_in_log(log_records, message, levelno, module):
210 def assert_message_in_log(log_records, message, levelno, module):
211 messages = [
211 messages = [
212 r.message for r in log_records
212 r.message for r in log_records
213 if r.module == module and r.levelno == levelno
213 if r.module == module and r.levelno == levelno
214 ]
214 ]
215 assert message in messages
215 assert message in messages
216
216
217
217
218 def _load_svn_dump_into_repo(dump_name, repo_path):
218 def _load_svn_dump_into_repo(dump_name, repo_path):
219 """
219 """
220 Utility to populate a svn repository with a named dump
220 Utility to populate a svn repository with a named dump
221
221
222 Currently the dumps are in rc_testdata. They might later on be
222 Currently the dumps are in rc_testdata. They might later on be
223 integrated with the main repository once they stabilize more.
223 integrated with the main repository once they stabilize more.
224 """
224 """
225 dump = rc_testdata.load_svn_dump(dump_name)
225 dump = rc_testdata.load_svn_dump(dump_name)
226 load_dump = subprocess32.Popen(
226 load_dump = subprocess.Popen(
227 ['svnadmin', 'load', repo_path],
227 ['svnadmin', 'load', repo_path],
228 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
228 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
229 stderr=subprocess32.PIPE)
229 stderr=subprocess.PIPE)
230 out, err = load_dump.communicate(dump)
230 out, err = load_dump.communicate(dump)
231 if load_dump.returncode != 0:
231 if load_dump.returncode != 0:
232 log.error("Output of load_dump command: %s", out)
232 log.error("Output of load_dump command: %s", out)
233 log.error("Error output of load_dump command: %s", err)
233 log.error("Error output of load_dump command: %s", err)
234 raise Exception(
234 raise Exception(
235 'Failed to load dump "%s" into repository at path "%s".'
235 'Failed to load dump "%s" into repository at path "%s".'
236 % (dump_name, repo_path))
236 % (dump_name, repo_path))
237
237
238
238
239 class AssertResponse(object):
239 class AssertResponse(object):
240 """
240 """
241 Utility that helps to assert things about a given HTML response.
241 Utility that helps to assert things about a given HTML response.
242 """
242 """
243
243
244 def __init__(self, response):
244 def __init__(self, response):
245 self.response = response
245 self.response = response
246
246
247 def get_imports(self):
247 def get_imports(self):
248 return fromstring, tostring, CSSSelector
248 return fromstring, tostring, CSSSelector
249
249
250 def one_element_exists(self, css_selector):
250 def one_element_exists(self, css_selector):
251 self.get_element(css_selector)
251 self.get_element(css_selector)
252
252
253 def no_element_exists(self, css_selector):
253 def no_element_exists(self, css_selector):
254 assert not self._get_elements(css_selector)
254 assert not self._get_elements(css_selector)
255
255
256 def element_equals_to(self, css_selector, expected_content):
256 def element_equals_to(self, css_selector, expected_content):
257 element = self.get_element(css_selector)
257 element = self.get_element(css_selector)
258 element_text = self._element_to_string(element)
258 element_text = self._element_to_string(element)
259 assert expected_content in element_text
259 assert expected_content in element_text
260
260
261 def element_contains(self, css_selector, expected_content):
261 def element_contains(self, css_selector, expected_content):
262 element = self.get_element(css_selector)
262 element = self.get_element(css_selector)
263 assert expected_content in element.text_content()
263 assert expected_content in element.text_content()
264
264
265 def element_value_contains(self, css_selector, expected_content):
265 def element_value_contains(self, css_selector, expected_content):
266 element = self.get_element(css_selector)
266 element = self.get_element(css_selector)
267 assert expected_content in element.value
267 assert expected_content in element.value
268
268
269 def contains_one_link(self, link_text, href):
269 def contains_one_link(self, link_text, href):
270 fromstring, tostring, CSSSelector = self.get_imports()
270 fromstring, tostring, CSSSelector = self.get_imports()
271 doc = fromstring(self.response.body)
271 doc = fromstring(self.response.body)
272 sel = CSSSelector('a[href]')
272 sel = CSSSelector('a[href]')
273 elements = [
273 elements = [
274 e for e in sel(doc) if e.text_content().strip() == link_text]
274 e for e in sel(doc) if e.text_content().strip() == link_text]
275 assert len(elements) == 1, "Did not find link or found multiple links"
275 assert len(elements) == 1, "Did not find link or found multiple links"
276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
277
277
278 def contains_one_anchor(self, anchor_id):
278 def contains_one_anchor(self, anchor_id):
279 fromstring, tostring, CSSSelector = self.get_imports()
279 fromstring, tostring, CSSSelector = self.get_imports()
280 doc = fromstring(self.response.body)
280 doc = fromstring(self.response.body)
281 sel = CSSSelector('#' + anchor_id)
281 sel = CSSSelector('#' + anchor_id)
282 elements = sel(doc)
282 elements = sel(doc)
283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
284
284
285 def _ensure_url_equal(self, found, expected):
285 def _ensure_url_equal(self, found, expected):
286 assert _Url(found) == _Url(expected)
286 assert _Url(found) == _Url(expected)
287
287
288 def get_element(self, css_selector):
288 def get_element(self, css_selector):
289 elements = self._get_elements(css_selector)
289 elements = self._get_elements(css_selector)
290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
291 return elements[0]
291 return elements[0]
292
292
293 def get_elements(self, css_selector):
293 def get_elements(self, css_selector):
294 return self._get_elements(css_selector)
294 return self._get_elements(css_selector)
295
295
296 def _get_elements(self, css_selector):
296 def _get_elements(self, css_selector):
297 fromstring, tostring, CSSSelector = self.get_imports()
297 fromstring, tostring, CSSSelector = self.get_imports()
298 doc = fromstring(self.response.body)
298 doc = fromstring(self.response.body)
299 sel = CSSSelector(css_selector)
299 sel = CSSSelector(css_selector)
300 elements = sel(doc)
300 elements = sel(doc)
301 return elements
301 return elements
302
302
303 def _element_to_string(self, element):
303 def _element_to_string(self, element):
304 fromstring, tostring, CSSSelector = self.get_imports()
304 fromstring, tostring, CSSSelector = self.get_imports()
305 return tostring(element)
305 return tostring(element)
306
306
307
307
308 class _Url(object):
308 class _Url(object):
309 """
309 """
310 A url object that can be compared with other url orbjects
310 A url object that can be compared with other url orbjects
311 without regard to the vagaries of encoding, escaping, and ordering
311 without regard to the vagaries of encoding, escaping, and ordering
312 of parameters in query strings.
312 of parameters in query strings.
313
313
314 Inspired by
314 Inspired by
315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
316 """
316 """
317
317
318 def __init__(self, url):
318 def __init__(self, url):
319 parts = urllib.parse.urlparse(url)
319 parts = urllib.parse.urlparse(url)
320 _query = frozenset(urllib.parse.parse_qsl(parts.query))
320 _query = frozenset(urllib.parse.parse_qsl(parts.query))
321 _path = unquote_plus(parts.path)
321 _path = unquote_plus(parts.path)
322 parts = parts._replace(query=_query, path=_path)
322 parts = parts._replace(query=_query, path=_path)
323 self.parts = parts
323 self.parts = parts
324
324
325 def __eq__(self, other):
325 def __eq__(self, other):
326 return self.parts == other.parts
326 return self.parts == other.parts
327
327
328 def __hash__(self):
328 def __hash__(self):
329 return hash(self.parts)
329 return hash(self.parts)
330
330
331
331
332 def run_test_concurrently(times, raise_catched_exc=True):
332 def run_test_concurrently(times, raise_catched_exc=True):
333 """
333 """
334 Add this decorator to small pieces of code that you want to test
334 Add this decorator to small pieces of code that you want to test
335 concurrently
335 concurrently
336
336
337 ex:
337 ex:
338
338
339 @test_concurrently(25)
339 @test_concurrently(25)
340 def my_test_function():
340 def my_test_function():
341 ...
341 ...
342 """
342 """
343 def test_concurrently_decorator(test_func):
343 def test_concurrently_decorator(test_func):
344 def wrapper(*args, **kwargs):
344 def wrapper(*args, **kwargs):
345 exceptions = []
345 exceptions = []
346
346
347 def call_test_func():
347 def call_test_func():
348 try:
348 try:
349 test_func(*args, **kwargs)
349 test_func(*args, **kwargs)
350 except Exception as e:
350 except Exception as e:
351 exceptions.append(e)
351 exceptions.append(e)
352 if raise_catched_exc:
352 if raise_catched_exc:
353 raise
353 raise
354 threads = []
354 threads = []
355 for i in range(times):
355 for i in range(times):
356 threads.append(threading.Thread(target=call_test_func))
356 threads.append(threading.Thread(target=call_test_func))
357 for t in threads:
357 for t in threads:
358 t.start()
358 t.start()
359 for t in threads:
359 for t in threads:
360 t.join()
360 t.join()
361 if exceptions:
361 if exceptions:
362 raise Exception(
362 raise Exception(
363 'test_concurrently intercepted %s exceptions: %s' % (
363 'test_concurrently intercepted %s exceptions: %s' % (
364 len(exceptions), exceptions))
364 len(exceptions), exceptions))
365 return wrapper
365 return wrapper
366 return test_concurrently_decorator
366 return test_concurrently_decorator
367
367
368
368
369 def wait_for_url(url, timeout=10):
369 def wait_for_url(url, timeout=10):
370 """
370 """
371 Wait until URL becomes reachable.
371 Wait until URL becomes reachable.
372
372
373 It polls the URL until the timeout is reached or it became reachable.
373 It polls the URL until the timeout is reached or it became reachable.
374 If will call to `py.test.fail` in case the URL is not reachable.
374 If will call to `py.test.fail` in case the URL is not reachable.
375 """
375 """
376 timeout = time.time() + timeout
376 timeout = time.time() + timeout
377 last = 0
377 last = 0
378 wait = 0.1
378 wait = 0.1
379
379
380 while timeout > last:
380 while timeout > last:
381 last = time.time()
381 last = time.time()
382 if is_url_reachable(url):
382 if is_url_reachable(url):
383 break
383 break
384 elif (last + wait) > time.time():
384 elif (last + wait) > time.time():
385 # Go to sleep because not enough time has passed since last check.
385 # Go to sleep because not enough time has passed since last check.
386 time.sleep(wait)
386 time.sleep(wait)
387 else:
387 else:
388 pytest.fail("Timeout while waiting for URL {}".format(url))
388 pytest.fail("Timeout while waiting for URL {}".format(url))
389
389
390
390
391 def is_url_reachable(url):
391 def is_url_reachable(url):
392 try:
392 try:
393 urllib.request.urlopen(url)
393 urllib.request.urlopen(url)
394 except urllib.error.URLError:
394 except urllib.error.URLError:
395 log.exception('URL `{}` reach error'.format(url))
395 log.exception('URL `{}` reach error'.format(url))
396 return False
396 return False
397 return True
397 return True
398
398
399
399
400 def repo_on_filesystem(repo_name):
400 def repo_on_filesystem(repo_name):
401 from rhodecode.lib import vcs
401 from rhodecode.lib import vcs
402 from rhodecode.tests import TESTS_TMP_PATH
402 from rhodecode.tests import TESTS_TMP_PATH
403 repo = vcs.get_vcs_instance(
403 repo = vcs.get_vcs_instance(
404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
405 return repo is not None
405 return repo is not None
406
406
407
407
408 def commit_change(
408 def commit_change(
409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
411
411
412 repo = Repository.get_by_repo_name(repo)
412 repo = Repository.get_by_repo_name(repo)
413 _commit = parent
413 _commit = parent
414 if not parent:
414 if not parent:
415 _commit = EmptyCommit(alias=vcs_type)
415 _commit = EmptyCommit(alias=vcs_type)
416
416
417 if newfile:
417 if newfile:
418 nodes = {
418 nodes = {
419 filename: {
419 filename: {
420 'content': content
420 'content': content
421 }
421 }
422 }
422 }
423 commit = ScmModel().create_nodes(
423 commit = ScmModel().create_nodes(
424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
425 message=message,
425 message=message,
426 nodes=nodes,
426 nodes=nodes,
427 parent_commit=_commit,
427 parent_commit=_commit,
428 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
428 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
429 )
429 )
430 else:
430 else:
431 commit = ScmModel().commit_change(
431 commit = ScmModel().commit_change(
432 repo=repo.scm_instance(), repo_name=repo.repo_name,
432 repo=repo.scm_instance(), repo_name=repo.repo_name,
433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
434 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
434 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
435 message=message,
435 message=message,
436 content=content,
436 content=content,
437 f_path=filename
437 f_path=filename
438 )
438 )
439 return commit
439 return commit
440
440
441
441
442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
443 if not default:
443 if not default:
444 raise ValueError('Permission for default user must be given')
444 raise ValueError('Permission for default user must be given')
445 form_data = [(
445 form_data = [(
446 'csrf_token', csrf_token
446 'csrf_token', csrf_token
447 )]
447 )]
448 # add default
448 # add default
449 form_data.extend([
449 form_data.extend([
450 ('u_perm_1', default)
450 ('u_perm_1', default)
451 ])
451 ])
452
452
453 if grant:
453 if grant:
454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
455 form_data.extend([
455 form_data.extend([
456 ('perm_new_member_perm_new{}'.format(cnt), perm),
456 ('perm_new_member_perm_new{}'.format(cnt), perm),
457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
460
460
461 ])
461 ])
462 if revoke:
462 if revoke:
463 for obj_id, obj_type in revoke:
463 for obj_id, obj_type in revoke:
464 form_data.extend([
464 form_data.extend([
465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
467 ])
467 ])
468 return form_data
468 return form_data
@@ -1,195 +1,195 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import datetime
22 import datetime
23 import subprocess32
23 import subprocess
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs.exceptions import VCSError
27 from rhodecode.lib.vcs.exceptions import VCSError
28 from rhodecode.lib.vcs.utils import author_email, author_name
28 from rhodecode.lib.vcs.utils import author_email, author_name
29 from rhodecode.lib.vcs.utils.helpers import get_scm
29 from rhodecode.lib.vcs.utils.helpers import get_scm
30 from rhodecode.lib.vcs.utils.helpers import get_scms_for_path
30 from rhodecode.lib.vcs.utils.helpers import get_scms_for_path
31 from rhodecode.lib.vcs.utils.helpers import parse_datetime
31 from rhodecode.lib.vcs.utils.helpers import parse_datetime
32 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
32 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
33
33
34
34
35 @pytest.mark.usefixtures("baseapp")
35 @pytest.mark.usefixtures("baseapp")
36 class TestPaths(object):
36 class TestPaths(object):
37
37
38 def _test_get_dirs_for_path(self, path, expected):
38 def _test_get_dirs_for_path(self, path, expected):
39 """
39 """
40 Tests if get_dirs_for_path returns same as expected.
40 Tests if get_dirs_for_path returns same as expected.
41 """
41 """
42 expected = sorted(expected)
42 expected = sorted(expected)
43 result = sorted(get_dirs_for_path(path))
43 result = sorted(get_dirs_for_path(path))
44 assert result == expected, (
44 assert result == expected, (
45 "%s != %s which was expected result for path %s"
45 "%s != %s which was expected result for path %s"
46 % (result, expected, path))
46 % (result, expected, path))
47
47
48 def test_get_dirs_for_path(self):
48 def test_get_dirs_for_path(self):
49 path = 'foo/bar/baz/file'
49 path = 'foo/bar/baz/file'
50 paths_and_results = (
50 paths_and_results = (
51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
52 ('foo/bar/', ['foo', 'foo/bar']),
52 ('foo/bar/', ['foo', 'foo/bar']),
53 ('foo/bar', ['foo']),
53 ('foo/bar', ['foo']),
54 )
54 )
55 for path, expected in paths_and_results:
55 for path, expected in paths_and_results:
56 self._test_get_dirs_for_path(path, expected)
56 self._test_get_dirs_for_path(path, expected)
57
57
58 def test_get_scms_for_path(self, tmpdir):
58 def test_get_scms_for_path(self, tmpdir):
59 new = tmpdir.strpath
59 new = tmpdir.strpath
60 assert get_scms_for_path(new) == []
60 assert get_scms_for_path(new) == []
61
61
62 os.mkdir(os.path.join(new, '.tux'))
62 os.mkdir(os.path.join(new, '.tux'))
63 assert get_scms_for_path(new) == []
63 assert get_scms_for_path(new) == []
64
64
65 os.mkdir(os.path.join(new, '.git'))
65 os.mkdir(os.path.join(new, '.git'))
66 assert set(get_scms_for_path(new)) == set(['git'])
66 assert set(get_scms_for_path(new)) == set(['git'])
67
67
68 os.mkdir(os.path.join(new, '.hg'))
68 os.mkdir(os.path.join(new, '.hg'))
69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
70
70
71
71
72 class TestGetScm(object):
72 class TestGetScm(object):
73
73
74 def test_existing_repository(self, vcs_repository_support):
74 def test_existing_repository(self, vcs_repository_support):
75 alias, repo = vcs_repository_support
75 alias, repo = vcs_repository_support
76 assert (alias, repo.path) == get_scm(repo.path)
76 assert (alias, repo.path) == get_scm(repo.path)
77
77
78 def test_raises_if_path_is_empty(self, tmpdir):
78 def test_raises_if_path_is_empty(self, tmpdir):
79 with pytest.raises(VCSError):
79 with pytest.raises(VCSError):
80 get_scm(str(tmpdir))
80 get_scm(str(tmpdir))
81
81
82 def test_get_scm_error_path(self):
82 def test_get_scm_error_path(self):
83 with pytest.raises(VCSError):
83 with pytest.raises(VCSError):
84 get_scm('err')
84 get_scm('err')
85
85
86 def test_get_two_scms_for_path(self, tmpdir):
86 def test_get_two_scms_for_path(self, tmpdir):
87 multialias_repo_path = str(tmpdir)
87 multialias_repo_path = str(tmpdir)
88
88
89 subprocess32.check_call(['hg', 'init', multialias_repo_path])
89 subprocess.check_call(['hg', 'init', multialias_repo_path])
90 subprocess32.check_call(['git', 'init', multialias_repo_path])
90 subprocess.check_call(['git', 'init', multialias_repo_path])
91
91
92 with pytest.raises(VCSError):
92 with pytest.raises(VCSError):
93 get_scm(multialias_repo_path)
93 get_scm(multialias_repo_path)
94
94
95 def test_ignores_svn_working_copy(self, tmpdir):
95 def test_ignores_svn_working_copy(self, tmpdir):
96 tmpdir.mkdir('.svn')
96 tmpdir.mkdir('.svn')
97 with pytest.raises(VCSError):
97 with pytest.raises(VCSError):
98 get_scm(tmpdir.strpath)
98 get_scm(tmpdir.strpath)
99
99
100
100
101 class TestParseDatetime(object):
101 class TestParseDatetime(object):
102
102
103 def test_datetime_text(self):
103 def test_datetime_text(self):
104 assert parse_datetime('2010-04-07 21:29:41') == \
104 assert parse_datetime('2010-04-07 21:29:41') == \
105 datetime.datetime(2010, 4, 7, 21, 29, 41)
105 datetime.datetime(2010, 4, 7, 21, 29, 41)
106
106
107 def test_no_seconds(self):
107 def test_no_seconds(self):
108 assert parse_datetime('2010-04-07 21:29') == \
108 assert parse_datetime('2010-04-07 21:29') == \
109 datetime.datetime(2010, 4, 7, 21, 29)
109 datetime.datetime(2010, 4, 7, 21, 29)
110
110
111 def test_date_only(self):
111 def test_date_only(self):
112 assert parse_datetime('2010-04-07') == \
112 assert parse_datetime('2010-04-07') == \
113 datetime.datetime(2010, 4, 7)
113 datetime.datetime(2010, 4, 7)
114
114
115 def test_another_format(self):
115 def test_another_format(self):
116 assert parse_datetime('04/07/10 21:29:41') == \
116 assert parse_datetime('04/07/10 21:29:41') == \
117 datetime.datetime(2010, 4, 7, 21, 29, 41)
117 datetime.datetime(2010, 4, 7, 21, 29, 41)
118
118
119 def test_now(self):
119 def test_now(self):
120 assert parse_datetime('now') - datetime.datetime.now() < \
120 assert parse_datetime('now') - datetime.datetime.now() < \
121 datetime.timedelta(seconds=1)
121 datetime.timedelta(seconds=1)
122
122
123 def test_today(self):
123 def test_today(self):
124 today = datetime.date.today()
124 today = datetime.date.today()
125 assert parse_datetime('today') == \
125 assert parse_datetime('today') == \
126 datetime.datetime(*today.timetuple()[:3])
126 datetime.datetime(*today.timetuple()[:3])
127
127
128 def test_yesterday(self):
128 def test_yesterday(self):
129 yesterday = datetime.date.today() - datetime.timedelta(days=1)
129 yesterday = datetime.date.today() - datetime.timedelta(days=1)
130 assert parse_datetime('yesterday') == \
130 assert parse_datetime('yesterday') == \
131 datetime.datetime(*yesterday.timetuple()[:3])
131 datetime.datetime(*yesterday.timetuple()[:3])
132
132
133 def test_tomorrow(self):
133 def test_tomorrow(self):
134 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
134 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
135 args = tomorrow.timetuple()[:3] + (23, 59, 59)
135 args = tomorrow.timetuple()[:3] + (23, 59, 59)
136 assert parse_datetime('tomorrow') == datetime.datetime(*args)
136 assert parse_datetime('tomorrow') == datetime.datetime(*args)
137
137
138 def test_days(self):
138 def test_days(self):
139 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
139 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
140 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
140 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
141 expected = datetime.datetime(*args)
141 expected = datetime.datetime(*args)
142 assert parse_datetime('3d') == expected
142 assert parse_datetime('3d') == expected
143 assert parse_datetime('3 d') == expected
143 assert parse_datetime('3 d') == expected
144 assert parse_datetime('3 day') == expected
144 assert parse_datetime('3 day') == expected
145 assert parse_datetime('3 days') == expected
145 assert parse_datetime('3 days') == expected
146
146
147 def test_weeks(self):
147 def test_weeks(self):
148 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
148 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
149 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
149 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
150 expected = datetime.datetime(*args)
150 expected = datetime.datetime(*args)
151 assert parse_datetime('3w') == expected
151 assert parse_datetime('3w') == expected
152 assert parse_datetime('3 w') == expected
152 assert parse_datetime('3 w') == expected
153 assert parse_datetime('3 week') == expected
153 assert parse_datetime('3 week') == expected
154 assert parse_datetime('3 weeks') == expected
154 assert parse_datetime('3 weeks') == expected
155
155
156 def test_mixed(self):
156 def test_mixed(self):
157 timestamp = (
157 timestamp = (
158 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
158 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
159 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
159 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
160 expected = datetime.datetime(*args)
160 expected = datetime.datetime(*args)
161 assert parse_datetime('2w3d') == expected
161 assert parse_datetime('2w3d') == expected
162 assert parse_datetime('2w 3d') == expected
162 assert parse_datetime('2w 3d') == expected
163 assert parse_datetime('2w 3 days') == expected
163 assert parse_datetime('2w 3 days') == expected
164 assert parse_datetime('2 weeks 3 days') == expected
164 assert parse_datetime('2 weeks 3 days') == expected
165
165
166
166
167 @pytest.mark.parametrize("test_str, name, email", [
167 @pytest.mark.parametrize("test_str, name, email", [
168 ('Marcin Kuzminski <marcin@python-works.com>',
168 ('Marcin Kuzminski <marcin@python-works.com>',
169 'Marcin Kuzminski', 'marcin@python-works.com'),
169 'Marcin Kuzminski', 'marcin@python-works.com'),
170 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
170 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
171 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
171 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
172 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
172 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
173 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
173 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
174 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
174 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
175 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
175 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
176 ('username <user@email.com>',
176 ('username <user@email.com>',
177 'username', 'user@email.com'),
177 'username', 'user@email.com'),
178 ('username <user@email.com',
178 ('username <user@email.com',
179 'username', 'user@email.com'),
179 'username', 'user@email.com'),
180 ('broken missing@email.com',
180 ('broken missing@email.com',
181 'broken', 'missing@email.com'),
181 'broken', 'missing@email.com'),
182 ('<justemail@mail.com>',
182 ('<justemail@mail.com>',
183 '', 'justemail@mail.com'),
183 '', 'justemail@mail.com'),
184 ('justname',
184 ('justname',
185 'justname', ''),
185 'justname', ''),
186 ('Mr Double Name withemail@email.com ',
186 ('Mr Double Name withemail@email.com ',
187 'Mr Double Name', 'withemail@email.com'),
187 'Mr Double Name', 'withemail@email.com'),
188 ])
188 ])
189 class TestAuthorExtractors(object):
189 class TestAuthorExtractors(object):
190
190
191 def test_author_email(self, test_str, name, email):
191 def test_author_email(self, test_str, name, email):
192 assert email == author_email(test_str)
192 assert email == author_email(test_str)
193
193
194 def test_author_name(self, test_str, name, email):
194 def test_author_name(self, test_str, name, email):
195 assert name == author_name(test_str)
195 assert name == author_name(test_str)
@@ -1,118 +1,118 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities for tests only. These are not or should not be used normally -
22 Utilities for tests only. These are not or should not be used normally -
23 functions here are crafted as we don't want to use ``vcs`` to verify tests.
23 functions here are crafted as we don't want to use ``vcs`` to verify tests.
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29
29
30 from subprocess32 import Popen
30 from subprocess import Popen
31
31
32
32
33 class VCSTestError(Exception):
33 class VCSTestError(Exception):
34 pass
34 pass
35
35
36
36
37 def run_command(cmd, args):
37 def run_command(cmd, args):
38 """
38 """
39 Runs command on the system with given ``args``.
39 Runs command on the system with given ``args``.
40 """
40 """
41 command = ' '.join((cmd, args))
41 command = ' '.join((cmd, args))
42 p = Popen(command, shell=True)
42 p = Popen(command, shell=True)
43 status = os.waitpid(p.pid, 0)[1]
43 status = os.waitpid(p.pid, 0)[1]
44 return status
44 return status
45
45
46
46
47 def eprint(msg):
47 def eprint(msg):
48 """
48 """
49 Prints given ``msg`` into sys.stderr as nose test runner hides all output
49 Prints given ``msg`` into sys.stderr as nose test runner hides all output
50 from sys.stdout by default and if we want to pipe stream somewhere we don't
50 from sys.stdout by default and if we want to pipe stream somewhere we don't
51 need those verbose messages anyway.
51 need those verbose messages anyway.
52 Appends line break.
52 Appends line break.
53 """
53 """
54 sys.stderr.write(msg)
54 sys.stderr.write(msg)
55 sys.stderr.write('\n')
55 sys.stderr.write('\n')
56
56
57
57
58 # TODO: Revisit once we have CI running, if this is not helping us, remove it
58 # TODO: Revisit once we have CI running, if this is not helping us, remove it
59 class SCMFetcher(object):
59 class SCMFetcher(object):
60
60
61 def __init__(self, alias, test_repo_path):
61 def __init__(self, alias, test_repo_path):
62 """
62 """
63 :param clone_cmd: command which would clone remote repository; pass
63 :param clone_cmd: command which would clone remote repository; pass
64 only first bits - remote path and destination would be appended
64 only first bits - remote path and destination would be appended
65 using ``remote_repo`` and ``test_repo_path``
65 using ``remote_repo`` and ``test_repo_path``
66 """
66 """
67 self.alias = alias
67 self.alias = alias
68 self.test_repo_path = test_repo_path
68 self.test_repo_path = test_repo_path
69
69
70 def setup(self):
70 def setup(self):
71 if not os.path.isdir(self.test_repo_path):
71 if not os.path.isdir(self.test_repo_path):
72 self.fetch_repo()
72 self.fetch_repo()
73
73
74 def fetch_repo(self):
74 def fetch_repo(self):
75 """
75 """
76 Tries to fetch repository from remote path.
76 Tries to fetch repository from remote path.
77 """
77 """
78 remote = self.remote_repo
78 remote = self.remote_repo
79 eprint(
79 eprint(
80 "Fetching repository %s into %s" % (remote, self.test_repo_path))
80 "Fetching repository %s into %s" % (remote, self.test_repo_path))
81 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
81 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
82
82
83
83
84 def get_normalized_path(path):
84 def get_normalized_path(path):
85 """
85 """
86 If given path exists, new path would be generated and returned. Otherwise
86 If given path exists, new path would be generated and returned. Otherwise
87 same whats given is returned. Assumes that there would be no more than
87 same whats given is returned. Assumes that there would be no more than
88 10000 same named files.
88 10000 same named files.
89 """
89 """
90 if os.path.exists(path):
90 if os.path.exists(path):
91 dir, basename = os.path.split(path)
91 dir, basename = os.path.split(path)
92 splitted_name = basename.split('.')
92 splitted_name = basename.split('.')
93 if len(splitted_name) > 1:
93 if len(splitted_name) > 1:
94 ext = splitted_name[-1]
94 ext = splitted_name[-1]
95 else:
95 else:
96 ext = None
96 ext = None
97 name = '.'.join(splitted_name[:-1])
97 name = '.'.join(splitted_name[:-1])
98 matcher = re.compile(r'^.*-(\d{5})$')
98 matcher = re.compile(r'^.*-(\d{5})$')
99 start = 0
99 start = 0
100 m = matcher.match(name)
100 m = matcher.match(name)
101 if not m:
101 if not m:
102 # Haven't append number yet so return first
102 # Haven't append number yet so return first
103 newname = '%s-00000' % name
103 newname = '%s-00000' % name
104 newpath = os.path.join(dir, newname)
104 newpath = os.path.join(dir, newname)
105 if ext:
105 if ext:
106 newpath = '.'.join((newpath, ext))
106 newpath = '.'.join((newpath, ext))
107 return get_normalized_path(newpath)
107 return get_normalized_path(newpath)
108 else:
108 else:
109 start = int(m.group(1)[-5:]) + 1
109 start = int(m.group(1)[-5:]) + 1
110 for x in range(start, 10000):
110 for x in range(start, 10000):
111 newname = name[:-5] + str(x).rjust(5, '0')
111 newname = name[:-5] + str(x).rjust(5, '0')
112 newpath = os.path.join(dir, newname)
112 newpath = os.path.join(dir, newname)
113 if ext:
113 if ext:
114 newpath = '.'.join((newpath, ext))
114 newpath = '.'.join((newpath, ext))
115 if not os.path.exists(newpath):
115 if not os.path.exists(newpath):
116 return newpath
116 return newpath
117 raise VCSTestError("Couldn't compute new path for %s" % path)
117 raise VCSTestError("Couldn't compute new path for %s" % path)
118 return path
118 return path
@@ -1,193 +1,193 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base for test suite for making push/pull operations.
22 Base for test suite for making push/pull operations.
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 from os.path import join as jn
30 from os.path import join as jn
31 from subprocess32 import Popen, PIPE
31 from subprocess import Popen, PIPE
32 import logging
32 import logging
33 import os
33 import os
34 import tempfile
34 import tempfile
35
35
36 from rhodecode.tests import GIT_REPO, HG_REPO
36 from rhodecode.tests import GIT_REPO, HG_REPO
37
37
38 DEBUG = True
38 DEBUG = True
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 REPO_GROUP = 'a_repo_group'
40 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class Command(object):
47 class Command(object):
48
48
49 def __init__(self, cwd):
49 def __init__(self, cwd):
50 self.cwd = cwd
50 self.cwd = cwd
51 self.process = None
51 self.process = None
52
52
53 def execute(self, cmd, *args):
53 def execute(self, cmd, *args):
54 """
54 """
55 Runs command on the system with given ``args``.
55 Runs command on the system with given ``args``.
56 """
56 """
57
57
58 command = cmd + ' ' + ' '.join(args)
58 command = cmd + ' ' + ' '.join(args)
59 if DEBUG:
59 if DEBUG:
60 log.debug('*** CMD %s ***', command)
60 log.debug('*** CMD %s ***', command)
61
61
62 env = dict(os.environ)
62 env = dict(os.environ)
63 # Delete coverage variables, as they make the test fail for Mercurial
63 # Delete coverage variables, as they make the test fail for Mercurial
64 for key in env.keys():
64 for key in env.keys():
65 if key.startswith('COV_CORE_'):
65 if key.startswith('COV_CORE_'):
66 del env[key]
66 del env[key]
67
67
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 cwd=self.cwd, env=env)
69 cwd=self.cwd, env=env)
70 stdout, stderr = self.process.communicate()
70 stdout, stderr = self.process.communicate()
71 if DEBUG:
71 if DEBUG:
72 log.debug('STDOUT:%s', stdout)
72 log.debug('STDOUT:%s', stdout)
73 log.debug('STDERR:%s', stderr)
73 log.debug('STDERR:%s', stderr)
74 return stdout, stderr
74 return stdout, stderr
75
75
76 def assert_returncode_success(self):
76 def assert_returncode_success(self):
77 assert self.process.returncode == 0
77 assert self.process.returncode == 0
78
78
79
79
80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
81 git_ident = "git config user.name {} && git config user.email {}".format(
81 git_ident = "git config user.name {} && git config user.email {}".format(
82 'Marcin KuΕΊminski', 'me@email.com')
82 'Marcin KuΕΊminski', 'me@email.com')
83 cwd = path = jn(dest)
83 cwd = path = jn(dest)
84
84
85 tags = tags or []
85 tags = tags or []
86 added_file = jn(path, '%s_setup.py' % tempfile._RandomNameSequence().next())
86 added_file = jn(path, '%s_setup.py' % tempfile._RandomNameSequence().next())
87 Command(cwd).execute('touch %s' % added_file)
87 Command(cwd).execute('touch %s' % added_file)
88 Command(cwd).execute('%s add %s' % (vcs, added_file))
88 Command(cwd).execute('%s add %s' % (vcs, added_file))
89 author_str = 'Marcin KuΕΊminski <me@email.com>'
89 author_str = 'Marcin KuΕΊminski <me@email.com>'
90
90
91 for i in range(kwargs.get('files_no', 3)):
91 for i in range(kwargs.get('files_no', 3)):
92 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
92 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
93 Command(cwd).execute(cmd)
93 Command(cwd).execute(cmd)
94
94
95 if vcs == 'hg':
95 if vcs == 'hg':
96 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
96 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
97 i, author_str, added_file
97 i, author_str, added_file
98 )
98 )
99 elif vcs == 'git':
99 elif vcs == 'git':
100 cmd = """%s && git commit -m 'committed new %s' %s""" % (
100 cmd = """%s && git commit -m 'committed new %s' %s""" % (
101 git_ident, i, added_file)
101 git_ident, i, added_file)
102 Command(cwd).execute(cmd)
102 Command(cwd).execute(cmd)
103
103
104 for tag in tags:
104 for tag in tags:
105 if vcs == 'hg':
105 if vcs == 'hg':
106 Command(cwd).execute(
106 Command(cwd).execute(
107 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
107 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
108 elif vcs == 'git':
108 elif vcs == 'git':
109 if tag['commit']:
109 if tag['commit']:
110 # annotated tag
110 # annotated tag
111 _stdout, _stderr = Command(cwd).execute(
111 _stdout, _stderr = Command(cwd).execute(
112 """%s && git tag -a %s -m "%s" """ % (
112 """%s && git tag -a %s -m "%s" """ % (
113 git_ident, tag['name'], tag['commit']))
113 git_ident, tag['name'], tag['commit']))
114 else:
114 else:
115 # lightweight tag
115 # lightweight tag
116 _stdout, _stderr = Command(cwd).execute(
116 _stdout, _stderr = Command(cwd).execute(
117 """%s && git tag %s""" % (
117 """%s && git tag %s""" % (
118 git_ident, tag['name']))
118 git_ident, tag['name']))
119
119
120
120
121 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
121 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
122 new_branch=False, **kwargs):
122 new_branch=False, **kwargs):
123 """
123 """
124 Generate some files, add it to DEST repo and push back
124 Generate some files, add it to DEST repo and push back
125 vcs is git or hg and defines what VCS we want to make those files for
125 vcs is git or hg and defines what VCS we want to make those files for
126 """
126 """
127 git_ident = "git config user.name {} && git config user.email {}".format(
127 git_ident = "git config user.name {} && git config user.email {}".format(
128 'Marcin KuΕΊminski', 'me@email.com')
128 'Marcin KuΕΊminski', 'me@email.com')
129 cwd = path = jn(dest)
129 cwd = path = jn(dest)
130
130
131 # commit some stuff into this repo
131 # commit some stuff into this repo
132 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
132 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
133
133
134 default_target_branch = {
134 default_target_branch = {
135 'git': 'master',
135 'git': 'master',
136 'hg': 'default'
136 'hg': 'default'
137 }.get(vcs)
137 }.get(vcs)
138
138
139 target_branch = target_branch or default_target_branch
139 target_branch = target_branch or default_target_branch
140
140
141 # PUSH it back
141 # PUSH it back
142 stdout = stderr = None
142 stdout = stderr = None
143 if vcs == 'hg':
143 if vcs == 'hg':
144 maybe_new_branch = ''
144 maybe_new_branch = ''
145 if new_branch:
145 if new_branch:
146 maybe_new_branch = '--new-branch'
146 maybe_new_branch = '--new-branch'
147 stdout, stderr = Command(cwd).execute(
147 stdout, stderr = Command(cwd).execute(
148 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
148 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
149 )
149 )
150 elif vcs == 'git':
150 elif vcs == 'git':
151 stdout, stderr = Command(cwd).execute(
151 stdout, stderr = Command(cwd).execute(
152 """{} &&
152 """{} &&
153 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
153 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
154 )
154 )
155
155
156 return stdout, stderr
156 return stdout, stderr
157
157
158
158
159 def _check_proper_git_push(
159 def _check_proper_git_push(
160 stdout, stderr, branch='master', should_set_default_branch=False):
160 stdout, stderr, branch='master', should_set_default_branch=False):
161 # Note: Git is writing most information to stderr intentionally
161 # Note: Git is writing most information to stderr intentionally
162 assert 'fatal' not in stderr
162 assert 'fatal' not in stderr
163 assert 'rejected' not in stderr
163 assert 'rejected' not in stderr
164 assert 'Pushing to' in stderr
164 assert 'Pushing to' in stderr
165 assert '%s -> %s' % (branch, branch) in stderr
165 assert '%s -> %s' % (branch, branch) in stderr
166
166
167 if should_set_default_branch:
167 if should_set_default_branch:
168 assert "Setting default branch to %s" % branch in stderr
168 assert "Setting default branch to %s" % branch in stderr
169 else:
169 else:
170 assert "Setting default branch" not in stderr
170 assert "Setting default branch" not in stderr
171
171
172
172
173 def _check_proper_hg_push(stdout, stderr, branch='default'):
173 def _check_proper_hg_push(stdout, stderr, branch='default'):
174 assert 'pushing to' in stdout
174 assert 'pushing to' in stdout
175 assert 'searching for changes' in stdout
175 assert 'searching for changes' in stdout
176
176
177 assert 'abort:' not in stderr
177 assert 'abort:' not in stderr
178
178
179
179
180 def _check_proper_clone(stdout, stderr, vcs):
180 def _check_proper_clone(stdout, stderr, vcs):
181 if vcs == 'hg':
181 if vcs == 'hg':
182 assert 'requesting all changes' in stdout
182 assert 'requesting all changes' in stdout
183 assert 'adding changesets' in stdout
183 assert 'adding changesets' in stdout
184 assert 'adding manifests' in stdout
184 assert 'adding manifests' in stdout
185 assert 'adding file changes' in stdout
185 assert 'adding file changes' in stdout
186
186
187 assert stderr == ''
187 assert stderr == ''
188
188
189 if vcs == 'git':
189 if vcs == 'git':
190 assert '' == stdout
190 assert '' == stdout
191 assert 'Cloning into' in stderr
191 assert 'Cloning into' in stderr
192 assert 'abort:' not in stderr
192 assert 'abort:' not in stderr
193 assert 'fatal:' not in stderr
193 assert 'fatal:' not in stderr
General Comments 0
You need to be logged in to leave comments. Login now