##// END OF EJS Templates
apps: base app fixes for python3
apps: base app fixes for python3

File last commit:

r4306:09801de9 default
r5086:62af941a default
Show More
time_urls.py
74 lines | 1.9 KiB | text/x-python | PythonLexer
project: added all source files and assets
r1 # -*- coding: utf-8 -*-
code: update copyrights to 2020
r4306 # Copyright (C) 2010-2020 RhodeCode GmbH
project: added all source files and assets
r1 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
import timeit
tests: updated load tests CLIs
r3811 import logging
import click
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811 log = logging.getLogger(__name__)
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811 @click.command()
@click.option('--server', help='Server url to connect to. e.g http://rc.local.com', required=True)
@click.option('--pages', help='load pages to visit from a file', required=True, type=click.File())
@click.option('--repeat', help='number of times to repeat', default=10, type=int)
scripts: added timeout flag to load script measuring url times
r3858 @click.option('--timeout', help='call timeout', default=60, type=int)
def main(server, repeat, pages, timeout):
tests: updated load tests CLIs
r3811
print("Repeating each URL %d times\n" % repeat)
pages = pages.readlines()
for page_url in pages:
url = "%s/%s" % (server, page_url.strip())
print(url)
scripts: added timeout flag to load script measuring url times
r3858 stmt = "requests.get('{}', timeout={})".format(url, timeout)
tests: updated load tests CLIs
r3811 t = timeit.Timer(stmt=stmt, setup="import requests")
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811 result = t.repeat(repeat=repeat, number=1)
core: added more accurate time measurement for called functions
r3853 print(" %.4f (min) - %.4f (max) - %.4f (avg)\n" %
tests: updated load tests CLIs
r3811 (min(result), max(result), sum(result) / len(result)))
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811 if __name__ == '__main__':
main()
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811
project: added all source files and assets
r1
tests: updated load tests CLIs
r3811