tcc/scripts/testes.py

87 lines
2.6 KiB
Python
Raw Normal View History

2023-08-23 14:01:18 +00:00
import requests
import concurrent.futures
import time
2023-08-24 23:52:15 +00:00
import sys
2023-08-24 23:57:34 +00:00
import os
2023-08-24 23:52:15 +00:00
from graph import generateGraph
2023-08-29 18:55:28 +00:00
from math import floor
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
if len(sys.argv) != 2 or sys.argv[1] == '-h' or sys.argv[1] == '--help':
print("Usage: python testes.py <name>")
sys.exit(1)
2023-08-23 14:01:18 +00:00
2023-08-29 18:55:28 +00:00
THREADS = 10
2023-08-24 23:52:15 +00:00
FRAMEWORK_NAME = sys.argv[1]
2023-08-29 18:55:28 +00:00
URL_BASE = 'http://172.26.48.1:9080'
API_REQUESTS = [
('/status/ok', range(0, 50_000, 5000)),
('/image/load-image', range(0, 50_000, 5000)),
('/image/load-big-image', range(0, 1_000, 10)),
2023-08-24 23:52:15 +00:00
]
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
def send_request(url):
success = False
2023-08-29 18:55:28 +00:00
responses = {
2: 0, # OK
4: 0, # Bad Request
5: 0, # Server Error
}
2023-08-24 23:52:15 +00:00
while not success:
try:
response = requests.get(url)
except:
continue
success = response.status_code == 200
2023-08-29 18:55:28 +00:00
responses[floor(response.status_code/100)] += 1
return responses
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
def getFileName(endpoint):
endpoint = endpoint.replace('/', '_')
return f"{FRAMEWORK_NAME}_{endpoint}.csv"
def record(filename, requests, reqpersec):
with open(filename, "a") as file:
file.write(f"{requests},{reqpersec}\n")
2023-08-29 18:55:28 +00:00
def run_tests(endpoint, num_requests):
2023-08-24 23:52:15 +00:00
filename = getFileName(endpoint)
2023-08-24 23:57:34 +00:00
if os.path.exists(filename):
os.remove(filename)
2023-08-24 23:52:15 +00:00
2023-08-25 12:24:01 +00:00
for num_request in num_requests:
2023-08-24 23:52:15 +00:00
if num_request <= 0: continue
2023-08-29 18:55:28 +00:00
ok_responses = 0
bad_responses = 0
server_errors = 0
with concurrent.futures.ThreadPoolExecutor(max_workers=THREADS) as executor:
2023-08-24 23:52:15 +00:00
url = f'{URL_BASE}{endpoint}'
2023-08-23 14:01:18 +00:00
start_time = time.time()
futures = []
2023-08-24 23:52:15 +00:00
#with requests.Session() as session:
# futures = [executor.submit(send_request, session, url) for _ in range(num_request)]
futures = [executor.submit(send_request, url) for _ in range(num_request)]
2023-08-23 14:01:18 +00:00
concurrent.futures.wait(futures)
elapsed_time = time.time() - start_time
2023-08-29 18:55:28 +00:00
for future in futures:
responses = future.result()
ok_responses += responses[2]
bad_responses += responses[4]
server_errors += responses[5]
print(f"{num_request}: {elapsed_time:.2f} seconds. {elapsed_time/num_request:.4f} seconds per request. {num_request/elapsed_time:.2f} requests per second. [OK: {ok_responses}, Bad Request: {bad_responses}, Server Error: {server_errors}]]")
2023-08-24 23:52:15 +00:00
record(filename, num_request, f"{num_request/elapsed_time:.2f}")
generateGraph(filename, FRAMEWORK_NAME, endpoint)
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
time.sleep(3)
2023-08-23 14:01:18 +00:00
2023-08-29 18:55:28 +00:00
for endpoint, num_requests in API_REQUESTS:
2023-08-25 12:24:01 +00:00
print(f"# {endpoint}")
2023-08-29 18:55:28 +00:00
run_tests(endpoint, num_requests)