2023-08-23 14:01:18 +00:00
import requests
2023-08-31 21:47:04 +00:00
import docker
2023-08-23 14:01:18 +00:00
import concurrent . futures
import time
2023-08-24 23:52:15 +00:00
import sys
2023-08-24 23:57:34 +00:00
import os
2023-08-31 21:47:04 +00:00
from graph import generate_req_graph , generate_resource_graph
2023-08-29 18:55:28 +00:00
from math import floor
2023-08-23 14:01:18 +00:00
2023-08-31 21:47:04 +00:00
if len ( sys . argv ) != 3 or sys . argv [ 1 ] == ' -h ' or sys . argv [ 1 ] == ' --help ' :
print ( " Usage: python testes.py <framework name> <container name> " )
2023-08-24 23:52:15 +00:00
sys . exit ( 1 )
2023-08-23 14:01:18 +00:00
2023-08-29 18:55:28 +00:00
THREADS = 10
2023-08-24 23:52:15 +00:00
FRAMEWORK_NAME = sys . argv [ 1 ]
2023-08-31 21:47:04 +00:00
CONTAINER_NAME = sys . argv [ 2 ]
2023-08-31 22:46:13 +00:00
URL_BASE = ' http://localhost:9080 '
2023-08-29 18:55:28 +00:00
API_REQUESTS = [
2023-08-31 21:47:04 +00:00
( ' /status/ok ' , range ( 0 , 30_000 , 5000 ) ) ,
( ' /image/load-image ' , range ( 0 , 30_000 , 5000 ) ) ,
( ' /static/simpleimage.png ' , range ( 0 , 30_000 , 5000 ) ) ,
( ' /image/load-big-image ' , range ( 0 , 1_000 , 200 ) ) ,
( ' /static/bigimage.png ' , range ( 0 , 1_000 , 200 ) ) ,
( ' /static/video.mp4 ' , range ( 0 , 10_000 , 1_000 ) ) ,
2023-08-24 23:52:15 +00:00
]
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
def send_request ( url ) :
success = False
2023-08-29 18:55:28 +00:00
responses = {
2 : 0 , # OK
4 : 0 , # Bad Request
5 : 0 , # Server Error
}
2023-08-24 23:52:15 +00:00
while not success :
try :
response = requests . get ( url )
except :
continue
success = response . status_code == 200
2023-08-29 18:55:28 +00:00
responses [ floor ( response . status_code / 100 ) ] + = 1
return responses
2023-08-23 14:01:18 +00:00
2023-08-31 21:47:04 +00:00
def getFileNames ( endpoint ) :
endpoint = endpoint . replace ( ' / ' , ' ' )
files = [
f " req_ { FRAMEWORK_NAME } _ { endpoint } .csv " ,
f " resource_ { FRAMEWORK_NAME } _ { endpoint } .csv " ,
]
return files
2023-08-24 23:52:15 +00:00
def record ( filename , requests , reqpersec ) :
with open ( filename , " a " ) as file :
file . write ( f " { requests } , { reqpersec } \n " )
2023-08-31 21:47:04 +00:00
def record_resource ( filename , requests , cpu , ram ) :
with open ( filename , " a " ) as file :
file . write ( f " { requests } , { cpu } , { ram } \n " )
2023-08-29 18:55:28 +00:00
def run_tests ( endpoint , num_requests ) :
2023-08-31 21:47:04 +00:00
files = getFileNames ( endpoint )
for filename in files :
if os . path . exists ( filename ) :
os . remove ( filename )
2023-08-24 23:52:15 +00:00
2023-08-25 12:24:01 +00:00
for num_request in num_requests :
2023-08-24 23:52:15 +00:00
if num_request < = 0 : continue
2023-08-29 18:55:28 +00:00
ok_responses = 0
bad_responses = 0
server_errors = 0
2023-08-31 21:47:04 +00:00
cpu , ram = 0 , 0
2023-08-29 18:55:28 +00:00
with concurrent . futures . ThreadPoolExecutor ( max_workers = THREADS ) as executor :
2023-08-24 23:52:15 +00:00
url = f ' { URL_BASE } { endpoint } '
2023-08-23 14:01:18 +00:00
start_time = time . time ( )
futures = [ ]
2023-08-24 23:52:15 +00:00
#with requests.Session() as session:
# futures = [executor.submit(send_request, session, url) for _ in range(num_request)]
2023-08-23 14:01:18 +00:00
2023-08-31 22:46:13 +00:00
half = floor ( num_request / 2 )
for i in range ( num_request ) :
futures . append ( executor . submit ( send_request , url ) )
if i == half :
cpu , ram = get_resource_usage ( )
2023-08-23 14:01:18 +00:00
2023-08-31 22:46:13 +00:00
concurrent . futures . wait ( futures )
2023-08-31 21:47:04 +00:00
2023-08-23 14:01:18 +00:00
elapsed_time = time . time ( ) - start_time
2023-08-29 18:55:28 +00:00
for future in futures :
responses = future . result ( )
ok_responses + = responses [ 2 ]
bad_responses + = responses [ 4 ]
server_errors + = responses [ 5 ]
print ( f " { num_request } : { elapsed_time : .2f } seconds. { elapsed_time / num_request : .4f } seconds per request. { num_request / elapsed_time : .2f } requests per second. [OK: { ok_responses } , Bad Request: { bad_responses } , Server Error: { server_errors } ]] " )
2023-08-31 21:47:04 +00:00
record ( files [ 0 ] , num_request , f " { num_request / elapsed_time : .2f } " )
record_resource ( files [ 1 ] , num_request , cpu , ram )
generate_req_graph ( files [ 0 ] , FRAMEWORK_NAME , endpoint )
2023-08-31 22:42:32 +00:00
generate_resource_graph ( files [ 1 ] , FRAMEWORK_NAME , endpoint )
2023-08-23 14:01:18 +00:00
2023-08-24 23:52:15 +00:00
time . sleep ( 3 )
2023-08-23 14:01:18 +00:00
2023-08-31 21:47:04 +00:00
def get_resource_usage ( ) :
try :
client = docker . from_env ( )
stats = client . containers . get ( CONTAINER_NAME ) . stats ( stream = False )
except :
return 0 , 0 # unable to get stats
return get_cpu_usage ( stats ) , get_ram_usage ( stats )
def get_cpu_usage ( stats ) :
UsageDelta = stats [ ' cpu_stats ' ] [ ' cpu_usage ' ] [ ' total_usage ' ] - stats [ ' precpu_stats ' ] [ ' cpu_usage ' ] [ ' total_usage ' ]
2023-08-31 22:18:37 +00:00
SystemDelta = stats [ ' cpu_stats ' ] [ ' system_cpu_usage ' ] - stats [ ' precpu_stats ' ] [ ' system_cpu_usage ' ]
len_cpu = stats [ ' cpu_stats ' ] [ ' online_cpus ' ]
2023-08-31 21:47:04 +00:00
percentage = ( UsageDelta / SystemDelta ) * len_cpu * 100
return f " { percentage : .2f } "
def get_ram_usage ( stats ) :
usage = stats [ ' memory_stats ' ] [ ' usage ' ]
limit = stats [ ' memory_stats ' ] [ ' limit ' ]
percentage = ( usage / limit ) * 100
# percent = round(percentage, 2)
return f " { percentage : .2f } "
2023-08-29 18:55:28 +00:00
for endpoint , num_requests in API_REQUESTS :
2023-08-25 12:24:01 +00:00
print ( f " # { endpoint } " )
2023-08-29 18:55:28 +00:00
run_tests ( endpoint , num_requests )