@ -11,9 +11,9 @@ y_label = "Throughput in GiB/s"
var_label = " Thread Counts "
thread_counts = [ " 1t " , " 2t " , " 4t " , " 8t " , " 12t " ]
thread_counts_nice = [ " 1 Thread " , " 2 Threads " , " 4 Threads " , " 8 Threads " , " 12 Threads " ]
engine_counts = [ " 1mib-1e " , " 1mib-4e " , " 1gib-1e " , " 1gib-4e " ]
engine_counts = [ " 1mib-1e_PREVENT_FROM_DISPLAYING " , " 1mib-4e_PREVENT_FROM_DISPLAYING " , " 1gib-1e " , " 1gib-4e " ]
engine_counts_nice = [ " 1 E/WQ and Tasksize 1 MiB " , " 4 E/WQ and Tasksize 1 MiB " , " 1 E/WQ and Tasksize 1 GiB " , " 4 E/WQ and Tasksize 1 GiB " ]
title = " Per-Thread Throughput - 120 Copy Operations split on Threads Intra-Node on DDR"
title = " Total Throughput - 120 Copy Operations split on Threads Intra-Node on DDR"
index = [ runid , x_label , var_label ]
data = [ ]
@ -43,9 +43,9 @@ def load_time_mesurements(file_path):
return {
" total " : sum ( [ x / ( iterations * 120 ) for x in list ( chain ( [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " total " ] for i in range ( count ) ] ) ) ] ) ,
" combined " : [ x / ( 120 / count ) for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " combined " ] for i in range ( count ) ] ) ) ] ,
" submission " : [ x / ( 120 / count ) for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " submission " ] for i in range ( count ) ] ) ) ] ,
" completion " : [ x / ( 120 / count ) for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " completion " ] for i in range ( count ) ] ) ) ]
" combined " : [ x / 120 for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " combined " ] for i in range ( count ) ] ) ) ] ,
" submission " : [ x / 120 for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " submission " ] for i in range ( count ) ] ) ) ] ,
" completion " : [ x / 120 for x in list ( chain ( * [ data [ " list " ] [ i ] [ " report " ] [ " time " ] [ " completion " ] for i in range ( count ) ] ) ) ]
}
def process_file_to_dataset ( file_path , engine_label , thread_count ) :
@ -55,13 +55,12 @@ def process_file_to_dataset(file_path, engine_label, thread_count):
thread_count_nice = thread_counts_nice [ threadc_index ]
data_size = 0
if engine_label in [ " 1gib-1e " , " 1gib-4e " ] :
data_size = 1024 * 1024 * 1024
else :
data_size = 1024 * 1024
if engine_label in [ " 1gib-1e " , " 1gib-4e " ] : data_size = 1024 * 1024 * 1024
elif engine_label in [ " 1mib-1e " , " 1mib-4e " ] : data_size = 1024 * 1024
else : data_size = 0
try :
time = [ load_time_mesurements ( file_path ) [ " total " ] ]
time = load_time_mesurements ( file_path ) [ " combined " ]
run_idx = 0
for t in time :
data . append ( { runid : run_idx , x_label : thread_count_nice , var_label : engine_nice , y_label : calc_throughput ( data_size , t ) } )