|
@ -5,7 +5,7 @@ from itertools import chain |
|
|
import seaborn as sns |
|
|
import seaborn as sns |
|
|
import matplotlib.pyplot as plt |
|
|
import matplotlib.pyplot as plt |
|
|
|
|
|
|
|
|
from common import calc_throughput |
|
|
|
|
|
|
|
|
from common import calc_throughput, load_time_mesurements |
|
|
|
|
|
|
|
|
result_path = "benchmark-results/" |
|
|
result_path = "benchmark-results/" |
|
|
output_path = "benchmark-plots/" |
|
|
output_path = "benchmark-plots/" |
|
@ -40,20 +40,8 @@ data = [] |
|
|
# loads the measurements from a given file and processes them |
|
|
# loads the measurements from a given file and processes them |
|
|
# so that they are normalized, meaning that the timings returned |
|
|
# so that they are normalized, meaning that the timings returned |
|
|
# are nanoseconds per element transfered |
|
|
# are nanoseconds per element transfered |
|
|
def load_time_mesurements(file_path): |
|
|
|
|
|
with open(file_path, 'r') as file: |
|
|
|
|
|
data = json.load(file) |
|
|
|
|
|
count = data["count"] |
|
|
|
|
|
batch_size = data["list"][0]["task"]["batching"]["batch_size"] if data["list"][0]["task"]["batching"]["batch_size"] > 0 else 1 |
|
|
|
|
|
iterations = data["list"][0]["task"]["iterations"] |
|
|
|
|
|
|
|
|
|
|
|
return { |
|
|
|
|
|
"size": data["list"][0]["task"]["size"], |
|
|
|
|
|
"total": sum([x / (iterations * batch_size * count * count) for x in list(chain([data["list"][i]["report"]["time"]["total"] for i in range(count)]))]), |
|
|
|
|
|
"combined": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["combined"] for i in range(count)]))], |
|
|
|
|
|
"submission": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["submission"] for i in range(count)]))], |
|
|
|
|
|
"completion": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["completion"] for i in range(count)]))] |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
def get_timing(file_path): |
|
|
|
|
|
return load_time_mesurements(file_path) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# procceses a single file and appends the desired timings |
|
|
# procceses a single file and appends the desired timings |
|
@ -61,12 +49,13 @@ def load_time_mesurements(file_path): |
|
|
# and ignores if the given file is not found as some |
|
|
# and ignores if the given file is not found as some |
|
|
# configurations may not be benchmarked |
|
|
# configurations may not be benchmarked |
|
|
def process_file_to_dataset(file_path, src_node, dst_node): |
|
|
def process_file_to_dataset(file_path, src_node, dst_node): |
|
|
|
|
|
size = 1024*1024*1024 |
|
|
try: |
|
|
try: |
|
|
file_data = load_time_mesurements(file_path) |
|
|
|
|
|
time = file_data["combined"] |
|
|
|
|
|
|
|
|
timing = get_timing(file_path) |
|
|
run_idx = 0 |
|
|
run_idx = 0 |
|
|
for t in time: |
|
|
|
|
|
data.append({ runid : run_idx, x_label : dst_node, y_label : calc_throughput(file_data["size"], t)}) |
|
|
|
|
|
|
|
|
for t in timing: |
|
|
|
|
|
tp = calc_throughput(size, t) |
|
|
|
|
|
data.append({ runid : run_idx, x_label : dst_node, y_label : tp}) |
|
|
run_idx = run_idx + 1 |
|
|
run_idx = run_idx + 1 |
|
|
except FileNotFoundError: |
|
|
except FileNotFoundError: |
|
|
return |
|
|
return |
|
@ -89,8 +78,7 @@ def plot_bar(table,title,node_config): |
|
|
def main(node_config,title): |
|
|
def main(node_config,title): |
|
|
src_node = 0 |
|
|
src_node = 0 |
|
|
for dst_node in {8,11,12,15}: |
|
|
for dst_node in {8,11,12,15}: |
|
|
size = "512mib" if node_config == "allnodes" and src_node == dst_node and src_node >= 8 else "1gib" |
|
|
|
|
|
file = os.path.join(result_path, f"copy-n{src_node}ton{dst_node}-{size}-{node_config}-1e.json") |
|
|
|
|
|
|
|
|
file = os.path.join(result_path, f"copy-n{src_node}ton{dst_node}-1gib-{node_config}-1e.json") |
|
|
process_file_to_dataset(file, src_node, dst_node) |
|
|
process_file_to_dataset(file, src_node, dst_node) |
|
|
|
|
|
|
|
|
df = pd.DataFrame(data) |
|
|
df = pd.DataFrame(data) |
|
|