Browse Source

add plotter for submit method with logscale to show optimal

master
Constantin Fürst 1 year ago
parent
commit
8ba84587be
  1. 96
      benchmarks/benchmark-plotters/plot-opt-submitmethod.py
  2. BIN
      benchmarks/benchmark-results/plot-opt-submitmethod.png

96
benchmarks/benchmark-plotters/plot-opt-submitmethod.py

@ -0,0 +1,96 @@
import os
import json
import pandas as pd
from pandas.core.ops import methods
from typing import List
import seaborn as sns
import matplotlib.pyplot as plt
runid = "Run ID"
x_label = "Size of Submitted Task"
y_label = "Throughput in GiB/s, LogScale"
var_label = "Submission Type"
sizes = ["1kib", "4kib", "1mib", "32mib"]
sizes_nice = ["1 KiB", "4 KiB", "1 MiB", "32 MiB"]
types = ["bs10", "bs50", "ms10", "ms50", "ssaw"]
types_nice = ["Batch, Size 10", "Batch, Size 50", "Multi-Submit, Count 10", "Multi-Submit, Count 50", "Single Submit"]
title = "Optimal Submission Method - Copy Operation tested Intra-Node on DDR"
index = [runid, x_label, var_label]
data = []
def calc_throughput(size_bytes,time_microseconds):
time_seconds = time_microseconds * 1e-9
size_gib = size_bytes / (1024 ** 3)
throughput_gibs = size_gib / time_seconds
return throughput_gibs
def index_from_element(value,array):
for (idx,val) in enumerate(array):
if val == value: return idx
return 0
def load_and_process_submit_json(file_path):
with open(file_path, 'r') as file:
data = json.load(file)
return data["list"][0]["report"]["time"]
# Function to plot the graph for the new benchmark
def create_submit_dataset(file_paths, type_label):
times = []
type_index = index_from_element(type_label,types)
type_nice = types_nice[type_index]
idx = 0
for file_path in file_paths:
time = load_and_process_submit_json(file_path)
times.append(time["combined"])
idx = idx + 1
# Adjust time measurements based on type
# which can contain multiple submissions
if type_label in {"bs10", "ms10"}:
times = [[t / 10 for t in time] for time in times]
elif type_label in {"ms50", "bs50"}:
times = [[t / 50 for t in time] for time in times]
times[0] = [t / 1 for t in times[0]]
times[1] = [t / 4 for t in times[1]]
times[2] = [t / (1024) for t in times[2]]
times[3] = [t / (32*1024) for t in times[3]]
throughput = [[calc_throughput(1024,time) for time in t] for t in times]
idx = 0
for run_set in throughput:
run_idx = 0
for run in run_set:
data.append({ runid : run_idx, x_label: sizes_nice[idx], var_label : type_nice, y_label : throughput[idx][run_idx]})
run_idx = run_idx + 1
idx = idx + 1
# Main function to iterate over files and create plots for the new benchmark
def main():
folder_path = "benchmark-results/" # Replace with the actual path to your folder
for type_label in types:
file_paths = [os.path.join(folder_path, f"submit-{type_label}-{size}-1e.json") for size in sizes]
create_submit_dataset(file_paths, type_label)
df = pd.DataFrame(data)
df.set_index(index, inplace=True)
df = df.sort_values(y_label)
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd").set(yscale="log")
plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-opt-submitmethod.png"), bbox_inches='tight')
plt.show()
if __name__ == "__main__":
main()

BIN
benchmarks/benchmark-results/plot-opt-submitmethod.png

After

Width: 623  |  Height: 453  |  Size: 35 KiB

Loading…
Cancel
Save