Browse Source

update the submit method benchmark plotter

master
Constantin Fürst 1 year ago
parent
commit
573e312a69
  1. 23
      benchmarks/benchmark-plotters/plot-perf-submitmethod.py
  2. BIN
      benchmarks/benchmark-results/plot-perf-submitmethod.png

23
benchmarks/benchmark-plotters/plot-perf-submitmethod.py

@ -10,8 +10,8 @@ runid = "Run ID"
x_label = "Size of Submitted Task" x_label = "Size of Submitted Task"
y_label = "Throughput in GiB/s" y_label = "Throughput in GiB/s"
var_label = "Submission Type" var_label = "Submission Type"
sizes = ["1kib", "4kib", "1mib", "1gib"]
sizes_nice = ["1 KiB", "4 KiB", "1 MiB", "1 GiB"]
sizes = ["1kib", "4kib", "1mib", "32mib"]
sizes_nice = ["1 KiB", "4 KiB", "1 MiB", "32 MiB"]
types = ["bs10", "bs50", "ms10", "ms50", "ssaw"] types = ["bs10", "bs50", "ms10", "ms50", "ssaw"]
types_nice = ["Batch, Size 10", "Batch, Size 50", "Multi-Submit, Count 10", "Multi-Submit, Count 50", "Single Submit"] types_nice = ["Batch, Size 10", "Batch, Size 50", "Multi-Submit, Count 10", "Multi-Submit, Count 50", "Single Submit"]
title = "Performance of Submission Methods - Copy Operation tested Intra-Node on DDR" title = "Performance of Submission Methods - Copy Operation tested Intra-Node on DDR"
@ -35,7 +35,12 @@ def index_from_element(value,array):
def load_and_process_submit_json(file_path): def load_and_process_submit_json(file_path):
with open(file_path, 'r') as file: with open(file_path, 'r') as file:
data = json.load(file) data = json.load(file)
time = { "combined" : data["list"][0]["report"]["time"]["combined"], "submit" : data["list"][0]["report"]["time"]["submit"], "complete" : data["list"][0]["report"]["time"]["complete"]}
time = {
"combined" : data["list"][0]["report"]["time"]["combined"],
"submit" : data["list"][0]["report"]["time"]["submission"],
"complete" : data["list"][0]["report"]["time"]["completion"]
}
return time return time
# Function to plot the graph for the new benchmark # Function to plot the graph for the new benchmark
@ -61,9 +66,9 @@ def plot_submit_graph(file_paths, type_label):
times[0] = [t / 1 for t in times[0]] times[0] = [t / 1 for t in times[0]]
times[1] = [t / 4 for t in times[1]] times[1] = [t / 4 for t in times[1]]
times[2] = [t / (1024) for t in times[2]] times[2] = [t / (1024) for t in times[2]]
times[3] = [t / (1024*1024) for t in times[3]]
times[3] = [t / (32*1024) for t in times[3]]
throughput = [[calc_throughput(1024,time) for time in t] for t in times]
throughput = [[calc_throughput(1000*1000,time) for time in t] for t in times]
idx = 0 idx = 0
for run_set in throughput: for run_set in throughput:
@ -76,8 +81,7 @@ def plot_submit_graph(file_paths, type_label):
# Main function to iterate over files and create plots for the new benchmark # Main function to iterate over files and create plots for the new benchmark
def main(): def main():
folder_path = "benchmark-results/submit-bench/" # Replace with the actual path to your folder
folder_path = "benchmark-results/" # Replace with the actual path to your folder
for type_label in types: for type_label in types:
file_paths = [os.path.join(folder_path, f"submit-{type_label}-{size}-1e.json") for size in sizes] file_paths = [os.path.join(folder_path, f"submit-{type_label}-{size}-1e.json") for size in sizes]
@ -85,9 +89,10 @@ def main():
df = pd.DataFrame(data) df = pd.DataFrame(data)
df.set_index(index, inplace=True) df.set_index(index, inplace=True)
print(df)
df = df.sort_values(y_label)
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd")
sns.catplot(x=x_label, y=y_label, hue=var_label, data=df, kind='bar', height=5, aspect=1, palette="viridis", errorbar="sd")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-perf-submitmethod.png"), bbox_inches='tight') plt.savefig(os.path.join(folder_path, "plot-perf-submitmethod.png"), bbox_inches='tight')
plt.show() plt.show()

BIN
benchmarks/benchmark-results/plot-perf-submitmethod.png

After

Width: 691  |  Height: 453  |  Size: 36 KiB

Loading…
Cancel
Save