Browse Source

use uniform colormap in the plots and use separate output folder for the plots

master
Constantin Fürst 12 months ago
parent
commit
c0c75aa51b
  1. 0
      benchmarks/benchmark-plots/plot-opt-submitmethod.png
  2. BIN
      benchmarks/benchmark-plots/plot-perf-allnodes-throughput.png
  3. BIN
      benchmarks/benchmark-plots/plot-perf-diff-throughput.png
  4. 0
      benchmarks/benchmark-plots/plot-perf-enginelocation.png
  5. 0
      benchmarks/benchmark-plots/plot-perf-mtsubmit.png
  6. BIN
      benchmarks/benchmark-plots/plot-perf-smart-throughput.png
  7. 7
      benchmarks/benchmark-plotters/plot-cost-mtsubmit.py
  8. 7
      benchmarks/benchmark-plotters/plot-perf-enginelocation.py
  9. 9
      benchmarks/benchmark-plotters/plot-perf-peakthroughput.py
  10. 7
      benchmarks/benchmark-plotters/plot-perf-submitmethod.py
  11. BIN
      benchmarks/benchmark-results/plot-perf-allnodes-throughput.png
  12. BIN
      benchmarks/benchmark-results/plot-perf-smart-throughput.png

0
benchmarks/benchmark-results/plot-opt-submitmethod.png → benchmarks/benchmark-plots/plot-opt-submitmethod.png

Before

Width: 594  |  Height: 488  |  Size: 39 KiB

After

Width: 594  |  Height: 488  |  Size: 39 KiB

BIN
benchmarks/benchmark-plots/plot-perf-allnodes-throughput.png

After

Width: 649  |  Height: 580  |  Size: 108 KiB

BIN
benchmarks/benchmark-plots/plot-perf-diff-throughput.png

After

Width: 652  |  Height: 545  |  Size: 92 KiB

0
benchmarks/benchmark-results/plot-perf-enginelocation.png → benchmarks/benchmark-plots/plot-perf-enginelocation.png

Before

Width: 563  |  Height: 488  |  Size: 40 KiB

After

Width: 563  |  Height: 488  |  Size: 40 KiB

0
benchmarks/benchmark-results/plot-perf-mtsubmit.png → benchmarks/benchmark-plots/plot-perf-mtsubmit.png

Before

Width: 563  |  Height: 522  |  Size: 37 KiB

After

Width: 563  |  Height: 522  |  Size: 37 KiB

BIN
benchmarks/benchmark-plots/plot-perf-smart-throughput.png

After

Width: 684  |  Height: 580  |  Size: 116 KiB

7
benchmarks/benchmark-plotters/plot-cost-mtsubmit.py

@ -81,11 +81,12 @@ def process_file_to_dataset(file_path, engine_label, thread_count):
# process_file_to_dataset for them in order to build a dataframe # process_file_to_dataset for them in order to build a dataframe
# which is then displayed and saved # which is then displayed and saved
def main(): def main():
folder_path = "benchmark-results/"
result_path = "benchmark-results/"
output_path = "benchmark-plots/"
for engine_label in engine_counts: for engine_label in engine_counts:
for thread_count in thread_counts: for thread_count in thread_counts:
file = os.path.join(folder_path, f"mtsubmit-{thread_count}-{engine_label}.json")
file = os.path.join(result_path, f"mtsubmit-{thread_count}-{engine_label}.json")
process_file_to_dataset(file, engine_label, thread_count) process_file_to_dataset(file, engine_label, thread_count)
df = pd.DataFrame(data) df = pd.DataFrame(data)
@ -94,7 +95,7 @@ def main():
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd") sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-perf-mtsubmit.png"), bbox_inches='tight')
plt.savefig(os.path.join(output_path, "plot-perf-mtsubmit.png"), bbox_inches='tight')
plt.show() plt.show()

7
benchmarks/benchmark-plotters/plot-perf-enginelocation.py

@ -90,11 +90,12 @@ def create_copy_dataset(file_path, method_label, type_label):
# process_file_to_dataset for them in order to build a dataframe # process_file_to_dataset for them in order to build a dataframe
# which is then displayed and saved # which is then displayed and saved
def main(): def main():
folder_path = "benchmark-results/"
result_path = "benchmark-results/"
output_path = "benchmark-plots/"
for method_label in copy_methods: for method_label in copy_methods:
for type_label in types: for type_label in types:
file = os.path.join(folder_path, f"{method_label}-{type_label}-1e.json")
file = os.path.join(result_path, f"{method_label}-{type_label}-1e.json")
create_copy_dataset(file, method_label, type_label) create_copy_dataset(file, method_label, type_label)
df = pd.DataFrame(data) df = pd.DataFrame(data)
@ -104,7 +105,7 @@ def main():
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd") sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-perf-enginelocation.png"), bbox_inches='tight')
plt.savefig(os.path.join(output_path, "plot-perf-enginelocation.png"), bbox_inches='tight')
plt.show() plt.show()
if __name__ == "__main__": if __name__ == "__main__":

9
benchmarks/benchmark-plotters/plot-perf-peakthroughput.py

@ -7,7 +7,8 @@ import matplotlib.pyplot as plt
from common import calc_throughput from common import calc_throughput
folder_path = "benchmark-results/"
result_path = "benchmark-results/"
output_path = "benchmark-plots/"
runid = "Run ID" runid = "Run ID"
x_label = "Destination Node" x_label = "Destination Node"
@ -75,10 +76,10 @@ def process_file_to_dataset(file_path, src_node, dst_node):
def plot_heatmap(table,title,node_config): def plot_heatmap(table,title,node_config):
plt.figure(figsize=(8, 6)) plt.figure(figsize=(8, 6))
sns.heatmap(table, annot=True, cmap="YlGn", fmt=".0f")
sns.heatmap(table, annot=True, cmap="rocket_r", fmt=".0f")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, f"plot-perf-{node_config}-throughput.png"), bbox_inches='tight')
plt.savefig(os.path.join(output_path, f"plot-perf-{node_config}-throughput.png"), bbox_inches='tight')
plt.show() plt.show()
@ -89,7 +90,7 @@ def main(node_config,title):
for src_node in range(16): for src_node in range(16):
for dst_node in range(16): for dst_node in range(16):
size = "512mib" if node_config == "allnodes" and src_node == dst_node and src_node >= 8 else "1gib" size = "512mib" if node_config == "allnodes" and src_node == dst_node and src_node >= 8 else "1gib"
file = os.path.join(folder_path, f"copy-n{src_node}ton{dst_node}-{size}-{node_config}-1e.json")
file = os.path.join(result_path, f"copy-n{src_node}ton{dst_node}-{size}-{node_config}-1e.json")
process_file_to_dataset(file, src_node, dst_node) process_file_to_dataset(file, src_node, dst_node)
df = pd.DataFrame(data) df = pd.DataFrame(data)

7
benchmarks/benchmark-plotters/plot-perf-submitmethod.py

@ -87,11 +87,12 @@ def process_file_to_dataset(file_path, type_label,size_label):
# process_file_to_dataset for them in order to build a dataframe # process_file_to_dataset for them in order to build a dataframe
# which is then displayed and saved # which is then displayed and saved
def main(): def main():
folder_path = "benchmark-results/"
result_path = "benchmark-results/"
output_path = "benchmark-plots/"
for type_label in types: for type_label in types:
for size in sizes: for size in sizes:
file = os.path.join(folder_path, f"submit-{type_label}-{size}-1e.json")
file = os.path.join(result_path, f"submit-{type_label}-{size}-1e.json")
process_file_to_dataset(file, type_label, size) process_file_to_dataset(file, type_label, size)
df = pd.DataFrame(data) df = pd.DataFrame(data)
@ -101,7 +102,7 @@ def main():
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd") sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-opt-submitmethod.png"), bbox_inches='tight')
plt.savefig(os.path.join(output_path, "plot-opt-submitmethod.png"), bbox_inches='tight')
plt.show() plt.show()
if __name__ == "__main__": if __name__ == "__main__":

BIN
benchmarks/benchmark-results/plot-perf-allnodes-throughput.png

Before

Width: 649  |  Height: 580  |  Size: 105 KiB

BIN
benchmarks/benchmark-results/plot-perf-smart-throughput.png

Before

Width: 684  |  Height: 580  |  Size: 120 KiB

Loading…
Cancel
Save