Constantin Fürst
11 months ago
2 changed files with 0 additions and 223 deletions
-
112benchmarks/benchmark-plotters/plot-perf-enginelocation.py
-
111benchmarks/benchmark-plotters/plot-perf-peakthroughput.py
@ -1,112 +0,0 @@ |
|||||
import os |
|
||||
import json |
|
||||
import pandas as pd |
|
||||
import seaborn as sns |
|
||||
import matplotlib.pyplot as plt |
|
||||
|
|
||||
from common import calc_throughput, index_from_element |
|
||||
|
|
||||
runid = "Run ID" |
|
||||
x_label = "Copy Type" |
|
||||
y_label = "Throughput in GiB/s" |
|
||||
var_label = "Configuration" |
|
||||
types = ["intersock-n0ton4-1mib", "internode-n0ton1-1mib", "intersock-n0ton4-1gib", "internode-n0ton1-1gib"] |
|
||||
types_nice = ["Inter-Socket 1MiB", "Inter-Node 1MiB", "Inter-Socket 1GiB", "Inter-Node 1GiB"] |
|
||||
copy_methods = ["dstcopy", "srccopy", "xcopy", "srcoutsidercopy", "dstoutsidercopy", "sockoutsidercopy", "nodeoutsidercopy"] |
|
||||
copy_methods_nice = [ "Engine on DST-Node", "Engine on SRC-Node", "Cross-Copy / Both Engines", "Engine on SRC-Socket, not SRC-Node", "Engine on DST-Socket, not DST-Node", "Engine on different Socket", "Engine on same Socket"] |
|
||||
|
|
||||
title = \ |
|
||||
"""Throughput showing impact of Engine Location\n |
|
||||
Copy Operation on DDR with 1 Engine per WQ""" |
|
||||
|
|
||||
description = \ |
|
||||
"""Throughput showing impact of Engine Location\n |
|
||||
Some Configurations missing as they are not feesible\n |
|
||||
Copy Operation on DDR with 1 Engine per WQ""" |
|
||||
|
|
||||
index = [runid, x_label, var_label] |
|
||||
data = [] |
|
||||
|
|
||||
|
|
||||
# loads the measurements from a given file and processes them |
|
||||
# so that they are normalized, meaning that the timings returned |
|
||||
# are nanoseconds per element transfered |
|
||||
def load_time_mesurements(file_path,method_label): |
|
||||
with open(file_path, 'r') as file: |
|
||||
data = json.load(file) |
|
||||
iterations = data["list"][0]["task"]["iterations"] |
|
||||
|
|
||||
if method_label == "xcopy": |
|
||||
# xcopy runs on two engines that both copy 1/2 of the entire |
|
||||
# specified size of 1gib, therefore the maximum time between |
|
||||
# these two is going to be the total time for copy |
|
||||
|
|
||||
time0 = data["list"][0]["report"]["time"] |
|
||||
time1 = data["list"][1]["report"]["time"] |
|
||||
|
|
||||
return { |
|
||||
"total": max(time0["total"],time1["total"]) / iterations, |
|
||||
"combined" : [max(x,y) for x,y in zip(time0["combined"], time1["combined"])], |
|
||||
"submission" : [max(x,y) for x,y in zip(time0["completion"], time1["completion"])], |
|
||||
"submission" : [max(x,y) for x,y in zip(time0["completion"], time1["completion"])], |
|
||||
} |
|
||||
|
|
||||
else: |
|
||||
return { |
|
||||
"total": data["list"][0]["report"]["time"]["total"] / iterations, |
|
||||
"combined": data["list"][0]["report"]["time"]["combined"], |
|
||||
"submission": data["list"][0]["report"]["time"]["submission"], |
|
||||
"completion": data["list"][0]["report"]["time"]["completion"] |
|
||||
} |
|
||||
|
|
||||
|
|
||||
# procceses a single file and appends the desired timings |
|
||||
# to the global data-array, handles multiple runs with a runid |
|
||||
# and ignores if the given file is not found as some |
|
||||
# configurations may not be benchmarked |
|
||||
def create_copy_dataset(file_path, method_label, type_label): |
|
||||
method_index = index_from_element(method_label,copy_methods) |
|
||||
method_nice = copy_methods_nice[method_index] |
|
||||
type_index = index_from_element(type_label, types) |
|
||||
type_nice = types_nice[type_index] |
|
||||
data_size = 0 |
|
||||
|
|
||||
|
|
||||
if type_label in ["internode-n0ton1-1gib", "intersock-n0ton4-1gib"]: data_size = 1024*1024*1024 |
|
||||
elif type_label in ["internode-n0ton1-1mib", "intersock-n0ton4-1mib"]: data_size = 1024 * 1024 |
|
||||
else: data_size = 0 |
|
||||
|
|
||||
try: |
|
||||
run_idx = 0 |
|
||||
time = [load_time_mesurements(file_path,method_label)["total"]] |
|
||||
for t in time: |
|
||||
data.append({ runid : run_idx, x_label: type_nice, var_label : method_nice, y_label : calc_throughput(data_size, t)}) |
|
||||
run_idx = run_idx + 1 |
|
||||
except FileNotFoundError: |
|
||||
return |
|
||||
|
|
||||
|
|
||||
# loops over all possible configuration combinations and calls |
|
||||
# process_file_to_dataset for them in order to build a dataframe |
|
||||
# which is then displayed and saved |
|
||||
def main(): |
|
||||
result_path = "benchmark-results/" |
|
||||
output_path = "benchmark-plots/" |
|
||||
|
|
||||
for method_label in copy_methods: |
|
||||
for type_label in types: |
|
||||
file = os.path.join(result_path, f"{method_label}-{type_label}-1e.json") |
|
||||
create_copy_dataset(file, method_label, type_label) |
|
||||
|
|
||||
df = pd.DataFrame(data) |
|
||||
df.set_index(index, inplace=True) |
|
||||
df = df.sort_values(y_label) |
|
||||
|
|
||||
sns.barplot(x=x_label, y=y_label, hue=var_label, data=df, palette="rocket", errorbar="sd") |
|
||||
|
|
||||
plt.title(title) |
|
||||
plt.savefig(os.path.join(output_path, "plot-perf-enginelocation.png"), bbox_inches='tight') |
|
||||
plt.show() |
|
||||
|
|
||||
if __name__ == "__main__": |
|
||||
main() |
|
@ -1,111 +0,0 @@ |
|||||
import os |
|
||||
import json |
|
||||
import pandas as pd |
|
||||
from itertools import chain |
|
||||
import seaborn as sns |
|
||||
import matplotlib.pyplot as plt |
|
||||
|
|
||||
from common import calc_throughput |
|
||||
|
|
||||
result_path = "benchmark-results/" |
|
||||
output_path = "benchmark-plots/" |
|
||||
|
|
||||
runid = "Run ID" |
|
||||
x_label = "Destination Node" |
|
||||
y_label = "Source Node" |
|
||||
v_label = "Throughput" |
|
||||
|
|
||||
title_allnodes = \ |
|
||||
"""Copy Throughput in GiB/s tested for 1GiB Elements\n |
|
||||
Using all 8 DSA Chiplets available on the System""" |
|
||||
title_smartnodes = \ |
|
||||
"""Copy Throughput in GiB/s tested for 1GiB Elements\n |
|
||||
Using Cross-Copy for Intersocket and all 4 Chiplets of Socket for Intrasocket""" |
|
||||
title_difference = \ |
|
||||
"""Gain in Copy Throughput in GiB/s of All-DSA vs. Smart Assignment""" |
|
||||
|
|
||||
description_smartnodes = \ |
|
||||
"""Copy Throughput in GiB/s tested for 1GiB Elements\n |
|
||||
Nodes of {8...15} are HBM accessors for their counterparts (minus 8)\n |
|
||||
Using all 4 DSA Chiplets of a Socket for Intra-Socket Operation\n |
|
||||
And using only the Source and Destination Nodes DSA for Inter-Socket""" |
|
||||
description_allnodes = \ |
|
||||
"""Copy Throughput in GiB/s tested for 1GiB Elements\n |
|
||||
Nodes of {8...15} are HBM accessors for their counterparts (minus 8)\n |
|
||||
Using all 8 DSA Chiplets available on the System""" |
|
||||
|
|
||||
index = [ runid, x_label, y_label] |
|
||||
data = [] |
|
||||
|
|
||||
|
|
||||
# loads the measurements from a given file and processes them |
|
||||
# so that they are normalized, meaning that the timings returned |
|
||||
# are nanoseconds per element transfered |
|
||||
def load_time_mesurements(file_path): |
|
||||
with open(file_path, 'r') as file: |
|
||||
data = json.load(file) |
|
||||
count = data["count"] |
|
||||
batch_size = data["list"][0]["task"]["batching"]["batch_size"] if data["list"][0]["task"]["batching"]["batch_size"] > 0 else 1 |
|
||||
iterations = data["list"][0]["task"]["iterations"] |
|
||||
|
|
||||
return { |
|
||||
"size": data["list"][0]["task"]["size"], |
|
||||
"total": sum([x / (iterations * batch_size * count * count) for x in list(chain([data["list"][i]["report"]["time"]["total"] for i in range(count)]))]), |
|
||||
"combined": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["combined"] for i in range(count)]))], |
|
||||
"submission": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["submission"] for i in range(count)]))], |
|
||||
"completion": [ x / (count * batch_size) for x in list(chain(*[data["list"][i]["report"]["time"]["completion"] for i in range(count)]))] |
|
||||
} |
|
||||
|
|
||||
|
|
||||
# procceses a single file and appends the desired timings |
|
||||
# to the global data-array, handles multiple runs with a runid |
|
||||
# and ignores if the given file is not found as some |
|
||||
# configurations may not be benchmarked |
|
||||
def process_file_to_dataset(file_path, src_node, dst_node): |
|
||||
try: |
|
||||
file_data = load_time_mesurements(file_path) |
|
||||
time = [file_data["total"]] |
|
||||
run_idx = 0 |
|
||||
for t in time: |
|
||||
data.append({ runid : run_idx, x_label : dst_node, y_label : src_node, v_label: calc_throughput(file_data["size"], t)}) |
|
||||
run_idx = run_idx + 1 |
|
||||
except FileNotFoundError: |
|
||||
return |
|
||||
|
|
||||
|
|
||||
def plot_heatmap(table,title,node_config): |
|
||||
plt.figure(figsize=(8, 6)) |
|
||||
|
|
||||
sns.heatmap(table, annot=True, cmap="rocket_r", fmt=".0f") |
|
||||
|
|
||||
plt.title(title) |
|
||||
plt.savefig(os.path.join(output_path, f"plot-perf-{node_config}-throughput.png"), bbox_inches='tight') |
|
||||
plt.show() |
|
||||
|
|
||||
|
|
||||
# loops over all possible configuration combinations and calls |
|
||||
# process_file_to_dataset for them in order to build a dataframe |
|
||||
# which is then displayed and saved |
|
||||
def main(node_config,title): |
|
||||
for src_node in range(16): |
|
||||
for dst_node in range(16): |
|
||||
size = "512mib" if node_config == "allnodes" and src_node == dst_node and src_node >= 8 else "1gib" |
|
||||
file = os.path.join(result_path, f"copy-n{src_node}ton{dst_node}-{size}-{node_config}-1e.json") |
|
||||
process_file_to_dataset(file, src_node, dst_node) |
|
||||
|
|
||||
df = pd.DataFrame(data) |
|
||||
|
|
||||
data.clear() |
|
||||
df.set_index(index, inplace=True) |
|
||||
data_pivot = df.pivot_table(index=y_label, columns=x_label, values=v_label) |
|
||||
|
|
||||
plot_heatmap(data_pivot, title, node_config) |
|
||||
|
|
||||
return data_pivot |
|
||||
|
|
||||
|
|
||||
if __name__ == "__main__": |
|
||||
dall = main("allnodes", title_allnodes) |
|
||||
dsmart = main("smart", title_smartnodes) |
|
||||
ddiff = dall - dsmart |
|
||||
plot_heatmap(ddiff,title_difference,"diff") |
|
Write
Preview
Loading…
Cancel
Save
Reference in new issue