Browse Source

fix bugs that were introduced by changes to the plotter scripts

master
Constantin Fürst 1 year ago
parent
commit
eb4ea5162d
  1. 8
      benchmarks/benchmark-plotters/plot-cost-mtsubmit.py
  2. 23
      benchmarks/benchmark-plotters/plot-perf-enginelocation.py
  3. 16
      benchmarks/benchmark-plotters/plot-perf-peakthroughput.py
  4. 10
      benchmarks/benchmark-plotters/plot-perf-submitmethod.py

8
benchmarks/benchmark-plotters/plot-cost-mtsubmit.py

@ -7,13 +7,13 @@ import matplotlib.pyplot as plt
runid = "Run ID" runid = "Run ID"
x_label = "Thread Count" x_label = "Thread Count"
y_label = "Throughput in GiB/s LogScale"
y_label = "Throughput in GiB/s"
var_label = "Thread Counts" var_label = "Thread Counts"
thread_counts = ["1t", "2t", "4t", "8t", "12t"] thread_counts = ["1t", "2t", "4t", "8t", "12t"]
thread_counts_nice = ["1 Thread", "2 Threads", "4 Threads", "8 Threads", "12 Threads"] thread_counts_nice = ["1 Thread", "2 Threads", "4 Threads", "8 Threads", "12 Threads"]
engine_counts = ["1mib-1e", "1mib-4e", "1gib-1e", "1gib-4e"] engine_counts = ["1mib-1e", "1mib-4e", "1gib-1e", "1gib-4e"]
engine_counts_nice = ["1 E/WQ and Tasksize 1 MiB", "4 E/WQ and Tasksize 1 MiB", "1 E/WQ and Tasksize 1 GiB", "4 E/WQ and Tasksize 1 GiB"] engine_counts_nice = ["1 E/WQ and Tasksize 1 MiB", "4 E/WQ and Tasksize 1 MiB", "1 E/WQ and Tasksize 1 GiB", "4 E/WQ and Tasksize 1 GiB"]
title = "Per-Thread Throughput - 120 Copy Operations split on Threads Intra-Node on DDR with Size 1 MiB"
title = "Per-Thread Throughput - 120 Copy Operations split on Threads Intra-Node on DDR"
index = [runid, x_label, var_label] index = [runid, x_label, var_label]
data = [] data = []
@ -42,7 +42,7 @@ def load_time_mesurements(file_path):
# therefore we divide the result by 120/n_threads to get the per-element speed # therefore we divide the result by 120/n_threads to get the per-element speed
return { return {
"total" : sum([x / (iterations * (120 / count)) for x in list(chain(*[data["list"][i]["report"]["time"]["total"] for i in range(count)]))]),
"total" : sum([x / (iterations * 120) for x in list(chain([data["list"][i]["report"]["time"]["total"] for i in range(count)]))]),
"combined" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["combined"] for i in range(count)]))], "combined" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["combined"] for i in range(count)]))],
"submission" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["submission"] for i in range(count)]))], "submission" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["submission"] for i in range(count)]))],
"completion" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["completion"] for i in range(count)]))] "completion" : [x / (120 / count) for x in list(chain(*[data["list"][i]["report"]["time"]["completion"] for i in range(count)]))]
@ -61,7 +61,7 @@ def process_file_to_dataset(file_path, engine_label, thread_count):
data_size = 1024*1024 data_size = 1024*1024
try: try:
time = load_time_mesurements(file_path)["total"]
time = [load_time_mesurements(file_path)["total"]]
run_idx = 0 run_idx = 0
for t in time: for t in time:
data.append({ runid : run_idx, x_label: thread_count_nice, var_label : engine_nice, y_label : calc_throughput(data_size, t)}) data.append({ runid : run_idx, x_label: thread_count_nice, var_label : engine_nice, y_label : calc_throughput(data_size, t)})

23
benchmarks/benchmark-plotters/plot-perf-enginelocation.py

@ -12,7 +12,7 @@ var_label = "Configuration"
types = ["intersock-n0ton4-1mib", "internode-n0ton1-1mib", "intersock-n0ton4-1gib", "internode-n0ton1-1gib"] types = ["intersock-n0ton4-1mib", "internode-n0ton1-1mib", "intersock-n0ton4-1gib", "internode-n0ton1-1gib"]
types_nice = ["Inter-Socket Copy 1MiB", "Inter-Node Copy 1MiB", "Inter-Socket Copy 1GiB", "Inter-Node Copy 1GiB"] types_nice = ["Inter-Socket Copy 1MiB", "Inter-Node Copy 1MiB", "Inter-Socket Copy 1GiB", "Inter-Node Copy 1GiB"]
copy_methods = ["dstcopy", "srccopy", "xcopy", "srcoutsidercopy", "dstoutsidercopy", "sockoutsidercopy", "nodeoutsidercopy"] copy_methods = ["dstcopy", "srccopy", "xcopy", "srcoutsidercopy", "dstoutsidercopy", "sockoutsidercopy", "nodeoutsidercopy"]
copy_methods_nice = [ "Engine on DST-Node", "Engine on SRC-Node", "Cross-Copy / Both Engines", "Engine on SRC-Socket, not SRC-Node", "Engine on DST-Socket, not DST-Node", "Engine on different Socket", "Engine on same Socket but neither SRC nor DST Node"]
copy_methods_nice = [ "Engine on DST-Node", "Engine on SRC-Node", "Cross-Copy / Both Engines", "Engine on SRC-Socket, not SRC-Node", "Engine on DST-Socket, not DST-Node", "Engine on different Socket", "Engine on same Socket"]
title = "Performance of Engine Location - Copy Operation on DDR with 1 Engine per WQ" title = "Performance of Engine Location - Copy Operation on DDR with 1 Engine per WQ"
index = [runid, x_label, var_label] index = [runid, x_label, var_label]
@ -45,14 +45,19 @@ def load_time_mesurements(file_path,method_label):
time1 = data["list"][1]["report"]["time"] time1 = data["list"][1]["report"]["time"]
return { return {
"total": max(time0["total"],time1["total"]),
"total": max(time0["total"],time1["total"]) / iterations,
"combined" : [max(x,y) for x,y in zip(time0["combined"], time1["combined"])], "combined" : [max(x,y) for x,y in zip(time0["combined"], time1["combined"])],
"submission" : [max(x,y) for x,y in zip(time0["completion"], time1["completion"])], "submission" : [max(x,y) for x,y in zip(time0["completion"], time1["completion"])],
"completion" : [max(x,y) for x,y in zip(time0["submission"], time1["submission"])]
"submission" : [max(x,y) for x,y in zip(time0["completion"], time1["completion"])],
} }
else: else:
return data["list"][0]["report"]["time"]
return {
"total": data["list"][0]["report"]["time"]["total"] / iterations,
"combined": data["list"][0]["report"]["time"]["combined"],
"submission": data["list"][0]["report"]["time"]["submission"],
"completion": data["list"][0]["report"]["time"]["completion"]
}
def create_copy_dataset(file_path, method_label, type_label): def create_copy_dataset(file_path, method_label, type_label):
method_index = index_from_element(method_label,copy_methods) method_index = index_from_element(method_label,copy_methods)
@ -61,14 +66,14 @@ def create_copy_dataset(file_path, method_label, type_label):
type_nice = types_nice[type_index] type_nice = types_nice[type_index]
data_size = 0 data_size = 0
if type_label in ["internode-n0ton1-1mib", "intersock-n0ton4-1mib"]:
data_size = 1024 * 1024
else:
data_size = 1024*1024*1024
if type_label in ["internode-n0ton1-1gib", "intersock-n0ton4-1gib"]: data_size = 1024*1024*1024
elif type_label in ["internode-n0ton1-1mib", "intersock-n0ton4-1mib"]: data_size = 1024 * 1024
else: data_size = 0
try: try:
time = load_time_mesurements(file_path,method_label)["total"]
run_idx = 0 run_idx = 0
time = [load_time_mesurements(file_path,method_label)["total"]]
for t in time: for t in time:
data.append({ runid : run_idx, x_label: type_nice, var_label : method_nice, y_label : calc_throughput(data_size, t)}) data.append({ runid : run_idx, x_label: type_nice, var_label : method_nice, y_label : calc_throughput(data_size, t)})
run_idx = run_idx + 1 run_idx = run_idx + 1

16
benchmarks/benchmark-plotters/plot-perf-peakthroughput.py

@ -12,6 +12,7 @@ y_label = "Source Node"
v_label = "Throughput" v_label = "Throughput"
title = "Copy Throughput for 1GiB Elements running on SRC Node" title = "Copy Throughput for 1GiB Elements running on SRC Node"
index = [ runid, x_label, y_label]
data = [] data = []
@ -38,10 +39,10 @@ def load_time_mesurements(file_path):
iterations = data["list"][0]["task"]["iterations"] iterations = data["list"][0]["task"]["iterations"]
return { return {
"total": data["list"][0]["report"]["total"] / iterations,
"combined": data["list"][0]["report"]["combined"],
"submission": data["list"][0]["report"]["submission"],
"completion": data["list"][0]["report"]["completion"]
"total": data["list"][0]["report"]["time"]["total"] / iterations,
"combined": data["list"][0]["report"]["time"]["combined"],
"submission": data["list"][0]["report"]["time"]["submission"],
"completion": data["list"][0]["report"]["time"]["completion"]
} }
@ -49,7 +50,7 @@ def process_file_to_dataset(file_path, src_node, dst_node):
data_size = 1024*1024*1024 data_size = 1024*1024*1024
try: try:
time = load_time_mesurements(file_path)["total"]
time = [load_time_mesurements(file_path)["total"]]
run_idx = 0 run_idx = 0
for t in time: for t in time:
data.append({ runid : run_idx, x_label : dst_node, y_label : src_node, v_label: calc_throughput(data_size, t)}) data.append({ runid : run_idx, x_label : dst_node, y_label : src_node, v_label: calc_throughput(data_size, t)})
@ -67,9 +68,10 @@ def main():
process_file_to_dataset(file, src_node, dst_node) process_file_to_dataset(file, src_node, dst_node)
df = pd.DataFrame(data) df = pd.DataFrame(data)
data_pivot = df.pivot_table(index=y_label, columns=x_label, values=v_label, aggfunc=mean_without_outliers)
df.set_index(index, inplace=True)
data_pivot = df.pivot_table(index=y_label, columns=x_label, values=v_label)
sns.heatmap(data_pivot, annot=True, palette="rocket", fmt=".0f")
sns.heatmap(data_pivot, annot=True, cmap="rocket_r", fmt=".0f")
plt.title(title) plt.title(title)
plt.savefig(os.path.join(folder_path, "plot-perf-peakthroughput.png"), bbox_inches='tight') plt.savefig(os.path.join(folder_path, "plot-perf-peakthroughput.png"), bbox_inches='tight')

10
benchmarks/benchmark-plotters/plot-perf-submitmethod.py

@ -47,10 +47,10 @@ def load_time_mesurements(file_path,type_label):
else: divisor = 1 else: divisor = 1
return { return {
"total": data["list"][0]["report"]["total"] / (iterations * divisor),
"combined": [ x / divisor for x in data["list"][0]["report"]["combined"]],
"submission": [ x / divisor for x in data["list"][0]["report"]["submission"]],
"completion": [ x / divisor for x in data["list"][0]["report"]["completion"]]
"total": data["list"][0]["report"]["time"]["total"] / (iterations * divisor),
"combined": [ x / divisor for x in data["list"][0]["report"]["time"]["combined"]],
"submission": [ x / divisor for x in data["list"][0]["report"]["time"]["submission"]],
"completion": [ x / divisor for x in data["list"][0]["report"]["time"]["completion"]]
} }
@ -69,7 +69,7 @@ def process_file_to_dataset(file_path, type_label,size_label):
else: data_size = 0 else: data_size = 0
try: try:
time = load_time_mesurements(file_path,type_label)["total"]
time = [load_time_mesurements(file_path,type_label)["total"]]
run_idx = 0 run_idx = 0
for t in time: for t in time:
data.append({ runid : run_idx, x_label: type_nice, var_label : size_nice, y_label : calc_throughput(data_size, t)}) data.append({ runid : run_idx, x_label: type_nice, var_label : size_nice, y_label : calc_throughput(data_size, t)})

Loading…
Cancel
Save