diff --git a/benchmarks/benchmark-descriptors/engine-location-bench/outsidercopy-internode-n0ton1-1gib.json b/benchmarks/benchmark-descriptors/engine-location-bench/nodeoutsidercopy-internode-n0ton1-1gib.json similarity index 100% rename from benchmarks/benchmark-descriptors/engine-location-bench/outsidercopy-internode-n0ton1-1gib.json rename to benchmarks/benchmark-descriptors/engine-location-bench/nodeoutsidercopy-internode-n0ton1-1gib.json diff --git a/benchmarks/benchmark-descriptors/engine-location-bench/outsidercopy-internode-n0ton1-1mib.json b/benchmarks/benchmark-descriptors/engine-location-bench/nodeoutsidercopy-internode-n0ton1-1mib.json similarity index 100% rename from benchmarks/benchmark-descriptors/engine-location-bench/outsidercopy-internode-n0ton1-1mib.json rename to benchmarks/benchmark-descriptors/engine-location-bench/nodeoutsidercopy-internode-n0ton1-1mib.json diff --git a/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1gib.json b/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1gib.json new file mode 100644 index 0000000..f2aa177 --- /dev/null +++ b/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1gib.json @@ -0,0 +1,21 @@ +{ + "count": 1, + "list": [ + { + "affinity": { + "nnode_dst": 1, + "nnode_src": 0, + "node": 3 + }, + "task": { + "batching": { + "batch_size": 0, + "batch_submit": false + }, + "iterations": 1000, + "size": 1073741824 + } + } + ], + "path": "hw" +} \ No newline at end of file diff --git a/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1mib.json b/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1mib.json new file mode 100644 index 0000000..461f6d1 --- /dev/null +++ b/benchmarks/benchmark-descriptors/engine-location-bench/sockoutsidercopy-internode-n0ton1-1mib.json @@ -0,0 +1,21 @@ +{ + "count": 1, + "list": [ + { + "affinity": { + "nnode_dst": 1, + "nnode_src": 0, + "node": 3 + }, + "task": { + "batching": { + "batch_size": 0, + "batch_submit": false + }, + "iterations": 1000, + "size": 1048576 + } + } + ], + "path": "hw" +} \ No newline at end of file diff --git a/benchmarks/benchmark-plotters/plot-perf-enginelocation.py b/benchmarks/benchmark-plotters/plot-perf-enginelocation.py index f78db1f..c4df13c 100644 --- a/benchmarks/benchmark-plotters/plot-perf-enginelocation.py +++ b/benchmarks/benchmark-plotters/plot-perf-enginelocation.py @@ -11,8 +11,8 @@ y_label = "Throughput in GiB/s" var_label = "Configuration" types = ["intersock-n0ton4-1mib", "internode-n0ton1-1mib", "intersock-n0ton4-1gib", "internode-n0ton1-1gib"] types_nice = ["Inter-Socket Copy 1MiB", "Inter-Node Copy 1MiB", "Inter-Socket Copy 1GiB", "Inter-Node Copy 1GiB"] -copy_methods = ["dstcopy", "srccopy", "xcopy"] -copy_methods_nice = [ "Engine on DST-Node", "Engine on SRC-Node", "Cross-Copy / Both Engines" ] +copy_methods = ["dstcopy", "srccopy", "xcopy", "srcoutsidercopy", "dstoutsidercopy", "sockoutsidercopy", "nodeoutsidercopy"] +copy_methods_nice = [ "Engine on DST-Node", "Engine on SRC-Node", "Cross-Copy / Both Engines", "Engine on SRC-Socket, not SRC-Node", "Engine on DST-Socket, not DST-Node", "Engine on different Socket", "Engine on same Socket but neither SRC nor DST Node"] title = "Performance of Engine Location - Copy Operation on DDR with 1 Engine per WQ" index = [runid, x_label, var_label] @@ -34,6 +34,7 @@ def index_from_element(value,array): def load_and_process_copy_json(file_path,method_label): with open(file_path, 'r') as file: data = json.load(file) + iterations = data["list"][0]["task"]["iterations"] # Extracting time from JSON structure if method_label == "xcopy": @@ -52,35 +53,35 @@ def load_and_process_copy_json(file_path,method_label): return data["list"][0]["report"]["time"] # Function to plot the graph for the new benchmark -def create_copy_dataset(file_paths, method_label): - times = [] - +def create_copy_dataset(file_path, method_label, type_label): method_index = index_from_element(method_label,copy_methods) method_nice = copy_methods_nice[method_index] + type_index = index_from_element(type_label, types) + type_nice = types_nice[type_index] + data_size = 0 - idx = 0 - for file_path in file_paths: - time = load_and_process_copy_json(file_path,method_label) - times.append(time["total"]) - idx = idx + 1 - - throughput = [[calc_throughput(1024*1024,time) for time in t] for t in times] + if type_label in ["internode-n0ton1-1mib", "intersock-n0ton4-1mib"]: + data_size = 1024 * 1024 + else: + data_size = 1024*1024*1024 - idx = 0 - for run_set in throughput: + try: + time = load_and_process_copy_json(file_path,method_label)["total"] run_idx = 0 - for run in run_set: - data.append({ runid : run_idx, x_label: types_nice[idx], var_label : method_nice, y_label : throughput[idx][run_idx]}) + for t in time: + data.append({ runid : run_idx, x_label: type_nice, var_label : method_nice, y_label : calc_throughput(data_size, t)}) run_idx = run_idx + 1 - idx = idx + 1 + except FileNotFoundError: + return # Main function to iterate over files and create plots for the new benchmark def main(): folder_path = "benchmark-results/" for method_label in copy_methods: - copy_file_paths = [os.path.join(folder_path, f"{method_label}-{type_label}-1e.json") for type_label in types] - create_copy_dataset(copy_file_paths, method_label) + for type_label in types: + file = os.path.join(folder_path, f"{method_label}-{type_label}-1e.json") + create_copy_dataset(file, method_label, type_label) df = pd.DataFrame(data) df.set_index(index, inplace=True)