Browse Source

finalize plotter script and add timing results

master
Constantin Fürst 11 months ago
parent
commit
bd23ae138e
  1. BIN
      qdp_project/plots/plot-timing-cacheaccess.pdf
  2. BIN
      qdp_project/plots/plot-timing-distprefetch.pdf
  3. BIN
      qdp_project/plots/plot-timing-dram.pdf
  4. BIN
      qdp_project/plots/plot-timing-hbm.pdf
  5. BIN
      qdp_project/plots/plot-timing-prefetch.pdf
  6. 40
      qdp_project/plotter.py

BIN
qdp_project/plots/plot-timing-cacheaccess.pdf

BIN
qdp_project/plots/plot-timing-distprefetch.pdf

BIN
qdp_project/plots/plot-timing-dram.pdf

BIN
qdp_project/plots/plot-timing-hbm.pdf

BIN
qdp_project/plots/plot-timing-prefetch.pdf

40
qdp_project/plotter.py

@ -5,8 +5,10 @@ import seaborn as sns
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
output_path = "./plots" output_path = "./plots"
prefetch_result = "./evaluation-results/qdp-xeonmax-prefetch-tca2-tcb1-tcj1-tmul8-wl4294967296-cs16777216.csv"
dram_result = "./evaluation-results/qdp-xeonmax-dram-tca2-tcb0-tcj1-tmul8-wl4294967296-cs2097152.csv"
hbm_result = "./evaluation-results/baseline/current-hbm/qdp-xeonmax-hbm-tca2-tcb0-tcj1-tmul16-wl4294967296-cs2097152.csv"
dram_result = "./evaluation-results/baseline/current-dram/qdp-xeonmax-dram-tca2-tcb0-tcj1-tmul16-wl4294967296-cs2097152.csv"
prefetch_result = "./evaluation-results/outofcacheallocation/qdp-xeonmax-prefetch-tca2-tcb1-tcj1-tmul16-wl4294967296-cs8388608.csv"
distprefetch_result = "./evaluation-results/distprefetch/qdp-xeonmax-distprefetch-tca1-tcb1-tcj1-tmul32-wl4294967296-cs8388608.csv"
tt_name = "rt-ns" tt_name = "rt-ns"
function_names = [ "scana-run", "scanb-run", "aggrj-run" ] function_names = [ "scana-run", "scanb-run", "aggrj-run" ]
@ -14,30 +16,26 @@ fn_nice = [ "Scan A, Filter", "Scan B, Prefetch", "Aggregate, Project + Sum" ]
def read_timings_from_csv(fname) -> tuple[list[float], list[str]]: def read_timings_from_csv(fname) -> tuple[list[float], list[str]]:
t = {} t = {}
total_time = 0
# Read data from CSV file
row_count = 0
with open(fname, newline='') as csvfile: with open(fname, newline='') as csvfile:
reader = csv.DictReader(csvfile, delimiter=';') reader = csv.DictReader(csvfile, delimiter=';')
for row in reader: for row in reader:
total_time += int(row[tt_name])
row_count = row_count + 1
for i in range(len(function_names)): for i in range(len(function_names)):
t[fn_nice[i]] = t.get(fn_nice[i], 0) + int(row[function_names[i]]) t[fn_nice[i]] = t.get(fn_nice[i], 0) + int(row[function_names[i]])
t = {key: value * 100 / total_time for key, value in t.items() if value != 0}
total = sum(list(t.values()))
if total < 100.0:
t["Waiting / Other"] = 100.0 - total
t = {key: value / (1000 * 1000 * row_count) for key, value in t.items() if value != 0}
return list(t.values()), list(t.keys()) return list(t.values()), list(t.keys())
def get_data_prefetch_cache_access() -> tuple[list[float], list[str]]: def get_data_prefetch_cache_access() -> tuple[list[float], list[str]]:
total = 0.3
data = [ 0.07, 0.19, 0.04 ]
total = 0.47
data = [ 0.01, 0.01, 0.04, 0.42 ]
data = [ x * 100 / total for x in data ] data = [ x * 100 / total for x in data ]
keys = ["numa_alloc_onnode", "dml::make_mem_move_task", "dml::hardware_device::submit"]
keys = ["Cache::GetCacheNode", "Cache::Access Itself", "dml::hardware_device::submit", "dml::make_mem_move_task (operator new)"]
return data,keys return data,keys
@ -45,13 +43,13 @@ def get_data_prefetch_total() -> tuple[list[float], list[str]]:
return read_timings_from_csv(prefetch_result) return read_timings_from_csv(prefetch_result)
def get_data_dram_total() -> tuple[list[float], list[str]]: def get_data_dram_total() -> tuple[list[float], list[str]]:
return read_timings_from_csv(dram_result)
return
# loops over all possible configuration combinations and calls # loops over all possible configuration combinations and calls
# process_file_to_dataset for them in order to build a dataframe # process_file_to_dataset for them in order to build a dataframe
# which is then displayed and saved # which is then displayed and saved
def main(data: tuple[list[float], list[str]], fname):
palette_color = sns.color_palette('mako')
def main(data: tuple[list[float], list[str]], fname, unit):
palette_color = sns.color_palette('mako_r')
fig, ax = plt.subplots(figsize=(6, 3), subplot_kw=dict(aspect="equal")) fig, ax = plt.subplots(figsize=(6, 3), subplot_kw=dict(aspect="equal"))
wedges, texts = ax.pie(data[0], wedgeprops=dict(width=0.5), startangle=-40, colors=palette_color) wedges, texts = ax.pie(data[0], wedgeprops=dict(width=0.5), startangle=-40, colors=palette_color)
@ -66,12 +64,14 @@ def main(data: tuple[list[float], list[str]], fname):
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))] horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = f"angle,angleA=0,angleB={ang}" connectionstyle = f"angle,angleA=0,angleB={ang}"
kw["arrowprops"].update({"connectionstyle": connectionstyle}) kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax.annotate(f"{data[1][i]} - {data[0][i]:2.1f}%", xy=(x, y), xytext=(1.35*np.sign(x), 1.4*y), horizontalalignment=horizontalalignment, **kw)
ax.annotate(f"{data[1][i]} - {data[0][i]:2.2f} {unit}", xy=(x, y), xytext=(1.35*np.sign(x), 1.4*y), horizontalalignment=horizontalalignment, **kw)
fig.savefig(os.path.join(output_path, fname), bbox_inches='tight') fig.savefig(os.path.join(output_path, fname), bbox_inches='tight')
if __name__ == "__main__": if __name__ == "__main__":
main(get_data_prefetch_cache_access(), "plot-timing-prefetch-cacheaccess.pdf")
main(get_data_prefetch_total(), "plot-timing-prefetch-totalexec.pdf")
main(get_data_dram_total(), "plot-timing-dram-totalexec.pdf")
main(get_data_prefetch_cache_access(), "plot-timing-cacheaccess.pdf", "%")
main(read_timings_from_csv(prefetch_result), "plot-timing-prefetch.pdf", "ms")
main(read_timings_from_csv(distprefetch_result), "plot-timing-distprefetch.pdf", "ms")
main(read_timings_from_csv(dram_result), "plot-timing-dram.pdf", "ms")
main(read_timings_from_csv(hbm_result), "plot-timing-hbm.pdf", "ms")
Loading…
Cancel
Save