Browse Source

create new timing plots that are now normalized by the longest execution time

master
Constantin Fürst 10 months ago
parent
commit
dfaef5b330
  1. BIN
      qdp_project/plots/plot-timing-distprefetch.pdf
  2. BIN
      qdp_project/plots/plot-timing-dram.pdf
  3. BIN
      qdp_project/plots/plot-timing-hbm.pdf
  4. BIN
      qdp_project/plots/plot-timing-prefetch.pdf
  5. 67
      qdp_project/plotter.py
  6. BIN
      thesis/images/plot-timing-distprefetch.pdf
  7. BIN
      thesis/images/plot-timing-dram.pdf
  8. BIN
      thesis/images/plot-timing-hbm.pdf
  9. BIN
      thesis/images/plot-timing-prefetch.pdf

BIN
qdp_project/plots/plot-timing-distprefetch.pdf

BIN
qdp_project/plots/plot-timing-dram.pdf

BIN
qdp_project/plots/plot-timing-hbm.pdf

BIN
qdp_project/plots/plot-timing-prefetch.pdf

67
qdp_project/plotter.py

@ -3,6 +3,7 @@ import csv
import numpy as np
import pandas as pd
import seaborn as sns
import plotly.express as px
import matplotlib.pyplot as plt
output_path = "./plots"
@ -12,10 +13,11 @@ prefetch_result = "./evaluation-results/current/qdp-xeonmax-prefetch-tca1-tcb1-t
distprefetch_result = "./evaluation-results/current/qdp-xeonmax-distprefetch-tca1-tcb1-tcj1-tmul32-wl4294967296-cs8388608.csv"
tt_name = "rt-ns"
function_names = [ "scana-run", "scanb-run", "aggrj-run" ]
fn_nice = [ "Scan A", "Scan B", "Aggregate" ]
function_names = ["aggrj-run" , "scana-run", "scanb-run" ]
fn_nice_prefetch = [ "Aggregate" ,"Scan A", "Scan A and B (parallel)"]
fn_nice_normal = [ "Aggregate" , "Scan A", "NULL"]
def read_timings_from_csv(fname) -> tuple[list[float], list[str]]:
def read_timings_from_csv(fname, fn_nice) -> tuple[list[float], list[str]]:
t = {}
row_count = 0
@ -29,6 +31,9 @@ def read_timings_from_csv(fname) -> tuple[list[float], list[str]]:
t = {key: value / (1000 * 1000 * row_count) for key, value in t.items() if value != 0}
if fn_nice[2] in t.keys():
t[fn_nice[1]] = t[fn_nice[1]] - t[fn_nice[2]]
return list(t.values()), list(t.keys())
@ -130,34 +135,50 @@ def tex_table(df, fname):
# loops over all possible configuration combinations and calls
# process_file_to_dataset for them in order to build a dataframe
# which is then displayed and saved
def donut_plot(data: tuple[list[float], list[str]], fname):
palette_color = sns.color_palette('mako_r')
fig, ax = plt.subplots(figsize=(6, 3), subplot_kw=dict(aspect="equal"))
def donut_plot(data: tuple[list[float], list[str]], maxtime, fname):
# pad to maxtime
data[0].append(maxtime - sum(data[0]))
data[1].append("NULL")
# pad to only display semi-circle
data[0].append(sum(data[0]))
data[1].append("NULL")
wedges, texts = ax.pie(data[0], wedgeprops=dict(width=0.5), startangle=-40, colors=palette_color)
fig, (ax, lax) = plt.subplots(nrows=2, gridspec_kw={"height_ratios":[4, 1]})
palette_color = sns.color_palette('mako_r')
wedges, texts = ax.pie(data[0], wedgeprops=dict(width=0.5), colors=palette_color)
wedges[-1].set_visible(False)
wedges[-2].set_visible(False)
ax.set_ylim(-0.0, 1.0)
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-"), bbox=bbox_props, zorder=0, va="center")
legend_labels = [f"{data[0][i]:3.2f} ms - {data[1][i]}" for i in range(len(data[0])) if data[1][i] != "NULL"]
lax.legend(wedges, legend_labels, borderaxespad=0, loc="upper center")
lax.set_ylim(0.0, 0.25)
lax.axis("off")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = f"angle,angleA=0,angleB={ang}"
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax.annotate(f"{data[1][i]} - {data[0][i]:2.2f} ms", xy=(x, y), xytext=(1.35*np.sign(x), 1.4*y), horizontalalignment=horizontalalignment, **kw)
plt.tight_layout()
plt.rcParams.update({'font.size': 18})
plt.rcParams.update({'font.size': 16})
fig.savefig(os.path.join(output_path, fname), bbox_inches='tight')
def main():
donut_plot(read_timings_from_csv(prefetch_result), "plot-timing-prefetch.pdf")
donut_plot(read_timings_from_csv(distprefetch_result), "plot-timing-distprefetch.pdf")
donut_plot(read_timings_from_csv(dram_result), "plot-timing-dram.pdf")
donut_plot(read_timings_from_csv(hbm_result), "plot-timing-hbm.pdf")
donut_plot(read_timings_from_csv(prefetch_result), "plot-timing-prefetch.pdf")
timings = [
read_timings_from_csv(prefetch_result, fn_nice_prefetch),
read_timings_from_csv(distprefetch_result, fn_nice_prefetch),
read_timings_from_csv(dram_result, fn_nice_normal),
read_timings_from_csv(hbm_result, fn_nice_normal)
]
maxtime = max([sum(timings[0][0]), sum(timings[1][0]), sum(timings[2][0]), sum(timings[3][0])])
donut_plot(timings[0], maxtime, "plot-timing-prefetch.pdf")
donut_plot(timings[1], maxtime, "plot-timing-distprefetch.pdf")
donut_plot(timings[2], maxtime, "plot-timing-dram.pdf")
donut_plot(timings[3], maxtime, "plot-timing-hbm.pdf")
donut_plot(read_timings_from_csv(prefetch_result, fn_nice_prefetch), maxtime, "plot-timing-prefetch.pdf")
tex_table(generate_speedup_table(), "table-qdp-speedup.tex")
tex_table(generate_rawtime_base_table(), "table-qdp-baseline.tex")

BIN
thesis/images/plot-timing-distprefetch.pdf

BIN
thesis/images/plot-timing-dram.pdf

BIN
thesis/images/plot-timing-hbm.pdf

BIN
thesis/images/plot-timing-prefetch.pdf

Loading…
Cancel
Save