Skip to content
Snippets Groups Projects
Commit b58419f2 authored by Yaman Umuroglu's avatar Yaman Umuroglu
Browse files

[Test] save end2end key results to txtfiles

parent a5041d92
No related branches found
No related tags found
No related merge requests found
...@@ -93,6 +93,7 @@ from finn.transformation.merge_onnx_models import MergeONNXModels ...@@ -93,6 +93,7 @@ from finn.transformation.merge_onnx_models import MergeONNXModels
from finn.transformation.insert_topk import InsertTopK from finn.transformation.insert_topk import InsertTopK
from finn.core.datatype import DataType from finn.core.datatype import DataType
from dataset_loading import mnist, cifar from dataset_loading import mnist, cifar
from datetime import datetime
build_dir = "/tmp/" + os.environ["FINN_INST_NAME"] build_dir = "/tmp/" + os.environ["FINN_INST_NAME"]
target_clk_ns = 10 target_clk_ns = 10
...@@ -104,6 +105,18 @@ def get_checkpoint_name(topology, wbits, abits, step): ...@@ -104,6 +105,18 @@ def get_checkpoint_name(topology, wbits, abits, step):
return build_dir + "/end2end_%s_w%da%d_%s.onnx" % (topology, wbits, abits, step) return build_dir + "/end2end_%s_w%da%d_%s.onnx" % (topology, wbits, abits, step)
def update_dashboard_data(topology, wbits, abits, key, val):
stats_file = build_dir + "/end2end_%s_w%da%d.txt" % (topology, wbits, abits)
stats_dict = dict()
if os.path.isfile(stats_file):
with open(stats_file, "r") as f:
stats_dict_txt = f.read()
stats_dict = eval(stats_dict_txt)
stats_dict[key] = val
with open(stats_file, "w") as f:
f.write(str(stats_dict))
def fold_tfc(model): def fold_tfc(model):
fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch") fc_layers = model.get_nodes_by_op_type("StreamingFCLayer_Batch")
# (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer # (PE, SIMD, in_fifo_depth, out_fifo_depth, ramstyle) for each layer
...@@ -275,6 +288,8 @@ class TestEnd2End: ...@@ -275,6 +288,8 @@ class TestEnd2End:
(model, ishape) = get_trained_network_and_ishape(topology, wbits, abits) (model, ishape) = get_trained_network_and_ishape(topology, wbits, abits)
chkpt_name = get_checkpoint_name(topology, wbits, abits, "export") chkpt_name = get_checkpoint_name(topology, wbits, abits, "export")
bo.export_finn_onnx(model, ishape, chkpt_name) bo.export_finn_onnx(model, ishape, chkpt_name)
dtstr = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
update_dashboard_data(topology, wbits, abits, "datetime", dtstr)
assert os.path.isfile(chkpt_name) assert os.path.isfile(chkpt_name)
def test_import_and_tidy(self, topology, wbits, abits): def test_import_and_tidy(self, topology, wbits, abits):
...@@ -450,7 +465,9 @@ class TestEnd2End: ...@@ -450,7 +465,9 @@ class TestEnd2End:
y = execute_parent(parent_chkpt, rtlsim_chkpt, input_tensor_npy) y = execute_parent(parent_chkpt, rtlsim_chkpt, input_tensor_npy)
model = ModelWrapper(rtlsim_chkpt) model = ModelWrapper(rtlsim_chkpt)
perf["cycles_rtlsim"] = model.get_metadata_prop("cycles_rtlsim") perf["cycles_rtlsim"] = model.get_metadata_prop("cycles_rtlsim")
warnings.warn("Estimated & rtlsim performance: " + str(perf)) # warnings.warn("Estimated & rtlsim performance: " + str(perf))
for (k, v) in perf.items():
update_dashboard_data(topology, wbits, abits, k, v)
assert np.isclose(y, output_tensor_npy).all() assert np.isclose(y, output_tensor_npy).all()
@pytest.mark.slow @pytest.mark.slow
...@@ -502,14 +519,17 @@ class TestEnd2End: ...@@ -502,14 +519,17 @@ class TestEnd2End:
cfg = get_build_env(kind, target_clk_ns) cfg = get_build_env(kind, target_clk_ns)
model = model.transform(cfg["build_fxn"]) model = model.transform(cfg["build_fxn"])
model = model.transform(AnnotateResources("synth")) model = model.transform(AnnotateResources("synth"))
warnings.warn( # warnings.warn(
"Post-synthesis resources (excluding shell): " # "Post-synthesis resources (excluding shell): "
+ model.get_metadata_prop("res_total_synth") # + model.get_metadata_prop("res_total_synth")
) # )
warnings.warn( # warnings.warn(
"Post-synthesis resources (all inclusive): " # "Post-synthesis resources (all inclusive): "
+ model.get_metadata_prop("res_total_top_synth") # + model.get_metadata_prop("res_total_top_synth")
) # )
synth_dct = eval(model.get_metadata_prop("res_total_top_synth"))
for (k, v) in synth_dct.items():
update_dashboard_data(topology, wbits, abits, k, v)
model.save(get_checkpoint_name(topology, wbits, abits, "build_" + kind)) model.save(get_checkpoint_name(topology, wbits, abits, "build_" + kind))
@pytest.mark.parametrize("kind", ["zynq", "alveo"]) @pytest.mark.parametrize("kind", ["zynq", "alveo"])
...@@ -599,3 +619,14 @@ class TestEnd2End: ...@@ -599,3 +619,14 @@ class TestEnd2End:
) )
ret_str += "\n" + "-----------------------------" ret_str += "\n" + "-----------------------------"
warnings.warn(ret_str) warnings.warn(ret_str)
largest_bsize = bsize_range[-1]
update_dashboard_data(
topology, wbits, abits, "fclk[mhz]", ret[largest_bsize]["fclk[mhz]"]
)
update_dashboard_data(
topology,
wbits,
abits,
"throughput[images/s]",
ret[largest_bsize]["throughput[images/s]"],
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment