Skip to content
Snippets Groups Projects
Commit 2a9e731a authored by Yaman Umuroglu's avatar Yaman Umuroglu
Browse files

[HLSCustomOp] move more functionality to base class

parent 9c134a51
No related branches found
No related tags found
No related merge requests found
from abc import abstractmethod
import numpy as np
import os
import subprocess
from finn.custom_op import CustomOp
from finn.core.utils import CppBuilder
......@@ -77,6 +79,52 @@ class HLSCustomOp(CustomOp):
builder.build(code_gen_dir)
self.set_nodeattr("executable_path", builder.executable_path)
def dynamic_input_to_npy(self, context, count):
node = self.onnx_node
code_gen_dir = self.get_nodeattr("code_gen_dir")
if code_gen_dir == "":
raise Exception(
"""
Found no codegen dir for this node, did you run the codegen transformation?
"""
)
# create a npy file for each input of the node (in_ind is input index)
# assuming dynamic inputs start from 0
for in_ind in range(count):
current_input_name = node.input[in_ind]
np.save(
os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)),
context[current_input_name],
)
def npy_to_dynamic_output(self, context):
# TODO support multi-output nodes as needed
node = self.onnx_node
code_gen_dir = self.get_nodeattr("code_gen_dir")
output = np.load("{}/output.npy".format(code_gen_dir))
context[node.output[0]] = output
def exec_precompiled_singlenode_model(self):
# execute precompiled executable
executable_path = self.get_nodeattr("executable_path")
if executable_path == "":
raise Exception(
"""
Found no executable for this node, did you run the codegen and
compilation transformations?
"""
)
process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)
process_execute.communicate()
def execute_node(self, context, graph):
# save input(s)
self.dynamic_input_to_npy(context, 1)
# execute the precompiled model
self.exec_precompiled_singlenode_model()
# load output npy file
self.npy_to_dynamic_output(context)
def generate_params(self, model):
pass
......
import os
import subprocess
import numpy as np
......@@ -245,14 +244,10 @@ class StreamingFCLayer_Batch(HLSCustomOp):
elif in_ind > 2:
raise Exception("Unexpected input found for StreamingFCLayer")
in_ind += 1
# execute precompiled executable
executable_path = self.get_nodeattr("executable_path")
assert executable_path != ""
process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)
process_execute.communicate()
# execute the precompiled model
super().exec_precompiled_singlenode_model()
# load output npy file
output = np.load("{}/output.npy".format(code_gen_dir))
context[node.output[0]] = output
super().npy_to_dynamic_output()
def global_includes(self):
self.code_gen_dict["$GLOBALS$"] = ['#include "weights.hpp"']
......
import os
import subprocess
import numpy as np
from finn.custom_op.fpgadataflow import HLSCustomOp
......@@ -22,29 +17,6 @@ class StreamingMaxPool_Batch(HLSCustomOp):
def infer_node_datatype(self, model):
pass
def execute_node(self, context, graph):
node = self.onnx_node
code_gen_dir = self.get_nodeattr("code_gen_dir")
# create a npy file fore each input of the node (in_ind is input index)
in_ind = 0
for inputs in node.input:
if in_ind == 0:
np.save(
os.path.join(code_gen_dir, "input_{}.npy".format(in_ind)),
context[inputs],
)
else:
raise Exception("Unexpected input found for StreamingMaxPool_Batch")
in_ind += 1
# execute precompiled executable
executable_path = self.get_nodeattr("executable_path")
assert executable_path != ""
process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)
process_execute.communicate()
# load output npy file
output = np.load("{}/output.npy".format(code_gen_dir))
context[node.output[0]] = output
def global_includes(self):
self.code_gen_dict["$GLOBALS$"] = ['#include "maxpool.h"']
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment