Skip to content
Snippets Groups Projects
Commit d5785d44 authored by Yaman Umuroglu's avatar Yaman Umuroglu
Browse files

Merge branch 'feature/create_dataflow_partition' into dev

parents 2e90d20b 8ce07ce5
No related branches found
No related tags found
No related merge requests found
......@@ -115,6 +115,19 @@ class ModelWrapper:
qa.quant_parameter_tensor_names.append(dt)
qnt_annotations.append(qa)
def get_tensor_valueinfo(self, tensor_name):
"""Returns ValueInfoProto of tensor with given name, if it has one."""
graph = self._model_proto.graph
vi_names = [(x.name, x) for x in graph.input]
vi_names += [(x.name, x) for x in graph.output]
vi_names += [(x.name, x) for x in graph.value_info]
try:
vi_ind = [x[0] for x in vi_names].index(tensor_name)
vi = vi_names[vi_ind][1]
return vi
except ValueError:
return None
def get_tensor_shape(self, tensor_name):
"""Returns the shape of tensor with given name, if it has ValueInfoProto."""
graph = self._model_proto.graph
......
......@@ -7,6 +7,7 @@ from finn.custom_op.fpgadataflow.streamingfclayer_batch import StreamingFCLayer_
from finn.custom_op.fpgadataflow.streamingmaxpool_batch import StreamingMaxPool_Batch
from finn.custom_op.fpgadataflow.tlastmarker import TLastMarker
from finn.custom_op.multithreshold import MultiThreshold
from finn.custom_op.streamingdataflowpartition import StreamingDataflowPartition
from finn.custom_op.xnorpopcount import XnorPopcountMatMul
# create a mapping of all known CustomOp names and classes
......@@ -18,6 +19,7 @@ custom_op["StreamingMaxPool_Batch"] = StreamingMaxPool_Batch
custom_op["StreamingFCLayer_Batch"] = StreamingFCLayer_Batch
custom_op["ConvolutionInputGenerator"] = ConvolutionInputGenerator
custom_op["TLastMarker"] = TLastMarker
custom_op["StreamingDataflowPartition"] = StreamingDataflowPartition
def getCustomOp(node):
......
from finn.custom_op import CustomOp
# note that the StreamingDataflowPartition node is only a meta/container node,
# it does not produce any HLS or bitfile by itself. it's a placeholder for
# a group of fpgadataflow nodes that have been separated out into a FINN-ONNX
# model of its own.
class StreamingDataflowPartition(CustomOp):
def get_nodeattr_types(self):
return {
"model": ("s", True, ""),
}
def make_shape_compatible_op(self):
pass
def infer_node_datatype(self, model):
pass
def execute_node(self, context, graph):
# TODO add RPC execution with synthesized bitfile?
# whole-design rtlsim with PyVerilator may also be an alternative
pass
def verify_node(self):
info_messages = []
# verify number of attributes
num_of_attr = 1
if len(self.onnx_node.attribute) == num_of_attr:
info_messages.append("The number of attributes is correct")
else:
info_messages.append(
"""The number of attributes is incorrect,
{} should have {} attributes""".format(
self.onnx_node.op_type, num_of_attr
)
)
# verify that "domain" is set to "finn"
domain_value = self.onnx_node.domain
if domain_value == "finn":
info_messages.append("Attribute domain is set correctly")
else:
info_messages.append('Attribute domain should be set to "finn"')
# verify that all necessary attributes exist
try:
self.get_nodeattr("model")
info_messages.append("All necessary attributes exist")
except Exception:
info_messages.append(
"""The necessary attributes do not exist.
StreamingDataflowPartition needs the following attribute(s):
model"""
)
# verify the number of inputs
if len(self.onnx_node.input) == 1:
info_messages.append("The number of inputs is correct")
else:
info_messages.append("StreamingDataflowPartition needs 1 data input")
return info_messages
File added
import copy
from onnx import helper
from finn.transformation import Transformation
from finn.util.basic import get_by_name, make_build_dir
class CreateDataflowPartition(Transformation):
"""Split a graph into two graphs; one which contains non-FINN-dataflow nodes
and a StreamingDataflowPartition node, and another which only contains
FINN dataflow nodes. The StreamingDataflowPartition has a model attribute
that indicates the filename for the second graph that only contains
dataflow nodes. No action is taken if there are no dataflow nodes."""
def __init__(self):
super().__init__()
def apply(self, model):
# TODO we currently assume that all dataflow nodes are connected to
# each other, forming a single partition. check the assumption and/or
# improve this.
all_nodes = list(model.graph.node)
df_nodes = filter(
lambda x: get_by_name(x.attribute, "backend") is not None, all_nodes
)
df_nodes = filter(
lambda x: get_by_name(x.attribute, "backend").s.decode("UTF-8")
== "fpgadataflow",
df_nodes,
)
df_nodes = list(df_nodes)
non_df_nodes = filter(lambda x: x not in df_nodes, all_nodes)
non_df_nodes = list(non_df_nodes)
if len(df_nodes) == 0:
# no changes if no dataflow nodes are present
return (model, False)
else:
# partition the model into two models
df_model = copy.deepcopy(model)
non_df_model = model
# remove all non-dataflow nodes from the dataflow model
for node_to_remove in non_df_nodes:
df_model.graph.node.remove(node_to_remove)
# identify the entry and exit points for the dataflow part
df_in = df_model.graph.node[0].input[0]
df_out = df_model.graph.node[-1].output[0]
df_in_vi = df_model.get_tensor_valueinfo(df_in)
df_out_vi = df_model.get_tensor_valueinfo(df_out)
# set df graph in/out to be df_in/df_out
df_model.graph.input.remove(df_model.graph.input[0])
df_model.graph.input.insert(0, df_in_vi)
df_model.graph.output.remove(df_model.graph.output[0])
df_model.graph.output.insert(0, df_out_vi)
df_model_dir = make_build_dir("dataflow_partition_")
df_model_filename = df_model_dir + "/df_model.onnx"
df_model.save(df_model_filename)
# remove all dataflow nodes from the non-dataflow model
# keep track of where the dataflow part starts
df_start_ind = all_nodes.index(df_nodes[0])
for node_to_remove in df_nodes:
non_df_model.graph.node.remove(node_to_remove)
# create StreamingDataflow node with df_in/df_out io
df_node = helper.make_node(
"StreamingDataflowPartition",
[df_in],
[df_out],
# use the model attribute to mark the df model
model=df_model_filename,
)
non_df_model.graph.node.insert(df_start_ind, df_node)
return (non_df_model, False)
import os.path
from pkgutil import get_data
from finn.core.modelwrapper import ModelWrapper
from finn.custom_op.registry import getCustomOp
from finn.transformation.fpgadataflow.create_dataflow_partition import (
CreateDataflowPartition,
)
def test_create_dataflow_partition():
# load the onnx model
raw_m = get_data(
"finn", "data/onnx/finn-hls-model/tfc_w1_a1_after_conv_to_hls.onnx"
)
model = ModelWrapper(raw_m)
model = model.transform(CreateDataflowPartition())
assert model.graph.node[2].op_type == "StreamingDataflowPartition"
sdp_node = getCustomOp(model.graph.node[2])
assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
assert os.path.isfile(sdp_node.get_nodeattr("model"))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment