Skip to content
Snippets Groups Projects
Commit 39350349 authored by auphelia's avatar auphelia
Browse files

[Test] Test for shape inference creates now a mixed model and performs shape...

[Test] Test for shape inference creates now a mixed model and performs shape inference. Afterwards it is executed to check if it still works
parent 9e015d88
No related branches found
No related tags found
No related merge requests found
No preview for this file type
import finn.core.onnx_exec as oxe
import onnx.shape_inference as si
import onnx.helper as helper
from finn.core.modelwrapper import ModelWrapper
import onnx.shape_inference as si
import finn.core.onnx_exec as oxe
from finn.core.modelwrapper import ModelWrapper
def infer_shapes(model):
"""Ensure every tensor in the model has a specified shape (ValueInfo)."""
for node in model.graph.node:
if node.domain == 'finn':
if node.domain == "finn":
# create an empty execution context
execution_context = model.make_empty_exec_context()
......@@ -24,10 +24,18 @@ def infer_shapes(model):
# onnx shape inference unfortunately does not take single node,
# it can only analyze entire models -- so we create a model which solely
# consists of our current node.
node_inputs = list(filter(lambda x: x.name in node.input, model.graph.input))
node_inputs += list(filter(lambda x: x.name in node.input, model.graph.value_info))
node_outputs = list(filter(lambda x: x.name in node.output, model.graph.output))
node_outputs += list(filter(lambda x: x.name in node.output, model.graph.value_info))
node_inputs = list(
filter(lambda x: x.name in node.input, model.graph.input)
)
node_inputs += list(
filter(lambda x: x.name in node.input, model.graph.value_info)
)
node_outputs = list(
filter(lambda x: x.name in node.output, model.graph.output)
)
node_outputs += list(
filter(lambda x: x.name in node.output, model.graph.value_info)
)
node_graph = helper.make_graph(
nodes=[node],
name="single-node-exec",
......@@ -39,10 +47,10 @@ def infer_shapes(model):
node_model = ModelWrapper(node_model)
# set the corresponding tensors in the whole model
for output in node.output:
model.set_tensor_shape(output, node_model.get_tensor_shape(output))
# single-step operation, no need to call multiple times so return
# model_was_changed = false
return (model, False)
from pkgutil import get_data
import numpy as np
import onnx
import onnx.numpy_helper as np_helper
from onnx import TensorProto, helper
import finn.core.onnx_exec as oxe
import finn.transformation.infer_shapes as si
from finn.core.modelwrapper import ModelWrapper
def test_infer_shapes():
# load the onnx model
raw_m = get_data("finn", "data/onnx/mixed-model/mixed_model.onnx")
raw_m = get_data("finn", "data/onnx/mnist-conv/model.onnx")
model = ModelWrapper(raw_m)
graph = model.graph
node_ind = 0
node_dict = {}
for n in graph.node:
node_ind += 1
node_dict[node_ind] = n
# multi-thresholding node to be inserted between the first Relu and MaxPool node
# get Relu node to use data to make a new Relu node and delete the old one
Relu_node = node_dict[4]
assert Relu_node.op_type == "Relu", "The wrong model was chosen for the check"
graph.node.remove(Relu_node)
# create new tensors (thresholds as constant) and add them to the graph info
mt_v0 = helper.make_tensor_value_info("mt_v0", TensorProto.FLOAT, [1, 8, 28, 28])
mt_thresh0 = helper.make_tensor_value_info("mt_thresh0", TensorProto.FLOAT, [8, 7])
graph.value_info.append(mt_v0)
graph.value_info.append(mt_thresh0)
# random numbers for the thresholds
# thresholds for one channel have to be sorted to guarantee the correct behavior
mt_thresh0_values = np.empty([8, 7], dtype=np.float32)
for i in range(len(mt_thresh0_values)):
mt_thresh0_values[i] = np.sort(np.random.random_sample(7,) * 10)
model.set_initializer(mt_thresh0.name, mt_thresh0_values)
# create and insert new Relu node and one multi-thresholding node
new_Relu_node = helper.make_node("Relu", [Relu_node.input[0]], ["mt_v0"])
mt_node = helper.make_node(
"MultiThreshold", ["mt_v0", "mt_thresh0"], [Relu_node.output[0]], domain="finn"
)
graph.node.insert(4, new_Relu_node)
graph.node.insert(5, mt_node)
# test shape inference on mixed model
model = model.transform_single(si.infer_shapes)
# execution with input values from mnist-conv model
raw_i = get_data("finn", "data/onnx/mnist-conv/test_data_set_0/input_0.pb")
input_tensor = onnx.load_tensor_from_string(raw_i)
# run using FINN-based execution
input_dict = {"Input3": np_helper.to_array(input_tensor)}
oxe.execute_onnx(model, input_dict)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment