Skip to content
Snippets Groups Projects
Unverified Commit dfdb17dd authored by Yaman Umuroglu's avatar Yaman Umuroglu Committed by GitHub
Browse files

Merge pull request #175 from Xilinx/feature/move_flatten_past_topk

Feature/move flatten past topk
parents a465a031 1616619c
No related branches found
No related tags found
No related merge requests found
...@@ -29,6 +29,7 @@ ...@@ -29,6 +29,7 @@
import numpy as np import numpy as np
import warnings import warnings
from onnx import helper as oh from onnx import helper as oh
from onnx import TensorProto
from finn.transformation import Transformation from finn.transformation import Transformation
import finn.core.data_layout as DataLayout import finn.core.data_layout as DataLayout
...@@ -676,6 +677,66 @@ class MoveMaxPoolPastMultiThreshold(Transformation): ...@@ -676,6 +677,66 @@ class MoveMaxPoolPastMultiThreshold(Transformation):
model = model.transform(InferShapes()) model = model.transform(InferShapes())
return (model, graph_modified) return (model, graph_modified)
class MoveFlattenPastTopK(Transformation):
"""Move flatten node past a succeeding topk node, if the "axis" attribute in topk
is set to -1 and the data layout before the flatten is NHWC with H=W=1"""
def apply(self, model):
graph = model.graph
node_ind = 0
graph_modified = False
for n in graph.node:
node_ind += 1
if n.op_type == "Flatten":
consumer = model.find_consumer(n.output[0])
if consumer is not None and consumer.op_type == "TopK":
axis = get_by_name(consumer.attribute, "axis")
if axis is None or axis.i != -1:
continue
start_name = n.input[0]
data_layout = model.get_tensor_layout(start_name)
if data_layout != DataLayout.NHWC:
warnings.warn(
"""Transformation can't be applied. The input
to flatten has to have DataLayout.NHWC"""
)
continue
(b, h, w, c) = model.get_tensor_shape(start_name)
if h != 1 or w != 1:
continue
# get parameter k from topk
k = model.get_tensor_shape(consumer.output[1])[-1]
# swap conections
# new tensor because dims change
middle_name = model.make_new_valueinfo_name()
topk_indices = oh.make_tensor_value_info(
middle_name, TensorProto.INT64, [b, h, w, k]
)
end_name = consumer.output[1]
graph.value_info.append(topk_indices)
# remove old nodes
graph.node.remove(n)
graph.node.remove(consumer)
# set inputs and outputs correctly
consumer.input[0] = start_name
consumer.output[1] = middle_name
model.set_tensor_shape(consumer.output[0], (b, h, w, k))
n.input[0] = middle_name
n.output[0] = end_name
# insert them back in
graph.node.insert(node_ind - 1, consumer)
graph.node.insert(node_ind, n)
graph_modified = True
model = model.transform(InferShapes())
return (model, graph_modified)
class MoveFlattenPastAffine(Transformation): class MoveFlattenPastAffine(Transformation):
"""Moves a node that implements a (1, -1) reshape past a MatMul, Mul or Add node.""" """Moves a node that implements a (1, -1) reshape past a MatMul, Mul or Add node."""
......
# Copyright (c) 2020, Xilinx
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of FINN nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import pytest
from onnx import TensorProto, helper
from finn.core.modelwrapper import ModelWrapper
from finn.core.datatype import DataType
import finn.core.data_layout as DataLayout
from finn.util.basic import gen_finn_dt_tensor
from finn.transformation.insert_topk import InsertTopK
from finn.transformation.infer_shapes import InferShapes
from finn.transformation.infer_datatypes import InferDataTypes
from finn.transformation.infer_data_layouts import InferDataLayouts
from finn.transformation.general import GiveUniqueNodeNames, GiveReadableTensorNames
from finn.transformation.streamline.reorder import MoveFlattenPastTopK
import finn.core.onnx_exec as oxe
# data layout
@pytest.mark.parametrize("data_layout", [DataLayout.NHWC, DataLayout.NCHW])
# batch size
@pytest.mark.parametrize("batch_size", [1, 2])
def test_move_flatten_past_affine(data_layout, batch_size):
if data_layout == DataLayout.NHWC:
ishape = [batch_size, 1, 1, 1024]
oshape = [batch_size, 1024]
else:
ishape = [batch_size, 1024, 1, 1]
oshape = [batch_size, 1024]
inp = helper.make_tensor_value_info("inp", TensorProto.FLOAT, ishape)
outp = helper.make_tensor_value_info("outp", TensorProto.FLOAT, oshape)
flatten_node = helper.make_node("Flatten", ["inp"], ["outp"])
graph = helper.make_graph(
nodes=[flatten_node], name="move-flatten-graph", inputs=[inp], outputs=[outp],
)
model = helper.make_model(graph, producer_name="move_flatten_model")
model = ModelWrapper(model)
model.set_tensor_datatype("inp", DataType.INT2)
model.set_tensor_layout("inp", data_layout)
model = model.transform(InsertTopK())
model = model.transform(InferShapes())
model = model.transform(InferDataTypes())
model = model.transform(InferDataLayouts())
model = model.transform(GiveUniqueNodeNames())
model = model.transform(GiveReadableTensorNames())
# compare execution before and after transformation
inp_values = gen_finn_dt_tensor(DataType.INT2, ishape)
idict = {model.graph.input[0].name: inp_values}
model_transformed = model.transform(MoveFlattenPastTopK())
assert oxe.compare_execution(model, model_transformed, idict)
# depending on data layout check if graph is transformed or not
if data_layout == DataLayout.NHWC:
# check if nodes have new order in transformed graph
assert model.graph != model_transformed.graph
assert model_transformed.graph.node[-1].op_type == "Flatten"
else:
assert model.graph == model_transformed.graph
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment