Skip to content
Snippets Groups Projects
Commit 680ca436 authored by Tobi-Alonso's avatar Tobi-Alonso
Browse files

[fpgadataflow] Move MakePYNQDriver pass CreateDataflowPartition in prep for ext_weights support

parent 5ad35261
No related branches found
No related tags found
No related merge requests found
......@@ -125,15 +125,16 @@ class FINNExampleOverlay(Overlay):
layer_w = np.fromiter(
[int(x, 16) for x in dat.strip().split()], dtype=np.uint32
)
layer_ind = int(w_filename.split("_")[0])
rt_weight_dict[layer_ind] = layer_w
for layer_ind in rt_weight_dict.keys():
cand_if_name = "StreamingDataflowPartition_1/s_axilite_%d" % layer_ind
sdp_ind = int(w_filename.split("_")[0])
layer_ind = int(w_filename.split("_")[1])
rt_weight_dict[(sdp_ind,layer_ind)] = layer_w
for sdp_ind,layer_ind in rt_weight_dict.keys():
cand_if_name = "StreamingDataflowPartition_%d/s_axilite_%d" % (sdp_ind,layer_ind)
if cand_if_name in self.ip_dict.keys():
layer_mmio = getattr(
self.StreamingDataflowPartition_1, "s_axilite_%d" % layer_ind
getattr(self,"StreamingDataflowPartition_%d" % sdp_ind), "s_axilite_%d" % layer_ind
).mmio
layer_w = rt_weight_dict[layer_ind]
layer_w = rt_weight_dict[(sdp_ind,layer_ind)]
layer_mmio.write_mm(0, layer_w.tobytes())
if verify:
new_w = np.copy(layer_mmio.array[: layer_w.shape[0]])
......
......@@ -37,12 +37,12 @@ import os
import warnings
import pkg_resources as pk
from . import template_driver
from finn.core.modelwrapper import ModelWrapper
class MakePYNQDriver(Transformation):
"""Create PYNQ Python code to correctly interface the generated
accelerator, including data packing/unpacking. Should be called
after conversion to HLS layers and folding, but prior to the creation of
after conversion to HLS layers, folding and the creation of
dataflow partitions for correct operation.
platform: one of ["zynq-iodma", "alveo"]
......@@ -149,24 +149,29 @@ class MakePYNQDriver(Transformation):
# generate weight files for runtime-writable layers
weights_dir = pynq_driver_dir + "/runtime_weights"
rt_layer_ind = 0
os.makedirs(weights_dir)
for node in model.graph.node:
if node.op_type in ["StreamingFCLayer_Batch", "Thresholding_Batch"]:
node_inst = getCustomOp(node)
is_rt_weights = node_inst.get_nodeattr("runtime_writeable_weights")
if is_rt_weights == 1:
fcl_w = model.get_initializer(node.input[1])
w_filename = weights_dir + "/%d_%s.dat" % (rt_layer_ind, node.name)
node_inst.make_weight_file(fcl_w, "decoupled_runtime", w_filename)
rt_layer_ind += 1
elif node.op_type == "StreamingDataflowPartition":
warnings.warn(
"""Please call MakePYNQDriver prior to
CreateDataflowPartition. Can only extract runtime-writable
weights from HLSCustomOp instances and not StreamingDataflowPartition.
"""
)
else:
continue
return (model, False)
for sdp_ind, sdp_node in enumerate(model.graph.node):
assert sdp_node.op_type == "StreamingDataflowPartition"
# get dataflow model
sdp_node = getCustomOp(sdp_node)
dataflow_model_filename = sdp_node.get_nodeattr("model")
dataflow_model = ModelWrapper(dataflow_model_filename)
rt_layer_ind = 0
for node in dataflow_model.graph.node:
if node.op_type in ["StreamingFCLayer_Batch", "Thresholding_Batch"]:
node_inst = getCustomOp(node)
is_rt_weights = node_inst.get_nodeattr("runtime_writeable_weights")
if is_rt_weights == 1:
fcl_w = dataflow_model.get_initializer(node.input[1])
w_filename = weights_dir + "/%d_%d_%s.dat" % (sdp_ind,rt_layer_ind, node.name)
node_inst.make_weight_file(fcl_w, "decoupled_runtime", w_filename)
rt_layer_ind += 1
elif node.op_type == "StreamingDataflowPartition":
warnings.warn(
"""Nested StreamingDataflowPartition are not supported
"""
)
else:
continue
return (model, False)
\ No newline at end of file
......@@ -276,7 +276,10 @@ class MakeZYNQProject(Transformation):
class ZynqBuild(Transformation):
"""Best-effort attempt at building the accelerator for Zynq."""
"""Best-effort attempt at building the accelerator for Zynq.
It assumes the model has only fpgadataflow nodes
"""
def __init__(self, platform, period_ns, enable_debug=False):
super().__init__()
......@@ -290,7 +293,6 @@ class ZynqBuild(Transformation):
model = model.transform(InferDataLayouts())
# prepare at global level, then break up into kernels
prep_transforms = [
MakePYNQDriver(platform="zynq-iodma"),
InsertIODMA(64),
InsertDWC(),
Floorplan(),
......@@ -325,6 +327,10 @@ class ZynqBuild(Transformation):
model = model.transform(
MakeZYNQProject(self.platform, enable_debug=self.enable_debug)
)
# set platform attribute for correct remote execution
model.set_metadata_prop("platform", "zynq-iodma")
# create driver
model = model.transform(MakePYNQDriver(platform="zynq-iodma"))
return (model, False)
......@@ -330,6 +330,7 @@ class VitisLink(Transformation):
class VitisBuild(Transformation):
"""Best-effort attempt at building the accelerator with Vitis.
It assumes the model has only fpgadataflow nodes
fpga_part: string identifying the target FPGA
period_ns: target clock period
......@@ -365,7 +366,6 @@ class VitisBuild(Transformation):
model = model.transform(InferDataLayouts())
# prepare at global level, then break up into kernels
prep_transforms = [
MakePYNQDriver(platform="alveo"),
InsertIODMA(512),
InsertDWC(),
]
......@@ -416,4 +416,6 @@ class VitisBuild(Transformation):
# set platform attribute for correct remote execution
model.set_metadata_prop("platform", "alveo")
#create driver
model = model.transform(MakePYNQDriver(platform="alveo"))
return (model, False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment