From 680ca4369920233fb626bb52f892f0954fac2e8c Mon Sep 17 00:00:00 2001 From: Tobi-Alonso <tobi.alonso@gmail.com> Date: Wed, 3 Mar 2021 15:54:19 +0000 Subject: [PATCH] [fpgadataflow] Move MakePYNQDriver pass CreateDataflowPartition in prep for ext_weights support --- .../qnn-data/templates/driver/driver_base.py | 13 ++--- .../fpgadataflow/make_pynq_driver.py | 49 ++++++++++--------- .../fpgadataflow/make_zynq_proj.py | 10 +++- .../fpgadataflow/vitis_build.py | 4 +- 4 files changed, 45 insertions(+), 31 deletions(-) diff --git a/src/finn/qnn-data/templates/driver/driver_base.py b/src/finn/qnn-data/templates/driver/driver_base.py index ef16a537c..58ef94bfd 100644 --- a/src/finn/qnn-data/templates/driver/driver_base.py +++ b/src/finn/qnn-data/templates/driver/driver_base.py @@ -125,15 +125,16 @@ class FINNExampleOverlay(Overlay): layer_w = np.fromiter( [int(x, 16) for x in dat.strip().split()], dtype=np.uint32 ) - layer_ind = int(w_filename.split("_")[0]) - rt_weight_dict[layer_ind] = layer_w - for layer_ind in rt_weight_dict.keys(): - cand_if_name = "StreamingDataflowPartition_1/s_axilite_%d" % layer_ind + sdp_ind = int(w_filename.split("_")[0]) + layer_ind = int(w_filename.split("_")[1]) + rt_weight_dict[(sdp_ind,layer_ind)] = layer_w + for sdp_ind,layer_ind in rt_weight_dict.keys(): + cand_if_name = "StreamingDataflowPartition_%d/s_axilite_%d" % (sdp_ind,layer_ind) if cand_if_name in self.ip_dict.keys(): layer_mmio = getattr( - self.StreamingDataflowPartition_1, "s_axilite_%d" % layer_ind + getattr(self,"StreamingDataflowPartition_%d" % sdp_ind), "s_axilite_%d" % layer_ind ).mmio - layer_w = rt_weight_dict[layer_ind] + layer_w = rt_weight_dict[(sdp_ind,layer_ind)] layer_mmio.write_mm(0, layer_w.tobytes()) if verify: new_w = np.copy(layer_mmio.array[: layer_w.shape[0]]) diff --git a/src/finn/transformation/fpgadataflow/make_pynq_driver.py b/src/finn/transformation/fpgadataflow/make_pynq_driver.py index 84dc01e53..ac5cdb4f5 100644 --- a/src/finn/transformation/fpgadataflow/make_pynq_driver.py +++ b/src/finn/transformation/fpgadataflow/make_pynq_driver.py @@ -37,12 +37,12 @@ import os import warnings import pkg_resources as pk from . import template_driver - +from finn.core.modelwrapper import ModelWrapper class MakePYNQDriver(Transformation): """Create PYNQ Python code to correctly interface the generated accelerator, including data packing/unpacking. Should be called - after conversion to HLS layers and folding, but prior to the creation of + after conversion to HLS layers, folding and the creation of dataflow partitions for correct operation. platform: one of ["zynq-iodma", "alveo"] @@ -149,24 +149,29 @@ class MakePYNQDriver(Transformation): # generate weight files for runtime-writable layers weights_dir = pynq_driver_dir + "/runtime_weights" - rt_layer_ind = 0 + os.makedirs(weights_dir) - for node in model.graph.node: - if node.op_type in ["StreamingFCLayer_Batch", "Thresholding_Batch"]: - node_inst = getCustomOp(node) - is_rt_weights = node_inst.get_nodeattr("runtime_writeable_weights") - if is_rt_weights == 1: - fcl_w = model.get_initializer(node.input[1]) - w_filename = weights_dir + "/%d_%s.dat" % (rt_layer_ind, node.name) - node_inst.make_weight_file(fcl_w, "decoupled_runtime", w_filename) - rt_layer_ind += 1 - elif node.op_type == "StreamingDataflowPartition": - warnings.warn( - """Please call MakePYNQDriver prior to - CreateDataflowPartition. Can only extract runtime-writable - weights from HLSCustomOp instances and not StreamingDataflowPartition. - """ - ) - else: - continue - return (model, False) + for sdp_ind, sdp_node in enumerate(model.graph.node): + assert sdp_node.op_type == "StreamingDataflowPartition" + # get dataflow model + sdp_node = getCustomOp(sdp_node) + dataflow_model_filename = sdp_node.get_nodeattr("model") + dataflow_model = ModelWrapper(dataflow_model_filename) + rt_layer_ind = 0 + for node in dataflow_model.graph.node: + if node.op_type in ["StreamingFCLayer_Batch", "Thresholding_Batch"]: + node_inst = getCustomOp(node) + is_rt_weights = node_inst.get_nodeattr("runtime_writeable_weights") + if is_rt_weights == 1: + fcl_w = dataflow_model.get_initializer(node.input[1]) + w_filename = weights_dir + "/%d_%d_%s.dat" % (sdp_ind,rt_layer_ind, node.name) + node_inst.make_weight_file(fcl_w, "decoupled_runtime", w_filename) + rt_layer_ind += 1 + elif node.op_type == "StreamingDataflowPartition": + warnings.warn( + """Nested StreamingDataflowPartition are not supported + """ + ) + else: + continue + return (model, False) \ No newline at end of file diff --git a/src/finn/transformation/fpgadataflow/make_zynq_proj.py b/src/finn/transformation/fpgadataflow/make_zynq_proj.py index 99c08dc05..26de772a1 100644 --- a/src/finn/transformation/fpgadataflow/make_zynq_proj.py +++ b/src/finn/transformation/fpgadataflow/make_zynq_proj.py @@ -276,7 +276,10 @@ class MakeZYNQProject(Transformation): class ZynqBuild(Transformation): - """Best-effort attempt at building the accelerator for Zynq.""" + """Best-effort attempt at building the accelerator for Zynq. + It assumes the model has only fpgadataflow nodes + + """ def __init__(self, platform, period_ns, enable_debug=False): super().__init__() @@ -290,7 +293,6 @@ class ZynqBuild(Transformation): model = model.transform(InferDataLayouts()) # prepare at global level, then break up into kernels prep_transforms = [ - MakePYNQDriver(platform="zynq-iodma"), InsertIODMA(64), InsertDWC(), Floorplan(), @@ -325,6 +327,10 @@ class ZynqBuild(Transformation): model = model.transform( MakeZYNQProject(self.platform, enable_debug=self.enable_debug) ) + # set platform attribute for correct remote execution model.set_metadata_prop("platform", "zynq-iodma") + + # create driver + model = model.transform(MakePYNQDriver(platform="zynq-iodma")) return (model, False) diff --git a/src/finn/transformation/fpgadataflow/vitis_build.py b/src/finn/transformation/fpgadataflow/vitis_build.py index 11cf46bb6..0fe427609 100644 --- a/src/finn/transformation/fpgadataflow/vitis_build.py +++ b/src/finn/transformation/fpgadataflow/vitis_build.py @@ -330,6 +330,7 @@ class VitisLink(Transformation): class VitisBuild(Transformation): """Best-effort attempt at building the accelerator with Vitis. + It assumes the model has only fpgadataflow nodes fpga_part: string identifying the target FPGA period_ns: target clock period @@ -365,7 +366,6 @@ class VitisBuild(Transformation): model = model.transform(InferDataLayouts()) # prepare at global level, then break up into kernels prep_transforms = [ - MakePYNQDriver(platform="alveo"), InsertIODMA(512), InsertDWC(), ] @@ -416,4 +416,6 @@ class VitisBuild(Transformation): # set platform attribute for correct remote execution model.set_metadata_prop("platform", "alveo") + #create driver + model = model.transform(MakePYNQDriver(platform="alveo")) return (model, False) -- GitLab