diff --git a/src/finn/analysis/fpgadataflow/post_synth_res.py b/src/finn/analysis/fpgadataflow/post_synth_res.py
index 72533b54ea7b2193a3a9e0d7ee7c8c60646ead0e..81accba23220d3f25e8560443ff22cf59d3733e9 100644
--- a/src/finn/analysis/fpgadataflow/post_synth_res.py
+++ b/src/finn/analysis/fpgadataflow/post_synth_res.py
@@ -55,36 +55,35 @@ def post_synth_res(model, override_synth_report_filename=None):
         raise Exception("Please run synthesis first")
 
     for node in model.graph.node:
-        if _is_fpgadataflow_node(node):
-            if node.op_type == "StreamingDataflowPartition":
-                sdp_model = ModelWrapper(getCustomOp(node).get_nodeattr("model"))
-                sdp_res_dict = post_synth_res(sdp_model, synth_report_filename)
-                res_dict.update(sdp_res_dict)
-            else:
-                row = root.findall(".//*[@contents='%s']/.." % node.name)
-                if row != []:
-                    node_dict = {}
-                    row = row[0].getchildren()
-                    """ Expected XML structure:
-    <tablerow class="" suppressoutput="0" wordwrap="0">
-        <tableheader class="" contents="Instance" halign="3" width="-1"/>
-        <tableheader class="" contents="Module" halign="3" width="-1"/>
-        <tableheader class="" contents="Total LUTs" halign="3" width="-1"/>
-        <tableheader class="" contents="Logic LUTs" halign="3" width="-1"/>
-        <tableheader class="" contents="LUTRAMs" halign="3" width="-1"/>
-        <tableheader class="" contents="SRLs" halign="3" width="-1"/>
-        <tableheader class="" contents="FFs" halign="3" width="-1"/>
-        <tableheader class="" contents="RAMB36" halign="3" width="-1"/>
-        <tableheader class="" contents="RAMB18" halign="3" width="-1"/>
-        <tableheader class="" contents="DSP48 Blocks" halign="3" width="-1"/>
-    </tablerow>
-                    """
-                    node_dict["LUT"] = int(row[2].attrib["contents"])
-                    node_dict["SRL"] = int(row[5].attrib["contents"])
-                    node_dict["FF"] = int(row[6].attrib["contents"])
-                    node_dict["BRAM_36K"] = int(row[7].attrib["contents"])
-                    node_dict["BRAM_18K"] = int(row[8].attrib["contents"])
-                    node_dict["DSP48"] = int(row[9].attrib["contents"])
-                    res_dict[node.name] = node_dict
+        if node.op_type == "StreamingDataflowPartition":
+            sdp_model = ModelWrapper(getCustomOp(node).get_nodeattr("model"))
+            sdp_res_dict = post_synth_res(sdp_model, synth_report_filename)
+            res_dict.update(sdp_res_dict)
+        elif _is_fpgadataflow_node(node):
+            row = root.findall(".//*[@contents='%s']/.." % node.name)
+            if row != []:
+                node_dict = {}
+                row = row[0].getchildren()
+                """ Expected XML structure:
+<tablerow class="" suppressoutput="0" wordwrap="0">
+    <tableheader class="" contents="Instance" halign="3" width="-1"/>
+    <tableheader class="" contents="Module" halign="3" width="-1"/>
+    <tableheader class="" contents="Total LUTs" halign="3" width="-1"/>
+    <tableheader class="" contents="Logic LUTs" halign="3" width="-1"/>
+    <tableheader class="" contents="LUTRAMs" halign="3" width="-1"/>
+    <tableheader class="" contents="SRLs" halign="3" width="-1"/>
+    <tableheader class="" contents="FFs" halign="3" width="-1"/>
+    <tableheader class="" contents="RAMB36" halign="3" width="-1"/>
+    <tableheader class="" contents="RAMB18" halign="3" width="-1"/>
+    <tableheader class="" contents="DSP48 Blocks" halign="3" width="-1"/>
+</tablerow>
+                """
+                node_dict["LUT"] = int(row[2].attrib["contents"])
+                node_dict["SRL"] = int(row[5].attrib["contents"])
+                node_dict["FF"] = int(row[6].attrib["contents"])
+                node_dict["BRAM_36K"] = int(row[7].attrib["contents"])
+                node_dict["BRAM_18K"] = int(row[8].attrib["contents"])
+                node_dict["DSP48"] = int(row[9].attrib["contents"])
+                res_dict[node.name] = node_dict
 
     return res_dict
diff --git a/src/finn/custom_op/streamingdataflowpartition.py b/src/finn/custom_op/streamingdataflowpartition.py
index bce4dde426b8838d6c86638a3641d51ab259a6db..31cd38fea3c5a9e88084c3332d46aebdb065f800 100644
--- a/src/finn/custom_op/streamingdataflowpartition.py
+++ b/src/finn/custom_op/streamingdataflowpartition.py
@@ -36,7 +36,12 @@ class StreamingDataflowPartition(CustomOp):
     bitfile by itself."""
 
     def get_nodeattr_types(self):
-        return {"model": ("s", True, "")}
+        return {
+            "model": ("s", True, ""),
+            "res_estimate": ("s", False, ""),
+            "res_hls": ("s", False, ""),
+            "res_synth": ("s", False, ""),
+        }
 
     def make_shape_compatible_op(self, model):
         pass
diff --git a/src/finn/transformation/fpgadataflow/annotate_resources.py b/src/finn/transformation/fpgadataflow/annotate_resources.py
index aba22d16caf715f3495055489c185d925a33f7e0..da6fa1ff738690308a9b7686a5c92d7395ab50c8 100644
--- a/src/finn/transformation/fpgadataflow/annotate_resources.py
+++ b/src/finn/transformation/fpgadataflow/annotate_resources.py
@@ -66,26 +66,25 @@ class AnnotateResources(Transformation):
         children_dict = {}
         # annotate node resources
         for node in graph.node:
-            if _is_fpgadataflow_node(node):
-                if node.name in self.res_dict.keys():
-                    op_inst = registry.getCustomOp(node)
-                    op_inst.set_nodeattr(
-                        "res_" + self.mode, str(self.res_dict[node.name])
-                    )
-                    children_dict[node.name] = self.res_dict[node.name]
-                elif node.op_type == "StreamingDataflowPartition":
-                    # recurse into model to manually annotate per-layer resources
-                    sdp_model_filename = getCustomOp(node).get_nodeattr("model")
-                    sdp_model = ModelWrapper(sdp_model_filename)
-                    sdp_model = sdp_model.transform(
-                        AnnotateResources(self.mode, self.res_dict)
-                    )
-                    sdp_dict = sdp_model.get_metadata_prop("res_total_" + self.mode)
-                    # save transformed model
-                    sdp_model.save(sdp_model_filename)
-                    # set res attribute for sdp node
-                    getCustomOp(node).set_nodeattr("res_" + self.mode, str(sdp_dict))
-                    children_dict[node.name] = sdp_dict
+            if _is_fpgadataflow_node(node) and node.name in self.res_dict.keys():
+                op_inst = registry.getCustomOp(node)
+                op_inst.set_nodeattr("res_" + self.mode, str(self.res_dict[node.name]))
+                children_dict[node.name] = self.res_dict[node.name]
+            elif node.op_type == "StreamingDataflowPartition":
+                # recurse into model to manually annotate per-layer resources
+                sdp_model_filename = getCustomOp(node).get_nodeattr("model")
+                sdp_model = ModelWrapper(sdp_model_filename)
+                sdp_model = sdp_model.transform(
+                    AnnotateResources(self.mode, self.res_dict)
+                )
+                sdp_dict = sdp_model.get_metadata_prop("res_total_" + self.mode)
+                sdp_dict = eval(sdp_dict)
+                # save transformed model
+                sdp_model.save(sdp_model_filename)
+                # set res attribute for sdp node
+                getCustomOp(node).set_nodeattr("res_" + self.mode, str(sdp_dict))
+                children_dict[node.name] = sdp_dict
+        self.res_dict.update(children_dict)
         total_dict = {}
         for lname in children_dict.keys():
             layer_res_dict = self.res_dict[lname]
diff --git a/src/finn/transformation/fpgadataflow/create_dataflow_partition.py b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
index 7197e68be2fbdf5fc39b7ed202e88672614514ec..5ec4ab14d65d63523856a6bb107bf75c1ca5a261 100644
--- a/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
+++ b/src/finn/transformation/fpgadataflow/create_dataflow_partition.py
@@ -125,6 +125,7 @@ class CreateDataflowPartition(Transformation):
                     [df_out],
                     # use the model attribute to mark the df model
                     model=df_model_filename,
+                    domain="finn",
                 )
                 non_df_model.graph.node.insert(df_start_ind, df_node)
                 model = non_df_model