diff --git a/src/finn/core/onnx_exec.py b/src/finn/core/onnx_exec.py
index d085eedf7f607f644b0873b21852aea5253fb2a6..6b8c01958a0f0dec5c9651fc98dfd794a23f39f3 100644
--- a/src/finn/core/onnx_exec.py
+++ b/src/finn/core/onnx_exec.py
@@ -24,6 +24,8 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
 # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import copy
+
 import numpy as np
 import onnx
 import onnx.helper as helper
@@ -31,6 +33,8 @@ import onnx.shape_inference as si
 import onnxruntime as rt
 from onnx import numpy_helper as np_helper
 
+import finn.transformation.general as tx
+
 
 def valueinfo_to_tensor(vi):
     """Creates an all-zeroes numpy tensor from a ValueInfoProto."""
@@ -132,3 +136,20 @@ def execute_onnx(model, input_dict, return_full_exec_context=False):
             out_name = out_tensor.name
             output_dict[out_name] = execution_context[out_name]
         return output_dict
+
+
+def execute_onnx_and_make_model(model, input_dict):
+    """Execute given ONNX model with given named inputs and return a new model
+    where an initializer is provided for each tensor."""
+
+    model = si.infer_shapes(model)
+    # retrieve the full execution context
+    execution_context = execute_onnx(model, input_dict, True)
+    new_model = copy.deepcopy(model)
+    # create value_info entries and initializers for everything
+    for i in execution_context.keys():
+        tx.set_initializer(new_model, i, execution_context[i])
+    for vi in new_model.graph.value_info:
+        new_model.graph.output.append(vi)
+    # import pdb; pdb.set_trace()
+    return new_model