diff --git a/notebooks/basics/1_how_to_work_with_onnx.ipynb b/notebooks/basics/1_how_to_work_with_onnx.ipynb
index c5044a1ee8aeca06197008df8ae90f27080a606f..29b2751aff73706d5590c6641b86104368816922 100644
--- a/notebooks/basics/1_how_to_work_with_onnx.ipynb
+++ b/notebooks/basics/1_how_to_work_with_onnx.ipynb
@@ -6,7 +6,7 @@
    "source": [
     "# FINN - How to work with ONNX\n",
     "\n",
-    "This notebook should give an overview of ONNX ProtoBuf, help to create and manipulate an ONNX model and use FINN functions to work with it. There may be overlaps to other notebooks, like [FINN-ModelWrapper](FINN-ModelWrapper.ipynb) and [FINN-CustomOps](FINN-CustomOps.ipynb), but this notebook will give an overview about the handling of ONNX models in FINN."
+    "This notebook should give an overview of ONNX ProtoBuf, help to create and manipulate an ONNX model and use FINN functions to work with it. There may be overlaps to other notebooks, like [ModelWrapper](2_modelwrapper.ipynb) and [CustomOps](../internals/2_custom_op.ipynb), but this notebook will give an overview about the handling of ONNX models in FINN."
    ]
   },
   {
@@ -310,16 +310,16 @@
      "output_type": "stream",
      "text": [
       "The output of the ONNX model is: \n",
-      "[[ 5.  7.  3.  9.]\n",
-      " [10.  1. 14. 12.]\n",
-      " [ 3.  7.  6. 20.]\n",
-      " [ 9.  1.  6. 10.]]\n",
+      "[[12.  9. 14.  8.]\n",
+      " [ 9.  9.  4.  6.]\n",
+      " [ 3. 19.  9.  5.]\n",
+      " [ 8. 22.  7.  2.]]\n",
       "\n",
       "The output of the reference function is: \n",
-      "[[ 5.  7.  3.  9.]\n",
-      " [10.  1. 14. 12.]\n",
-      " [ 3.  7.  6. 20.]\n",
-      " [ 9.  1.  6. 10.]]\n",
+      "[[12.  9. 14.  8.]\n",
+      " [ 9.  9.  4.  6.]\n",
+      " [ 3. 19.  9.  5.]\n",
+      " [ 8. 22.  7.  2.]]\n",
       "\n",
       "The results are the same!\n"
      ]
@@ -358,12 +358,12 @@
    "source": [
     "In the following we assume that we do not know the appearance of the model, so we first try to identify whether there are two consecutive adders in the graph and then convert them into a sum node. \n",
     "\n",
-    "Here we make use of FINN. FINN provides a thin wrapper around the model which provides several additional helper functions to manipulate the graph. The code can be found [here](https://github.com/Xilinx/finn/blob/dev/src/finn/core/modelwrapper.py) and you can find a more detailed description in the notebook [FINN-ModelWrapper](FINN-ModelWrapper.ipynb)."
+    "Here we make use of FINN. FINN provides a thin wrapper around the model which provides several additional helper functions to manipulate the graph. The code can be found [here](https://github.com/Xilinx/finn/blob/master/src/finn/core/modelwrapper.py) and you can find a more detailed description in the notebook [ModelWrapper](2_modelwrapper.ipynb)."
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 13,
+   "execution_count": 14,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -380,7 +380,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 15,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -404,7 +404,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 16,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -427,7 +427,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 16,
+   "execution_count": 17,
    "metadata": {},
    "outputs": [
     {
@@ -455,7 +455,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 17,
+   "execution_count": 18,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -484,7 +484,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": 19,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -514,7 +514,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 19,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [
     {
@@ -550,7 +550,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 20,
+   "execution_count": 21,
    "metadata": {},
    "outputs": [
     {
@@ -585,7 +585,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 21,
+   "execution_count": 22,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -601,7 +601,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 22,
+   "execution_count": 23,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -622,7 +622,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 23,
+   "execution_count": 24,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -650,7 +650,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 25,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -660,7 +660,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 25,
+   "execution_count": 26,
    "metadata": {},
    "outputs": [
     {
@@ -680,7 +680,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 26,
+   "execution_count": 27,
    "metadata": {},
    "outputs": [
     {
@@ -710,7 +710,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 27,
+   "execution_count": 28,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -720,7 +720,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 28,
+   "execution_count": 29,
    "metadata": {},
    "outputs": [
     {
@@ -728,16 +728,16 @@
      "output_type": "stream",
      "text": [
       "The output of the manipulated ONNX model is: \n",
-      "[[ 5.  7.  3.  9.]\n",
-      " [10.  1. 14. 12.]\n",
-      " [ 3.  7.  6. 20.]\n",
-      " [ 9.  1.  6. 10.]]\n",
+      "[[12.  9. 14.  8.]\n",
+      " [ 9.  9.  4.  6.]\n",
+      " [ 3. 19.  9.  5.]\n",
+      " [ 8. 22.  7.  2.]]\n",
       "\n",
       "The output of the reference function is: \n",
-      "[[ 5.  7.  3.  9.]\n",
-      " [10.  1. 14. 12.]\n",
-      " [ 3.  7.  6. 20.]\n",
-      " [ 9.  1.  6. 10.]]\n",
+      "[[12.  9. 14.  8.]\n",
+      " [ 9.  9.  4.  6.]\n",
+      " [ 3. 19.  9.  5.]\n",
+      " [ 8. 22.  7.  2.]]\n",
       "\n",
       "The results are the same!\n"
      ]
diff --git a/notebooks/basics/2_modelwrapper.ipynb b/notebooks/basics/2_modelwrapper.ipynb
index ca9c4c6e43584cfcb12c795e5896e726a40fc5d6..6b3cd0337d938c100e0f71e61f8505a5b7377505 100644
--- a/notebooks/basics/2_modelwrapper.ipynb
+++ b/notebooks/basics/2_modelwrapper.ipynb
@@ -42,17 +42,17 @@
    "source": [
     "### Create a ModelWrapper instance\n",
     "\n",
-    "<font size=\"3\">Here we use a premade ONNX file on disk to load up the ModelWrapper, but this could have been produced from e.g. a trained Brevitas PyTorch model. See [this notebook](brevitas-network-import.ipynb) for more details.</font>"
+    "<font size=\"3\">Here we use a premade ONNX file on disk to load up the ModelWrapper, but this could have been produced from e.g. a trained Brevitas PyTorch model. See [this notebook](3_brevitas_network_import.ipynb) for more details.</font>"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": 3,
    "metadata": {},
    "outputs": [],
    "source": [
     "from finn.core.modelwrapper import ModelWrapper\n",
-    "model = ModelWrapper(\"LFCW1A1.onnx\")"
+    "model = ModelWrapper(\"../LFCW1A1.onnx\")"
    ]
   },
   {
@@ -66,7 +66,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 13,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [
     {
@@ -132,7 +132,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [
     {
@@ -162,7 +162,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": 6,
    "metadata": {},
    "outputs": [
     {
@@ -208,7 +208,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 21,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [
     {
@@ -235,7 +235,7 @@
     "\n",
     "Optionally, the dtype (container datatype) of the tensor can also be specified as third argument. By default it is set to TensorProto.FLOAT. \n",
     "    \n",
-    "**Important:** dtype should not be confused with FINN data type, which specifies the quantization annotation. See the remarks about FINN-ONNX in [this notebook](finn-basics.ipynb). It is safest to use floating point tensors as the container data type for best compatibility inside FINN.</font>"
+    "**Important:** dtype should not be confused with FINN data type, which specifies the quantization annotation. See the remarks about FINN-ONNX in [this notebook](0_getting_started.ipynb). It is safest to use floating point tensors as the container data type for best compatibility inside FINN.</font>"
    ]
   },
   {
@@ -249,12 +249,12 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "<font size=\"3\">FINN introduces its [own data types](https://github.com/Xilinx/finn/blob/dev/src/finn/core/datatype.py) because ONNX does not natively support precisions less than 8 bits. FINN is about quantized neural networks, so precision of i.e. 4 bits, 3 bits, 2 bits or 1 bit are of interest. To represent the data within FINN, float tensors are used with additional annotation to specify the quantized data type of a tensor. The following helper functions are about this quantization annotation.</font>"
+    "<font size=\"3\">FINN introduces its [own data types](https://github.com/Xilinx/finn/blob/master/src/finn/core/datatype.py) because ONNX does not natively support precisions less than 8 bits. FINN is about quantized neural networks, so precision of i.e. 4 bits, 3 bits, 2 bits or 1 bit are of interest. To represent the data within FINN, float tensors are used with additional annotation to specify the quantized data type of a tensor. The following helper functions are about this quantization annotation.</font>"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 30,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [
     {
@@ -291,7 +291,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 39,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [
     {
@@ -330,7 +330,7 @@
    "metadata": {},
    "source": [
     "### More helper functions\n",
-    "<font size=\"3\">ModelWrapper contains more useful functions, if you are interested please have a look at the [Python code](https://github.com/Xilinx/finn/blob/dev/src/finn/core/modelwrapper.py) directly. Additionally, in the folder notebooks/ a Jupyter notebook about transformation passes [FINN-HowToTransformationPass](FINN-HowToTransformationPass.ipynb) and one about analysis passes [FINN-HowToAnalysisPass](FINN-HowToAnalysisPass.ipynb) can be found.</font>"
+    "<font size=\"3\">ModelWrapper contains more useful functions, if you are interested please have a look at the [Python code](https://github.com/Xilinx/finn/blob/master/src/finn/core/modelwrapper.py) directly. "
    ]
   },
   {
diff --git a/notebooks/basics/3_brevitas_network_import.ipynb b/notebooks/basics/3_brevitas_network_import.ipynb
index 404242908bca1c34ea600cc9616817975e35deca..30026e7aaa541641d4068ca0a7a0a3cf7c14088f 100644
--- a/notebooks/basics/3_brevitas_network_import.ipynb
+++ b/notebooks/basics/3_brevitas_network_import.ipynb
@@ -17,10 +17,11 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": 1,
    "metadata": {},
    "outputs": [],
    "source": [
+    "import onnx\n",
     "import inspect\n",
     "\n",
     "def showSrc(what):\n",
@@ -38,7 +39,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 2,
    "metadata": {},
    "outputs": [
     {
@@ -103,7 +104,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 3,
    "metadata": {},
    "outputs": [
     {
@@ -298,7 +299,7 @@
        ")"
       ]
      },
-     "execution_count": 4,
+     "execution_count": 3,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -322,12 +323,12 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 10,
    "metadata": {},
    "outputs": [
     {
      "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAATB0lEQVR4nO3dfWxd5X0H8O/3XttxXhwSJ8GYJECIYIPSNQUPWoEmCm2agqbAtFGiFZGK1f0DpNKyF8S0FU3ahmAtmraOyW0iwsroulJWptEXSJkoaKAkKCThNbwkEC/EhLzY5MW5vve3P3zo3ODze8w999xzm+f7kSzb9+dz7pNrf3Pt+zvP89DMICInvlLRAxCR5lDYRSKhsItEQmEXiYTCLhKJtmbeWQenWSdmNvMuTwwM1LM0VBg4ecZuDcvl9FNXq9nO3ZZ+bgCwsfrPz/Z2/9yVSt3nztNRHMIxG530m5op7CRXAPh7AGUA3zGzO7yv78RMXMTLs9zliSkQOC8wAGBjY/XfdXuHf+5QIGt+vTz7pNRa9cBB/9wB5bnz3Hr13X3pxcB/Ym09p7r1scH/detFecbWp9bq/jWeZBnAtwB8DsC5AFaRPLfe84lIvrL8zX4hgFfN7HUzOwbgewBWNmZYItJoWcK+EMBbEz7fldz2K0j2k9xIcmMFoxnuTkSyyP3VeDMbMLM+M+trx7S8705EUmQJ+yCAxRM+X5TcJiItKEvYNwA4i+QSkh0ArgXwcGOGJSKNVnfrzczGSN4E4KcYb72tNbPnGzayiJTnzHHr1f376z53acYMt147fNg/QaAtWJ4926277bWS31IMCrQkvfYa2/wf/eqeoXpG1NIy9dnN7BEAjzRoLCKSI10uKxIJhV0kEgq7SCQUdpFIKOwikVDYRSLR1PnssQr1uu3YsWznn5m+RkDt0KFs5w6MvTo8XPe523p73HpoGmmwF+5cI1DuPcW/77d2+acOTQ2uZPue5kHP7CKRUNhFIqGwi0RCYReJhMIuEgmFXSQSar01QXAaaUhgmmmm9lqe5w7IukJreU76yrWAP7021FoLTd3N/D0tgJ7ZRSKhsItEQmEXiYTCLhIJhV0kEgq7SCQUdpFIqM/eBOV53W7d3W0UQHn+fP8ORp1ttU72dzq1Gf4uPaUhfxnrvcvPdOvzvrgztVap+UtBt31tlluvvfCaW88iy9TdVqVndpFIKOwikVDYRSKhsItEQmEXiYTCLhIJhV0kEuqzN0Gojx7y8t2L3Porn1qTWjti/pLGpcD/93tr/vE9Zb9P75nGdre+fNZqt14u+2O3SnqN0/xxm3ftAoBSV5dbr42MuPUiZAo7yR0ARgBUAYyZWV8jBiUijdeIZ/ZPmdneBpxHRHKkv9lFIpE17AbgZyQ3keyf7AtI9pPcSHJjBf7fQSKSn6y/xl9iZoMkTwbwKMmXzOyJiV9gZgMABgBgNrst4/2JSJ0yPbOb2WDyfgjAQwAubMSgRKTx6g47yZkku97/GMByANsaNTARaawsv8b3AHiI4+uOtwH4VzP7SUNGdYIJbXt8YOVvufUvfPQXbr3M9P+zd1b8v5y6Sn4f/bQ2f0551WpufdTGUmv7a/7a6x/7h+f8c9f8H98tf/HbqbVpP97gHtu28FS3nnXN+yLUHXYzex3Axxo4FhHJkVpvIpFQ2EUiobCLREJhF4mEwi4SCZo176K22ey2i3h50+7v18Wql/w2zurZQ279tcp7qbWl7X7r7GDtiFsvw9/SOTRFtob01tysUqd7bMgbzr8bAJ47dkpq7R+/dI17bPm/n/Xrc+e69ep+fwnuvDxj6zFs+yb9pumZXSQSCrtIJBR2kUgo7CKRUNhFIqGwi0RCYReJxK/XUtJ0er7ONM/xst8vtlqG6w0C0zwRuJZhzW1Xu/VT71zr1pfPSO+lh6agjtSqbv2y+//ErS99wO8n1zrTl4t+c4W/HPOG/m+69SWBawjmlNKvT/jjK/2lpH/j1YVufWzXoFtvRXpmF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpFQ2EUi0fz57KVP13+CJo61kbLOfT6y0t97Y9856ZdLjE13D4W1+Y/pmf8W6KNvecm/gwzO2+Q/F311wRNufVFgGWzPZ09d5tY1n11EWpbCLhIJhV0kEgq7SCQUdpFIKOwikVDYRSLR/Pns3rzz0LzwImWYS5+15zr9Pze59UWPlFNrVvG3ZC7POcmtVw8cdOuh7ahRS/+e1o4edQ996m7/+oKv/o3fZ8+C7R1uvag+ehbBZ3aSa0kOkdw24bZuko+S3J68968wEJHCTeXX+HsBrDjutlsBrDezswCsTz4XkRYWDLuZPQFg33E3rwSwLvl4HYCrGjwuEWmwev9m7zGz3cnHbwPoSftCkv0A+gGgE4G/70QkN5lfjbfxmTSpsynMbMDM+sysrx3+In8ikp96w76HZC8AJO/9bUZFpHD1hv1hANcnH18P4EeNGY6I5CX4NzvJBwBcCmA+yV0Avg7gDgDfJ3kDgJ0A/M2uJwqsU143rw8OZF5XPnS8xwL/5vL8eW69uvfdTOd3j61mu7ahdnQ08AX1j23ulgNuPct89Yr54ypND+wdH6hXh4c/7JByFwy7ma1KKV3e4LGISI50uaxIJBR2kUgo7CKRUNhFIqGwi0RCWzYngls21/ypou59t/kPc3Wf32IKKc/rdk7ut5hCU1iDAq21Umd6i4rT/XWu31ru/LumwNuu+r1aoGU4LXC159hYHSMqlp7ZRSKhsItEQmEXiYTCLhIJhV0kEgq7SCQUdpFItFafPTRN1RNYhtrGitvu2QK97qyq7x6/ROD/Y6Bf7PXBAYCd/vGhPr23XHRbYBnrpb/7mlvfWz3k1ueXZ6bWRkLLlgeuHzghl5IWkRODwi4SCYVdJBIKu0gkFHaRSCjsIpFQ2EUi0dw+O/253eE55fn2q12l9G2RWU6vTUWe2ypXLj7PPXbvR/0+etsR/3uyYN2zbt01w5/PPnDm/W49yyLY3z1wgVv3rl0AgFJXl1uvjYx86DHlTc/sIpFQ2EUiobCLREJhF4mEwi4SCYVdJBIKu0gkmjyfnf767lbJ8a79ufKlwBrmnJk+N5od7e6xb646w61b4Ltwxoo33Pqc9vTH9NaF33KPPafdH3st0M3+0hf9zXxrlj62L5z8Y/fYo+b3+HvK/jUCd+1bmlp76rNL3GNLM/wtl1uxjx4SfGYnuZbkEMltE267neQgyc3J2xX5DlNEsprKr/H3Algxye13m9my5O2Rxg5LRBotGHYzewKAf+2giLS8LC/Q3URyS/Jr/ty0LyLZT3IjyY0VS1+PTETyVW/Y7wGwFMAyALsBfCPtC81swMz6zKyvnf7ihiKSn7rCbmZ7zKxqZjUA3wZwYWOHJSKNVlfYSfZO+PRqANvSvlZEWkOwz07yAQCXAphPcheArwO4lOQyAAZgB4AvT+nezNy522zv8A8PzPv2lM89263v+L15bv03P7M9tTaw5N/dY731ywHgjcp7bn1J+yy3vmss/fhFbf6xIW9Ujrj1f178mFufUUr/nr5S8dd9Py3j2Be0pffCX/6a32c/669OvOevYNjNbNUkN6/JYSwikiNdLisSCYVdJBIKu0gkFHaRSCjsIpFo/pbNzpLMWVprIS/fkHpFLwDgtWv/ya1vGk0fW6i1FtJV8qffrj/iL1W99Wj6ctErZ/ktpAVl/0cg1PYL2V89nFo7u91/3CrmLx1+2Pyfl9Wzh1Jrl33+LvfYaz6y2q13/6H/uLXils56ZheJhMIuEgmFXSQSCrtIJBR2kUgo7CKRUNhFItH8Pruz7XKWrYlDW+i+9Hl/SWXA72VfMC19qubTR/1+8EMH/e2BH7vnk259/sD/uPXKp9PP/8rfnuIee+OCx936R/xZx/jJYX8558Vt6VNkz//pH7nHTt/p3/knr9zi1tec9mRq7aj51zY8vewHbn3Fg1e6dVymPruIFERhF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpGgBbbFbaSTSvPsE53pG77WjvrbQ7Wdvji1NvbmLvfYe3f+wq2HNoteWJ6RWit721ADWHPQ73Uv63zTrb9dne3WXx/tSa1d3fW8e2y3s9Qz4C8FDQAvHkufrw4Aq//yltTanPv86wdC2hYtdOv8bvp20/cu9fvo71T9PvzMUmAr69Mucet5ecbWY9j2TTp4PbOLREJhF4mEwi4SCYVdJBIKu0gkFHaRSCjsIpFoap99NrvtIl6ey7lD2z3v+cGZbv2pC+5z616/ebezZTIA9Aa2Hg5t2byobbpbH7X0qwRmlTrdY28avMit//w//Ln4J2/2r1CY9l8bUmttvf71B2N73nHrpY52t+5dt1FZ3uce+/N7v+PWz3nqOrd+2h9sdet5ydRnJ7mY5OMkXyD5PMmvJLd3k3yU5Pbkvb8Lg4gUaiq/xo8BuMXMzgXwCQA3kjwXwK0A1pvZWQDWJ5+LSIsKht3MdpvZs8nHIwBeBLAQwEoA65IvWwfgqrwGKSLZfag16EieAeDjAJ4B0GNmu5PS2wAmvUCbZD+AfgDoRPr15SKSrym/Gk9yFoAHAdxsZsMTazb+Kt+kr/SZ2YCZ9ZlZXzv8xQlFJD9TCjvJdowH/X4z+2Fy8x6SvUm9F0D6lpkiUrhg640kMf43+T4zu3nC7XcBeNfM7iB5K4BuM/tT71yh1lvbktPdsYy9sdOte0qdfgsKZ5/hlof+On1K4/knD7rHvjo83613dYy69Z37/UZHz53pbUE+7W/ZXJ7lb5tsx7Jtox2atuzhNP83QRv1HzfQmaYa+Lkvz5/n1qv7Dvj37SyZniev9TaVv9kvBnAdgK0kNye33QbgDgDfJ3kDgJ0ArmnEYEUkH8Gwm9mTANL+i8znChkRaThdLisSCYVdJBIKu0gkFHaRSCjsIpFo6pbNLJdQnpW+LHKoj+71Pqt73/Xve7o/TbS65SW3vuD303u+bwb6vR30l1seDfR8T+30e7peL7s0w79EuTo87NazKs1M7+PXDh3yjw302auBx50d6dcfhHr0oZ+n8mx/ee+8H9d66JldJBIKu0gkFHaRSCjsIpFQ2EUiobCLREJhF4lEU/vsVq25/cfgHGKn9xlaSrq6f78/uIDywt7U2tjrO/yDA330UleXW6+NjPjn95TL9R8LoDznJLdePXDQrdcO+9cYuOcO9apL/r8tON/dO3XB1yfkQc/sIpFQ2EUiobCLREJhF4mEwi4SCYVdJBIKu0gkmtpnDwnNIfZYJdv65iHBXnoGmfroOZ871EcPynNL8BzXZs9yfUCr0jO7SCQUdpFIKOwikVDYRSKhsItEQmEXiYTCLhKJYNhJLib5OMkXSD5P8ivJ7beTHCS5OXm7Iv/hiki9pnJRzRiAW8zsWZJdADaRfDSp3W1mf5ff8ESkUaayP/tuALuTj0dIvghgYd4DE5HG+lB/s5M8A8DHATyT3HQTyS0k15Kcm3JMP8mNJDdWUP8yQSKSzZTDTnIWgAcB3GxmwwDuAbAUwDKMP/N/Y7LjzGzAzPrMrK8d/t5dIpKfKYWdZDvGg36/mf0QAMxsj5lVzawG4NsALsxvmCKS1VRejSeANQBeNLNvTrh94nKrVwPY1vjhiUijTOXV+IsBXAdgK8nNyW23AVhFchkAA7ADwJdzGaGINMRUXo1/EgAnKT3S+OGISF50BZ1IJBR2kUgo7CKRUNhFIqGwi0RCYReJhMIuEgmFXSQSCrtIJBR2kUgo7CKRUNhFIqGwi0RCYReJBC3PLXWPvzPyHQA7J9w0H8Depg3gw2nVsbXquACNrV6NHNvpZrZgskJTw/6BOyc3mllfYQNwtOrYWnVcgMZWr2aNTb/Gi0RCYReJRNFhHyj4/j2tOrZWHRegsdWrKWMr9G92EWmeop/ZRaRJFHaRSBQSdpIrSL5M8lWStxYxhjQkd5DcmmxDvbHgsawlOURy24Tbukk+SnJ78n7SPfYKGltLbOPtbDNe6GNX9PbnTf+bnWQZwCsAPgNgF4ANAFaZ2QtNHUgKkjsA9JlZ4RdgkPwdAO8BuM/MzktuuxPAPjO7I/mPcq6Z/VmLjO12AO8VvY13sltR78RtxgFcBWA1CnzsnHFdgyY8bkU8s18I4FUze93MjgH4HoCVBYyj5ZnZEwD2HXfzSgDrko/XYfyHpelSxtYSzGy3mT2bfDwC4P1txgt97JxxNUURYV8I4K0Jn+9Ca+33bgB+RnITyf6iBzOJHjPbnXz8NoCeIgczieA23s103DbjLfPY1bP9eVZ6ge6DLjGz8wF8DsCNya+rLcnG/wZrpd7plLbxbpZJthn/pSIfu3q3P8+qiLAPAlg84fNFyW0twcwGk/dDAB5C621Fvef9HXST90MFj+eXWmkb78m2GUcLPHZFbn9eRNg3ADiL5BKSHQCuBfBwAeP4AJIzkxdOQHImgOVova2oHwZwffLx9QB+VOBYfkWrbOOdts04Cn7sCt/+3Mya/gbgCoy/Iv8agD8vYgwp4zoTwHPJ2/NFjw3AAxj/ta6C8dc2bgAwD8B6ANsBPAagu4XG9i8AtgLYgvFg9RY0tksw/iv6FgCbk7crin7snHE15XHT5bIikdALdCKRUNhFIqGwi0RCYReJhMIuEgmFXSQSCrtIJP4PSkcHEGlbZOgAAAAASUVORK5CYII=\n",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAARX0lEQVR4nO3dfYyVZXrH8d/FoDAw8iYRCaisG/5QqmUbgk1KyOKmxlUMbKJm/aPauAmarMmqTVqz/UOSaqJVa/pH3YStL9CsmiWoq0a7a82mWo1GNFQQW1CULGR4E5H3t+HqH/NgZ3We6549z3nOc9z7+0kmM3Ouec65OTM/zsv13Pdt7i4Af/xGNT0AAJ1B2IFMEHYgE4QdyARhBzIxupM3Zma89Z+ZUaPKH09OnTpV23VXvf6enp6wPjAw0PJ1183dbbjLK4XdzK6U9M+SeiT9q7vfV+X6cmU27O/mS6k/6ip/eKNHx38CqcCk6r29vaW1Q4cOhcem9PX1hfUDBw6U1lIt50mTJoX1zz77LKx3o5afxptZj6R/kfR9SRdLusHMLm7XwAC0V5XX7PMlfeTuW9z9uKSnJS1pz7AAtFuVsM+Q9Lsh328rLvs9ZrbMzNaa2doKtwWgotrfoHP3FZJWSLxBBzSpyiP7dknnDfl+ZnEZgC5UJezvSJptZt8yszMl/VDS8+0ZFoB2a/lpvLufNLPbJP1ag623x9z9g7aNLCPjx48P6wcPHmz5useMGRPWjx07FtZTbcFx48aF9ai9lmoppqSOj9prqT76vn37WhpTN6v0mt3dX5L0UpvGAqBGnC4LZIKwA5kg7EAmCDuQCcIOZIKwA5mwTq4um+vpsqled6qXffTo0bA+duzYlo9Nia676vWfffbZYb3qNNLofp06dWp47O7du8N6amrwyZMnw3qdyuaz88gOZIKwA5kg7EAmCDuQCcIOZIKwA5mg9fYNkGrNVfkd1nnddUtNDa6yem1q6m5qanCTS03TegMyR9iBTBB2IBOEHcgEYQcyQdiBTBB2IBP02TvgrLPOCuvRbqOSNHHixLB+4sSJ0lpqN9LUFNbPP/88rC9YsCCs33rrraW1VC/6jjvuCOtbt24N601OM20SfXYgc4QdyARhBzJB2IFMEHYgE4QdyARhBzJBn/0b4JFHHgnrUS871Wuuuox1b29vWI+ktk2+5JJLwvqmTZvC+vHjx0trZ5xxRnhsdO6ClP53HzlyJKzXqazPXmnLZjP7VNIBSQOSTrr7vCrXB6A+lcJeWOTue9pwPQBqxGt2IBNVw+6SfmNm75rZsuF+wMyWmdlaM1tb8bYAVFD1afwCd99uZudIesXM/sfdXxv6A+6+QtIKiTfogCZVemR39+3F512SnpU0vx2DAtB+LYfdzMab2Vmnv5Z0haQN7RoYgPaq8jR+mqRniz7taElPuvu/t2VUf2RSWzYvWrQorF922WVhPeqVHzx4MDw21W/u6+sL66nzNKI566m11x999NGWr1uS7rzzztLaW2+9FR5b93bSTWg57O6+RdKftnEsAGpE6w3IBGEHMkHYgUwQdiAThB3IBFNcu0Bqqubs2bPD+v79+0trEyZMCI+NpoFK6SmwVbZ8TrX9UlJLcO/du7e0tnTp0vDYdevWhfVUSzLV8qwTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJdiw42TFRT7fOfnBK6thU/ZZbbgnrq1atCuszZ85s+bZTffZ77rknrK9evTqsn3nmmaW1K664Ijz2wQcfDOuprbCj2168eHF47LZt28L6nj3fvDVWeWQHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATHZ/Pnup3Rzo51naqOvd54cKFYf2iiy4qrY0bNy48dvTo+FSLNWvWhPUtW7aE9SpSyz3PmTMnrKfu90jq75T57AC6FmEHMkHYgUwQdiAThB3IBGEHMkHYgUx0vM8+alT5/y9V54XXqcpc+lOnTlW67eg+S9VPnjwZHjt+/PiwfujQobCe2o46+p2l5tJfffXVYf3pp58O61X67Kk17VP3a5Na7rOb2WNmtsvMNgy5bIqZvWJmm4vPk9s5WADtN5Kn8U9IuvIrl90l6VV3ny3p1eJ7AF0sGXZ3f03SV/fRWSJpZfH1SknxXjoAGtfqGnTT3L2/+HqHpGllP2hmyyQta/F2ALRJ5QUn3d2jDRvdfYWkFRIbOwJNarX1ttPMpktS8XlX+4YEoA6thv15STcVX98k6VftGQ6AuiT77Gb2lKTvSpoqaaekuyU9J+mXks6XtFXS9e5evhn2/19XbU/jq64bX7UeSfVkU3uoR/uvV9Xb2xvWjxw5EtZT5wBUOcfgwgsvDOsff/xxy9edGldqTfqUw4cPVzq+irI+e/I1u7vfUFL6XqURAegoTpcFMkHYgUwQdiAThB3IBGEHMsGWzYVUC3JgYCCsR3p6esJ61WWHozZRqsWUmsKakrr+aNvkqCZJixYtamlMp0W/0xMnToTHpqa4Vvl7aAqP7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKr+ux1budcdTnnKuq+7QMHDpTWUv3iVK87dXyqTx8tF51axvq6664L60ePHg3rY8eOLa2l+uyp31mTWzK3ikd2IBOEHcgEYQcyQdiBTBB2IBOEHcgEYQcy0fE+ezS3u5t75dGSyanllFPq3Fb50ksvDY+dM2dOWE8tJf3cc8+F9UjUB5ekhQsXhvUqW3inlqGOzl2Qqi/B3QQe2YFMEHYgE4QdyARhBzJB2IFMEHYgE4QdyETH++zRnPU6++ipufKped1RT3j06PhuXLp0aVhPHb9kyZKwPmbMmNLa3Llzw2MnTZoU1lO97Ndff73l42fPnh0em1qbPdXrXr9+fWnt8ssvD4+N7lOpO/voKclHdjN7zMx2mdmGIZctN7PtZrau+Liq3mECqGokT+OfkHTlMJc/7O5zi4+X2jssAO2WDLu7vyZpbwfGAqBGVd6gu83M3i+e5k8u+yEzW2Zma81sbYXbAlBRq2H/maRvS5orqV/SQ2U/6O4r3H2eu89r8bYAtEFLYXf3ne4+4O6nJP1c0vz2DgtAu7UUdjObPuTbH0jaUPazALqDpfqoZvaUpO9Kmippp6S7i+/nSnJJn0q6xd37kzdmFt5Yqt+cmvcdmTVrVli/5pprwvrixYtLa6l516l526m509H+61K8hnlfX194bErVed3R7/SLL74Ij504cWJYT9m8eXNpbdWqVeGxDz1U+spUUnf32d192JNKkifVuPsNw1z8aOURAegoTpcFMkHYgUwQdiAThB3IBGEHMpFsvbX1xsw8Wna5zimud999d1hfvnx5WN+zZ09pberUqa0M6UuprYf37o2nJkT1Cy64IDw21RZMbdmccuzYsdJaahpp6u8h1YqNpi2ntlx++eWXw/rNN98c1pvc0rms9cYjO5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmeh4nz2qV9maODXVMtX3rLLt8q5du8L61q1bw/oDDzwQ1levXh3W580rXwTo4YcfDo9Nbdk8eXLpimOSpG3btoX16Hf6xBNPhMd+8sknYf3aa68N69HU46rTa1988cWwnpoyXSf67EDmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZKKjffZRo0Z5ND/6+PHj4fHnnHNOaW337t3hsak+e2rudNQvTm0HvWnTprA+ZcqUsJ5atjha7vn8888Pj03NZ08t771v376wfuONN5bWXnjhhfDYlNQ6AtFy0YsWLQqPTa0xkLpfUst/14k+O5A5wg5kgrADmSDsQCYIO5AJwg5kgrADmeiq+exVpPqeK1euDOvXX399y9d/+PDh8Nhx48aF9dS2yKl5/gMDA6W11Lrvb775Zlh/8sknw/q6devC+htvvFFaS51fkOrhp37n0Xkb8+fPD499++23w/rjjz8e1lPrytep5T67mZ1nZr81s41m9oGZ/aS4fIqZvWJmm4vP8SoHABo1kqfxJyX9jbtfLOnPJf3YzC6WdJekV919tqRXi+8BdKlk2N29393fK74+IOlDSTMkLZF0+rnxSklL6xokgOriFz1fYWazJH1H0tuSprl7f1HaIWlayTHLJC1rfYgA2mHE78abWZ+kNZJud/f9Q2s++C7fsG++ufsKd5/n7uWrIgKo3YjCbmZnaDDov3D3Z4qLd5rZ9KI+XVK8xCqARiVbbzY4f3OlpL3ufvuQyx+Q9Jm732dmd0ma4u5/m7iu8MbOPffccCw7duwI65Fo+15JmjlzZli/9957S2szZswIj01tuZzaujjaLlqS7r///tLaxo0bw2NTU1xT2yKnpKYtR1JtwxMnToT1aOpx6u9+woQJYb3qlOk6lbXeRvKa/S8k/ZWk9WZ2uqn6U0n3Sfqlmf1I0lZJcaMaQKOSYXf3/5JU9l/k99o7HAB14XRZIBOEHcgEYQcyQdiBTBB2IBMdneLa09PjUV83NVU06n3u37+/tCZJfX19YT3VN416vlX6vVK655s6RyDqZad6+MeOHQvrVUW/79Ryzampwam/lyq/s5SqY6sTS0kDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJrlpKOjWHOOqlp5YVrjove/r06aW1/v7+0tpI9Pb2hvXUls11XndqGetDhw6F9SpzylNGjYofq6rMKW/6/IQq6LMDmSPsQCYIO5AJwg5kgrADmSDsQCYIO5CJruqzA6iOPjuQOcIOZIKwA5kg7EAmCDuQCcIOZIKwA5lIht3MzjOz35rZRjP7wMx+Uly+3My2m9m64uOq+ocLoFXJk2rMbLqk6e7+npmdJeldSUs1uB/7QXd/cMQ3xkk1QO3KTqoZyf7s/ZL6i68PmNmHkma0d3gA6vYHvWY3s1mSviPp7eKi28zsfTN7zMwmlxyzzMzWmtnaSiMFUMmIz403sz5J/ynpXnd/xsymSdojySX9gwaf6t+cuA6exgM1K3saP6Kwm9kZkl6U9Gt3/6dh6rMkvejuf5K4HsIO1KzliTA2uDzoo5I+HBr04o27034gaUPVQQKoz0jejV8g6XVJ6yWdXpv3p5JukDRXg0/jP5V0S/FmXnRdPLIDNav0NL5dCDtQP+azA5kj7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmkgtOttkeSVuHfD+1uKwbdevYunVcEmNrVTvHdkFZoaPz2b9242Zr3X1eYwMIdOvYunVcEmNrVafGxtN4IBOEHchE02Ff0fDtR7p1bN06LomxtaojY2v0NTuAzmn6kR1AhxB2IBONhN3MrjSz/zWzj8zsribGUMbMPjWz9cU21I3uT1fsobfLzDYMuWyKmb1iZpuLz8PusdfQ2LpiG+9gm/FG77umtz/v+Gt2M+uRtEnSX0raJukdSTe4+8aODqSEmX0qaZ67N34ChpktlHRQ0qrTW2uZ2T9K2uvu9xX/UU5297/rkrEt1x+4jXdNYyvbZvyv1eB9187tz1vRxCP7fEkfufsWdz8u6WlJSxoYR9dz99ck7f3KxUskrSy+XqnBP5aOKxlbV3D3fnd/r/j6gKTT24w3et8F4+qIJsI+Q9Lvhny/Td2137tL+o2ZvWtmy5oezDCmDdlma4ekaU0OZhjJbbw76SvbjHfNfdfK9udV8Qbd1y1w9z+T9H1JPy6ernYlH3wN1k29059J+rYG9wDsl/RQk4MpthlfI+l2d98/tNbkfTfMuDpyvzUR9u2Szhvy/czisq7g7tuLz7skPavBlx3dZOfpHXSLz7saHs+X3H2nuw+4+ylJP1eD912xzfgaSb9w92eKixu/74YbV6futybC/o6k2Wb2LTM7U9IPJT3fwDi+xszGF2+cyMzGS7pC3bcV9fOSbiq+vknSrxocy+/plm28y7YZV8P3XePbn7t7xz8kXaXBd+Q/lvT3TYyhZFwXSvrv4uODpscm6SkNPq07ocH3Nn4k6WxJr0raLOk/JE3porH9mwa39n5fg8Ga3tDYFmjwKfr7ktYVH1c1fd8F4+rI/cbpskAmeIMOyARhBzJB2IFMEHYgE4QdyARhBzJB2IFM/B+tIjCppYWKvAAAAABJRU5ErkJggg==\n",
       "text/plain": [
        "<Figure size 432x288 with 1 Axes>"
       ]
@@ -347,12 +348,12 @@
     "input_tensor = onnx.load_tensor_from_string(raw_i)\n",
     "input_tensor_npy = nph.to_array(input_tensor)\n",
     "input_tensor_pyt = torch.from_numpy(input_tensor_npy).float()\n",
-    "imgplot = plt.imshow(input_tensor_npy.reshape(28,28))"
+    "imgplot = plt.imshow(input_tensor_npy.reshape(28,28), cmap='gray')"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 11,
    "metadata": {},
    "outputs": [
     {
@@ -362,7 +363,7 @@
        "        1.1795e-04, 5.0158e-05, 1.0517e-01, 2.4597e-05])"
       ]
      },
-     "execution_count": 7,
+     "execution_count": 11,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -377,12 +378,12 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 12,
    "metadata": {},
    "outputs": [
     {
      "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEICAYAAABS0fM3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAa3klEQVR4nO3debxdZXn28d9FEgRCBCFxgASIGCxxeAuNgKJCRSqoQB1qwRcrVkXfFkVBK1oriIqvI7WKVgQnZCgC2qABxFdE3yKBMBsmQwQSQAlzGEoYrv6xni2bk3P2WSRZ68BZ1/fz2Z+z13jfe59z1r3X86z1bNkmIiK6a62xTiAiIsZWCkFERMelEEREdFwKQUREx6UQRER0XApBRETHpRBE1CTpMEk/KM83k3SvpAmrsJ+PSTpmzWc4MOaOkn5Xcv7rNmPHk18KwTgk6XpJrx5m/s6SHi0Hg97j9L7lW0n6oaTbJN0t6XJJB63KwW5I3AMkLZD0oKTvPsFt3yLpPEn3S/rlKOv2v77lkq6R9I7VyX0ktm+0vb7tR2rktHTItkfYflcTeQ1wOPC1kvOPV3dnkr4r6dNrIK94Ekgh6J6by8Gg99gDQNKWwHxgCfAi2xsAfwPMAaasbkzg08C3V2HbO4B/Bf5v3Vi21weeDnwE+Jak2UNXkjRxFXJ5KtscWLgqG3bwveqcFILo+SRwnu2DbN8CYPsa22+1fdfQlSX9paQr+qbPlnRh3/Sve00Qtk8rn0JvH2Y/z5D0E0nLJN1Znk/vLbf9c9snUxWT2lz5MXAnMFvSFpIs6Z2SbgR+UeLvUM447pJ0maSd+3KbKenccnZxNjC1b1lvfxPL9EaSviPp5vI6fixpMnAGsEnfGdgm/U1MZds9JS0sOfxS0tZ9y66X9KFydna3pP+QtE5ZNrW8X3dJuqO85yv9T0u6DngucHrJ4Wklj7llu0WS3t23/mGSTpH0A0n3APsNeq/73ot3SFpSXv97Jb2k5H2XpK/1rb+lpF9Iur2cfR4vacO+5dtKuqS87z8sr/nTfctfL+nSst/zJL14UH4xuhSC6Hk1cMoTWP98YFY5GE0CXkx1wJsiaV2qM4lf19jPWsB3qD6xbgY8AHxt4BY1SFpL0huADYEr+hbtBGwNvEbSpsBPqc5WNgI+BJwqaVpZ9wTgIqoC8Cng7QNCHgesB7wAeCZwpO37gN15/FnY4wqapK2AE4EPANOAeVQH7LX7VnsLsBswk+p93q/MPxhYWrZ7FvAxYKUxY2xvCdwI7FFyeBA4qWy7CfBm4AhJr+rbbC+qv4cNgeMHvO5+2wOzgL+lOov7Z6q/qxcAb5G0U+9lA58tsbcGZgCHlfdjbeBHwHepficnAm/oBZC0DdWZ5XuAjYFvAnMlPa1mjjGMFILu2aR8kuo93lLmbwzcUncnth8ALgReCfwFcBnwX8COwA7A72yvdAYwzH5ut32q7fttLwc+Q3WwXlWbSLoLuA04FHib7Wv6lh9m+76S/77APNvzbD9q+2xgAfBaSZsBLwH+xfaDtn8FnM4wJD2H6oD/Xtt32n7I9rk18/1b4Ke2z7b9EPBFYF3gZX3r/Jvtm23fUXL48zL/IeA5wOYl5q9dY/AwSTOofk8fsf3fti8FjgH+rm+139j+cXlfHqj5Wj5V9vcz4D7gRNu32r6J6kPBNgC2F5XX+6DtZcCXeex3vgMwsbzmh2yfBlzQF2N/4Ju259t+xPb3gAfLdrGK0vbXPTfbnj7M/NupDirDkvTvVAdOgCNsHwGcC+xM9cnyXKpmmJ2o/jFrHQglrQccSfWJ9xll9hRJE0briB3BSK+vZ0nf882Bv5G0R9+8ScA5VJ9W7yyf6ntuoPr0OtQM4A7bd65CvpuU/QJg+1FJS4BN+9b5Q9/z+8s2AF+g+iT9M0kAR9uu05eyScl3ed+8G6jO4nqW8MT9se/5A8NMrw8g6VnAV4BXUPU/rUX1t9PL7aYhBW3o7+ztkt7XN29tHntPYhXkjCB6fg68aaSFtt/b17xxRJndKwSvLM/PpSoEO1GzEFA1bzwf2N7208u+oGo+aMLQA8xxtjfse0wuB9NbgGeUdv6ezUbY5xJgo/527hHiDedmqoMbAKqO6DOAm0Z9IfZy2wfbfi6wJ3CQpF1G267E3EhS/0UAmw2J2eSwxEeU/b+o/M735bHf9y3ApuV96OkvvkuAzwz5na1n+8QG8x33UgjGr0mS1ul7jHb2dyjwMklfkPRsAEnPKx2Gwx3gAM6jOohvB1xgeyHVQW174Fe9lSRNLB2cE4AJQ/KZQvVp8S5JG5U86Nt2Qtl2IrBW2XbSE3gfBvkBsIek1/TiqLrcc7rtG6iaiT4paW1JLwf2GG4npXP9DODrqjq/J0nqFbQ/AhtL2mCEHE4GXidpl/K6DqY6ozpvtORLp+nzykHzbuAR4NHRtrO9pOz/s+U1vxh4Z3k/2jAFuBe4u/TTfLhv2W+oXscB5e9mL6q/r55vAe+VtL0qkyW9bkhRiycohWD8mkd1gO09Dhu0su3rgJcCWwALJd0NnEp1MFw+wjb3ARcDC22vKLN/A9xg+9a+VT9ecjiE6tPfA2UeVJ2K61K16Z8PnDkkzNvK+t+gakp4gOpgsNrKAXEvqk7WZVSfNj/MY/8Xb6UqandQFajvD9jd26ja7K8GbqXq/MX21VQdnotLn8zjmjBK/8W+wFep3oM9qDp1VzC6WVRncvdSve9ft31Oje0A9qH6Xd9M1Tl7qO2f19x2dX0S2JaqeP0UOK23oLzuN1IVpruo3pufUBVHbC8A3k11QcGdwCJGuaopRqd8MU1EPJlJmg/8u+3vjHUu41XOCCLiSUXSTpKeXZqG3k51yezQM8VYg3LVUEQ82Tyfqu9kMrAYeHPvJsdoRpqGIiI6Lk1DEREd95RrGpo6daq32GKLsU4jIuIp5aKLLrrN9rThlj3lCsEWW2zBggULxjqNiIinFEk3jLQsTUMRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHfeUu7M4nrgjz7628Rgf3HWrxmNERDNyRhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxjRYCSbtJukbSIkmHDLN8M0nnSLpE0uWSXttkPhERsbLGCoGkCcBRwO7AbGAfSbOHrPZx4GTb2wB7A19vKp+IiBhek2cE2wGLbC+2vQI4CdhryDoGnl6ebwDc3GA+ERExjCYLwabAkr7ppWVev8OAfSUtBeYB7xtuR5L2l7RA0oJly5Y1kWtERGeNdWfxPsB3bU8HXgscJ2mlnGwfbXuO7TnTpk1rPcmIiPGsyUJwEzCjb3p6mdfvncDJALZ/A6wDTG0wp4iIGKLJQnAhMEvSTElrU3UGzx2yzo3ALgCStqYqBGn7iYhoUWOFwPbDwAHAWcBVVFcHLZR0uKQ9y2oHA++WdBlwIrCfbTeVU0RErGxikzu3PY+qE7h/3if6nl8J7NhkDhERMdhYdxZHRMQYSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI4btRBIelEbiURExNioc0bwdUkXSPoHSRs0nlFERLRq1EJg+xXA/wZmABdJOkHSro1nFhERrajVR2D7d8DHgY8AOwH/JulqSW9sMrmIiGhenT6CF0s6ErgKeBWwh+2ty/MjG84vIiIaNrHGOl8FjgE+ZvuB3kzbN0v6eGOZRUREK+o0Df3I9nH9RUDSgQC2j2sss4iIaEWdQvB3w8zbbw3nERERY2TEpiFJ+wBvBWZKmtu3aApwR9OJRUREOwb1EZwH3AJMBb7UN385cHmTSUVERHtGLAS2bwBuAF7aXjoREdG2QU1D/9/2yyUtB9y/CLDtpzeeXURENG7QGcHLy88p7aUTERFtG3RGsNGgDW2nwzgiYhwY1Fl8EVWTkIZZZuC5jWQUERGtGtQ0NLPNRCIiYmyMeEOZpD8rP7cd7lFn55J2k3SNpEWSDhlhnbdIulLSQkknrNrLiIiIVTWoaeggYH8efw9Bj6kGnRuRpAnAUcCuwFLgQklzbV/Zt84s4KPAjrbvlPTMJ5h/RESspkFNQ/uXn3+5ivveDlhkezGApJOAvYAr+9Z5N3CU7TtLrFtXMVZERKyiOsNQryPpIEmnSTpV0gckrVNj35sCS/qml5Z5/bYCtpL0X5LOl7TbCDnsL2mBpAXLli2rEToiIuqqM+jc94EXUA1H/bXyfE2NOjoRmAXsDOwDfEvShkNXsn207Tm250ybNm0NhY6ICKj3fQQvtD27b/ocSVeOuPZjbqL6esue6WVev6XAfNsPAb+XdC1VYbiwxv4jImINqHNGcLGkHXoTkrYHFtTY7kJglqSZktYG9gbmDlnnx1RnA0iaStVUtLjGviMiYg0ZdGfxFVRXB00CzpN0Y5neHLh6tB3bfljSAcBZwATg27YXSjocWGB7bln2V+UM4xHgw7ZvX90XFRER9Q1qGnr96u7c9jxg3pB5n+h7bqrLVA9a3VgREbFqRhuG+k/KNf51rhaKiIinkDqXj+4p6XfA74FzgeuBMxrOKyIiWlKns/hTwA7AtWX8oV2A8xvNKiIiWlOnEDxUOnDXkrSW7XOAOQ3nFRERLalzH8FdktYHfg0cL+lW4L5m04qIiLbUOSPYC3gA+ABwJnAdsEeTSUVERHtGPSOwfZ+kZ1MNIncHcFau9Y+IGD/qXDX0LuAC4I3Am4HzJf1904lFREQ76vQRfBjYpncWIGlj4Dzg200mFhER7ajTR3A7sLxvenmZFxER48CgsYZ6wz4sAuZL+k+qsYb2Ai5vIbeIiGjBoKahKeXndeXR85/NpRMREW0bNNbQJ/uny70E2L636aQiIqI9da4aeqGkS4CFwEJJF0l6QfOpRUREG+p0Fh8NHGR7c9ubAwcD32o2rYiIaEudQjC5jC8EgO1fApMbyygiIlpV5z6CxZL+hce+sH5f8nWSERHjRp0zgr8HpgGnAacCU8u8iIgYBwaeEUiaAPyz7fe3lE9ERLRs4BmB7UeAl7eUS0REjIE6fQSXSJoL/JC+7yGwfVpjWUVERGvqFIJ1qMYWelXfPFP1GURExFNcrdFHbd/WeCYRETEmRuwjkLSHpGXA5ZKWSnpZi3lFRERLBnUWfwZ4he1NgDcBn20npYiIaNOgQvCw7asBbM/nsdFIIyJiHBnUR/DMvu8kWGna9pebSysiItoyqBB8i8efBQydjoiIcaD29xFERMT4VGesoYiIGMdSCCIiOi6FICKi40bsIxhyxdBKctVQRMT4MOiqod4VQs8HXgLMLdN7ABc0mVRERLRn1KuGJP0K2Nb28jJ9GPDTVrKLiIjG1ekjeBawom96RZkXERHjQJ3RR78PXCDpR2X6r4HvNZdSRES0adRCYPszks4AXlFmvcP2Jc2mFRERbal7+eh6wD22vwIslTSzzkaSdpN0jaRFkg4ZsN6bJFnSnJr5RETEGjJqIZB0KPAR4KNl1iTgBzW2mwAcBewOzAb2kTR7mPWmAAcC8+unHRERa0qdM4I3AHtSvq/Y9s3UG3xuO2CR7cW2VwAnAXsNs96ngM8B/10r44iIWKPqFIIVtk31PcVImlxz35sCS/qml5Z5fyJpW2CG7YGXo0raX9ICSQuWLVtWM3xERNRRpxCcLOmbwIaS3g38HDhmdQNLWgv4MnDwaOvaPtr2HNtzpk2btrqhIyKiT52rhr4oaVfgHqq7jD9h++wa+74JmNE3Pb3M65kCvBD4pSSAZwNzJe1pe0HN/CMiYjWNWggkfc72R4Czh5k3yIXArHKF0U3A3sBbewtt3w1M7dvnL4EPpQhERLSrTtPQrsPM2320jWw/DBwAnAVcBZxse6GkwyXt+cTSjIiIpgwaffT/AP8AbCnp8r5FU4Dz6uzc9jxg3pB5nxhh3Z3r7DMiItasQU1DJwBnAJ8F+m8GW277jkazioiI1ozYNGT7btvXA18B7rB9g+0bgIclbd9WghER0aw6fQTfAO7tm763zIuIiHGgTiFQuaEMANuPUm/U0oiIeAqoUwgWS3q/pEnlcSCwuOnEIiKiHXUKwXuBl1HdC7AU2B7Yv8mkIiKiPXXuLL6V6mawiIgYhwbdR/BPtj8v6auUAef62X5/o5lFREQrBp0RXFV+ZsiHiIhxbMRCYPv08jPfTxwRMY4Naho6nWGahHpsZ7ygiIhxYFDT0BfLzzdSDRHd+3rKfYA/NplURES0Z1DT0LkAkr5ku/9L5U+XlH6DiIhxos59BJMlPbc3Ub5foO7XVUZExJNcnaEiPkj1LWKLAQGbA+9pNKuIiGhNnRvKzpQ0C/izMutq2w82m1ZERLRl1KYhSesBHwYOsH0ZsJmk1zeeWUREtKJOH8F3gBXAS8v0TcCnG8soIiJaVacQbGn788BDALbvp+oriIiIcaBOIVghaV3KzWWStgTSRxARMU7UuWroUOBMYIak44Edgf2aTCoiItozsBBIEnA11d3FO1A1CR1o+7YWcouIiBYMLAS2LWme7RcBP20pp4iIaFGdPoKLJb2k8UwiImJM1Okj2B7YV9L1wH1UzUO2/eImE4uIiHbUKQSvaTyLiIgYM4O+j2Adqi+ufx5wBXCs7YfbSiwiItoxqI/ge8AcqiKwO/ClVjKKiIhWDWoaml2uFkLSscAF7aQUERFtGnRG8FDvSZqEIiLGr0FnBP9L0j3luYB1y3TvqqGnN55dREQ0btBXVU5oM5GIiBgbdW4oi4iIcSyFICKi41IIIiI6LoUgIqLjUggiIjqu0UIgaTdJ10haJOmQYZYfJOlKSZdL+n+SNm8yn4iIWFljhUDSBOAoquEpZgP7SJo9ZLVLgDllJNNTgM83lU9ERAyvyTOC7YBFthfbXgGcBOzVv4Ltc2zfXybPB6Y3mE9ERAyjyUKwKbCkb3ppmTeSdwJnDLdA0v6SFkhasGzZsjWYYkREPCk6iyXtSzXS6ReGW277aNtzbM+ZNm1au8lFRIxzdb6YZlXdBMzom55e5j2OpFcD/wzsZPvBBvOJiIhhNHlGcCEwS9JMSWsDewNz+1eQtA3wTWBP27c2mEtERIygsUJQhq4+ADgLuAo42fZCSYdL2rOs9gVgfeCHki6VNHeE3UVEREOabBrC9jxg3pB5n+h7/uom40dExOieFJ3FERExdlIIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjqu0S+miYho05FnX9t4jA/uulXjMdqWM4KIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjmu0EEjaTdI1khZJOmSY5U+T9B9l+XxJWzSZT0RErKyxQiBpAnAUsDswG9hH0uwhq70TuNP284Ajgc81lU9ERAxvYoP73g5YZHsxgKSTgL2AK/vW2Qs4rDw/BfiaJNl2Ewkdefa1Tez2cT6461aNx4iIWJOaLASbAkv6ppcC24+0ju2HJd0NbAzc1r+SpP2B/cvkvZKuaSTj4U0dms8gB41h7DUsrzuxE3sYa/Bvve3XvflIC5osBGuM7aOBo8citqQFtuckdmIndmKPl9hDNdlZfBMwo296epk37DqSJgIbALc3mFNERAzRZCG4EJglaaaktYG9gblD1pkLvL08fzPwi6b6ByIiYniNNQ2VNv8DgLOACcC3bS+UdDiwwPZc4FjgOEmLgDuoisWTzZg0SSV2Yid2YrdF+QAeEdFtubM4IqLjUggiIjouhWAEow2P0XDsb0u6VdJvW447Q9I5kq6UtFDSgS3GXkfSBZIuK7E/2VbsvhwmSLpE0k/GIPb1kq6QdKmkBS3H3lDSKZKulnSVpJe2FPf55fX2HvdI+kAbsUv8D5a/td9KOlHSOi3GPrDEXdjmax6R7TyGPKg6t68DngusDVwGzG4x/iuBbYHftvy6nwNsW55PAa5t63UDAtYvzycB84EdWn79BwEnAD9pM26JfT0wte24Jfb3gHeV52sDG45BDhOAPwCbtxRvU+D3wLpl+mRgv5ZivxD4LbAe1QU7PweeNxa/+94jZwTD+9PwGLZXAL3hMVph+1dUV1G1yvYtti8uz5cDV1H9w7QR27bvLZOTyqO1KxkkTQdeBxzTVswnA0kbUH3wOBbA9grbd41BKrsA19m+ocWYE4F1yz1M6wE3txR3a2C+7fttPwycC7yxpdjDSiEY3nDDY7RyQHyyKCPBbkP1ybytmBMkXQrcCpxtu7XYwL8C/wQ82mLMfgZ+JumiMqRKW2YCy4DvlGaxYyRNbjF+z97AiW0Fs30T8EXgRuAW4G7bP2sp/G+BV0jaWNJ6wGt5/M23rUshiJVIWh84FfiA7Xvaimv7Edt/TnUX+naSXthGXEmvB261fVEb8UbwctvbUo3W+4+SXtlS3IlUzZDfsL0NcB/Qdp/Y2sCewA9bjPkMqrP8mcAmwGRJ+7YR2/ZVVCMt/ww4E7gUeKSN2CNJIRheneExxiVJk6iKwPG2TxuLHErTxDnAbi2F3BHYU9L1VM2Ar5L0g5ZiA3/6hIrtW4EfUTVPtmEpsLTv7OsUqsLQpt2Bi23/scWYrwZ+b3uZ7YeA04CXtRXc9rG2/8L2K4E7qfrjxkwKwfDqDI8x7kgSVVvxVba/3HLsaZI2LM/XBXYFrm4jtu2P2p5uewuq3/UvbLfy6RBA0mRJU3rPgb+iaj5onO0/AEskPb/M2oXHDxXfhn1osVmouBHYQdJ65e9+F6o+sVZIemb5uRlV/8AJbcUezlNi9NG2eYThMdqKL+lEYGdgqqSlwKG2j20h9I7A24ArSls9wMdsz2sh9nOA75UvNFoLONl265dxjpFnAT+qjkdMBE6wfWaL8d8HHF8+9CwG3tFW4FL4dgXe01ZMANvzJZ0CXAw8DFxCu0M+nCppY+Ah4B/HqIP+TzLEREREx6VpKCKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi4/4HEHMv4f97kiwAAAAASUVORK5CYII=\n",
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEICAYAAABS0fM3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAa3klEQVR4nO3debxdZXn28d9FEgRCBCFxgASIGCxxeAuNgKJCRSqoQB1qwRcrVkXfFkVBK1oriIqvI7WKVgQnZCgC2qABxFdE3yKBMBsmQwQSQAlzGEoYrv6xni2bk3P2WSRZ68BZ1/fz2Z+z13jfe59z1r3X86z1bNkmIiK6a62xTiAiIsZWCkFERMelEEREdFwKQUREx6UQRER0XApBRETHpRBE1CTpMEk/KM83k3SvpAmrsJ+PSTpmzWc4MOaOkn5Xcv7rNmPHk18KwTgk6XpJrx5m/s6SHi0Hg97j9L7lW0n6oaTbJN0t6XJJB63KwW5I3AMkLZD0oKTvPsFt3yLpPEn3S/rlKOv2v77lkq6R9I7VyX0ktm+0vb7tR2rktHTItkfYflcTeQ1wOPC1kvOPV3dnkr4r6dNrIK94Ekgh6J6by8Gg99gDQNKWwHxgCfAi2xsAfwPMAaasbkzg08C3V2HbO4B/Bf5v3Vi21weeDnwE+Jak2UNXkjRxFXJ5KtscWLgqG3bwveqcFILo+SRwnu2DbN8CYPsa22+1fdfQlSX9paQr+qbPlnRh3/Sve00Qtk8rn0JvH2Y/z5D0E0nLJN1Znk/vLbf9c9snUxWT2lz5MXAnMFvSFpIs6Z2SbgR+UeLvUM447pJ0maSd+3KbKenccnZxNjC1b1lvfxPL9EaSviPp5vI6fixpMnAGsEnfGdgm/U1MZds9JS0sOfxS0tZ9y66X9KFydna3pP+QtE5ZNrW8X3dJuqO85yv9T0u6DngucHrJ4Wklj7llu0WS3t23/mGSTpH0A0n3APsNeq/73ot3SFpSXv97Jb2k5H2XpK/1rb+lpF9Iur2cfR4vacO+5dtKuqS87z8sr/nTfctfL+nSst/zJL14UH4xuhSC6Hk1cMoTWP98YFY5GE0CXkx1wJsiaV2qM4lf19jPWsB3qD6xbgY8AHxt4BY1SFpL0huADYEr+hbtBGwNvEbSpsBPqc5WNgI+BJwqaVpZ9wTgIqoC8Cng7QNCHgesB7wAeCZwpO37gN15/FnY4wqapK2AE4EPANOAeVQH7LX7VnsLsBswk+p93q/MPxhYWrZ7FvAxYKUxY2xvCdwI7FFyeBA4qWy7CfBm4AhJr+rbbC+qv4cNgeMHvO5+2wOzgL+lOov7Z6q/qxcAb5G0U+9lA58tsbcGZgCHlfdjbeBHwHepficnAm/oBZC0DdWZ5XuAjYFvAnMlPa1mjjGMFILu2aR8kuo93lLmbwzcUncnth8ALgReCfwFcBnwX8COwA7A72yvdAYwzH5ut32q7fttLwc+Q3WwXlWbSLoLuA04FHib7Wv6lh9m+76S/77APNvzbD9q+2xgAfBaSZsBLwH+xfaDtn8FnM4wJD2H6oD/Xtt32n7I9rk18/1b4Ke2z7b9EPBFYF3gZX3r/Jvtm23fUXL48zL/IeA5wOYl5q9dY/AwSTOofk8fsf3fti8FjgH+rm+139j+cXlfHqj5Wj5V9vcz4D7gRNu32r6J6kPBNgC2F5XX+6DtZcCXeex3vgMwsbzmh2yfBlzQF2N/4Ju259t+xPb3gAfLdrGK0vbXPTfbnj7M/NupDirDkvTvVAdOgCNsHwGcC+xM9cnyXKpmmJ2o/jFrHQglrQccSfWJ9xll9hRJE0briB3BSK+vZ0nf882Bv5G0R9+8ScA5VJ9W7yyf6ntuoPr0OtQM4A7bd65CvpuU/QJg+1FJS4BN+9b5Q9/z+8s2AF+g+iT9M0kAR9uu05eyScl3ed+8G6jO4nqW8MT9se/5A8NMrw8g6VnAV4BXUPU/rUX1t9PL7aYhBW3o7+ztkt7XN29tHntPYhXkjCB6fg68aaSFtt/b17xxRJndKwSvLM/PpSoEO1GzEFA1bzwf2N7208u+oGo+aMLQA8xxtjfse0wuB9NbgGeUdv6ezUbY5xJgo/527hHiDedmqoMbAKqO6DOAm0Z9IfZy2wfbfi6wJ3CQpF1G267E3EhS/0UAmw2J2eSwxEeU/b+o/M735bHf9y3ApuV96OkvvkuAzwz5na1n+8QG8x33UgjGr0mS1ul7jHb2dyjwMklfkPRsAEnPKx2Gwx3gAM6jOohvB1xgeyHVQW174Fe9lSRNLB2cE4AJQ/KZQvVp8S5JG5U86Nt2Qtl2IrBW2XbSE3gfBvkBsIek1/TiqLrcc7rtG6iaiT4paW1JLwf2GG4npXP9DODrqjq/J0nqFbQ/AhtL2mCEHE4GXidpl/K6DqY6ozpvtORLp+nzykHzbuAR4NHRtrO9pOz/s+U1vxh4Z3k/2jAFuBe4u/TTfLhv2W+oXscB5e9mL6q/r55vAe+VtL0qkyW9bkhRiycohWD8mkd1gO09Dhu0su3rgJcCWwALJd0NnEp1MFw+wjb3ARcDC22vKLN/A9xg+9a+VT9ecjiE6tPfA2UeVJ2K61K16Z8PnDkkzNvK+t+gakp4gOpgsNrKAXEvqk7WZVSfNj/MY/8Xb6UqandQFajvD9jd26ja7K8GbqXq/MX21VQdnotLn8zjmjBK/8W+wFep3oM9qDp1VzC6WVRncvdSve9ft31Oje0A9qH6Xd9M1Tl7qO2f19x2dX0S2JaqeP0UOK23oLzuN1IVpruo3pufUBVHbC8A3k11QcGdwCJGuaopRqd8MU1EPJlJmg/8u+3vjHUu41XOCCLiSUXSTpKeXZqG3k51yezQM8VYg3LVUEQ82Tyfqu9kMrAYeHPvJsdoRpqGIiI6Lk1DEREd95RrGpo6daq32GKLsU4jIuIp5aKLLrrN9rThlj3lCsEWW2zBggULxjqNiIinFEk3jLQsTUMRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHfeUu7M4nrgjz7628Rgf3HWrxmNERDNyRhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxKQQRER2XQhAR0XEpBBERHZdCEBHRcSkEEREdl0IQEdFxjRYCSbtJukbSIkmHDLN8M0nnSLpE0uWSXttkPhERsbLGCoGkCcBRwO7AbGAfSbOHrPZx4GTb2wB7A19vKp+IiBhek2cE2wGLbC+2vQI4CdhryDoGnl6ebwDc3GA+ERExjCYLwabAkr7ppWVev8OAfSUtBeYB7xtuR5L2l7RA0oJly5Y1kWtERGeNdWfxPsB3bU8HXgscJ2mlnGwfbXuO7TnTpk1rPcmIiPGsyUJwEzCjb3p6mdfvncDJALZ/A6wDTG0wp4iIGKLJQnAhMEvSTElrU3UGzx2yzo3ALgCStqYqBGn7iYhoUWOFwPbDwAHAWcBVVFcHLZR0uKQ9y2oHA++WdBlwIrCfbTeVU0RErGxikzu3PY+qE7h/3if6nl8J7NhkDhERMdhYdxZHRMQYSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI4btRBIelEbiURExNioc0bwdUkXSPoHSRs0nlFERLRq1EJg+xXA/wZmABdJOkHSro1nFhERrajVR2D7d8DHgY8AOwH/JulqSW9sMrmIiGhenT6CF0s6ErgKeBWwh+2ty/MjG84vIiIaNrHGOl8FjgE+ZvuB3kzbN0v6eGOZRUREK+o0Df3I9nH9RUDSgQC2j2sss4iIaEWdQvB3w8zbbw3nERERY2TEpiFJ+wBvBWZKmtu3aApwR9OJRUREOwb1EZwH3AJMBb7UN385cHmTSUVERHtGLAS2bwBuAF7aXjoREdG2QU1D/9/2yyUtB9y/CLDtpzeeXURENG7QGcHLy88p7aUTERFtG3RGsNGgDW2nwzgiYhwY1Fl8EVWTkIZZZuC5jWQUERGtGtQ0NLPNRCIiYmyMeEOZpD8rP7cd7lFn55J2k3SNpEWSDhlhnbdIulLSQkknrNrLiIiIVTWoaeggYH8efw9Bj6kGnRuRpAnAUcCuwFLgQklzbV/Zt84s4KPAjrbvlPTMJ5h/RESspkFNQ/uXn3+5ivveDlhkezGApJOAvYAr+9Z5N3CU7TtLrFtXMVZERKyiOsNQryPpIEmnSTpV0gckrVNj35sCS/qml5Z5/bYCtpL0X5LOl7TbCDnsL2mBpAXLli2rEToiIuqqM+jc94EXUA1H/bXyfE2NOjoRmAXsDOwDfEvShkNXsn207Tm250ybNm0NhY6ICKj3fQQvtD27b/ocSVeOuPZjbqL6esue6WVev6XAfNsPAb+XdC1VYbiwxv4jImINqHNGcLGkHXoTkrYHFtTY7kJglqSZktYG9gbmDlnnx1RnA0iaStVUtLjGviMiYg0ZdGfxFVRXB00CzpN0Y5neHLh6tB3bfljSAcBZwATg27YXSjocWGB7bln2V+UM4xHgw7ZvX90XFRER9Q1qGnr96u7c9jxg3pB5n+h7bqrLVA9a3VgREbFqRhuG+k/KNf51rhaKiIinkDqXj+4p6XfA74FzgeuBMxrOKyIiWlKns/hTwA7AtWX8oV2A8xvNKiIiWlOnEDxUOnDXkrSW7XOAOQ3nFRERLalzH8FdktYHfg0cL+lW4L5m04qIiLbUOSPYC3gA+ABwJnAdsEeTSUVERHtGPSOwfZ+kZ1MNIncHcFau9Y+IGD/qXDX0LuAC4I3Am4HzJf1904lFREQ76vQRfBjYpncWIGlj4Dzg200mFhER7ajTR3A7sLxvenmZFxER48CgsYZ6wz4sAuZL+k+qsYb2Ai5vIbeIiGjBoKahKeXndeXR85/NpRMREW0bNNbQJ/uny70E2L636aQiIqI9da4aeqGkS4CFwEJJF0l6QfOpRUREG+p0Fh8NHGR7c9ubAwcD32o2rYiIaEudQjC5jC8EgO1fApMbyygiIlpV5z6CxZL+hce+sH5f8nWSERHjRp0zgr8HpgGnAacCU8u8iIgYBwaeEUiaAPyz7fe3lE9ERLRs4BmB7UeAl7eUS0REjIE6fQSXSJoL/JC+7yGwfVpjWUVERGvqFIJ1qMYWelXfPFP1GURExFNcrdFHbd/WeCYRETEmRuwjkLSHpGXA5ZKWSnpZi3lFRERLBnUWfwZ4he1NgDcBn20npYiIaNOgQvCw7asBbM/nsdFIIyJiHBnUR/DMvu8kWGna9pebSysiItoyqBB8i8efBQydjoiIcaD29xFERMT4VGesoYiIGMdSCCIiOi6FICKi40bsIxhyxdBKctVQRMT4MOiqod4VQs8HXgLMLdN7ABc0mVRERLRn1KuGJP0K2Nb28jJ9GPDTVrKLiIjG1ekjeBawom96RZkXERHjQJ3RR78PXCDpR2X6r4HvNZdSRES0adRCYPszks4AXlFmvcP2Jc2mFRERbal7+eh6wD22vwIslTSzzkaSdpN0jaRFkg4ZsN6bJFnSnJr5RETEGjJqIZB0KPAR4KNl1iTgBzW2mwAcBewOzAb2kTR7mPWmAAcC8+unHRERa0qdM4I3AHtSvq/Y9s3UG3xuO2CR7cW2VwAnAXsNs96ngM8B/10r44iIWKPqFIIVtk31PcVImlxz35sCS/qml5Z5fyJpW2CG7YGXo0raX9ICSQuWLVtWM3xERNRRpxCcLOmbwIaS3g38HDhmdQNLWgv4MnDwaOvaPtr2HNtzpk2btrqhIyKiT52rhr4oaVfgHqq7jD9h++wa+74JmNE3Pb3M65kCvBD4pSSAZwNzJe1pe0HN/CMiYjWNWggkfc72R4Czh5k3yIXArHKF0U3A3sBbewtt3w1M7dvnL4EPpQhERLSrTtPQrsPM2320jWw/DBwAnAVcBZxse6GkwyXt+cTSjIiIpgwaffT/AP8AbCnp8r5FU4Dz6uzc9jxg3pB5nxhh3Z3r7DMiItasQU1DJwBnAJ8F+m8GW277jkazioiI1ozYNGT7btvXA18B7rB9g+0bgIclbd9WghER0aw6fQTfAO7tm763zIuIiHGgTiFQuaEMANuPUm/U0oiIeAqoUwgWS3q/pEnlcSCwuOnEIiKiHXUKwXuBl1HdC7AU2B7Yv8mkIiKiPXXuLL6V6mawiIgYhwbdR/BPtj8v6auUAef62X5/o5lFREQrBp0RXFV+ZsiHiIhxbMRCYPv08jPfTxwRMY4Naho6nWGahHpsZ7ygiIhxYFDT0BfLzzdSDRHd+3rKfYA/NplURES0Z1DT0LkAkr5ku/9L5U+XlH6DiIhxos59BJMlPbc3Ub5foO7XVUZExJNcnaEiPkj1LWKLAQGbA+9pNKuIiGhNnRvKzpQ0C/izMutq2w82m1ZERLRl1KYhSesBHwYOsH0ZsJmk1zeeWUREtKJOH8F3gBXAS8v0TcCnG8soIiJaVacQbGn788BDALbvp+oriIiIcaBOIVghaV3KzWWStgTSRxARMU7UuWroUOBMYIak44Edgf2aTCoiItozsBBIEnA11d3FO1A1CR1o+7YWcouIiBYMLAS2LWme7RcBP20pp4iIaFGdPoKLJb2k8UwiImJM1Okj2B7YV9L1wH1UzUO2/eImE4uIiHbUKQSvaTyLiIgYM4O+j2Adqi+ufx5wBXCs7YfbSiwiItoxqI/ge8AcqiKwO/ClVjKKiIhWDWoaml2uFkLSscAF7aQUERFtGnRG8FDvSZqEIiLGr0FnBP9L0j3luYB1y3TvqqGnN55dREQ0btBXVU5oM5GIiBgbdW4oi4iIcSyFICKi41IIIiI6LoUgIqLjUggiIjqu0UIgaTdJ10haJOmQYZYfJOlKSZdL+n+SNm8yn4iIWFljhUDSBOAoquEpZgP7SJo9ZLVLgDllJNNTgM83lU9ERAyvyTOC7YBFthfbXgGcBOzVv4Ltc2zfXybPB6Y3mE9ERAyjyUKwKbCkb3ppmTeSdwJnDLdA0v6SFkhasGzZsjWYYkREPCk6iyXtSzXS6ReGW277aNtzbM+ZNm1au8lFRIxzdb6YZlXdBMzom55e5j2OpFcD/wzsZPvBBvOJiIhhNHlGcCEwS9JMSWsDewNz+1eQtA3wTWBP27c2mEtERIygsUJQhq4+ADgLuAo42fZCSYdL2rOs9gVgfeCHki6VNHeE3UVEREOabBrC9jxg3pB5n+h7/uom40dExOieFJ3FERExdlIIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi41IIIiI6LoUgIqLjUggiIjqu0S+miYho05FnX9t4jA/uulXjMdqWM4KIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjkshiIjouBSCiIiOSyGIiOi4FIKIiI5LIYiI6LgUgoiIjmu0EEjaTdI1khZJOmSY5U+T9B9l+XxJWzSZT0RErKyxQiBpAnAUsDswG9hH0uwhq70TuNP284Ajgc81lU9ERAxvYoP73g5YZHsxgKSTgL2AK/vW2Qs4rDw/BfiaJNl2Ewkdefa1Tez2cT6461aNx4iIWJOaLASbAkv6ppcC24+0ju2HJd0NbAzc1r+SpP2B/cvkvZKuaSTj4U0dms8gB41h7DUsrzuxE3sYa/Bvve3XvflIC5osBGuM7aOBo8citqQFtuckdmIndmKPl9hDNdlZfBMwo296epk37DqSJgIbALc3mFNERAzRZCG4EJglaaaktYG9gblD1pkLvL08fzPwi6b6ByIiYniNNQ2VNv8DgLOACcC3bS+UdDiwwPZc4FjgOEmLgDuoisWTzZg0SSV2Yid2YrdF+QAeEdFtubM4IqLjUggiIjouhWAEow2P0XDsb0u6VdJvW447Q9I5kq6UtFDSgS3GXkfSBZIuK7E/2VbsvhwmSLpE0k/GIPb1kq6QdKmkBS3H3lDSKZKulnSVpJe2FPf55fX2HvdI+kAbsUv8D5a/td9KOlHSOi3GPrDEXdjmax6R7TyGPKg6t68DngusDVwGzG4x/iuBbYHftvy6nwNsW55PAa5t63UDAtYvzycB84EdWn79BwEnAD9pM26JfT0wte24Jfb3gHeV52sDG45BDhOAPwCbtxRvU+D3wLpl+mRgv5ZivxD4LbAe1QU7PweeNxa/+94jZwTD+9PwGLZXAL3hMVph+1dUV1G1yvYtti8uz5cDV1H9w7QR27bvLZOTyqO1KxkkTQdeBxzTVswnA0kbUH3wOBbA9grbd41BKrsA19m+ocWYE4F1yz1M6wE3txR3a2C+7fttPwycC7yxpdjDSiEY3nDDY7RyQHyyKCPBbkP1ybytmBMkXQrcCpxtu7XYwL8C/wQ82mLMfgZ+JumiMqRKW2YCy4DvlGaxYyRNbjF+z97AiW0Fs30T8EXgRuAW4G7bP2sp/G+BV0jaWNJ6wGt5/M23rUshiJVIWh84FfiA7Xvaimv7Edt/TnUX+naSXthGXEmvB261fVEb8UbwctvbUo3W+4+SXtlS3IlUzZDfsL0NcB/Qdp/Y2sCewA9bjPkMqrP8mcAmwGRJ+7YR2/ZVVCMt/ww4E7gUeKSN2CNJIRheneExxiVJk6iKwPG2TxuLHErTxDnAbi2F3BHYU9L1VM2Ar5L0g5ZiA3/6hIrtW4EfUTVPtmEpsLTv7OsUqsLQpt2Bi23/scWYrwZ+b3uZ7YeA04CXtRXc9rG2/8L2K4E7qfrjxkwKwfDqDI8x7kgSVVvxVba/3HLsaZI2LM/XBXYFrm4jtu2P2p5uewuq3/UvbLfy6RBA0mRJU3rPgb+iaj5onO0/AEskPb/M2oXHDxXfhn1osVmouBHYQdJ65e9+F6o+sVZIemb5uRlV/8AJbcUezlNi9NG2eYThMdqKL+lEYGdgqqSlwKG2j20h9I7A24ArSls9wMdsz2sh9nOA75UvNFoLONl265dxjpFnAT+qjkdMBE6wfWaL8d8HHF8+9CwG3tFW4FL4dgXe01ZMANvzJZ0CXAw8DFxCu0M+nCppY+Ah4B/HqIP+TzLEREREx6VpKCKi41IIIiI6LoUgIqLjUggiIjouhSAiouNSCCIiOi6FICKi4/4HEHMv4f97kiwAAAAASUVORK5CYII=\n",
       "text/plain": [
        "<Figure size 432x288 with 1 Axes>"
       ]
@@ -421,18 +422,9 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 13,
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/workspace/brevitas_cnv_lfc/training_scripts/models/LFC.py:73: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect.\n",
-      "  x = 2.0 * x - torch.tensor([1.0])\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
     "import brevitas.onnx as bo\n",
     "export_onnx_path = \"/tmp/LFCW1A1.onnx\"\n",
@@ -449,13 +441,15 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 10,
+   "execution_count": 14,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
+      "\n",
+      "Stopping http://0.0.0.0:8081\n",
       "Serving '/tmp/LFCW1A1.onnx' at http://0.0.0.0:8081\n"
      ]
     }
@@ -467,7 +461,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 11,
+   "execution_count": 15,
    "metadata": {},
    "outputs": [
     {
@@ -506,7 +500,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 22,
+   "execution_count": 16,
    "metadata": {},
    "outputs": [
     {
@@ -518,7 +512,7 @@
        "op_type: \"MatMul\""
       ]
      },
-     "execution_count": 22,
+     "execution_count": 16,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -538,7 +532,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 23,
+   "execution_count": 17,
    "metadata": {},
    "outputs": [
     {
@@ -553,7 +547,7 @@
        "       [-1.,  1.,  1., ..., -1., -1.,  1.]], dtype=float32)"
       ]
      },
-     "execution_count": 23,
+     "execution_count": 17,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -571,7 +565,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 18,
    "metadata": {},
    "outputs": [
     {
@@ -580,7 +574,7 @@
        "<DataType.BIPOLAR: 8>"
       ]
      },
-     "execution_count": 24,
+     "execution_count": 18,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -591,7 +585,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 25,
+   "execution_count": 19,
    "metadata": {},
    "outputs": [
     {
@@ -600,7 +594,7 @@
        "[784, 1024]"
       ]
      },
-     "execution_count": 25,
+     "execution_count": 19,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -618,7 +612,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 20,
    "metadata": {},
    "outputs": [
     {
@@ -643,7 +637,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 21,
    "metadata": {},
    "outputs": [
     {
@@ -673,7 +667,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": 22,
    "metadata": {},
    "outputs": [
     {
@@ -684,7 +678,7 @@
        "      dtype=float32)"
       ]
      },
-     "execution_count": 18,
+     "execution_count": 22,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -700,7 +694,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 19,
+   "execution_count": 23,
    "metadata": {},
    "outputs": [
     {
@@ -709,7 +703,7 @@
        "True"
       ]
      },
-     "execution_count": 19,
+     "execution_count": 23,
      "metadata": {},
      "output_type": "execute_result"
     }
diff --git a/notebooks/end2end_example/tfc_end2end_example.ipynb b/notebooks/end2end_example/tfc_end2end_example.ipynb
index 0eb2e2270ef3afe27b011f6940cc910f82331bb3..27c5c3eead98a030276bfa515cf3dc836c91d721 100644
--- a/notebooks/end2end_example/tfc_end2end_example.ipynb
+++ b/notebooks/end2end_example/tfc_end2end_example.ipynb
@@ -884,12 +884,32 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "dict_keys(['Ultra96', 'Pynq-Z1'])\n"
+     ]
+    }
+   ],
+   "source": [
+    "# print the names of the supported PYNQ boards\n",
+    "from finn.util.basic import pynq_part_map\n",
+    "print(pynq_part_map.keys())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
    "metadata": {},
    "outputs": [],
    "source": [
-    "fpga_part = \"xczu3eg-sbva484-1-e\"\n",
+    "# change this if you have a different PYNQ board, see list above\n",
     "pynq_board = \"Ultra96\"\n",
+    "fpga_part = pynq_part_map[pynq_board]\n",
     "target_clk_ns = 5"
    ]
   },
diff --git a/notebooks/internals/0_custom_analysis_pass.ipynb b/notebooks/internals/0_custom_analysis_pass.ipynb
index 58a89356b05dff89a093ec6bafc7c05a5826d97b..3db1d1c47acef301f7b89a05980aa68477ba567f 100644
--- a/notebooks/internals/0_custom_analysis_pass.ipynb
+++ b/notebooks/internals/0_custom_analysis_pass.ipynb
@@ -13,7 +13,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": 1,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -58,13 +58,13 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Serving 'LFCW1A1.onnx' at http://0.0.0.0:8081\n"
+      "Serving '../LFCW1A1.onnx' at http://0.0.0.0:8081\n"
      ]
     }
    ],
    "source": [
     "import netron\n",
-    "netron.start('LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")"
+    "netron.start('../LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")"
    ]
   },
   {
@@ -99,12 +99,12 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 6,
    "metadata": {},
    "outputs": [],
    "source": [
     "from finn.core.modelwrapper import ModelWrapper\n",
-    "model = ModelWrapper('LFCW1A1.onnx')"
+    "model = ModelWrapper('../LFCW1A1.onnx')"
    ]
   },
   {
@@ -116,7 +116,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -146,7 +146,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [
     {
@@ -154,7 +154,7 @@
      "output_type": "stream",
      "text": [
       "    def analysis(self, analysis_fxn):\n",
-      "        \"\"\"Run given anaylsis_fxn on this model and return resulting dict.\"\"\"\n",
+      "        \"\"\"Runs given anaylsis_fxn on this model and return resulting dict.\"\"\"\n",
       "        return analysis_fxn(self)\n",
       "\n"
      ]
@@ -173,7 +173,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [
     {
diff --git a/notebooks/internals/1_custom_transformation_pass.ipynb b/notebooks/internals/1_custom_transformation_pass.ipynb
index 29e888deb19afaa427fa61819912bc9ba72b17a2..f0405c0db3b02cf19476ed6cc8d293a93df00b30 100644
--- a/notebooks/internals/1_custom_transformation_pass.ipynb
+++ b/notebooks/internals/1_custom_transformation_pass.ipynb
@@ -55,6 +55,7 @@
       "    def transform(self, transformation, make_deepcopy=True):\n",
       "        \"\"\"Applies given Transformation repeatedly until no more changes can be made\n",
       "        and returns a transformed ModelWrapper instance.\n",
+      "        \n",
       "        If make_deepcopy is specified, operates on a new (deep)copy of model.\n",
       "        \"\"\"\n",
       "        transformed_model = self\n",
@@ -109,6 +110,8 @@
      "output_type": "stream",
      "text": [
       "class Transformation(ABC):\n",
+      "    \"\"\"Transformation class all transformations are based on. Contains only \n",
+      "    abstract method apply() every transformation has to fill.\"\"\"\n",
       "    def __init__(self):\n",
       "        super().__init__()\n",
       "\n",
@@ -145,39 +148,37 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [],
    "source": [
     "import onnx\n",
-    "onnx_model = onnx.load('LFCW1A1.onnx')\n",
+    "onnx_model = onnx.load('../LFCW1A1.onnx')\n",
     "from finn.core.modelwrapper import ModelWrapper\n",
     "onnx_model = ModelWrapper(onnx_model)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "\n",
-      "Stopping http://0.0.0.0:8081\n",
-      "Serving 'LFCW1A1.onnx' at http://0.0.0.0:8081\n"
+      "Serving '../LFCW1A1.onnx' at http://0.0.0.0:8081\n"
      ]
     }
    ],
    "source": [
     "import netron\n",
-    "netron.start('LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")"
+    "netron.start('../LFCW1A1.onnx', port=8081, host=\"0.0.0.0\")"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 6,
    "metadata": {},
    "outputs": [
     {
@@ -200,7 +201,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -237,7 +238,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 10,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -247,7 +248,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 11,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [
     {
@@ -266,7 +267,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 12,
+   "execution_count": 10,
    "metadata": {},
    "outputs": [
     {
diff --git a/notebooks/internals/2_custom_op.ipynb b/notebooks/internals/2_custom_op.ipynb
index def670e46ff50a539df6a5e00788749c396bbd57..7e91d8c4048b3c3ffb547fe9fdaa7fa726263f8d 100644
--- a/notebooks/internals/2_custom_op.ipynb
+++ b/notebooks/internals/2_custom_op.ipynb
@@ -90,7 +90,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 2,
    "metadata": {},
    "outputs": [
     {
@@ -98,6 +98,9 @@
      "output_type": "stream",
      "text": [
       "class CustomOp(ABC):\n",
+      "    \"\"\"CustomOp class all custom op nodes are based on. Contains different functions \n",
+      "    every custom node should have. Some as abstract methods, these have to be filled when\n",
+      "    writing a new custom op node.\"\"\"\n",
       "    def __init__(self, onnx_node):\n",
       "        super().__init__()\n",
       "        self.onnx_node = onnx_node\n",
@@ -172,6 +175,13 @@
       "        \"\"\"Execute this CustomOp instance, given the execution context and\n",
       "        ONNX graph.\"\"\"\n",
       "        pass\n",
+      "\n",
+      "    @abstractmethod\n",
+      "    def verify_node(self):\n",
+      "        \"\"\"Verifies that all attributes the node needs are there and\n",
+      "        that particular attributes are set correctly. Also checks if\n",
+      "        the number of inputs is equal to the expected number.\"\"\"\n",
+      "        pass\n",
       "\n"
      ]
     }
@@ -190,7 +200,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 3,
    "metadata": {},
    "outputs": [
     {
@@ -198,6 +208,7 @@
      "output_type": "stream",
      "text": [
       "class MultiThreshold(CustomOp):\n",
+      "    \"\"\"Class that corresponds to a multithresholding node.\"\"\"\n",
       "    def get_nodeattr_types(self):\n",
       "        return {\n",
       "            \"out_dtype\": (\"s\", True, \"\"),\n",
@@ -226,6 +237,52 @@
       "        output = multithreshold(v, thresholds, out_scale, out_bias)\n",
       "        # setting context according to output\n",
       "        context[node.output[0]] = output\n",
+      "\n",
+      "    def verify_node(self):\n",
+      "        info_messages = []\n",
+      "\n",
+      "        # verify number of attributes\n",
+      "        num_of_attr = 3\n",
+      "        if len(self.onnx_node.attribute) == num_of_attr:\n",
+      "            info_messages.append(\"The number of attributes is correct\")\n",
+      "        else:\n",
+      "            info_messages.append(\n",
+      "                \"\"\"The number of attributes is incorrect,\n",
+      "            {} should have {} attributes\"\"\".format(\n",
+      "                    self.onnx_node.op_type, num_of_attr\n",
+      "                )\n",
+      "            )\n",
+      "\n",
+      "        # verify that \"domain\" is set to \"finn\"\n",
+      "        domain_value = self.onnx_node.domain\n",
+      "        if domain_value == \"finn\":\n",
+      "            info_messages.append(\"Attribute domain is set correctly\")\n",
+      "        else:\n",
+      "            info_messages.append('Attribute domain should be set to \"finn\"')\n",
+      "\n",
+      "        # verify that all necessary attributes exist\n",
+      "        try:\n",
+      "            self.get_nodeattr(\"out_scale\")\n",
+      "            self.get_nodeattr(\"out_bias\")\n",
+      "            self.get_nodeattr(\"out_dtype\")\n",
+      "            info_messages.append(\"All necessary attributes exist\")\n",
+      "        except Exception:\n",
+      "            info_messages.append(\n",
+      "                \"\"\"The necessary attributes do not exist.\n",
+      "                MultiThreshold needs the following attributes:\n",
+      "                out_scale, out_bias, out_dtype\"\"\"\n",
+      "            )\n",
+      "\n",
+      "        # verify the number of inputs\n",
+      "        if len(self.onnx_node.input) == 2:\n",
+      "            info_messages.append(\"The number of inputs is correct\")\n",
+      "        else:\n",
+      "            info_messages.append(\n",
+      "                \"\"\"MultiThreshold needs 2 inputs\n",
+      "                    (data input and threshold values)\"\"\"\n",
+      "            )\n",
+      "\n",
+      "        return info_messages\n",
       "\n"
      ]
     }
@@ -323,7 +380,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [
     {
@@ -331,46 +388,119 @@
      "output_type": "stream",
      "text": [
       "class HLSCustomOp(CustomOp):\n",
+      "    \"\"\"HLSCustomOp class all custom ops that correspond to a finn-hlslib \n",
+      "    function are based on. Contains different functions every fpgadataflow \n",
+      "    custom node should have. Some as abstract methods, these have to be filled\n",
+      "    when writing a new fpgadataflow custom op node.\"\"\"\n",
       "    def __init__(self, onnx_node):\n",
       "        super().__init__(onnx_node)\n",
-      "        # template for single node execution\n",
-      "        self.docompute_template = \"\"\"\n",
-      "        #include \"cnpy.h\"\n",
-      "        #include \"npy2apintstream.hpp\"\n",
-      "        #include <vector>\n",
-      "        #include \"bnn-library.h\"\n",
       "\n",
-      "        // includes for network parameters\n",
-      "        $GLOBALS$\n",
+      "        self.code_gen_dict = {}\n",
       "\n",
-      "        // defines for network parameters\n",
-      "        $DEFINES$\n",
+      "        # getting templates from templates.py\n",
       "\n",
-      "        int main(){\n",
+      "        # template for single node execution\n",
+      "        self.docompute_template = templates.docompute_template\n",
       "\n",
-      "        $STREAMDECLARATIONS$\n",
+      "        # templates for single node ip generation\n",
+      "        # cpp file\n",
+      "        self.ipgen_template = templates.ipgen_template\n",
+      "        # tcl script\n",
+      "        self.ipgentcl_template = templates.ipgentcl_template\n",
       "\n",
-      "        $READNPYDATA$\n",
+      "    def get_nodeattr_types(self):\n",
+      "        return {\n",
+      "            \"backend\": (\"s\", True, \"fpgadataflow\"),\n",
+      "            \"code_gen_dir_npysim\": (\"s\", False, \"\"),\n",
+      "            \"code_gen_dir_ipgen\": (\"s\", False, \"\"),\n",
+      "            \"executable_path\": (\"s\", False, \"\"),\n",
+      "            \"ipgen_path\": (\"s\", False, \"\"),\n",
+      "            \"exec_mode\": (\"s\", False, \"\"),\n",
+      "            \"sim_cycles\": (\"i\", False, 0),\n",
+      "            \"rtlsim_trace\": (\"s\", False, \"\"),\n",
+      "        }\n",
       "\n",
-      "        $DOCOMPUTE$\n",
+      "    def node_res_estimation(self):\n",
+      "        \"\"\"Returns summarized resource estimation of BRAMs and LUTs \n",
+      "        of the node.\"\"\"\n",
+      "        resources = []\n",
+      "        resources.append(\"BRAMs: \" + str(self.bram_estimation()))\n",
+      "        resources.append(\"LUTs: \" + str(self.lut_estimation()))\n",
+      "        return resources\n",
+      "\n",
+      "    def bram_estimation(self):\n",
+      "        \"\"\"Function for BRAM resource estimation, is member function of \n",
+      "        HLSCustomOp class but has to be filled by every node\"\"\"\n",
+      "        return 0\n",
+      "\n",
+      "    def lut_estimation(self):\n",
+      "        \"\"\"Function for LUT resource estimation, is member function of\n",
+      "        HLSCustomOp class but has to be filled by every node\"\"\"\n",
+      "        return 0\n",
+      "\n",
+      "    def code_generation_ipgen(self, model, fpgapart, clk):\n",
+      "        \"\"\"Generates c++ code and tcl script for ip generation.\"\"\"\n",
+      "        node = self.onnx_node\n",
       "\n",
-      "        $DATAOUTSTREAM$\n",
+      "        # generate top cpp file for ip generation\n",
+      "        path = self.get_nodeattr(\"code_gen_dir_ipgen\")\n",
+      "        self.generate_params(model, path)\n",
+      "        self.global_includes()\n",
+      "        self.defines(\"ipgen\")\n",
+      "        self.blackboxfunction()\n",
+      "        self.pragmas()\n",
+      "        self.docompute()\n",
       "\n",
-      "        $SAVEASCNPY$\n",
+      "        template = self.ipgen_template\n",
       "\n",
-      "        }\n",
+      "        for key in self.code_gen_dict:\n",
+      "            # transform list into long string separated by '\\n'\n",
+      "            code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n",
+      "            template = template.replace(key, code_gen_line)\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n",
+      "        f = open(os.path.join(code_gen_dir, \"top_{}.cpp\".format(node.name)), \"w\")\n",
+      "        f.write(template)\n",
+      "        f.close()\n",
+      "        self.code_gen_dict.clear()\n",
       "\n",
-      "        \"\"\"\n",
-      "        self.code_gen_dict = {}\n",
+      "        # generate tcl script for ip generation\n",
+      "        self.code_gen_dict[\"$PROJECTNAME$\"] = [\"project_{}\".format(node.name)]\n",
+      "        self.code_gen_dict[\"$HWSRCDIR$\"] = [code_gen_dir]\n",
+      "        self.code_gen_dict[\"$FPGAPART$\"] = [fpgapart]\n",
+      "        self.code_gen_dict[\"$FINNHLSLIBDIR$\"] = [\"/workspace/finn-hlslib\"]\n",
+      "        self.code_gen_dict[\"$TOPFXN$\"] = [node.name]\n",
+      "        self.code_gen_dict[\"$CLKPERIOD$\"] = [str(clk)]\n",
       "\n",
-      "    def get_nodeattr_types(self):\n",
-      "        return {\"code_gen_dir\": (\"s\", False, \"\"), \"executable_path\": (\"s\", False, \"\")}\n",
+      "        template = self.ipgentcl_template\n",
+      "\n",
+      "        for key in self.code_gen_dict:\n",
+      "            # transform list into long string separated by '\\n'\n",
+      "            code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n",
+      "            template = template.replace(key, code_gen_line)\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n",
+      "        f = open(os.path.join(code_gen_dir, \"hls_syn_{}.tcl\".format(node.name)), \"w\")\n",
+      "        f.write(template)\n",
+      "        f.close()\n",
+      "        self.code_gen_dict.clear()\n",
       "\n",
-      "    def code_generation(self, model):\n",
+      "    def ipgen_singlenode_code(self):\n",
+      "        \"\"\"Builds the bash script for ip generation using the IPGenBuilder from \n",
+      "        finn.util.fpgadataflow.\"\"\"\n",
       "        node = self.onnx_node\n",
-      "        self.generate_params(model)\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_ipgen\")\n",
+      "        builder = IPGenBuilder()\n",
+      "        builder.append_tcl(code_gen_dir + \"/hls_syn_{}.tcl\".format(node.name))\n",
+      "        builder.set_ipgen_path(code_gen_dir + \"/project_{}\".format(node.name))\n",
+      "        builder.build(code_gen_dir)\n",
+      "        self.set_nodeattr(\"ipgen_path\", builder.ipgen_path)\n",
+      "\n",
+      "    def code_generation_npysim(self, model):\n",
+      "        \"\"\"Generates c++ code for simulation (npysim).\"\"\"\n",
+      "        node = self.onnx_node\n",
+      "        path = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
+      "        self.generate_params(model, path)\n",
       "        self.global_includes()\n",
-      "        self.defines()\n",
+      "        self.defines(\"npysim\")\n",
       "        self.read_npy_data()\n",
       "        self.strm_decl()\n",
       "        self.docompute()\n",
@@ -383,18 +513,23 @@
       "            # transform list into long string separated by '\\n'\n",
       "            code_gen_line = \"\\n\".join(self.code_gen_dict[key])\n",
       "            template = template.replace(key, code_gen_line)\n",
-      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "        f = open(os.path.join(code_gen_dir, \"execute_{}.cpp\".format(node.op_type)), \"w\")\n",
       "        f.write(template)\n",
       "        f.close()\n",
+      "        self.code_gen_dict.clear()\n",
       "\n",
       "    def compile_singlenode_code(self):\n",
-      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "        \"\"\"Builds the bash script for compilation using the CppBuilder from\n",
+      "        finn.util.basic and executes the script to produce the executable.\"\"\"\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "        builder = CppBuilder()\n",
+      "        # to enable additional debug features please uncommand the next line\n",
+      "        # builder.append_includes(\"-DDEBUG\")\n",
       "        builder.append_includes(\"-I/workspace/finn/src/finn/data/cpp\")\n",
       "        builder.append_includes(\"-I/workspace/cnpy/\")\n",
       "        builder.append_includes(\"-I/workspace/finn-hlslib\")\n",
-      "        builder.append_includes(\"-I/workspace/vivado-hlslib\")\n",
+      "        builder.append_includes(\"-I{}/include\".format(os.environ[\"VIVADO_PATH\"]))\n",
       "        builder.append_includes(\"--std=c++11\")\n",
       "        builder.append_sources(code_gen_dir + \"/*.cpp\")\n",
       "        builder.append_sources(\"/workspace/cnpy/cnpy.cpp\")\n",
@@ -404,12 +539,15 @@
       "        self.set_nodeattr(\"executable_path\", builder.executable_path)\n",
       "\n",
       "    def dynamic_input_to_npy(self, context, count):\n",
+      "        \"\"\"Saves input (given context) into .npy files. \n",
+      "        \n",
+      "        Count indicates the number of inputs that have to be saved.\"\"\"\n",
       "        node = self.onnx_node\n",
-      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "        if code_gen_dir == \"\":\n",
       "            raise Exception(\n",
       "                \"\"\"\n",
-      "Found no codegen dir for this node, did you run the codegen transformation?\n",
+      "Found no codegen dir for this node, did you run the codegen_npysim transformation?\n",
       "            \"\"\"\n",
       "            )\n",
       "        # create a npy file for each input of the node (in_ind is input index)\n",
@@ -422,14 +560,16 @@
       "            )\n",
       "\n",
       "    def npy_to_dynamic_output(self, context):\n",
+      "        \"\"\"Reads the output from a .npy file and saves it at the right place in \n",
+      "        the context dictionary.\"\"\"\n",
       "        # TODO support multi-output nodes as needed\n",
       "        node = self.onnx_node\n",
-      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "        output = np.load(\"{}/output.npy\".format(code_gen_dir))\n",
       "        context[node.output[0]] = output\n",
       "\n",
       "    def exec_precompiled_singlenode_model(self):\n",
-      "        # execute precompiled executable\n",
+      "        \"\"\"Executes precompiled executable.\"\"\"\n",
       "        executable_path = self.get_nodeattr(\"executable_path\")\n",
       "        if executable_path == \"\":\n",
       "            raise Exception(\n",
@@ -441,44 +581,194 @@
       "        process_execute = subprocess.Popen(executable_path, stdout=subprocess.PIPE)\n",
       "        process_execute.communicate()\n",
       "\n",
+      "    def reset_rtlsim(self, sim):\n",
+      "        \"\"\"Sets reset input in pyverilator to zero, toggles the clock and set it\n",
+      "        back to one\"\"\"\n",
+      "        sim.io.ap_rst_n = 0\n",
+      "        sim.io.ap_clk = 1\n",
+      "        sim.io.ap_clk = 0\n",
+      "        sim.io.ap_rst_n = 1\n",
+      "\n",
+      "    def toggle_clk(self, sim):\n",
+      "        \"\"\"Toggles the clock input in pyverilator once.\"\"\"\n",
+      "        sim.io.ap_clk = 1\n",
+      "        sim.io.ap_clk = 0\n",
+      "\n",
+      "    def rtlsim(self, sim, inp):\n",
+      "        \"\"\"Runs the pyverilator simulation by passing the input values to the simulation,\n",
+      "        toggle the clock and observing the execution time. Function contains also an \n",
+      "        observation loop that can abort the simulation if no output value is produced \n",
+      "        after 100 cycles.\"\"\"\n",
+      "        \n",
+      "        trace_file = self.get_nodeattr(\"rtlsim_trace\")\n",
+      "        if trace_file != \"\":\n",
+      "            if trace_file == \"default\":\n",
+      "                trace_file = self.onnx_node.name + \".vcd\"\n",
+      "            sim.start_vcd_trace(trace_file)\n",
+      "        inputs = inp\n",
+      "        outputs = []\n",
+      "        sim.io.out_V_V_TREADY = 1\n",
+      "\n",
+      "        # observe if output is completely calculated\n",
+      "        # observation_count will contain the number of cycles the calculation ran\n",
+      "        num_out_values = self.get_number_output_values()\n",
+      "        output_observed = False\n",
+      "        observation_count = 0\n",
+      "\n",
+      "        # avoid infinite looping of simulation by aborting when there is no change in\n",
+      "        # output values after 100 cycles\n",
+      "        no_change_count = 0\n",
+      "        old_outputs = outputs\n",
+      "        liveness_threshold = pyverilate_get_liveness_threshold_cycles()\n",
+      "\n",
+      "        while not (output_observed):\n",
+      "            sim.io.in0_V_V_TVALID = 1 if len(inputs) > 0 else 0\n",
+      "            sim.io.in0_V_V_TDATA = inputs[0] if len(inputs) > 0 else 0\n",
+      "            if sim.io.in0_V_V_TREADY == 1 and sim.io.in0_V_V_TVALID == 1:\n",
+      "                inputs = inputs[1:]\n",
+      "            if sim.io.out_V_V_TVALID == 1 and sim.io.out_V_V_TREADY == 1:\n",
+      "                outputs = outputs + [sim.io.out_V_V_TDATA]\n",
+      "            sim.io.ap_clk = 1\n",
+      "            sim.io.ap_clk = 0\n",
+      "\n",
+      "            observation_count = observation_count + 1\n",
+      "            no_change_count = no_change_count + 1\n",
+      "\n",
+      "            if len(outputs) == num_out_values:\n",
+      "                self.set_nodeattr(\"sim_cycles\", observation_count)\n",
+      "                output_observed = True\n",
+      "\n",
+      "            if no_change_count == liveness_threshold:\n",
+      "                if old_outputs == outputs:\n",
+      "                    if trace_file != \"\":\n",
+      "                        sim.flush_vcd_trace()\n",
+      "                        sim.stop_vcd_trace()\n",
+      "                    raise Exception(\n",
+      "                        \"Error in simulation! Takes too long to produce output. \"\n",
+      "                        \"Consider setting the LIVENESS_THRESHOLD env.var. to a \"\n",
+      "                        \"larger value.\"\n",
+      "                    )\n",
+      "                else:\n",
+      "                    no_change_count = 0\n",
+      "                    old_outputs = outputs\n",
+      "        if trace_file != \"\":\n",
+      "            sim.flush_vcd_trace()\n",
+      "            sim.stop_vcd_trace()\n",
+      "        return outputs\n",
+      "\n",
       "    def execute_node(self, context, graph):\n",
-      "        # save input(s)\n",
-      "        self.dynamic_input_to_npy(context, 1)\n",
-      "        # execute the precompiled model\n",
-      "        self.exec_precompiled_singlenode_model()\n",
-      "        # load output npy file\n",
-      "        self.npy_to_dynamic_output(context)\n",
-      "\n",
-      "    def generate_params(self, model):\n",
+      "        \"\"\"Executes single node using npysim or rtlsim.\"\"\"\n",
+      "        mode = self.get_nodeattr(\"exec_mode\")\n",
+      "        if mode == \"npysim\":\n",
+      "            # save input(s)\n",
+      "            self.dynamic_input_to_npy(context, 1)\n",
+      "            # execute the precompiled model\n",
+      "            self.exec_precompiled_singlenode_model()\n",
+      "            # load output npy file\n",
+      "            self.npy_to_dynamic_output(context)\n",
+      "        elif mode == \"rtlsim\":\n",
+      "            pass\n",
+      "\n",
+      "        else:\n",
+      "            raise Exception(\n",
+      "                \"\"\"Invalid value for attribute exec_mode! Is currently set to: {}\n",
+      "            has to be set to one of the following value (\"npysim\", \"rtlsim\")\"\"\".format(\n",
+      "                    mode\n",
+      "                )\n",
+      "            )\n",
+      "\n",
+      "    def generate_params(self, model, path):\n",
+      "        \"\"\"Function to generate parameters (i.e. weights and thresholds), \n",
+      "        is member function of HLSCustomOp class but has to be filled \n",
+      "        by every node.\"\"\"\n",
+      "        pass\n",
+      "\n",
+      "    @abstractmethod\n",
+      "    def get_number_output_values(self):\n",
+      "        \"\"\"Function to get the number of expected output values, \n",
+      "        is member function of HLSCustomOp class but has to be filled \n",
+      "        by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def global_includes(self):\n",
+      "        \"\"\"Function to set the global includes for c++ code that has to be generated\n",
+      "        for npysim or rtlsim, is member function of HLSCustomOp class but has to \n",
+      "        be filled by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
-      "    def defines(self):\n",
+      "    def defines(self, var):\n",
+      "        \"\"\"Function to set the define commands for c++ code that has to be generated\n",
+      "        for npysim or rtlsim, is member function of HLSCustomOp class but has to \n",
+      "        be filled by every node.\n",
+      "        \n",
+      "        var: makes it possible to reuse the function for different c++ code generation.\n",
+      "        I.e. if set to \"ipgen\" in StreamingFCLayer_Batch additional PRAGMA defines are\n",
+      "        added.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def read_npy_data(self):\n",
+      "        \"\"\"Function to generate the commands for reading data from .npy file in c++, \n",
+      "        is member function of HLSCustomOp class but has to be filled by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def strm_decl(self):\n",
+      "        \"\"\"Function to generate the commands for the stream declaration in c++,\n",
+      "        is member function of HLSCustomOp class but has to be filled\n",
+      "        by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def docompute(self):\n",
+      "        \"\"\"Function to generate the commands for the computational part of the \n",
+      "        c++ code, is member function of HLSCustomOp class but has to be filled\n",
+      "        by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def dataoutstrm(self):\n",
+      "        \"\"\"Function to generate the commands for reading out data from c++ and convert \n",
+      "        into npy format, is member function of HLSCustomOp class but has to be filled \n",
+      "        by every node.\"\"\"\n",
       "        pass\n",
       "\n",
       "    @abstractmethod\n",
       "    def save_as_npy(self):\n",
+      "        \"\"\"Function to generate the commands for saving data in .npy file in c++,\n",
+      "        is member function of HLSCustomOp class but has to be filled by every node.\"\"\"\n",
+      "        pass\n",
+      "\n",
+      "    @abstractmethod\n",
+      "    def blackboxfunction(self):\n",
+      "        \"\"\"Function to generate a blackbock function in c++ from which an IP block \n",
+      "        will be generated, is member function of HLSCustomOp class but has to be filled \n",
+      "        by every node.\"\"\"\n",
+      "        pass\n",
+      "\n",
+      "    @abstractmethod\n",
+      "    def pragmas(self):\n",
+      "        \"\"\"Function to generate the pragma commands in c++, is member function of \n",
+      "        HLSCustomOp class but has to be filled by every node.\"\"\"\n",
       "        pass\n",
+      "\n",
+      "    def get_folded_input_shape(self):\n",
+      "        \"\"\"Returns folded input shape (according to synapse folding), if implemented.\"\"\"\n",
+      "        raise Exception(\"get_folded_input_shape not implemented for this op\")\n",
+      "\n",
+      "    def get_folded_output_shape(self):\n",
+      "        \"\"\"Returns folded output shape (according to neuron folding), if implemented.\"\"\"\n",
+      "        raise Exception(\"get_folded_output_shape not implemented for this op\")\n",
+      "\n",
+      "    def get_instream_width(self):\n",
+      "        \"\"\"Returns input stream width, if implemented.\"\"\"\n",
+      "        raise Exception(\"get_instream_width not implemented for this op\")\n",
+      "\n",
+      "    def get_outstream_width(self):\n",
+      "        \"\"\"Returns output stream width, if implemented.\"\"\"\n",
+      "        raise Exception(\"get_outstream_width not implemented for this op\")\n",
       "\n"
      ]
     }
@@ -555,7 +845,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 5,
    "metadata": {
     "scrolled": true
    },
@@ -564,7 +854,9 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "    def generate_params(self, model):\n",
+      "    def generate_params(self, model, path):\n",
+      "        \"\"\"Saves weights into params.h and if existing thresholds into thresh.h.\"\"\"\n",
+      "        code_gen_dir = path\n",
       "        # weights\n",
       "        weights = model.get_initializer(self.onnx_node.input[1])\n",
       "        # convert weights into hlslib-compatible format\n",
@@ -578,7 +870,7 @@
       "            weight_tensor, export_wdt, \"weights\", True, True\n",
       "        )\n",
       "        # write weights into params.h\n",
-      "        code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "        # code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "        f_weights = open(\"{}/params.h\".format(code_gen_dir), \"w\")\n",
       "\n",
       "        if export_wdt.bitwidth() != 1:\n",
@@ -598,6 +890,7 @@
       "            )\n",
       "        f_weights.write(weight_hls_code)\n",
       "        f_weights.close()\n",
+      "\n",
       "        # thresholds\n",
       "        if len(self.onnx_node.input) > 2:\n",
       "            thresholds = model.get_initializer(self.onnx_node.input[2])\n",
@@ -619,7 +912,7 @@
       "                    threshold_tensor, tdt, \"thresholds\", False, True\n",
       "                )\n",
       "                # write thresholds into thresh.h\n",
-      "                code_gen_dir = self.get_nodeattr(\"code_gen_dir\")\n",
+      "                # code_gen_dir = self.get_nodeattr(\"code_gen_dir_npysim\")\n",
       "                f_thresh = open(\"{}/thresh.h\".format(code_gen_dir), \"w\")\n",
       "                tdt_hls = tdt.get_hls_datatype_str()\n",
       "                # use binary to export bipolar activations\n",
diff --git a/notebooks/internals/3_verify_hls_custom_op.ipynb b/notebooks/internals/3_verify_hls_custom_op.ipynb
index e4d1deef2b8d5e13ad0e28feb7b919c7959d2a1f..5c3b43cd03d45be03a6c853a19169fbcc5c5acbf 100644
--- a/notebooks/internals/3_verify_hls_custom_op.ipynb
+++ b/notebooks/internals/3_verify_hls_custom_op.ipynb
@@ -478,7 +478,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": null,
    "metadata": {},
    "outputs": [
     {