diff --git a/docker/quicktest.sh b/docker/quicktest.sh
index b06feccdc578a59c8ef00531871e1211c2a407e5..b4ad37232fa69754a86e9064d7592d7474e8617e 100755
--- a/docker/quicktest.sh
+++ b/docker/quicktest.sh
@@ -5,8 +5,8 @@
 cd $FINN_ROOT
 # check if command line argument is empty or not present
 if [ -z $1 ]; then
-  echo "Running quicktest: not (vivado or slow) with pytest-xdist"
-  python setup.py test --addopts "-m 'not (vivado or slow or vitis)' --dist=loadfile -n $PYTEST_PARALLEL"
+  echo "Running quicktest: not (vivado or slow or board) with pytest-xdist"
+  python setup.py test --addopts "-m 'not (vivado or slow or vitis or board)' --dist=loadfile -n $PYTEST_PARALLEL"
 elif [ $1 = "main" ]; then
   echo "Running main test suite: not (rtlsim or end2end) with pytest-xdist"
   python setup.py test --addopts "-k 'not (rtlsim or end2end)' --dist=loadfile -n $PYTEST_PARALLEL"
diff --git a/docs/finn/getting_started.rst b/docs/finn/getting_started.rst
index bff31cde45122ebc25f515422ffc523f4f78e3be..25538a3ad1fa21b8e841b5f3fadc5388137d68bb 100644
--- a/docs/finn/getting_started.rst
+++ b/docs/finn/getting_started.rst
@@ -41,8 +41,7 @@ System Requirements
 * Docker `without root <https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user>`_
 * A working Vivado 2019.1 or 2020.1 installation
 * A ``VIVADO_PATH`` environment variable pointing to the Vivado installation directory (e.g. the directory where settings64.sh is located)
-* *(optional)* A PYNQ board with a network connection
-   * the ``bitstring`` package must be installed on the PYNQ: ``sudo pip3 install bitstring``
+* *(optional)* A PYNQ board with a network connection, see `PYNQ board first-time setup`_ below
 * *(optional)* An Alveo board, and a working Vitis 2020.1 installation if you want to use Vitis and Alveo (see `Alveo first-time setup`_ below)
 
 We also recommend running the FINN compiler on a system with sufficiently
@@ -139,6 +138,24 @@ As of FINN v0.4b we also have preliminary support for `Xilinx Alveo boards <http
 
 **Vivado IPI support for any Xilinx FPGA:** FINN generates a Vivado IP Integrator (IPI) design from the neural network with AXI stream (FIFO) in-out interfaces, which can be integrated onto any Xilinx FPGA as part of a larger system. It's up to you to take the FINN-generated accelerator (what we call "stitched IP" in the tutorials), wire it up to your FPGA design and send/receive neural network data to/from the accelerator.
 
+PYNQ board first-time setup
+****************************
+We use *host* to refer to the PC running the FINN Docker environment, which will build the accelerator+driver and package it up, and *target* to refer to the PYNQ board. To be able to access the target from the host, you'll need to set up SSH public key authentication:
+
+Start on the target side:
+
+1. Note down the IP address of your PYNQ board. This IP address must be accessible from the host.
+2. Ensure the ``bitstring`` package is installed: ``sudo pip3 install bitstring``
+
+Continue on the host side (replace the ``<PYNQ_IP>`` and ``<PYNQ_USERNAME>`` with the IP address and username of your board from the first step):
+
+1. Launch the Docker container from where you cloned finn with ``./run-docker.sh``
+2. Go into the `ssh_keys` directory  (e.g. ``cd /workspace/finn/ssh_keys``)
+3. Run ``ssh-keygen`` to create a key pair e.g. ``id_rsa`` private and ``id_rsa.pub`` public key
+4. Run ``ssh-copy-id -i id_rsa.pub <PYNQ_USERNAME>@<PYNQ_IP>`` to install the keys on the remote system
+5. Test that you can ``ssh <PYNQ_USERNAME>@<PYNQ_IP>`` without having to enter the password. Pass the ``-v`` flag to the ssh command if it doesn't work to help you debug.
+
+
 Alveo first-time setup
 **********************
 We use *host* to refer to the PC running the FINN Docker environment, which will build the accelerator+driver and package it up, and *target* to refer to the PC where the Alveo card is installed. These two can be the same PC, or connected over the network -- FINN includes some utilities to make it easier to test on remote PCs too. Prior to first usage, you need to set up both the host and the target in the following manner:
@@ -150,7 +167,7 @@ On the target side:
 3. Create a conda environment named *finn-pynq-alveo* by following this guide `to set up PYNQ for Alveo <https://pynq.readthedocs.io/en/latest/getting_started/alveo_getting_started.html>`_. It's best to follow the recommended environment.yml (set of package versions) in this guide.
 4. Activate the environment with `conda activate finn-pynq-alveo` and install the bitstring package with ``pip install bitstring``.
 5. Done! You should now be able to e.g. ``import pynq`` in Python scripts.
-6. (optional) If you don't want to specify the ``ALVEO_PASSWORD`` environment variable, you can `set up public key authentication <https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server>`_. Copy your private key to the ``finn/ssh_keys`` folder on the host to get password-less deployment and remote execution.
+
 
 
 On the host side:
@@ -159,4 +176,5 @@ On the host side:
 2. Install Xilinx XRT and set up the ``XILINX_XRT`` environment variable to point to your installation. *This must be the same path as the target's XRT (target step 1)*
 3. Install the Vitis platform files for Alveo and set up the ``PLATFORM_REPO_PATHS`` environment variable to point to your installation. *This must be the same path as the target's platform files (target step 2)*
 4. Set up the ``ALVEO_*`` environment variables accordingly for your target, see description of environment variables above.
+5. `Set up public key authentication <https://www.digitalocean.com/community/tutorials/how-to-configure-ssh-key-based-authentication-on-a-linux-server>`_. Copy your private key to the ``finn/ssh_keys`` folder on the host to get password-less deployment and remote execution.
 5. Done! You can try the ``test_end2end_vitis`` tests in the FINN Docker to verify your setup, although this will take some time.
diff --git a/notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb b/notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb
index 4130f35d7a371711fe1f6bf494358e3c93d8c136..a141caf423f5238245b509e077d32c6bd1a85fcd 100644
--- a/notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb
+++ b/notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb
@@ -624,17 +624,30 @@
    "source": [
     "## 5. Deployment and Remote Execution\n",
     "\n",
-    "Now that we're done with the hardware generation, we can generate a Python driver for accelerator and copy the necessary files onto our PYNQ board."
+    "Now that we're done with the hardware generation, we can generate a Python driver for accelerator and copy the necessary files onto our PYNQ board.\n",
+    "\n",
+    "**Make sure you've [set up the SSH keys for your PYNQ board](https://finn-dev.readthedocs.io/en/latest/getting_started.html#pynq-board-first-time-setup) before executing this step.**"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 21,
+   "execution_count": 7,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Welcome to PYNQ Linux, based on Ubuntu 18.04 (GNU/Linux 5.4.0-xilinx-v2020.1 armv7l)\r\n",
+      "\r\n",
+      " * Pure upstream Kubernetes 1.21, smallest, simplest cluster ops!\r\n",
+      "\r\n",
+      "     https://microk8s.io/\r\n"
+     ]
+    }
+   ],
    "source": [
     "import os\n",
-    "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
     "\n",
     "# set up the following values according to your own environment\n",
     "# FINN will use ssh to deploy and run the generated accelerator\n",
@@ -643,6 +656,20 @@
     "password = os.getenv(\"PYNQ_PASSWORD\", \"xilinx\")\n",
     "port = os.getenv(\"PYNQ_PORT\", 22)\n",
     "target_dir = os.getenv(\"PYNQ_TARGET_DIR\", \"/home/xilinx/finn_cnv_end2end_example\")\n",
+    "# set up ssh options to only allow publickey authentication\n",
+    "options = \"-o PreferredAuthentications=publickey -o PasswordAuthentication=no\"\n",
+    "\n",
+    "# test access to PYNQ board\n",
+    "! ssh {options} {username}@{ip} -p {port} cat /var/run/motd.dynamic"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
     "\n",
     "model = ModelWrapper(build_dir + \"/end2end_cnv_w1a1_synth.onnx\")\n",
     "model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))\n",
@@ -689,7 +716,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
+    "! ssh {options} {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
    ]
   },
   {
@@ -795,7 +822,7 @@
    "source": [
     "### Validating the Accuracy on a PYNQ Board <a id='validation'></a>\n",
     "\n",
-    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`sshpass` and `echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
+    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
     "\n",
     "**Ensure that your PYNQ board has a working internet connecting for the next steps, since some there is some downloading involved.**\n",
     "\n",
@@ -824,7 +851,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
+    "! ssh {options} -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
    ]
   },
   {
@@ -866,7 +893,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset cifar10 --batchsize 1000'"
+    "! ssh {options} -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset cifar10 --batchsize 1000'"
    ]
   },
   {
diff --git a/notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb b/notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb
index 8cbff4fcea58d452b1e35c0dab647a8f922dc2c0..5ed4b170b4eeee4b438d9539d2317a7d5eab5df2 100644
--- a/notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb
+++ b/notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb
@@ -1337,7 +1337,43 @@
    "source": [
     "### Deployment and Remote Execution <a id='deploy'></a>\n",
     "\n",
-    "We'll now use the `DeployToPYNQ` transformation to create a deployment folder with the bitfile and driver file(s), and copy that to the PYNQ board. You can change the default IP address, username, password and target folder for the PYNQ below."
+    "We'll now use the `DeployToPYNQ` transformation to create a deployment folder with the bitfile and driver file(s), and copy that to the PYNQ board. You can change the default IP address, username, password and target folder for the PYNQ below.\n",
+    "\n",
+    "**Make sure you've [set up the SSH keys for your PYNQ board](https://finn-dev.readthedocs.io/en/latest/getting_started.html#pynq-board-first-time-setup) before executing this step.**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Welcome to PYNQ Linux, based on Ubuntu 18.04 (GNU/Linux 5.4.0-xilinx-v2020.1 armv7l)\r\n",
+      "\r\n",
+      " * Pure upstream Kubernetes 1.21, smallest, simplest cluster ops!\r\n",
+      "\r\n",
+      "     https://microk8s.io/\r\n"
+     ]
+    }
+   ],
+   "source": [
+    "import os\n",
+    "\n",
+    "# set up the following values according to your own environment\n",
+    "# FINN will use ssh to deploy and run the generated accelerator\n",
+    "ip = os.getenv(\"PYNQ_IP\", \"192.168.2.99\")\n",
+    "username = os.getenv(\"PYNQ_USERNAME\", \"xilinx\")\n",
+    "password = os.getenv(\"PYNQ_PASSWORD\", \"xilinx\")\n",
+    "port = os.getenv(\"PYNQ_PORT\", 22)\n",
+    "target_dir = os.getenv(\"PYNQ_TARGET_DIR\", \"/home/xilinx/finn_tfc_end2end_example\")\n",
+    "# set up ssh options to only allow publickey authentication\n",
+    "options = \"-o PreferredAuthentications=publickey -o PasswordAuthentication=no\"\n",
+    "\n",
+    "# test access to PYNQ board\n",
+    "! ssh {options} {username}@{ip} -p {port} cat /var/run/motd.dynamic"
    ]
   },
   {
@@ -1347,11 +1383,7 @@
    "outputs": [],
    "source": [
     "from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ\n",
-    "ip = \"192.168.2.99\"\n",
-    "port = \"22\"\n",
-    "username = \"xilinx\"\n",
-    "password = \"xilinx\"\n",
-    "target_dir = \"/home/xilinx/finn_tfc_end2end_example\"\n",
+    "\n",
     "model = model.transform(DeployToPYNQ(ip, port, username, password, target_dir))\n",
     "model.save(build_dir + \"/tfc_w1_a1_pynq_deploy.onnx\")"
    ]
@@ -1456,7 +1488,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
+    "! ssh {options} {username}@{ip} -p {port} 'ls -l {target_dir_pynq}'"
    ]
   },
   {
@@ -1578,7 +1610,7 @@
    "source": [
     "### Validating the Accuracy on a PYNQ Board <a id='validation'></a>\n",
     "\n",
-    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`sshpass` and `echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
+    "All the command line prompts here are meant to be executed with `sudo` on the PYNQ board, so we'll use a workaround (`echo password | sudo -S command`) to get that working from this notebook running on the host computer.\n",
     "\n",
     "**Ensure that your PYNQ board has a working internet connecting for the next steps, since some there is some downloading involved.**\n",
     "\n",
@@ -1587,7 +1619,7 @@
     "\n",
     "Command to execute on PYNQ:\n",
     "\n",
-    "```pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading```"
+    "```sudo pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading```"
    ]
   },
   {
@@ -1609,7 +1641,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
+    "! ssh {options} -t {username}@{ip} -p {port} 'echo {password} | sudo -S pip3 install git+https://github.com/fbcotter/dataset_loading.git@0.0.4#egg=dataset_loading'"
    ]
   },
   {
@@ -1656,7 +1688,7 @@
     }
    ],
    "source": [
-    "! sshpass -p {password} ssh -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset mnist --batchsize 1000'"
+    "! ssh {options} -t {username}@{ip} -p {port} 'cd {target_dir_pynq}; echo {password} | sudo -S python3.6 validate.py --dataset mnist --batchsize 1000'"
    ]
   },
   {
diff --git a/setup.cfg b/setup.cfg
index 45fe40156acd966fed302522e9e8ca716a4d331c..e98077ddf1e60f69513d85193374414636a0f355 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -109,6 +109,7 @@ markers =
     slow: marks tests as slow (deselect with '-m "not slow"')
     vivado: mark tests that require Vivado or Vivado HLS
     vitis: mark tests that require Vitis
+    board: mark tests that require a PYNQ board
 norecursedirs =
     dist
     build
diff --git a/src/finn/custom_op/fpgadataflow/tlastmarker.py b/src/finn/custom_op/fpgadataflow/tlastmarker.py
index bedaf0984c39ef7603e6829961d7a3efb6ff489f..70edaee9cfc0662411d005325e781f13b4f1b510 100644
--- a/src/finn/custom_op/fpgadataflow/tlastmarker.py
+++ b/src/finn/custom_op/fpgadataflow/tlastmarker.py
@@ -243,12 +243,13 @@ class TLastMarker(HLSCustomOp):
 
     def get_verilog_top_module_intf_names(self):
         intf_names = super().get_verilog_top_module_intf_names()
+        stream_width = self.get_nodeattr("StreamWidth")
         if self.get_nodeattr("Direction") == "in":
-            intf_names["s_axis"] = ["in0"]
-            intf_names["m_axis"] = ["out_V_V"]
+            intf_names["s_axis"] = [("in0", stream_width)]
+            intf_names["m_axis"] = [("out_V_V", stream_width)]
         else:
-            intf_names["s_axis"] = ["in0_V_V"]
-            intf_names["m_axis"] = ["out_r"]
+            intf_names["s_axis"] = [("in0_V_V", stream_width)]
+            intf_names["m_axis"] = [("out_r", stream_width)]
         if self.get_nodeattr("DynIters") == 1:
             intf_names["axilite"] = ["s_axi_control"]
         return intf_names
diff --git a/src/finn/transformation/fpgadataflow/make_deployment.py b/src/finn/transformation/fpgadataflow/make_deployment.py
index 6d37f567c9a20cf692df126c1c3560324b61d06d..84d3f4cd94c9e0870b90941f7c46447da4cee631 100644
--- a/src/finn/transformation/fpgadataflow/make_deployment.py
+++ b/src/finn/transformation/fpgadataflow/make_deployment.py
@@ -26,7 +26,6 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-import warnings
 import os
 import subprocess
 from distutils.dir_util import copy_tree
@@ -98,22 +97,17 @@ class DeployToPYNQ(Transformation):
         copy_tree(pynq_driver_dir, deployment_dir)
         model.set_metadata_prop("pynq_deploy_dir", deployment_dir)
         model.set_metadata_prop("exec_mode", "remote_pynq")
-        if self.password == "":
-            prefix = ""  # assume we are using an ssh key
-            warnings.warn("Empty password, make sure you've set up an ssh key")
-        else:
-            prefix = "sshpass -p %s " % self.password
 
         # create target directory on PYNQ board
-        cmd = prefix + 'ssh {}@{} -p {} "mkdir -p {}"'.format(
+        cmd = 'ssh {}@{} -p {} "mkdir -p {}"'.format(
             self.username, self.ip, self.port, self.target_dir
         )
         bash_command = ["/bin/bash", "-c", cmd]
         process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
         process_compile.communicate()
-        # copy directory to PYNQ board using scp and sshpass
-        cmd = prefix + "scp -P{} -r {} {}@{}:{}".format(
-            self.port, deployment_dir, self.username, self.ip, self.target_dir,
+        # copy directory to PYNQ board using scp
+        cmd = "scp -P{} -r {} {}@{}:{}".format(
+            self.port, deployment_dir, self.username, self.ip, self.target_dir
         )
         bash_command = ["/bin/bash", "-c", cmd]
         process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
diff --git a/tests/end2end/test_end2end_access_board.py b/tests/end2end/test_end2end_access_board.py
new file mode 100644
index 0000000000000000000000000000000000000000..21b495c74ca8179e1f9e1e3955e665c4c81763b8
--- /dev/null
+++ b/tests/end2end/test_end2end_access_board.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2021, Xilinx
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of FINN nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import pytest
+import subprocess
+from finn.util.test import get_build_env
+
+
+@pytest.mark.board
+def test_end2end_access_board():
+    build_env = get_build_env("zynq", 5)
+    if build_env["ip"] == "":
+        pytest.skip("PYNQ board IP address not specified")
+    remote_cmd_base = [
+        "ssh",
+        "-o",
+        "PreferredAuthentications=publickey",
+        "-o",
+        "PasswordAuthentication=no",
+        "%s@%s" % (build_env["username"], build_env["ip"]),
+    ]
+    test_text = "BoardIsAccessible"
+    touch_cmd = remote_cmd_base + ["echo %s" % test_text]
+    verif_res = subprocess.run(
+        touch_cmd, stdout=subprocess.PIPE, universal_newlines=True
+    )
+    assert verif_res.returncode == 0
+    assert verif_res.stdout.split("\n")[0] == test_text
diff --git a/tests/end2end/test_end2end_cybsec_mlp.py b/tests/end2end/test_end2end_cybsec_mlp.py
index eedbf97f389754440a116cf8755c25d597c433ee..4dba19f586e3235a582e72d4c3936a60ebc4a703 100644
--- a/tests/end2end/test_end2end_cybsec_mlp.py
+++ b/tests/end2end/test_end2end_cybsec_mlp.py
@@ -213,22 +213,9 @@ echo %s | sudo -S python3.6 validate-unsw-nb15.py --batchsize=10 --limit_batches
         build_env["ip"],
         build_env["target_dir"],
     )
-    rsync_res = subprocess.run(
-        [
-            "sshpass",
-            "-p",
-            build_env["password"],
-            "rsync",
-            "-avz",
-            deploy_dir,
-            remote_target,
-        ]
-    )
+    rsync_res = subprocess.run(["rsync", "-avz", deploy_dir, remote_target])
     assert rsync_res.returncode == 0
     remote_verif_cmd = [
-        "sshpass",
-        "-p",
-        build_env["password"],
         "ssh",
         "%s@%s" % (build_env["username"], build_env["ip"]),
         "sh",
diff --git a/tests/end2end/test_ext_weights.py b/tests/end2end/test_ext_weights.py
index 0407395ed57dc07c6700efcebbb1fc8a767877bb..aa0ce7a6c6c6148ca28b58428ad60d7eb0347bea 100644
--- a/tests/end2end/test_ext_weights.py
+++ b/tests/end2end/test_ext_weights.py
@@ -44,6 +44,14 @@ onnx_zip_url = "https://github.com/Xilinx/finn-examples"
 onnx_zip_url += "/releases/download/v0.0.1a/onnx-models-bnn-pynq.zip"
 onnx_zip_local = build_dir + "/onnx-models-bnn-pynq.zip"
 onnx_dir_local = build_dir + "/onnx-models-bnn-pynq"
+mnist_url = "https://raw.githubusercontent.com/fgnt/mnist/master"
+mnist_local = build_dir + "/mnist"
+mnist_files = [
+    "train-images-idx3-ubyte.gz",
+    "train-labels-idx1-ubyte.gz",
+    "t10k-images-idx3-ubyte.gz",
+    "t10k-labels-idx1-ubyte.gz",
+]
 
 
 def get_checkpoint_name(step):
@@ -98,6 +106,22 @@ def test_end2end_ext_weights_build():
     shutil.copytree(output_dir + "/deploy", get_checkpoint_name("build"))
 
 
+@pytest.mark.board
+def test_end2end_ext_weights_dataset():
+    # make sure we have local copies of mnist dataset files
+    subprocess.check_output(["mkdir", "-p", mnist_local])
+    for f in mnist_files:
+        if not os.path.isfile(mnist_local + "/" + f):
+            wget.download(mnist_url + "/" + f, out=mnist_local + "/" + f)
+        assert os.path.isfile(mnist_local + "/" + f)
+    # rsync to board
+    build_env = get_build_env(build_kind, target_clk_ns)
+    mnist_target = "%s@%s:%s" % (build_env["username"], build_env["ip"], "/tmp/")
+
+    rsync_dataset_cmd = ["rsync", "-rv", mnist_local + "/", mnist_target]
+    subprocess.check_output(rsync_dataset_cmd)
+
+
 def test_end2end_ext_weights_run_on_hw():
     build_env = get_build_env(build_kind, target_clk_ns)
     deploy_dir = get_checkpoint_name("build")
@@ -124,22 +148,9 @@ echo %s | sudo -S python3.6 validate.py --dataset mnist --bitfile %s
         build_env["ip"],
         build_env["target_dir"],
     )
-    rsync_res = subprocess.run(
-        [
-            "sshpass",
-            "-p",
-            build_env["password"],
-            "rsync",
-            "-avz",
-            deploy_dir,
-            remote_target,
-        ]
-    )
+    rsync_res = subprocess.run(["rsync", "-avz", deploy_dir, remote_target])
     assert rsync_res.returncode == 0
     remote_verif_cmd = [
-        "sshpass",
-        "-p",
-        build_env["password"],
         "ssh",
         "%s@%s" % (build_env["username"], build_env["ip"]),
         "sh",