diff --git a/docs/finn/getting_started.rst b/docs/finn/getting_started.rst
index 9b3111b70eae97a3644e1de23c368bd5b09f7927..c575ca7e3b3b04eb651fb1135d949baad038f1ad 100644
--- a/docs/finn/getting_started.rst
+++ b/docs/finn/getting_started.rst
@@ -107,9 +107,6 @@ These are summarized below:
 * (optional) ``LOCALHOST_URL`` (default localhost) sets the base URL for accessing e.g. Netron from inside the container. Useful when running FINN remotely.
 * (optional) ``NETRON_PORT`` (default 8081) changes the port for Netron inside Docker
 * (optional) ``PYNQ_BOARD`` or ``ALVEO_BOARD`` specifies the type of PYNQ/Alveo board used (see "supported hardware" below) for the test suite
-* (optional) ``PYNQ_IP`` and ``PYNQ_PORT`` (or ``ALVEO_IP`` and ``ALVEO_PORT``) specify ip address and port number to access the PYNQ board / Alveo target
-* (optional) ``PYNQ_USERNAME`` and ``PYNQ_PASSWORD`` (or ``ALVEO_USERNAME`` and ``ALVEO_PASSWORD``) specify the PYNQ board / Alveo host access credentials for the test suite. For PYNQ, password is always needed to run as sudo. For Alveo, you can leave the password empty and place your ssh private key in the ``finn/ssh_keys`` folder to use keypair authentication.
-* (optional) ``PYNQ_TARGET_DIR`` (or ``ALVEO_TARGET_DIR``) specifies the target dir on the PYNQ board / Alveo host for the test suite
 * (optional) ``IMAGENET_VAL_PATH`` specifies the path to the ImageNet validation directory for tests.
 * (optional) ``FINN_DOCKER_PREBUILT`` (default 0) if set to 1 then skip Docker image building and use the image tagged with ``FINN_DOCKER_TAG``.
 * (optional) ``FINN_DOCKER_TAG`` (autogenerated) specifies the Docker image tag to use.
diff --git a/docs/finn/source_code/finn.core.rst b/docs/finn/source_code/finn.core.rst
index afa1ecffa08213db6a282076c6fdf59694f9e13e..28cb47eaf70cade96a1146559cbbd92248923a34 100644
--- a/docs/finn/source_code/finn.core.rst
+++ b/docs/finn/source_code/finn.core.rst
@@ -54,14 +54,6 @@ finn.core.onnx\_exec
    :undoc-members:
    :show-inheritance:
 
-finn.core.remote\_exec
------------------------------
-
-.. automodule:: finn.core.remote_exec
-   :members:
-   :undoc-members:
-   :show-inheritance:
-
 finn.core.rtlsim\_exec
 -----------------------------
 
diff --git a/docs/finn/source_code/finn.util.rst b/docs/finn/source_code/finn.util.rst
index 7ba3b252abfa0086a8c0281eb9a792fb239d6ec3..aebd0604f4c555f2b1bc637bc4c3d94b35309722 100644
--- a/docs/finn/source_code/finn.util.rst
+++ b/docs/finn/source_code/finn.util.rst
@@ -99,14 +99,6 @@ finn.util.fpgadataflow
    :undoc-members:
    :show-inheritance:
 
-finn.util.gdrive
------------------------------
-
-.. automodule:: finn.util.gdrive
-  :members:
-  :undoc-members:
-  :show-inheritance:
-
 finn.util.hls
 ---------------
 
diff --git a/src/finn/core/onnx_exec.py b/src/finn/core/onnx_exec.py
index 2695113661ed286c94ae9cb5f20ca99cc1fced7f..daecb59743d1b843e9d7fd40fdbf5bf10fac2fe1 100644
--- a/src/finn/core/onnx_exec.py
+++ b/src/finn/core/onnx_exec.py
@@ -31,7 +31,6 @@ import numpy as np
 import qonnx.analysis.topology as ta
 from qonnx.core.onnx_exec import execute_onnx as execute_onnx_base
 
-from finn.core.remote_exec import remote_exec
 from finn.core.rtlsim_exec import rtlsim_exec
 
 
@@ -51,7 +50,6 @@ def execute_onnx(
 
     # check if model has an execution mode set
     # if None, execute model node using the QONNX-provided execute_onnx impl
-    # if set to "remote_pynq" execute model on PYNQ board
     # if set to "rtlsim" execute model using pyverilator
     model_exec_mode = model.get_metadata_prop("exec_mode")
     if (model_exec_mode is None) or (model_exec_mode == ""):
@@ -91,22 +89,17 @@ def execute_onnx(
 
     # check if model has an execution mode set
     # if None, execute model node by node using execute_node()
-    # if set to "remote_pynq" execute model on PYNQ board
     # if set to "rtlsim" execute model using pyverilator
     model_exec_mode = model.get_metadata_prop("exec_mode")
     if (model_exec_mode is None) or (model_exec_mode == ""):
         return execute_onnx_base()
-    elif model_exec_mode == "remote_pynq":
-        # use remote exec metadata built into model to execute on a remote PYNQ
-        remote_exec(model, execution_context)
     elif model_exec_mode == "rtlsim":
         # use stitched IP for rtlsim
         rtlsim_exec(model, execution_context)
     else:
         raise Exception(
-            """Metadata property "exec_mode" is set to an unknown value.
-        Can be left unset or has to be set to "remote_pynq" for remote execution
-        on PYNQ board or "rtlsim" for execution using pyverilator!"""
+            """Metadata property "exec_mode" is set to an unknown value. Can be left
+            unset or has to be set to "rtlsim" for execution using pyverilator!"""
         )
 
     if return_full_exec_context:
diff --git a/src/finn/core/remote_exec.py b/src/finn/core/remote_exec.py
deleted file mode 100644
index f487b48f86f1ef0440ed4a8bf371083369dd096c..0000000000000000000000000000000000000000
--- a/src/finn/core/remote_exec.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (c) 2020 Xilinx, Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of Xilinx nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import numpy as np
-import os
-import subprocess
-import warnings
-
-
-def remote_exec(model, execution_context):
-    """Executes the given model remotely on the pynq board. The metadata properties
-    related to the pynq board have to be set. The execution context contains the
-    input values."""
-    # TODO fix for multi input-output
-    pynq_ip = model.get_metadata_prop("pynq_ip")
-    pynq_port = int(model.get_metadata_prop("pynq_port"))
-    pynq_username = model.get_metadata_prop("pynq_username")
-    pynq_password = model.get_metadata_prop("pynq_password")
-    pynq_target_dir = model.get_metadata_prop("pynq_target_dir")
-    deployment_dir = model.get_metadata_prop("pynq_deploy_dir")
-    platform = model.get_metadata_prop("platform")
-    assert platform in ["alveo", "zynq-iodma"]
-    bitfile = model.get_metadata_prop("bitfile")
-    bitfile = os.path.basename(bitfile)
-    if pynq_password == "":
-        if "zynq" in platform:
-            raise Exception("PYNQ board remote exec needs password for sudo")
-        else:
-            local_prefix = ""  # assume we are using an ssh key
-            warnings.warn("Empty password, make sure you've set up an ssh key")
-    else:
-        local_prefix = "sshpass -p %s " % pynq_password
-
-    if platform == "alveo":
-        # Alveo can run without sudo
-        remote_prefix = ""
-    elif "zynq" in platform:
-        # PYNQ Zynq boards need to execute with sudo
-        remote_prefix = "echo %s | sudo -S " % pynq_password
-
-    inp = execution_context[model.graph.input[0].name]
-    # make copy of array before saving it
-    inp = inp.copy()
-    batchsize = inp.shape[0]
-    np.save(os.path.join(deployment_dir, "input.npy"), inp)
-    # extracting last folder of absolute path (deployment_dir)
-    deployment_folder = os.path.basename(os.path.normpath(deployment_dir))
-    # copy input to PYNQ board
-    cmd = local_prefix + "scp -P{} -r {}/input.npy {}@{}:{}/{}".format(
-        pynq_port,
-        deployment_dir,
-        pynq_username,
-        pynq_ip,
-        pynq_target_dir,
-        deployment_folder,
-    )
-    bash_command = ["/bin/bash", "-c", cmd]
-    process_scp_in = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_scp_in.communicate()
-
-    # use platform attribute for correct remote execution
-    if platform == "alveo":
-        remote_cmd = "bash -ic 'bash alveo_run.sh execute %d' \"" % batchsize
-    else:
-        remote_cmd = (
-            "python3.6 driver.py --exec_mode=execute --batchsize={} "
-            "--bitfile={} --inputfile=input.npy --outputfile=output.npy "
-            '--platform={} "'
-        ).format(batchsize, bitfile, platform)
-    cmd = (
-        local_prefix + 'ssh {}@{} -p {} "cd {}/{}; ' + remote_prefix + remote_cmd
-    ).format(pynq_username, pynq_ip, pynq_port, pynq_target_dir, deployment_folder)
-    bash_command = ["/bin/bash", "-c", cmd]
-    process_exec_accel = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_exec_accel.communicate()
-    # remove stale output file from local dir, if any
-    try:
-        os.remove("{}/output.npy".format(deployment_dir))
-    except FileNotFoundError:
-        pass
-    # copy generated output to local
-    cmd = local_prefix + "scp -P{} {}@{}:{}/{}/output.npy {}".format(
-        pynq_port,
-        pynq_username,
-        pynq_ip,
-        pynq_target_dir,
-        deployment_folder,
-        deployment_dir,
-    )
-    bash_command = ["/bin/bash", "-c", cmd]
-    process_scp_out = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_scp_out.communicate()
-    outp = np.load("{}/output.npy".format(deployment_dir))
-    execution_context[model.graph.output[0].name] = outp
diff --git a/src/finn/core/throughput_test.py b/src/finn/core/throughput_test.py
index 3533fd13399a4ba4392d66af785979afc32cab29..08633be33b8ab6d096275aca2c362a8bac43d704 100644
--- a/src/finn/core/throughput_test.py
+++ b/src/finn/core/throughput_test.py
@@ -28,90 +28,11 @@
 
 import numpy as np
 import os
-import subprocess
-import warnings
 from qonnx.util.basic import gen_finn_dt_tensor
 
 from finn.core.rtlsim_exec import rtlsim_exec
 
 
-def throughput_test_remote(model, batchsize=1000, timeout=None):
-    """Runs the throughput test for the given model remotely on the pynq board.
-    The metadata properties related to the pynq board have to be set.
-    Additionally a timeout for the SSH communication can be set.
-    Returns a dictionary with results of the throughput test. Returns None
-    if the test fails."""
-
-    pynq_ip = model.get_metadata_prop("pynq_ip")
-    pynq_port = int(model.get_metadata_prop("pynq_port"))
-    pynq_username = model.get_metadata_prop("pynq_username")
-    pynq_password = model.get_metadata_prop("pynq_password")
-    pynq_target_dir = model.get_metadata_prop("pynq_target_dir")
-    deployment_dir = model.get_metadata_prop("pynq_deploy_dir")
-    # extracting last folder of absolute path (deployment_dir)
-    deployment_folder = os.path.basename(os.path.normpath(deployment_dir))
-    platform = model.get_metadata_prop("platform")
-    assert platform in ["alveo", "zynq-iodma"]
-    bitfile = model.get_metadata_prop("bitfile")
-    bitfile = os.path.basename(bitfile)
-    if pynq_password == "":
-        if "zynq" in platform:
-            raise Exception("PYNQ board remote exec needs password for sudo")
-        else:
-            local_prefix = ""  # assume we are using an ssh key
-            warnings.warn("Empty password, make sure you've set up an ssh key")
-    else:
-        local_prefix = "sshpass -p %s " % pynq_password
-
-    if platform == "alveo":
-        # Alveo can run without sudo but needs correct environment
-        remote_prefix = "conda activate finn-pynq-alveo; "
-    elif "zynq" in platform:
-        # PYNQ Zynq boards need to execute with sudo
-        remote_prefix = "echo %s | sudo -S " % pynq_password
-
-    # use platform attribute for correct remote execution
-    if platform == "alveo":
-        remote_cmd = "bash -ic 'bash alveo_run.sh throughput_test %d' \"" % batchsize
-    else:
-        remote_cmd = (
-            "python3.6 driver.py --exec_mode=throughput_test --batchsize={} "
-            "--bitfile={} --inputfile=input.npy --outputfile=output.npy "
-            '--platform={} "'
-        ).format(batchsize, bitfile, platform)
-    cmd = (
-        local_prefix + 'ssh {}@{} -p {} "cd {}/{}; ' + remote_prefix + remote_cmd
-    ).format(pynq_username, pynq_ip, pynq_port, pynq_target_dir, deployment_folder)
-    bash_command = ["/bin/bash", "-c", cmd]
-    process_throughput_test = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_throughput_test.communicate(timeout=timeout)
-
-    # remove any pre-existing metrics file
-    try:
-        os.remove("{}/nw_metrics.txt".format(deployment_dir))
-    except FileNotFoundError:
-        pass
-
-    cmd = local_prefix + "scp -P{} {}@{}:{}/{}/nw_metrics.txt {}".format(
-        pynq_port,
-        pynq_username,
-        pynq_ip,
-        pynq_target_dir,
-        deployment_folder,
-        deployment_dir,
-    )
-    bash_command = ["/bin/bash", "-c", cmd]
-    process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-    process_compile.communicate(timeout=timeout)
-
-    try:
-        with open("{}/nw_metrics.txt".format(deployment_dir), "r") as file:
-            res = eval(file.read())
-        return res
-    except FileNotFoundError:
-        return None
-
-
 def throughput_test_rtlsim(model, batchsize=100):
     """Runs a throughput test for the given IP-stitched model. When combined
     with tracing, useful to determine bottlenecks and required FIFO sizes."""
diff --git a/src/finn/transformation/fpgadataflow/make_deployment.py b/src/finn/transformation/fpgadataflow/make_deployment.py
deleted file mode 100644
index d4684dc83ce1f22ecae2ca04af5e5973519db4f6..0000000000000000000000000000000000000000
--- a/src/finn/transformation/fpgadataflow/make_deployment.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-import subprocess
-from distutils.dir_util import copy_tree
-from qonnx.transformation.base import Transformation
-from shutil import copy
-
-import finn.transformation.fpgadataflow.templates as templates
-from finn.util.basic import make_build_dir
-
-
-class DeployToPYNQ(Transformation):
-    """Collects all necessary files for deployment and copies them to the PYNQ board.
-    Expects information about PYNQ board to make scp possible:
-
-    IP address of board, username and password for board and target directory where
-    the files are stored on the board"""
-
-    def __init__(self, ip, port, username, password, target_dir):
-        super().__init__()
-        self.ip = ip
-        self.port = port
-        self.username = username
-        self.password = password
-        self.target_dir = target_dir
-
-    def apply(self, model):
-        # set metadata properties accordingly to user input specifications
-        model.set_metadata_prop("pynq_ip", self.ip)
-        model.set_metadata_prop("pynq_port", str(self.port))
-        model.set_metadata_prop("pynq_username", self.username)
-        model.set_metadata_prop("pynq_password", self.password)
-        model.set_metadata_prop("pynq_target_dir", self.target_dir)
-
-        # create directory for deployment files
-        deployment_dir = make_build_dir(prefix="pynq_deployment_")
-        model.set_metadata_prop("pynq_deployment_dir", deployment_dir)
-
-        # get and copy necessary files
-        # .bit and .hwh file
-        bitfile = model.get_metadata_prop("bitfile")
-        hwh_file = model.get_metadata_prop("hw_handoff")
-        deploy_files = [bitfile, hwh_file]
-
-        for dfile in deploy_files:
-            if dfile is not None:
-                copy(dfile, deployment_dir)
-
-        # helper script for Alveo
-        platform = model.get_metadata_prop("platform")
-        if platform == "alveo":
-            alveo_run_sh = templates.alveo_run_sh_template
-            fill_dict = {
-                "$REMOTE_DEPLOY_DIR$": self.target_dir
-                + "/"
-                + os.path.basename(deployment_dir),
-                "$CONDA_ENV_NAME$": "finn-pynq-alveo",
-                "$REMOTE_XRT$": os.environ["XILINX_XRT"],
-                "$REMOTE_PLATFORM_REPO_PATHS$": os.environ["PLATFORM_REPO_PATHS"],
-                "$BITFILE$": os.path.basename(bitfile),
-            }
-            for key, value in fill_dict.items():
-                alveo_run_sh = alveo_run_sh.replace(key, value)
-            alveo_run_sh_path = deployment_dir + "/alveo_run.sh"
-            with open(alveo_run_sh_path, "w") as f:
-                f.write(alveo_run_sh)
-
-        # driver.py and python libraries
-        pynq_driver_dir = model.get_metadata_prop("pynq_driver_dir")
-        copy_tree(pynq_driver_dir, deployment_dir)
-        model.set_metadata_prop("pynq_deploy_dir", deployment_dir)
-        model.set_metadata_prop("exec_mode", "remote_pynq")
-
-        # create target directory on PYNQ board
-        cmd = 'ssh {}@{} -p {} "mkdir -p {}"'.format(
-            self.username, self.ip, self.port, self.target_dir
-        )
-        bash_command = ["/bin/bash", "-c", cmd]
-        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-        process_compile.communicate()
-        # copy directory to PYNQ board using scp
-        cmd = "scp -P{} -r {} {}@{}:{}".format(
-            self.port, deployment_dir, self.username, self.ip, self.target_dir
-        )
-        bash_command = ["/bin/bash", "-c", cmd]
-        process_compile = subprocess.Popen(bash_command, stdout=subprocess.PIPE)
-        process_compile.communicate()
-
-        return (model, False)
diff --git a/src/finn/transformation/fpgadataflow/template_driver.py b/src/finn/transformation/fpgadataflow/template_driver.py
index 05ee6ad920d7e921dc9611a7936e28288ba53a0a..158825191e3372fc133e01b47ea7138b0aba899f 100644
--- a/src/finn/transformation/fpgadataflow/template_driver.py
+++ b/src/finn/transformation/fpgadataflow/template_driver.py
@@ -135,5 +135,5 @@ if __name__ == "__main__":
         file.close()
         print("Results written to nw_metrics.txt")
     else:
-        raise Exception("Exec mode has to be set to remote_pynq or throughput_test")
+        raise Exception("Exec mode has to be set to execute or throughput_test")
 """
diff --git a/src/finn/transformation/fpgadataflow/templates.py b/src/finn/transformation/fpgadataflow/templates.py
index f52bad0ffb35ae4714acc24aef368d01967db426..ce1545b5be88d87f65d5c39028057b5f4b651a41 100644
--- a/src/finn/transformation/fpgadataflow/templates.py
+++ b/src/finn/transformation/fpgadataflow/templates.py
@@ -242,22 +242,6 @@ report_utilization -hierarchical -hierarchical_depth 4 -file synth_report.xml -f
 close_project
 """
 
-alveo_run_sh_template = """#!/bin/bash
-
-if [ "$#" -ne 2 ]; then
-    echo "Usage: alveo_run.sh <exec_mode={execute, throughput_test}> <batch_size>"
-    exit -1
-fi
-
-cd $REMOTE_DEPLOY_DIR$
-eval "$(conda shell.bash hook)"
-conda activate $CONDA_ENV_NAME$
-source $REMOTE_XRT$/setup.sh
-export PLATFORM_REPO_PATHS=$REMOTE_PLATFORM_REPO_PATHS$
-python3.6 driver.py --exec_mode=$1 --batchsize=$2 --bitfile=$BITFILE$ \
-    --inputfile=input.npy --outputfile=output.npy --platform=alveo
-"""
-
 vitis_gen_xml_report_tcl_template = """
 open_project $VITIS_PROJ_PATH$/_x/link/vivado/vpl/prj/prj.xpr
 open_run impl_1
diff --git a/src/finn/util/gdrive.py b/src/finn/util/gdrive.py
deleted file mode 100644
index d525437300b6aee081bb073d40a517b5e3aa14be..0000000000000000000000000000000000000000
--- a/src/finn/util/gdrive.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2020, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import gspread
-import os
-import warnings
-from datetime import datetime
-
-from finn.util.basic import get_finn_root
-
-
-def upload_to_end2end_dashboard(data_dict):
-    gdrive_key = get_finn_root() + "/gdrive-key/service_account.json"
-    if not os.path.isfile(gdrive_key):
-        warnings.warn("Google Drive key not found, skipping dashboard upload")
-        return
-    gc = gspread.service_account(filename=gdrive_key)
-    spreadsheet = gc.open("finn-end2end-dashboard")
-    worksheet = spreadsheet.get_worksheet(0)
-    keys = list(data_dict.keys())
-    vals = list(data_dict.values())
-    # check against existing header
-    existing_keys = worksheet.row_values(1)
-    if not set(existing_keys).issuperset(set(keys)):
-        # create new worksheet
-        dtstr = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-        worksheet = spreadsheet.add_worksheet(
-            title="Dashboard " + dtstr, rows=10, cols=len(keys), index=0
-        )
-        # create header row with keys
-        worksheet.update("A1:1", [keys])
-        # freeze and make header bold
-        worksheet.freeze(rows=1)
-        worksheet.format("A1:1", {"textFormat": {"bold": True}})
-    # insert values into new row at appropriate positions
-    worksheet.insert_row([], index=2)
-    for i in range(len(keys)):
-        colind = existing_keys.index(keys[i])
-        col_letter = chr(ord("A") + colind)
-        worksheet.update("%s2" % col_letter, vals[i])
diff --git a/src/finn/util/test.py b/src/finn/util/test.py
index bd8bde2820fa87ed972d699cae905d7f6cc310ff..4250079ef3e994f62a3e9f9150eb5b66371b5895 100644
--- a/src/finn/util/test.py
+++ b/src/finn/util/test.py
@@ -114,25 +114,14 @@ def get_build_env(kind, target_clk_ns):
     if kind == "zynq":
         ret["board"] = os.getenv("PYNQ_BOARD", default="Pynq-Z1")
         ret["part"] = pynq_part_map[ret["board"]]
-        ret["ip"] = os.getenv("PYNQ_IP", "")
-        ret["username"] = os.getenv("PYNQ_USERNAME", "xilinx")
-        ret["password"] = os.getenv("PYNQ_PASSWORD", "xilinx")
-        ret["port"] = os.getenv("PYNQ_PORT", 22)
-        ret["target_dir"] = os.getenv("PYNQ_TARGET_DIR", "/home/xilinx/finn")
         ret["build_fxn"] = ZynqBuild(ret["board"], target_clk_ns)
     elif kind == "alveo":
         ret["board"] = os.getenv("ALVEO_BOARD", default="U250")
         ret["part"] = alveo_part_map[ret["board"]]
-        ret["platform"] = alveo_default_platform[ret["board"]]
-        ret["ip"] = os.getenv("ALVEO_IP", "")
-        ret["username"] = os.getenv("ALVEO_USERNAME", "")
-        ret["password"] = os.getenv("ALVEO_PASSWORD", "")
-        ret["port"] = os.getenv("ALVEO_PORT", 22)
-        ret["target_dir"] = os.getenv("ALVEO_TARGET_DIR", "/tmp/finn_alveo_deploy")
         ret["build_fxn"] = VitisBuild(
             ret["part"],
             target_clk_ns,
-            ret["platform"],
+            alveo_default_platform[ret["board"]],
             strategy=VitisOptStrategy.BUILD_SPEED,
         )
     else:
diff --git a/tests/end2end/test_end2end_access_board.py b/tests/end2end/test_end2end_access_board.py
deleted file mode 100644
index ba3c49195b298059149303c63ef2db8ab6e16039..0000000000000000000000000000000000000000
--- a/tests/end2end/test_end2end_access_board.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2021, Xilinx
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright notice, this
-#   list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright notice,
-#   this list of conditions and the following disclaimer in the documentation
-#   and/or other materials provided with the distribution.
-#
-# * Neither the name of FINN nor the names of its
-#   contributors may be used to endorse or promote products derived from
-#   this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import pytest
-
-import subprocess
-
-from finn.util.test import get_build_env
-
-
-@pytest.mark.board
-@pytest.mark.end2end
-def test_end2end_access_board():
-    build_env = get_build_env("zynq", 5)
-    if build_env["ip"] == "":
-        pytest.skip("PYNQ board IP address not specified")
-    remote_cmd_base = [
-        "ssh",
-        "-o",
-        "PreferredAuthentications=publickey",
-        "-o",
-        "PasswordAuthentication=no",
-        "%s@%s" % (build_env["username"], build_env["ip"]),
-    ]
-    test_text = "BoardIsAccessible"
-    touch_cmd = remote_cmd_base + ["echo %s" % test_text]
-    verif_res = subprocess.run(
-        touch_cmd, stdout=subprocess.PIPE, universal_newlines=True
-    )
-    assert verif_res.returncode == 0
-    assert verif_res.stdout.split("\n")[0] == test_text
diff --git a/tests/end2end/test_end2end_bnn_pynq.py b/tests/end2end/test_end2end_bnn_pynq.py
index 62b76d2f1306a94bf850cf62e360cb0e63a8ce30..27aaa1986df15b4d6f51462679b1f8e7179684cb 100644
--- a/tests/end2end/test_end2end_bnn_pynq.py
+++ b/tests/end2end/test_end2end_bnn_pynq.py
@@ -34,13 +34,10 @@ import numpy as np
 # import pytorch before onnx, so we make sure to import onnx first
 import onnx  # NOQA
 import os
-import subprocess
 import torch
 import warnings
 from brevitas.export import export_finn_onnx, export_qonnx
-from collections import OrderedDict
 from dataset_loading import cifar, mnist
-from datetime import datetime
 from qonnx.core.datatype import DataType
 from qonnx.core.modelwrapper import ModelWrapper
 from qonnx.custom_op.registry import getCustomOp
@@ -59,13 +56,12 @@ from qonnx.transformation.insert_topk import InsertTopK
 from qonnx.transformation.lower_convs_to_matmul import LowerConvsToMatMul
 from qonnx.transformation.merge_onnx_models import MergeONNXModels
 from qonnx.util.cleanup import cleanup as qonnx_cleanup
-from scipy.stats import linregress
 
 import finn.transformation.fpgadataflow.convert_to_hls_layers as to_hls
 import finn.transformation.streamline.absorb as absorb
 from finn.analysis.fpgadataflow.dataflow_performance import dataflow_performance
 from finn.core.onnx_exec import execute_onnx
-from finn.core.throughput_test import throughput_test_remote, throughput_test_rtlsim
+from finn.core.throughput_test import throughput_test_rtlsim
 from finn.transformation.fpgadataflow.annotate_cycles import AnnotateCycles
 from finn.transformation.fpgadataflow.annotate_resources import AnnotateResources
 from finn.transformation.fpgadataflow.compile_cppsim import CompileCppSim
@@ -75,7 +71,6 @@ from finn.transformation.fpgadataflow.create_dataflow_partition import (
 from finn.transformation.fpgadataflow.create_stitched_ip import CreateStitchedIP
 from finn.transformation.fpgadataflow.hlssynth_ip import HLSSynthIP
 from finn.transformation.fpgadataflow.insert_dwc import InsertDWC
-from finn.transformation.fpgadataflow.make_deployment import DeployToPYNQ
 from finn.transformation.fpgadataflow.make_pynq_driver import MakePYNQDriver
 from finn.transformation.fpgadataflow.minimize_accumulator_width import (
     MinimizeAccumulatorWidth,
@@ -95,7 +90,6 @@ from finn.transformation.streamline.reorder import (
     MoveScalarLinearPastInvariants,
 )
 from finn.util.basic import get_finn_root
-from finn.util.gdrive import upload_to_end2end_dashboard
 from finn.util.pytorch import ToTensor
 from finn.util.test import (
     execute_parent,
@@ -122,24 +116,6 @@ def get_checkpoint_name(topology, wbits, abits, QONNX_export, step):
     )
 
 
-def get_dashboard_data(topology, wbits, abits):
-    stats_file = build_dir + "/end2end_%s_w%da%d.txt" % (topology, wbits, abits)
-    stats_dict = OrderedDict()
-    if os.path.isfile(stats_file):
-        with open(stats_file, "r") as f:
-            stats_dict_txt = f.read()
-        stats_dict = eval(stats_dict_txt)
-    return stats_dict
-
-
-def update_dashboard_data(topology, wbits, abits, key, val):
-    stats_dict = get_dashboard_data(topology, wbits, abits)
-    stats_dict[key] = val
-    stats_file = build_dir + "/end2end_%s_w%da%d.txt" % (topology, wbits, abits)
-    with open(stats_file, "w") as f:
-        f.write(str(stats_dict))
-
-
 def fold_tfc(model):
     fc_layers = model.get_nodes_by_op_type("MatrixVectorActivation")
     # (PE, SIMD, ramstyle) for each layer
@@ -335,15 +311,6 @@ class TestEnd2End:
             model.save(chkpt_name)
         else:
             export_finn_onnx(model, torch.randn(ishape), chkpt_name)
-        nname = "%s_w%da%d" % (topology, wbits, abits)
-        update_dashboard_data(topology, wbits, abits, "network", nname)
-        dtstr = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-        update_dashboard_data(topology, wbits, abits, "datetime", dtstr)
-        finn_commit = subprocess.check_output(
-            ["git", "rev-parse", "HEAD"], cwd=get_finn_root()
-        )
-        finn_commit = finn_commit.decode("utf-8").strip()
-        update_dashboard_data(topology, wbits, abits, "finn-commit", finn_commit)
         assert os.path.isfile(chkpt_name)
 
     def test_import_and_tidy(self, topology, wbits, abits, QONNX_export):
@@ -644,10 +611,6 @@ class TestEnd2End:
         ret = throughput_test_rtlsim(model, batchsize=batchsize)
         res_cycles = ret["cycles"]
         est_cycles = latency + cycles_per_sample_est * batchsize
-        # warnings.warn("Estimated & rtlsim performance: " + str(perf))
-        # for (k, v) in perf.items():
-        #    update_dashboard_data(topology, wbits, abits, k, v)
-        update_dashboard_data(topology, wbits, abits, "cycles_rtlsim", latency)
         assert (abs(res_cycles - est_cycles) / res_cycles) < 0.15
 
     @pytest.mark.slow
@@ -691,10 +654,6 @@ class TestEnd2End:
         cfg = get_build_env(kind, target_clk_ns)
         model = model.transform(cfg["build_fxn"])
         model = model.transform(AnnotateResources("synth"))
-        synth_dct = eval(model.get_metadata_prop("res_total_top_synth"))
-        for (k, v) in synth_dct.items():
-            update_dashboard_data(topology, wbits, abits, k, v)
-        update_dashboard_data(topology, wbits, abits, "board", cfg["board"])
         model.save(
             get_checkpoint_name(topology, wbits, abits, QONNX_export, "build_" + kind)
         )
@@ -715,121 +674,3 @@ class TestEnd2End:
         model.save(
             get_checkpoint_name(topology, wbits, abits, QONNX_export, "driver_" + kind)
         )
-
-    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
-    def test_deploy(self, topology, wbits, abits, QONNX_export, kind):
-        prev_chkpt_name = get_checkpoint_name(
-            topology, wbits, abits, QONNX_export, "driver_" + kind
-        )
-        model = load_test_checkpoint_or_skip(prev_chkpt_name)
-        cfg = get_build_env(kind, target_clk_ns)
-        if cfg["ip"] == "":
-            pytest.skip("PYNQ board IP address not specified")
-        model = model.transform(
-            DeployToPYNQ(
-                cfg["ip"],
-                cfg["port"],
-                cfg["username"],
-                cfg["password"],
-                cfg["target_dir"],
-            )
-        )
-        # save the model to be able to link it to the parent
-        model.save(
-            get_checkpoint_name(topology, wbits, abits, QONNX_export, "deploy_" + kind)
-        )
-
-    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
-    def test_run_on_hw(self, topology, wbits, abits, QONNX_export, kind):
-        prev_chkpt_name = get_checkpoint_name(
-            topology, wbits, abits, QONNX_export, "deploy_" + kind
-        )
-        model = load_test_checkpoint_or_skip(prev_chkpt_name)  # NOQA
-        cfg = get_build_env(kind, target_clk_ns)
-        if cfg["ip"] == "":
-            pytest.skip("PYNQ board IP address not specified")
-        (input_tensor_npy, output_tensor_npy) = get_golden_io_pair(
-            topology, wbits, abits, return_topk=1
-        )
-        parent_model = load_test_checkpoint_or_skip(
-            get_checkpoint_name(topology, wbits, abits, QONNX_export, "dataflow_parent")
-        )
-        iname = parent_model.graph.input[0].name
-        oname = parent_model.graph.output[0].name
-        sdp_node = parent_model.get_nodes_by_op_type("StreamingDataflowPartition")[0]
-        sdp_node = getCustomOp(sdp_node)
-        sdp_node.set_nodeattr("model", prev_chkpt_name)
-        ret = execute_onnx(parent_model, {iname: input_tensor_npy}, True)
-        y = ret[oname]
-        assert np.isclose(y, output_tensor_npy).all()
-
-    @pytest.mark.parametrize("kind", ["zynq", "alveo"])
-    def test_throughput_hw(self, topology, wbits, abits, QONNX_export, kind):
-        prev_chkpt_name = get_checkpoint_name(
-            topology, wbits, abits, QONNX_export, "deploy_" + kind
-        )
-        end2end_example = "%s_w%da%d_%s" % (topology, wbits, abits, kind)
-        model = load_test_checkpoint_or_skip(prev_chkpt_name)  # NOQA
-        cfg = get_build_env(kind, target_clk_ns)
-        if cfg["ip"] == "":
-            pytest.skip("PYNQ board IP address not specified")
-        ret = dict()
-        # try a range of batch sizes, some may fail due to insufficient DMA
-        # buffers
-        bsize_range_in = [8**i for i in range(5)]
-        bsize_range = []
-        for bsize in bsize_range_in:
-            res = throughput_test_remote(model, bsize)
-            if res is not None:
-                ret[bsize] = res
-                bsize_range.append(bsize)
-            else:
-                # assume we reached largest possible N
-                break
-        y = [ret[key]["runtime[ms]"] for key in bsize_range]
-        lrret = linregress(bsize_range, y)
-        ret_str = ""
-        ret_str += "\n" + "%s Throughput Test Results" % end2end_example
-        ret_str += "\n" + "-----------------------------"
-        ret_str += "\n" + "From linear regression:"
-        ret_str += "\n" + "Invocation overhead: %f ms" % lrret.intercept
-        ret_str += "\n" + "Time per sample: %f ms" % lrret.slope
-        ret_str += "\n" + "Raw data:"
-
-        ret_str += "\n" + "{:<8} {:<16} {:<16} {:<16} {:<16} {:<16}".format(
-            "N", "runtime[ms]", "fclk[mhz]", "fps", "DRAM rd[MB/s]", "DRAM wr[MB/s]"
-        )
-        for k in bsize_range:
-            v = ret[k]
-            ret_str += "\n" + "{:<8} {:<16} {:<16} {:<16} {:<16} {:<16}".format(
-                k,
-                np.round(v["runtime[ms]"], 4),
-                v["fclk[mhz]"],
-                np.round(v["throughput[images/s]"], 2),
-                np.round(v["DRAM_in_bandwidth[MB/s]"], 2),
-                np.round(v["DRAM_out_bandwidth[MB/s]"], 2),
-            )
-        ret_str += "\n" + "-----------------------------"
-        warnings.warn(ret_str)
-        largest_bsize = bsize_range[-1]
-        update_dashboard_data(
-            topology, wbits, abits, "fclk[mhz]", ret[largest_bsize]["fclk[mhz]"]
-        )
-        update_dashboard_data(
-            topology,
-            wbits,
-            abits,
-            "throughput[images/s]",
-            ret[largest_bsize]["throughput[images/s]"],
-        )
-
-    def test_upload_results_to_dashboard(self, topology, wbits, abits, QONNX_export):
-        # ToDo: Extend the dashboard to also upload QONNX exported models?
-        if QONNX_export:
-            pytest.skip("Dashboard data upload is disabled for QONNX exported models.")
-        else:
-            dashboard_data = get_dashboard_data(topology, wbits, abits)
-            if len(dashboard_data.keys()) > 0:
-                upload_to_end2end_dashboard(dashboard_data)
-            else:
-                pytest.skip("No data to upload to dashboard")
diff --git a/tests/end2end/test_end2end_cybsec_mlp.py b/tests/end2end/test_end2end_cybsec_mlp.py
index d2a4d0287fc16d6bf4281be07a6a7ed5027150f1..5e402bdeb44a73a0cb750083cd6f4d431e9bb7c9 100644
--- a/tests/end2end/test_end2end_cybsec_mlp.py
+++ b/tests/end2end/test_end2end_cybsec_mlp.py
@@ -34,10 +34,8 @@ import json
 import numpy as np
 import os
 import shutil
-import subprocess
 import torch
 import torch.nn as nn
-import wget
 from brevitas.core.quant import QuantType
 from brevitas.export import export_finn_onnx, export_qonnx
 from brevitas.nn import QuantIdentity, QuantLinear, QuantReLU
@@ -225,62 +223,3 @@ def test_end2end_cybsec_mlp_build(QONNX_export):
         assert est_res_dict["total"]["LUT"] == 7904.0
         assert est_res_dict["total"]["BRAM_18K"] == 36.0
     shutil.copytree(output_dir + "/deploy", get_checkpoint_name("build", QONNX_export))
-
-
-@pytest.mark.end2end
-@pytest.mark.xfail
-@pytest.mark.parametrize("QONNX_export", [False, True])
-def test_end2end_cybsec_mlp_run_on_hw(QONNX_export):
-    build_env = get_build_env(build_kind, target_clk_ns)
-    assets_dir = pk.resource_filename("finn.qnn-data", "cybsec-mlp/")
-    deploy_dir = get_checkpoint_name("build", QONNX_export)
-    if not os.path.isdir(deploy_dir):
-        pytest.skip(deploy_dir + " not found from previous test step, skipping")
-    driver_dir = deploy_dir + "/driver"
-    assert os.path.isdir(driver_dir)
-    # put all assets into driver dir
-    shutil.copy(assets_dir + "/validate-unsw-nb15.py", driver_dir)
-    # put a copy of binarized dataset into driver dir
-    dataset_url = (
-        "https://zenodo.org/record/4519767/files/unsw_nb15_binarized.npz?download=1"
-    )
-    dataset_local = driver_dir + "/unsw_nb15_binarized.npz"
-    if not os.path.isfile(dataset_local):
-        wget.download(dataset_url, out=dataset_local)
-    assert os.path.isfile(dataset_local)
-    # create a shell script for running validation: 10 batches x 10 imgs
-    with open(driver_dir + "/validate.sh", "w") as f:
-        f.write(
-            """#!/bin/bash
-cd %s/driver
-echo %s | sudo -S python3.6 validate-unsw-nb15.py --batchsize=10 --limit_batches=10
-        """
-            % (
-                build_env["target_dir"] + "/end2end_cybsecmlp_build",
-                build_env["password"],
-            )
-        )
-    # set up rsync command
-    remote_target = "%s@%s:%s" % (
-        build_env["username"],
-        build_env["ip"],
-        build_env["target_dir"],
-    )
-    rsync_res = subprocess.run(["rsync", "-avz", deploy_dir, remote_target])
-    assert rsync_res.returncode == 0
-    remote_verif_cmd = [
-        "ssh",
-        "%s@%s" % (build_env["username"], build_env["ip"]),
-        "sh",
-        build_env["target_dir"] + "/end2end_cybsecmlp_build/driver/validate.sh",
-    ]
-    verif_res = subprocess.run(
-        remote_verif_cmd,
-        stdout=subprocess.PIPE,
-        universal_newlines=True,
-        input=build_env["password"],
-    )
-    assert verif_res.returncode == 0
-    log_output = verif_res.stdout.split("\n")
-    assert log_output[-3] == "batch 10 / 10 : total OK 93 NOK 7"
-    assert log_output[-2] == "Final accuracy: 93.000000"
diff --git a/tests/end2end/test_ext_weights.py b/tests/end2end/test_ext_weights.py
index 0a92c74a38d64ade37d576f3830f3a5628c94d88..bef2e0ffa77cb96ff45956e380aeb376def61228 100644
--- a/tests/end2end/test_ext_weights.py
+++ b/tests/end2end/test_ext_weights.py
@@ -110,69 +110,3 @@ def test_end2end_ext_weights_build():
     if os.path.isdir(get_checkpoint_name("build")):
         shutil.rmtree(get_checkpoint_name("build"))
     shutil.copytree(output_dir + "/deploy", get_checkpoint_name("build"))
-
-
-@pytest.mark.board
-@pytest.mark.end2end
-@pytest.mark.xfail
-def test_end2end_ext_weights_dataset():
-    # make sure we have local copies of mnist dataset files
-    subprocess.check_output(["mkdir", "-p", mnist_local])
-    for f in mnist_files:
-        if not os.path.isfile(mnist_local + "/" + f):
-            wget.download(mnist_url + "/" + f, out=mnist_local + "/" + f)
-        assert os.path.isfile(mnist_local + "/" + f)
-    # rsync to board
-    build_env = get_build_env(build_kind, target_clk_ns)
-    mnist_target = "%s@%s:%s" % (build_env["username"], build_env["ip"], "/tmp/")
-
-    rsync_dataset_cmd = ["rsync", "-rv", mnist_local + "/", mnist_target]
-    subprocess.check_output(rsync_dataset_cmd)
-
-
-@pytest.mark.end2end
-@pytest.mark.xfail
-def test_end2end_ext_weights_run_on_hw():
-    build_env = get_build_env(build_kind, target_clk_ns)
-    deploy_dir = get_checkpoint_name("build")
-    if not os.path.isdir(deploy_dir):
-        pytest.skip(deploy_dir + " not found from previous test step, skipping")
-    driver_dir = deploy_dir + "/driver"
-    assert os.path.isdir(driver_dir)
-    # create a shell script for running validation: 10 batches x 10 imgs
-    with open(driver_dir + "/validate.sh", "w") as f:
-        f.write(
-            """#!/bin/bash
-cd %s/driver
-echo %s | sudo -S python3.6 validate.py --dataset mnist --bitfile %s
-        """
-            % (
-                build_env["target_dir"] + "/end2end_ext_weights_build",
-                build_env["password"],
-                "../bitfile/finn-accel.bit",
-            )
-        )
-    # set up rsync command
-    remote_target = "%s@%s:%s" % (
-        build_env["username"],
-        build_env["ip"],
-        build_env["target_dir"],
-    )
-    rsync_res = subprocess.run(["rsync", "-avz", deploy_dir, remote_target])
-    assert rsync_res.returncode == 0
-    remote_verif_cmd = [
-        "ssh",
-        "%s@%s" % (build_env["username"], build_env["ip"]),
-        "sh",
-        build_env["target_dir"] + "/end2end_ext_weights_build/driver/validate.sh",
-    ]
-    verif_res = subprocess.run(
-        remote_verif_cmd,
-        stdout=subprocess.PIPE,
-        universal_newlines=True,
-        input=build_env["password"],
-    )
-    assert verif_res.returncode == 0
-    log_output = verif_res.stdout.split("\n")
-    assert log_output[-3] == "batch 100 / 100 : total OK 9296 NOK 704"
-    assert log_output[-2] == "Final accuracy: 92.960000"
diff --git a/tests/fpgadataflow/test_fpgadataflow_ipstitch.py b/tests/fpgadataflow/test_fpgadataflow_ipstitch.py
index b220338e6919e8eeaeef0f6e5343fed9b1dfca10..7e4069f5c481344560509d17c086ca2cbdbd0fda 100644
--- a/tests/fpgadataflow/test_fpgadataflow_ipstitch.py
+++ b/tests/fpgadataflow/test_fpgadataflow_ipstitch.py
@@ -206,7 +206,6 @@ def test_fpgadataflow_ipstitch_gen_model(mem_mode):
         assert sdp_node.__class__.__name__ == "StreamingDataflowPartition"
         assert os.path.isfile(sdp_node.get_nodeattr("model"))
         model = load_test_checkpoint_or_skip(sdp_node.get_nodeattr("model"))
-        model.set_metadata_prop("exec_mode", "remote_pynq")
     model = model.transform(InsertTLastMarker())
     model = model.transform(GiveUniqueNodeNames())
     model = model.transform(PrepareIP(test_fpga_part, 5))